From 166b2352f32d7953e663bb5842093970700eae60 Mon Sep 17 00:00:00 2001 From: Julia Ryan Date: Thu, 18 Sep 2025 19:21:42 -0500 Subject: [PATCH 01/58] Respect user's font-smoothing setting (#38467) #37622 was incorrectly forcing font smoothing to be enabled on macos even when the user had disabled that setting at the OS level. See [this comment](https://github.com/zed-industries/zed/pull/37622#issuecomment-3310030659) for an example of the difference that font smoothing makes. Release Notes: - N/A --- crates/gpui/src/platform/mac/text_system.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/gpui/src/platform/mac/text_system.rs b/crates/gpui/src/platform/mac/text_system.rs index 9144b2a23a40bd527e1441cf71adcc2562c33f3c..7f765fbaac80e27f8db4b9c4f2d00de90e991a9a 100644 --- a/crates/gpui/src/platform/mac/text_system.rs +++ b/crates/gpui/src/platform/mac/text_system.rs @@ -397,7 +397,6 @@ impl MacTextSystemState { .subpixel_variant .map(|v| v as f32 / SUBPIXEL_VARIANTS as f32); cx.set_allows_font_smoothing(true); - cx.set_should_smooth_fonts(true); cx.set_text_drawing_mode(CGTextDrawingMode::CGTextFill); cx.set_gray_fill_color(0.0, 1.0); cx.set_allows_antialiasing(true); From e5e308ba78c3de679b822e50a07b3432a1624ce1 Mon Sep 17 00:00:00 2001 From: Nia Date: Fri, 19 Sep 2025 02:45:59 +0200 Subject: [PATCH 02/58] fuzzy: Fixup atomic ordering (#38468) Hopefully partially addresses some crashes that can be triggered in this code. Release Notes: - N/A --- crates/agent_ui/src/context_picker/file_context_picker.rs | 2 +- crates/assistant_slash_commands/src/diagnostics_command.rs | 2 +- crates/assistant_slash_commands/src/file_command.rs | 2 +- crates/file_finder/src/file_finder.rs | 6 +++--- crates/fuzzy/src/matcher.rs | 2 +- crates/fuzzy/src/paths.rs | 7 +++---- crates/fuzzy/src/strings.rs | 2 +- 7 files changed, 11 insertions(+), 12 deletions(-) diff --git a/crates/agent_ui/src/context_picker/file_context_picker.rs b/crates/agent_ui/src/context_picker/file_context_picker.rs index d64de23f4e42b8a79dc9bdcbc1c2fa9677c09372..d6f2af7083eb4049e168f6409cef22022cbe404b 100644 --- a/crates/agent_ui/src/context_picker/file_context_picker.rs +++ b/crates/agent_ui/src/context_picker/file_context_picker.rs @@ -251,7 +251,7 @@ pub(crate) fn search_files( fuzzy::match_path_sets( candidate_sets.as_slice(), query.as_str(), - None, + &None, false, 100, &cancellation_flag, diff --git a/crates/assistant_slash_commands/src/diagnostics_command.rs b/crates/assistant_slash_commands/src/diagnostics_command.rs index 8b1dbd515cabeb498d2a639387b426527dcda651..dd54565c2abc168bb995325f2ebf930bbde90793 100644 --- a/crates/assistant_slash_commands/src/diagnostics_command.rs +++ b/crates/assistant_slash_commands/src/diagnostics_command.rs @@ -73,7 +73,7 @@ impl DiagnosticsSlashCommand { fuzzy::match_path_sets( candidate_sets.as_slice(), query.as_str(), - None, + &None, false, 100, &cancellation_flag, diff --git a/crates/assistant_slash_commands/src/file_command.rs b/crates/assistant_slash_commands/src/file_command.rs index 261e15bc0ae8b9e886d4d146696db78e5c0c831d..4bf53bad9b5364c7fd488cf74644701c6f176b99 100644 --- a/crates/assistant_slash_commands/src/file_command.rs +++ b/crates/assistant_slash_commands/src/file_command.rs @@ -104,7 +104,7 @@ impl FileSlashCommand { fuzzy::match_path_sets( candidate_sets.as_slice(), query.as_str(), - None, + &None, false, 100, &cancellation_flag, diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index eda01466f6dda2f90fbdbd9f92f3cf812b083026..dadd3ea299304e845bbc0f412c3962d14e2006e4 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -886,14 +886,14 @@ impl FileFinderDelegate { .collect::>(); let search_id = util::post_inc(&mut self.search_count); - self.cancel_flag.store(true, atomic::Ordering::Relaxed); + self.cancel_flag.store(true, atomic::Ordering::Release); self.cancel_flag = Arc::new(AtomicBool::new(false)); let cancel_flag = self.cancel_flag.clone(); cx.spawn_in(window, async move |picker, cx| { let matches = fuzzy::match_path_sets( candidate_sets.as_slice(), query.path_query(), - relative_to, + &relative_to, false, 100, &cancel_flag, @@ -902,7 +902,7 @@ impl FileFinderDelegate { .await .into_iter() .map(ProjectPanelOrdMatch); - let did_cancel = cancel_flag.load(atomic::Ordering::Relaxed); + let did_cancel = cancel_flag.load(atomic::Ordering::Acquire); picker .update(cx, |picker, cx| { picker diff --git a/crates/fuzzy/src/matcher.rs b/crates/fuzzy/src/matcher.rs index e649d47dd646b80e312e2465f0929f630fecf81f..88253d4848b4b3866b9380256eccf1826213cfd1 100644 --- a/crates/fuzzy/src/matcher.rs +++ b/crates/fuzzy/src/matcher.rs @@ -76,7 +76,7 @@ impl<'a> Matcher<'a> { continue; } - if cancel_flag.load(atomic::Ordering::Relaxed) { + if cancel_flag.load(atomic::Ordering::Acquire) { break; } diff --git a/crates/fuzzy/src/paths.rs b/crates/fuzzy/src/paths.rs index 78030d5f964edb73e0f43f43ad412446dfbc9b34..de6284e957a5320b5eac15ad4ff23a8c4ff5b420 100644 --- a/crates/fuzzy/src/paths.rs +++ b/crates/fuzzy/src/paths.rs @@ -121,7 +121,7 @@ pub fn match_fixed_path_set( pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( candidate_sets: &'a [Set], query: &str, - relative_to: Option>, + relative_to: &Option>, smart_case: bool, max_results: usize, cancel_flag: &AtomicBool, @@ -148,7 +148,6 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( executor .scoped(|scope| { for (segment_idx, results) in segment_results.iter_mut().enumerate() { - let relative_to = relative_to.clone(); scope.spawn(async move { let segment_start = segment_idx * segment_size; let segment_end = segment_start + segment_size; @@ -157,7 +156,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( let mut tree_start = 0; for candidate_set in candidate_sets { - if cancel_flag.load(atomic::Ordering::Relaxed) { + if cancel_flag.load(atomic::Ordering::Acquire) { break; } @@ -209,7 +208,7 @@ pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>( }) .await; - if cancel_flag.load(atomic::Ordering::Relaxed) { + if cancel_flag.load(atomic::Ordering::Acquire) { return Vec::new(); } diff --git a/crates/fuzzy/src/strings.rs b/crates/fuzzy/src/strings.rs index 5bd7b66c0b5352370d010a479e85d01177aac8bd..7c866de05c4566c060fa01a362931e1355cd8c37 100644 --- a/crates/fuzzy/src/strings.rs +++ b/crates/fuzzy/src/strings.rs @@ -189,7 +189,7 @@ where }) .await; - if cancel_flag.load(atomic::Ordering::Relaxed) { + if cancel_flag.load(atomic::Ordering::Acquire) { return Vec::new(); } From c826ce6fc6cf6e269e44d21595026d28d6238f63 Mon Sep 17 00:00:00 2001 From: Nia Date: Fri, 19 Sep 2025 03:51:41 +0200 Subject: [PATCH 03/58] markdown: Use the faster hasher (#38469) Micro-optimisation in the markdown crate to use the faster hasher. Release Notes: - N/A --- Cargo.lock | 1 + crates/markdown/Cargo.toml | 1 + crates/markdown/src/markdown.rs | 7 +++--- crates/markdown/src/parser.rs | 32 +++++++++++++++---------- crates/zed/src/zed/component_preview.rs | 2 +- 5 files changed, 25 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5654d206701f5efd5d91cd33c9ab456701b1e667..3acfed9bd7cfa8bc2742bb4f006c38a4f65a1f0e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10418,6 +10418,7 @@ version = "0.1.0" dependencies = [ "assets", "base64 0.22.1", + "collections", "env_logger 0.11.8", "fs", "futures 0.3.31", diff --git a/crates/markdown/Cargo.toml b/crates/markdown/Cargo.toml index 9dfb3fdcd6c38f65357d93e5701cb0b72a6814a7..650338ef4f05485535313e408db64f0b7fe1188d 100644 --- a/crates/markdown/Cargo.toml +++ b/crates/markdown/Cargo.toml @@ -20,6 +20,7 @@ test-support = [ [dependencies] base64.workspace = true +collections.workspace = true futures.workspace = true gpui.workspace = true language.workspace = true diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index c2f8025e32d70cdd9500afdf0a4fc02a334a8521..fdf0f2bbf20190d15b533d02b9f0122746439c89 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -9,8 +9,6 @@ use log::Level; pub use path_range::{LineCol, PathWithRange}; use std::borrow::Cow; -use std::collections::HashMap; -use std::collections::HashSet; use std::iter; use std::mem; use std::ops::Range; @@ -19,6 +17,7 @@ use std::rc::Rc; use std::sync::Arc; use std::time::Duration; +use collections::{HashMap, HashSet}; use gpui::{ AnyElement, App, BorderStyle, Bounds, ClipboardItem, CursorStyle, DispatchPhase, Edges, Entity, FocusHandle, Focusable, FontStyle, FontWeight, GlobalElementId, Hitbox, Hsla, Image, @@ -176,7 +175,7 @@ impl Markdown { options: Options { parse_links_only: false, }, - copied_code_blocks: HashSet::new(), + copied_code_blocks: HashSet::default(), }; this.parse(cx); this @@ -199,7 +198,7 @@ impl Markdown { options: Options { parse_links_only: true, }, - copied_code_blocks: HashSet::new(), + copied_code_blocks: HashSet::default(), }; this.parse(cx); this diff --git a/crates/markdown/src/parser.rs b/crates/markdown/src/parser.rs index d60d34b41e7efc99970f72b15a8ea9c4c79eb6f9..1b4d5b5755c0b825124f37f68466bae7c0838b1a 100644 --- a/crates/markdown/src/parser.rs +++ b/crates/markdown/src/parser.rs @@ -4,7 +4,9 @@ pub use pulldown_cmark::TagEnd as MarkdownTagEnd; use pulldown_cmark::{ Alignment, CowStr, HeadingLevel, LinkType, MetadataBlockKind, Options, Parser, }; -use std::{collections::HashSet, ops::Range, path::Path, sync::Arc}; +use std::{ops::Range, path::Path, sync::Arc}; + +use collections::HashSet; use crate::path_range::PathWithRange; @@ -26,8 +28,8 @@ pub fn parse_markdown( HashSet>, ) { let mut events = Vec::new(); - let mut language_names = HashSet::new(); - let mut language_paths = HashSet::new(); + let mut language_names = HashSet::default(); + let mut language_paths = HashSet::default(); let mut within_link = false; let mut within_metadata = false; let mut parser = Parser::new_ext(text, PARSE_OPTIONS) @@ -579,8 +581,8 @@ mod tests { (30..37, Text), (30..37, End(MarkdownTagEnd::Paragraph)) ], - HashSet::new(), - HashSet::new() + HashSet::default(), + HashSet::default() ) ) } @@ -613,8 +615,8 @@ mod tests { (46..51, Text), (0..51, End(MarkdownTagEnd::Paragraph)) ], - HashSet::new(), - HashSet::new() + HashSet::default(), + HashSet::default() ) ); } @@ -670,8 +672,8 @@ mod tests { (43..53, SubstitutedText("–––––".into())), (0..53, End(MarkdownTagEnd::Paragraph)) ], - HashSet::new(), - HashSet::new() + HashSet::default(), + HashSet::default() ) ) } @@ -695,8 +697,12 @@ mod tests { (8..34, Text), (0..37, End(MarkdownTagEnd::CodeBlock)), ], - HashSet::from(["rust".into()]), - HashSet::new() + { + let mut h = HashSet::default(); + h.insert("rust".into()); + h + }, + HashSet::default() ) ); assert_eq!( @@ -716,8 +722,8 @@ mod tests { (4..16, Text), (4..16, End(MarkdownTagEnd::CodeBlock)) ], - HashSet::new(), - HashSet::new() + HashSet::default(), + HashSet::default() ) ); } diff --git a/crates/zed/src/zed/component_preview.rs b/crates/zed/src/zed/component_preview.rs index 176b176d59b23ed7f605988cf682c9d52dfdb95b..7a287cf3d83f24e7f4d42221bda420053a975860 100644 --- a/crates/zed/src/zed/component_preview.rs +++ b/crates/zed/src/zed/component_preview.rs @@ -216,7 +216,7 @@ impl ComponentPreview { } fn scope_ordered_entries(&self) -> Vec { - use std::collections::HashMap; + use collections::HashMap; let mut scope_groups: HashMap< ComponentScope, From e62dd2a0e584154886ff86cc1e9e3e060558b977 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 18 Sep 2025 22:28:17 -0600 Subject: [PATCH 04/58] Tighten up MergeFrom trait (#38473) Release Notes: - N/A --- crates/language/src/language_settings.rs | 2 +- crates/settings/src/merge_from.rs | 96 ++++++++++--------- crates/settings/src/settings_content.rs | 24 +++-- .../settings/src/settings_content/language.rs | 26 ++--- crates/settings/src/settings_store.rs | 16 ++-- crates/settings_macros/src/settings_macros.rs | 76 +++------------ 6 files changed, 97 insertions(+), 143 deletions(-) diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 64744ee99d24a56abb357e0c034e11afa4dae9d0..0e05123033bf92d537eef5eab258db4eac7e7a56 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -582,7 +582,7 @@ impl settings::Settings for AllLanguageSettings { let mut languages = HashMap::default(); for (language_name, settings) in &all_languages.languages.0 { let mut language_settings = all_languages.defaults.clone(); - settings::merge_from::MergeFrom::merge_from(&mut language_settings, Some(settings)); + settings::merge_from::MergeFrom::merge_from(&mut language_settings, settings); languages.insert( LanguageName(language_name.clone()), load_from_content(language_settings), diff --git a/crates/settings/src/merge_from.rs b/crates/settings/src/merge_from.rs index 11c0785bcb466e26de956475fb5bd4f9821c2790..c12994786ff5fadb6686c6ab1b93d9700195eb2a 100644 --- a/crates/settings/src/merge_from.rs +++ b/crates/settings/src/merge_from.rs @@ -1,29 +1,37 @@ -use std::rc::Rc; - /// Trait for recursively merging settings structures. /// -/// This trait allows settings objects to be merged from optional sources, -/// where `None` values are ignored and `Some` values override existing values. +/// When Zed starts it loads settinsg from `default.json` to initialize +/// everything. These may be further refined by loading the user's settings, +/// and any settings profiles; and then further refined by loading any +/// local project settings. +/// +/// The default behaviour of merging is: +/// * For objects with named keys (HashMap, structs, etc.). The values are merged deeply +/// (so if the default settings has languages.JSON.prettier.allowed = true, and the user's settings has +/// languages.JSON.tab_size = 4; the merged settings file will have both settings). +/// * For options, a None value is ignored, but Some values are merged recursively. +/// * For other types (including Vec), a merge overwrites the current value. /// -/// HashMaps, structs and similar types are merged by combining their contents key-wise, -/// but all other types (including Vecs) are last-write-wins. -/// (Though see also ExtendingVec and SaturatingBool) +/// If you want to break the rules you can (e.g. ExtendingVec, or SaturatingBool). #[allow(unused)] pub trait MergeFrom { + /// Merge from a source of the same type. + fn merge_from(&mut self, other: &Self); + /// Merge from an optional source of the same type. - /// If `other` is `None`, no changes are made. - /// If `other` is `Some(value)`, fields from `value` are merged into `self`. - fn merge_from(&mut self, other: Option<&Self>); + fn merge_from_option(&mut self, other: Option<&Self>) { + if let Some(other) = other { + self.merge_from(other); + } + } } macro_rules! merge_from_overwrites { ($($type:ty),+) => { $( impl MergeFrom for $type { - fn merge_from(&mut self, other: Option<&Self>) { - if let Some(value) = other { - *self = value.clone(); - } + fn merge_from(&mut self, other: &Self) { + *self = other.clone(); } } )+ @@ -51,25 +59,41 @@ merge_from_overwrites!( gpui::FontFeatures ); -impl MergeFrom for Vec { - fn merge_from(&mut self, other: Option<&Self>) { - if let Some(other) = other { - *self = other.clone() +impl MergeFrom for Option { + fn merge_from(&mut self, other: &Self) { + let Some(other) = other else { + return; + }; + if let Some(this) = self { + this.merge_from(other); + } else { + self.replace(other.clone()); } } } +impl MergeFrom for Vec { + fn merge_from(&mut self, other: &Self) { + *self = other.clone() + } +} + +impl MergeFrom for Box { + fn merge_from(&mut self, other: &Self) { + self.as_mut().merge_from(other.as_ref()) + } +} + // Implementations for collections that extend/merge their contents impl MergeFrom for collections::HashMap where K: Clone + std::hash::Hash + Eq, V: Clone + MergeFrom, { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; + fn merge_from(&mut self, other: &Self) { for (k, v) in other { if let Some(existing) = self.get_mut(k) { - existing.merge_from(Some(v)); + existing.merge_from(v); } else { self.insert(k.clone(), v.clone()); } @@ -82,11 +106,10 @@ where K: Clone + std::hash::Hash + Eq + Ord, V: Clone + MergeFrom, { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; + fn merge_from(&mut self, other: &Self) { for (k, v) in other { if let Some(existing) = self.get_mut(k) { - existing.merge_from(Some(v)); + existing.merge_from(v); } else { self.insert(k.clone(), v.clone()); } @@ -100,11 +123,10 @@ where // Q: ?Sized + std::hash::Hash + collections::Equivalent + Eq, V: Clone + MergeFrom, { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; + fn merge_from(&mut self, other: &Self) { for (k, v) in other { if let Some(existing) = self.get_mut(k) { - existing.merge_from(Some(v)); + existing.merge_from(v); } else { self.insert(k.clone(), v.clone()); } @@ -116,8 +138,7 @@ impl MergeFrom for collections::BTreeSet where T: Clone + Ord, { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; + fn merge_from(&mut self, other: &Self) { for item in other { self.insert(item.clone()); } @@ -128,8 +149,7 @@ impl MergeFrom for collections::HashSet where T: Clone + std::hash::Hash + Eq, { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; + fn merge_from(&mut self, other: &Self) { for item in other { self.insert(item.clone()); } @@ -137,13 +157,12 @@ where } impl MergeFrom for serde_json::Value { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; + fn merge_from(&mut self, other: &Self) { match (self, other) { (serde_json::Value::Object(this), serde_json::Value::Object(other)) => { for (k, v) in other { if let Some(existing) = this.get_mut(k) { - existing.merge_from(other.get(k)); + existing.merge_from(v); } else { this.insert(k.clone(), v.clone()); } @@ -153,12 +172,3 @@ impl MergeFrom for serde_json::Value { } } } - -impl MergeFrom for Rc { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; - let mut this: T = self.as_ref().clone(); - this.merge_from(Some(other.as_ref())); - *self = Rc::new(this) - } -} diff --git a/crates/settings/src/settings_content.rs b/crates/settings/src/settings_content.rs index 2ef42d8ebd730343f749d3e2e48055a2d02819ad..43402cae0e6c723b4cc2e94f28c1ba7d0c61c928 100644 --- a/crates/settings/src/settings_content.rs +++ b/crates/settings/src/settings_content.rs @@ -827,6 +827,14 @@ pub struct ReplSettingsContent { } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] +/// An ExtendingVec in the settings can only accumulate new values. +/// +/// This is useful for things like private files where you only want +/// to allow new values to be added. +/// +/// Consider using a HashMap instead of this type +/// (like auto_install_extensions) so that user settings files can both add +/// and remove values from the set. pub struct ExtendingVec(pub Vec); impl Into> for ExtendingVec { @@ -841,13 +849,15 @@ impl From> for ExtendingVec { } impl merge_from::MergeFrom for ExtendingVec { - fn merge_from(&mut self, other: Option<&Self>) { - if let Some(other) = other { - self.0.extend_from_slice(other.0.as_slice()); - } + fn merge_from(&mut self, other: &Self) { + self.0.extend_from_slice(other.0.as_slice()); } } +/// A SaturatingBool in the settings can only ever be set to true, +/// later attempts to set it to false will be ignored. +/// +/// Used by `disable_ai`. #[derive(Debug, Default, Copy, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] pub struct SaturatingBool(pub bool); @@ -858,9 +868,7 @@ impl From for SaturatingBool { } impl merge_from::MergeFrom for SaturatingBool { - fn merge_from(&mut self, other: Option<&Self>) { - if let Some(other) = other { - self.0 |= other.0 - } + fn merge_from(&mut self, other: &Self) { + self.0 |= other.0 } } diff --git a/crates/settings/src/settings_content/language.rs b/crates/settings/src/settings_content/language.rs index ef435d638359825128729d0c024cde8e5c5613c8..6052afee671edba49e05b56ddef147a01866e364 100644 --- a/crates/settings/src/settings_content/language.rs +++ b/crates/settings/src/settings_content/language.rs @@ -34,37 +34,27 @@ pub struct AllLanguageSettingsContent { pub file_types: HashMap, ExtendingVec>, } -fn merge_option(this: &mut Option, other: Option<&T>) { - let Some(other) = other else { return }; - if let Some(this) = this { - this.merge_from(Some(other)); - } else { - this.replace(other.clone()); - } -} - impl merge_from::MergeFrom for AllLanguageSettingsContent { - fn merge_from(&mut self, other: Option<&Self>) { - let Some(other) = other else { return }; - self.file_types.merge_from(Some(&other.file_types)); - merge_option(&mut self.features, other.features.as_ref()); - merge_option(&mut self.edit_predictions, other.edit_predictions.as_ref()); + fn merge_from(&mut self, other: &Self) { + self.file_types.merge_from(&other.file_types); + self.features.merge_from(&other.features); + self.edit_predictions.merge_from(&other.edit_predictions); // A user's global settings override the default global settings and // all default language-specific settings. // - self.defaults.merge_from(Some(&other.defaults)); + self.defaults.merge_from(&other.defaults); for language_settings in self.languages.0.values_mut() { - language_settings.merge_from(Some(&other.defaults)); + language_settings.merge_from(&other.defaults); } // A user's language-specific settings override default language-specific settings. for (language_name, user_language_settings) in &other.languages.0 { if let Some(existing) = self.languages.0.get_mut(language_name) { - existing.merge_from(Some(&user_language_settings)); + existing.merge_from(&user_language_settings); } else { let mut new_settings = self.defaults.clone(); - new_settings.merge_from(Some(&user_language_settings)); + new_settings.merge_from(&user_language_settings); self.languages.0.insert(language_name.clone(), new_settings); } diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index fe2a5cfdfc6493cf3ef374a66c389022748e088b..dc703e50f1de43aee8059e144dc4cb0815b3472d 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -870,15 +870,15 @@ impl SettingsStore { if changed_local_path.is_none() { let mut merged = self.default_settings.as_ref().clone(); - merged.merge_from(self.extension_settings.as_deref()); - merged.merge_from(self.global_settings.as_deref()); + merged.merge_from_option(self.extension_settings.as_deref()); + merged.merge_from_option(self.global_settings.as_deref()); if let Some(user_settings) = self.user_settings.as_ref() { - merged.merge_from(Some(&user_settings.content)); - merged.merge_from(user_settings.for_release_channel()); - merged.merge_from(user_settings.for_os()); - merged.merge_from(user_settings.for_profile(cx)); + merged.merge_from(&user_settings.content); + merged.merge_from_option(user_settings.for_release_channel()); + merged.merge_from_option(user_settings.for_os()); + merged.merge_from_option(user_settings.for_profile(cx)); } - merged.merge_from(self.server_settings.as_deref()); + merged.merge_from_option(self.server_settings.as_deref()); self.merged_settings = Rc::new(merged); for setting_value in self.setting_values.values_mut() { @@ -906,7 +906,7 @@ impl SettingsStore { } else { self.merged_settings.as_ref().clone() }; - merged_local_settings.merge_from(Some(local_settings)); + merged_local_settings.merge_from(local_settings); project_settings_stack.push(merged_local_settings); diff --git a/crates/settings_macros/src/settings_macros.rs b/crates/settings_macros/src/settings_macros.rs index 33c136b1f2b3e4bec3528d4dff632e05119bc516..1a7c391847e5093754f241ffccb079cc5ddd1a6b 100644 --- a/crates/settings_macros/src/settings_macros.rs +++ b/crates/settings_macros/src/settings_macros.rs @@ -1,13 +1,11 @@ use proc_macro::TokenStream; use quote::quote; -use syn::{Data, DeriveInput, Fields, Type, parse_macro_input}; +use syn::{Data, DeriveInput, Fields, parse_macro_input}; /// Derives the `MergeFrom` trait for a struct. /// /// This macro automatically implements `MergeFrom` by calling `merge_from` -/// on all fields in the struct. For `Option` fields, it merges by taking -/// the `other` value when `self` is `None`. For other types, it recursively -/// calls `merge_from` on the field. +/// on all fields in the struct. /// /// # Example /// @@ -30,61 +28,25 @@ pub fn derive_merge_from(input: TokenStream) -> TokenStream { Fields::Named(fields) => { let field_merges = fields.named.iter().map(|field| { let field_name = &field.ident; - let field_type = &field.ty; - - if is_option_type(field_type) { - // For Option fields, merge by taking the other value if self is None - quote! { - if let Some(other_value) = other.#field_name.as_ref() { - if self.#field_name.is_none() { - self.#field_name = Some(other_value.clone()); - } else if let Some(self_value) = self.#field_name.as_mut() { - self_value.merge_from(Some(other_value)); - } - } - } - } else { - // For non-Option fields, recursively call merge_from - quote! { - self.#field_name.merge_from(Some(&other.#field_name)); - } + quote! { + self.#field_name.merge_from(&other.#field_name); } }); quote! { - if let Some(other) = other { - #(#field_merges)* - } + #(#field_merges)* } } Fields::Unnamed(fields) => { - let field_merges = fields.unnamed.iter().enumerate().map(|(i, field)| { + let field_merges = fields.unnamed.iter().enumerate().map(|(i, _)| { let field_index = syn::Index::from(i); - let field_type = &field.ty; - - if is_option_type(field_type) { - // For Option fields, merge by taking the other value if self is None - quote! { - if let Some(other_value) = other.#field_index.as_ref() { - if self.#field_index.is_none() { - self.#field_index = Some(other_value.clone()); - } else if let Some(self_value) = self.#field_index.as_mut() { - self_value.merge_from(Some(other_value)); - } - } - } - } else { - // For non-Option fields, recursively call merge_from - quote! { - self.#field_index.merge_from(Some(&other.#field_index)); - } + quote! { + self.#field_index.merge_from(&other.#field_index); } }); quote! { - if let Some(other) = other { - #(#field_merges)* - } + #(#field_merges)* } } Fields::Unit => { @@ -95,9 +57,7 @@ pub fn derive_merge_from(input: TokenStream) -> TokenStream { }, Data::Enum(_) => { quote! { - if let Some(other) = other { - *self = other.clone(); - } + *self = other.clone(); } } Data::Union(_) => { @@ -107,7 +67,7 @@ pub fn derive_merge_from(input: TokenStream) -> TokenStream { let expanded = quote! { impl #impl_generics crate::merge_from::MergeFrom for #name #ty_generics #where_clause { - fn merge_from(&mut self, other: ::core::option::Option<&Self>) { + fn merge_from(&mut self, other: &Self) { use crate::merge_from::MergeFrom as _; #merge_body } @@ -116,17 +76,3 @@ pub fn derive_merge_from(input: TokenStream) -> TokenStream { TokenStream::from(expanded) } - -/// Check if a type is `Option` -fn is_option_type(ty: &Type) -> bool { - match ty { - Type::Path(type_path) => { - if let Some(segment) = type_path.path.segments.last() { - segment.ident == "Option" - } else { - false - } - } - _ => false, - } -} From 66f2fda6252fcab00f5eb8de5fe04ccc2fd74f31 Mon Sep 17 00:00:00 2001 From: jneem Date: Fri, 19 Sep 2025 04:42:04 -0500 Subject: [PATCH 05/58] helix: Initial support for helix-mode paste (#37963) This is a redo of #29776. I went for a separate function -- instead of adding a bunch of conditions to `vim::Paste` -- because there were quite a few differences. Release Notes: - Added a `vim::HelixPaste` command that imitates Helix's paste behavior --------- Co-authored-by: Jakub Konka --- assets/keymaps/vim.json | 2 + crates/vim/src/helix.rs | 2 + crates/vim/src/helix/paste.rs | 413 ++++++++++++++++++++++++++++++++++ 3 files changed, 417 insertions(+) create mode 100644 crates/vim/src/helix/paste.rs diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 817198659657814dcc597926d689063ae2182c78..590e84cf7fc10f7af5dd317bc114b75390414e4f 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -433,6 +433,8 @@ "h": "vim::WrappingLeft", "l": "vim::WrappingRight", "y": "vim::HelixYank", + "p": "vim::HelixPaste", + "shift-p": ["vim::HelixPaste", { "before": true }], "alt-;": "vim::OtherEnd", "ctrl-r": "vim::Redo", "f": ["vim::PushFindForward", { "before": false, "multiline": true }], diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index cc235d67ae6efcae2fb5a5c5d899b9f7776cbda4..ec1618311f8b8e16b71a39fc1d29b5c60eb49c96 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -1,5 +1,6 @@ mod boundary; mod object; +mod paste; mod select; use editor::display_map::DisplaySnapshot; @@ -40,6 +41,7 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, Vim::helix_append); Vim::action(editor, cx, Vim::helix_yank); Vim::action(editor, cx, Vim::helix_goto_last_modification); + Vim::action(editor, cx, Vim::helix_paste); } impl Vim { diff --git a/crates/vim/src/helix/paste.rs b/crates/vim/src/helix/paste.rs new file mode 100644 index 0000000000000000000000000000000000000000..ecfdaa499257ad91d8518f488be9a4d4dbb51f1c --- /dev/null +++ b/crates/vim/src/helix/paste.rs @@ -0,0 +1,413 @@ +use editor::{ToOffset, movement}; +use gpui::{Action, Context, Window}; +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::{Vim, state::Mode}; + +/// Pastes text from the specified register at the cursor position. +#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)] +#[action(namespace = vim)] +#[serde(deny_unknown_fields)] +pub struct HelixPaste { + #[serde(default)] + before: bool, +} + +impl Vim { + pub fn helix_paste( + &mut self, + action: &HelixPaste, + window: &mut Window, + cx: &mut Context, + ) { + self.record_current_action(cx); + self.store_visual_marks(window, cx); + let count = Vim::take_count(cx).unwrap_or(1); + // TODO: vim paste calls take_forced_motion here, but I don't know what that does + // (none of the other helix_ methods call it) + + self.update_editor(cx, |vim, editor, cx| { + editor.transact(window, cx, |editor, window, cx| { + editor.set_clip_at_line_ends(false, cx); + + let selected_register = vim.selected_register.take(); + + let Some((text, clipboard_selections)) = Vim::update_globals(cx, |globals, cx| { + globals.read_register(selected_register, Some(editor), cx) + }) + .and_then(|reg| { + (!reg.text.is_empty()) + .then_some(reg.text) + .zip(reg.clipboard_selections) + }) else { + return; + }; + + let (display_map, current_selections) = editor.selections.all_adjusted_display(cx); + + // The clipboard can have multiple selections, and there can + // be multiple selections. Helix zips them together, so the first + // clipboard entry gets pasted at the first selection, the second + // entry gets pasted at the second selection, and so on. If there + // are more clipboard selections than selections, the extra ones + // don't get pasted anywhere. If there are more selections than + // clipboard selections, the last clipboard selection gets + // pasted at all remaining selections. + + let mut edits = Vec::new(); + let mut new_selections = Vec::new(); + let mut start_offset = 0; + + let mut replacement_texts: Vec = Vec::new(); + + for ix in 0..current_selections.len() { + let to_insert = if let Some(clip_sel) = clipboard_selections.get(ix) { + let end_offset = start_offset + clip_sel.len; + let text = text[start_offset..end_offset].to_string(); + start_offset = end_offset + 1; + text + } else if let Some(last_text) = replacement_texts.last() { + // We have more current selections than clipboard selections: repeat the last one. + last_text.to_owned() + } else { + text.to_string() + }; + replacement_texts.push(to_insert); + } + + let line_mode = replacement_texts.iter().any(|text| text.ends_with('\n')); + + for (to_insert, sel) in replacement_texts.into_iter().zip(current_selections) { + // Helix doesn't care about the head/tail of the selection. + // Pasting before means pasting before the whole selection. + let display_point = if line_mode { + if action.before { + movement::line_beginning(&display_map, sel.start, false) + } else if sel.end.column() == 0 { + sel.end + } else { + movement::right( + &display_map, + movement::line_end(&display_map, sel.end, false), + ) + } + } else if action.before { + sel.start + } else if sel.start == sel.end { + // Helix and Zed differ in how they understand + // single-point cursors. In Helix, a single-point cursor + // is "on top" of some character, and pasting after that + // cursor means that the pasted content should go after + // that character. (If the cursor is at the end of a + // line, the pasted content goes on the next line.) + movement::right(&display_map, sel.end) + } else { + sel.end + }; + let point = display_point.to_point(&display_map); + let anchor = if action.before { + display_map.buffer_snapshot.anchor_after(point) + } else { + display_map.buffer_snapshot.anchor_before(point) + }; + edits.push((point..point, to_insert.repeat(count))); + new_selections.push((anchor, to_insert.len() * count)); + } + + editor.edit(edits, cx); + + editor.change_selections(Default::default(), window, cx, |s| { + let snapshot = s.buffer().clone(); + s.select_ranges(new_selections.into_iter().map(|(anchor, len)| { + let offset = anchor.to_offset(&snapshot); + if action.before { + offset.saturating_sub(len)..offset + } else { + offset..(offset + len) + } + })); + }) + }); + }); + + self.switch_mode(Mode::HelixNormal, true, window, cx); + } +} + +#[cfg(test)] +mod test { + use indoc::indoc; + + use crate::{state::Mode, test::VimTestContext}; + + #[gpui::test] + async fn test_paste(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + cx.set_state( + indoc! {" + The «quiˇ»ck brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("y w p"); + + cx.assert_state( + indoc! {" + The quick «quiˇ»brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Pasting before the selection: + cx.set_state( + indoc! {" + The quick brown + fox «jumpsˇ» over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("shift-p"); + cx.assert_state( + indoc! {" + The quick brown + fox «quiˇ»jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_point_selection_paste(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + cx.set_state( + indoc! {" + The quiˇck brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("y"); + + // Pasting before the selection: + cx.set_state( + indoc! {" + The quick brown + fox jumpsˇ over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("shift-p"); + cx.assert_state( + indoc! {" + The quick brown + fox jumps«cˇ» over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Pasting after the selection: + cx.set_state( + indoc! {" + The quick brown + fox jumpsˇ over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("p"); + cx.assert_state( + indoc! {" + The quick brown + fox jumps «cˇ»over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Pasting after the selection at the end of a line: + cx.set_state( + indoc! {" + The quick brown + fox jumps overˇ + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("p"); + cx.assert_state( + indoc! {" + The quick brown + fox jumps over + «cˇ»the lazy dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_multi_cursor_paste(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + // Select two blocks of text. + cx.set_state( + indoc! {" + The «quiˇ»ck brown + fox ju«mpsˇ» over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("y"); + + // Only one cursor: only the first block gets pasted. + cx.set_state( + indoc! {" + ˇThe quick brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("shift-p"); + cx.assert_state( + indoc! {" + «quiˇ»The quick brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Two cursors: both get pasted. + cx.set_state( + indoc! {" + ˇThe ˇquick brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("shift-p"); + cx.assert_state( + indoc! {" + «quiˇ»The «mpsˇ»quick brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Three cursors: the second yanked block is duplicated. + cx.set_state( + indoc! {" + ˇThe ˇquick brown + fox jumpsˇ over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("shift-p"); + cx.assert_state( + indoc! {" + «quiˇ»The «mpsˇ»quick brown + fox jumps«mpsˇ» over + the lazy dog."}, + Mode::HelixNormal, + ); + + // Again with three cursors. All three should be pasted twice. + cx.set_state( + indoc! {" + ˇThe ˇquick brown + fox jumpsˇ over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("2 shift-p"); + cx.assert_state( + indoc! {" + «quiquiˇ»The «mpsmpsˇ»quick brown + fox jumps«mpsmpsˇ» over + the lazy dog."}, + Mode::HelixNormal, + ); + } + + #[gpui::test] + async fn test_line_mode_paste(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + cx.set_state( + indoc! {" + The quick brow«n + ˇ»fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("y shift-p"); + + cx.assert_state( + indoc! {" + «n + ˇ»The quick brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // In line mode, if we're in the middle of a line then pasting before pastes on + // the line before. + cx.set_state( + indoc! {" + The quick brown + fox jumpsˇ over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("shift-p"); + cx.assert_state( + indoc! {" + The quick brown + «n + ˇ»fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + // In line mode, if we're in the middle of a line then pasting after pastes on + // the line after. + cx.set_state( + indoc! {" + The quick brown + fox jumpsˇ over + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("p"); + cx.assert_state( + indoc! {" + The quick brown + fox jumps over + «n + ˇ»the lazy dog."}, + Mode::HelixNormal, + ); + + // If we're currently at the end of a line, "the line after" + // means right after the cursor. + cx.set_state( + indoc! {" + The quick brown + fox jumps over + ˇthe lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("p"); + cx.assert_state( + indoc! {" + The quick brown + fox jumps over + «n + ˇ»the lazy dog."}, + Mode::HelixNormal, + ); + } +} From 194a13ffb5623e2657a1916db4edab314914720a Mon Sep 17 00:00:00 2001 From: David Kleingeld Date: Fri, 19 Sep 2025 12:31:54 +0200 Subject: [PATCH 06/58] Add denoising & prepare for migrating to new samplerate & channel count (#38493) Uses the previously merged denoising crate (and fixes a bug in it that snug in during refactoring) in the microphone input. The experimental audio path now picks the samplerate and channel count depending on a setting. It can handle incoming streams with both the current (future legacy) and new samplerate & channel count. These are url-encoded into the livekit track name. --- Cargo.lock | 2 +- assets/settings/default.json | 32 +++- crates/audio/Cargo.toml | 1 + crates/audio/src/audio.rs | 65 +++++-- crates/audio/src/audio_settings.rs | 95 +++++++--- crates/audio/src/rodio_ext.rs | 167 +++++++++++++++++- crates/denoise/src/engine.rs | 6 +- crates/denoise/src/lib.rs | 2 +- crates/livekit_client/src/livekit_client.rs | 20 ++- .../src/livekit_client/playback.rs | 89 +++++++--- .../src/livekit_client/playback/source.rs | 40 +++-- crates/settings/src/settings_content.rs | 44 +++-- tooling/workspace-hack/Cargo.toml | 14 +- 13 files changed, 454 insertions(+), 123 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3acfed9bd7cfa8bc2742bb4f006c38a4f65a1f0e..be3e5b04ca18d56024eabe45f14562fca3d56375 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1405,6 +1405,7 @@ dependencies = [ "async-tar", "collections", "crossbeam", + "denoise", "gpui", "libwebrtc", "log", @@ -20742,7 +20743,6 @@ dependencies = [ "nix 0.29.0", "nix 0.30.1", "nom 7.1.3", - "num", "num-bigint", "num-bigint-dig", "num-complex", diff --git a/assets/settings/default.json b/assets/settings/default.json index 091231521470ebec50cf1351a76063e9205a3d24..d469638ab28ea02eb9b7675296ee9582e2de3ccd 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -413,15 +413,33 @@ "experimental.rodio_audio": false, // Requires 'rodio_audio: true' // - // Use the new audio systems automatic gain control for your microphone. - // This affects how loud you sound to others. - "experimental.control_input_volume": false, + // Automatically increase or decrease you microphone's volume. This affects how + // loud you sound to others. + // + // Recommended: off (default) + // Microphones are too quite in zed, until everyone is on experimental + // audio and has auto speaker volume on this will make you very loud + // compared to other speakers. + "experimental.auto_microphone_volume": false, + // Requires 'rodio_audio: true' + // + // Automatically increate or decrease the volume of other call members. + // This only affects how things sound for you. + "experimental.auto_speaker_volume": true, // Requires 'rodio_audio: true' // - // Use the new audio systems automatic gain control on everyone in the - // call. This makes call members who are too quite louder and those who are - // too loud quieter. This only affects how things sound for you. - "experimental.control_output_volume": false + // Remove background noises. Works great for typing, cars, dogs, AC. Does + // not work well on music. + "experimental.denoise": true, + // Requires 'rodio_audio: true' + // + // Use audio parameters compatible with the previous versions of + // experimental audio and non-experimental audio. When this is false you + // will sound strange to anyone not on the latest experimental audio. In + // the future we will migrate by setting this to false + // + // You need to rejoin a call for this setting to apply + "experimental.legacy_audio_compatible": true }, // Scrollbar related settings "scrollbar": { diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index c083c9a659e50aef37acc2cdfc239696bd469c1e..7f2fed80e2315e51fca7d8477b04885998336632 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -18,6 +18,7 @@ async-tar.workspace = true collections.workspace = true crossbeam.workspace = true gpui.workspace = true +denoise = { path = "../denoise" } log.workspace = true parking_lot.workspace = true rodio = { workspace = true, features = [ "wav", "playback", "wav_output" ] } diff --git a/crates/audio/src/audio.rs b/crates/audio/src/audio.rs index f60ddb87b9615d2da9c2be248ab397c19a463616..dc4d97a8fa47f11f9120cf5144a37ae6fd94bc2a 100644 --- a/crates/audio/src/audio.rs +++ b/crates/audio/src/audio.rs @@ -9,7 +9,7 @@ mod non_windows_and_freebsd_deps { pub(super) use log::info; pub(super) use parking_lot::Mutex; pub(super) use rodio::cpal::Sample; - pub(super) use rodio::source::{LimitSettings, UniformSourceIterator}; + pub(super) use rodio::source::LimitSettings; pub(super) use std::sync::Arc; } @@ -31,18 +31,20 @@ pub use rodio_ext::RodioExt; use crate::audio_settings::LIVE_SETTINGS; -// NOTE: We used to use WebRTC's mixer which only supported -// 16kHz, 32kHz and 48kHz. As 48 is the most common "next step up" -// for audio output devices like speakers/bluetooth, we just hard-code -// this; and downsample when we need to. +// We are migrating to 16kHz sample rate from 48kHz. In the future +// once we are reasonably sure most users have upgraded we will +// remove the LEGACY parameters. // -// Since most noise cancelling requires 16kHz we will move to -// that in the future. -pub const SAMPLE_RATE: NonZero = nz!(48000); -pub const CHANNEL_COUNT: NonZero = nz!(2); +// We migrate to 16kHz because it is sufficient for speech and required +// by the denoiser and future Speech to Text layers. +pub const SAMPLE_RATE: NonZero = nz!(16000); +pub const CHANNEL_COUNT: NonZero = nz!(1); pub const BUFFER_SIZE: usize = // echo canceller and livekit want 10ms of audio (SAMPLE_RATE.get() as usize / 100) * CHANNEL_COUNT.get() as usize; +pub const LEGACY_SAMPLE_RATE: NonZero = nz!(48000); +pub const LEGACY_CHANNEL_COUNT: NonZero = nz!(2); + pub const REPLAY_DURATION: Duration = Duration::from_secs(30); pub fn init(cx: &mut App) { @@ -106,6 +108,11 @@ impl Global for Audio {} impl Audio { fn ensure_output_exists(&mut self) -> Result<&Mixer> { + #[cfg(debug_assertions)] + log::warn!( + "Audio does not sound correct without optimizations. Use a release build to debug audio issues" + ); + if self.output_handle.is_none() { self.output_handle = Some( OutputStreamBuilder::open_default_stream() @@ -160,13 +167,20 @@ impl Audio { let stream = rodio::microphone::MicrophoneBuilder::new() .default_device()? .default_config()? - .prefer_sample_rates([SAMPLE_RATE, SAMPLE_RATE.saturating_mul(nz!(2))]) - // .prefer_channel_counts([nz!(1), nz!(2)]) + .prefer_sample_rates([ + SAMPLE_RATE, // sample rates trivially resamplable to `SAMPLE_RATE` + SAMPLE_RATE.saturating_mul(nz!(2)), + SAMPLE_RATE.saturating_mul(nz!(3)), + SAMPLE_RATE.saturating_mul(nz!(4)), + ]) + .prefer_channel_counts([nz!(1), nz!(2), nz!(3), nz!(4)]) .prefer_buffer_sizes(512..) .open_stream()?; info!("Opened microphone: {:?}", stream.config()); - let (replay, stream) = UniformSourceIterator::new(stream, CHANNEL_COUNT, SAMPLE_RATE) + let (replay, stream) = stream + .possibly_disconnected_channels_to_mono() + .constant_samplerate(SAMPLE_RATE) .limit(LimitSettings::live_performance()) .process_buffer::(move |buffer| { let mut int_buffer: [i16; _] = buffer.map(|s| s.to_sample()); @@ -187,15 +201,28 @@ impl Audio { } } }) - .automatic_gain_control(1.0, 4.0, 0.0, 5.0) + .denoise() + .context("Could not set up denoiser")? + .periodic_access(Duration::from_millis(100), move |denoise| { + denoise.set_enabled(LIVE_SETTINGS.denoise.load(Ordering::Relaxed)); + }) + .automatic_gain_control(1.0, 2.0, 0.0, 5.0) .periodic_access(Duration::from_millis(100), move |agc_source| { - agc_source.set_enabled(LIVE_SETTINGS.control_input_volume.load(Ordering::Relaxed)); + agc_source + .set_enabled(LIVE_SETTINGS.auto_microphone_volume.load(Ordering::Relaxed)); }) .replayable(REPLAY_DURATION)?; voip_parts .replays .add_voip_stream("local microphone".to_string(), replay); + + let stream = if voip_parts.legacy_audio_compatible { + stream.constant_params(LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE) + } else { + stream.constant_params(CHANNEL_COUNT, SAMPLE_RATE) + }; + Ok(stream) } @@ -206,9 +233,10 @@ impl Audio { cx: &mut App, ) -> anyhow::Result<()> { let (replay_source, source) = source - .automatic_gain_control(1.0, 4.0, 0.0, 5.0) + .constant_params(CHANNEL_COUNT, SAMPLE_RATE) + .automatic_gain_control(1.0, 2.0, 0.0, 5.0) .periodic_access(Duration::from_millis(100), move |agc_source| { - agc_source.set_enabled(LIVE_SETTINGS.control_input_volume.load(Ordering::Relaxed)); + agc_source.set_enabled(LIVE_SETTINGS.auto_speaker_volume.load(Ordering::Relaxed)); }) .replayable(REPLAY_DURATION) .expect("REPLAY_DURATION is longer than 100ms"); @@ -269,6 +297,7 @@ impl Audio { pub struct VoipParts { echo_canceller: Arc>, replays: replays::Replays, + legacy_audio_compatible: bool, } #[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))] @@ -277,8 +306,12 @@ impl VoipParts { let (apm, replays) = cx.try_read_default_global::(|audio, _| { (Arc::clone(&audio.echo_canceller), audio.replays.clone()) })?; + let legacy_audio_compatible = + AudioSettings::try_read_global(cx, |settings| settings.legacy_audio_compatible) + .unwrap_or_default(); Ok(Self { + legacy_audio_compatible, echo_canceller: apm, replays, }) diff --git a/crates/audio/src/audio_settings.rs b/crates/audio/src/audio_settings.rs index 2c9db4989efa5edcf4ef84c4e3031b53980fad51..cba7d45c31f4674be6a69c10ab34f00e0b8cbbd1 100644 --- a/crates/audio/src/audio_settings.rs +++ b/crates/audio/src/audio_settings.rs @@ -6,18 +6,38 @@ use settings::{Settings, SettingsStore}; #[derive(Clone, Debug)] pub struct AudioSettings { /// Opt into the new audio system. + /// + /// You need to rejoin a call for this setting to apply pub rodio_audio: bool, // default is false /// Requires 'rodio_audio: true' /// - /// Use the new audio systems automatic gain control for your microphone. - /// This affects how loud you sound to others. - pub control_input_volume: bool, + /// Automatically increase or decrease you microphone's volume. This affects how + /// loud you sound to others. + /// + /// Recommended: off (default) + /// Microphones are too quite in zed, until everyone is on experimental + /// audio and has auto speaker volume on this will make you very loud + /// compared to other speakers. + pub auto_microphone_volume: bool, + /// Requires 'rodio_audio: true' + /// + /// Automatically increate or decrease the volume of other call members. + /// This only affects how things sound for you. + pub auto_speaker_volume: bool, + /// Requires 'rodio_audio: true' + /// + /// Remove background noises. Works great for typing, cars, dogs, AC. Does + /// not work well on music. + pub denoise: bool, /// Requires 'rodio_audio: true' /// - /// Use the new audio systems automatic gain control on everyone in the - /// call. This makes call members who are too quite louder and those who are - /// too loud quieter. This only affects how things sound for you. - pub control_output_volume: bool, + /// Use audio parameters compatible with the previous versions of + /// experimental audio and non-experimental audio. When this is false you + /// will sound strange to anyone not on the latest experimental audio. In + /// the future we will migrate by setting this to false + /// + /// You need to rejoin a call for this setting to apply + pub legacy_audio_compatible: bool, } /// Configuration of audio in Zed @@ -25,46 +45,66 @@ impl Settings for AudioSettings { fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self { let audio = &content.audio.as_ref().unwrap(); AudioSettings { - control_input_volume: audio.control_input_volume.unwrap(), - control_output_volume: audio.control_output_volume.unwrap(), rodio_audio: audio.rodio_audio.unwrap(), + auto_microphone_volume: audio.auto_microphone_volume.unwrap(), + auto_speaker_volume: audio.auto_speaker_volume.unwrap(), + denoise: audio.denoise.unwrap(), + legacy_audio_compatible: audio.legacy_audio_compatible.unwrap(), } } - - fn import_from_vscode( - _vscode: &settings::VsCodeSettings, - _current: &mut settings::SettingsContent, - ) { - } } /// See docs on [LIVE_SETTINGS] pub(crate) struct LiveSettings { - pub(crate) control_input_volume: AtomicBool, - pub(crate) control_output_volume: AtomicBool, + pub(crate) auto_microphone_volume: AtomicBool, + pub(crate) auto_speaker_volume: AtomicBool, + pub(crate) denoise: AtomicBool, } impl LiveSettings { pub(crate) fn initialize(&self, cx: &mut App) { cx.observe_global::(move |cx| { - LIVE_SETTINGS.control_input_volume.store( - AudioSettings::get_global(cx).control_input_volume, + LIVE_SETTINGS.auto_microphone_volume.store( + AudioSettings::get_global(cx).auto_microphone_volume, Ordering::Relaxed, ); - LIVE_SETTINGS.control_output_volume.store( - AudioSettings::get_global(cx).control_output_volume, + LIVE_SETTINGS.auto_speaker_volume.store( + AudioSettings::get_global(cx).auto_speaker_volume, Ordering::Relaxed, ); + + let denoise_enabled = AudioSettings::get_global(cx).denoise; + #[cfg(debug_assertions)] + { + static DENOISE_WARNING_SEND: AtomicBool = AtomicBool::new(false); + if denoise_enabled && !DENOISE_WARNING_SEND.load(Ordering::Relaxed) { + DENOISE_WARNING_SEND.store(true, Ordering::Relaxed); + log::warn!("Denoise does not work on debug builds, not enabling") + } + } + #[cfg(not(debug_assertions))] + LIVE_SETTINGS + .denoise + .store(denoise_enabled, Ordering::Relaxed); }) .detach(); let init_settings = AudioSettings::get_global(cx); LIVE_SETTINGS - .control_input_volume - .store(init_settings.control_input_volume, Ordering::Relaxed); + .auto_microphone_volume + .store(init_settings.auto_microphone_volume, Ordering::Relaxed); + LIVE_SETTINGS + .auto_speaker_volume + .store(init_settings.auto_speaker_volume, Ordering::Relaxed); + let denoise_enabled = AudioSettings::get_global(cx).denoise; + #[cfg(debug_assertions)] + if denoise_enabled { + log::warn!("Denoise does not work on debug builds, not enabling") + } + #[cfg(not(debug_assertions))] LIVE_SETTINGS - .control_output_volume - .store(init_settings.control_output_volume, Ordering::Relaxed); + .denoise + .store(denoise_enabled, Ordering::Relaxed); } } @@ -73,6 +113,7 @@ impl LiveSettings { /// real time and must each run in a dedicated OS thread, therefore we can not /// use the background executor. pub(crate) static LIVE_SETTINGS: LiveSettings = LiveSettings { - control_input_volume: AtomicBool::new(true), - control_output_volume: AtomicBool::new(true), + auto_microphone_volume: AtomicBool::new(true), + auto_speaker_volume: AtomicBool::new(true), + denoise: AtomicBool::new(true), }; diff --git a/crates/audio/src/rodio_ext.rs b/crates/audio/src/rodio_ext.rs index e80b00e15a8fdbd3fc438b78a9ca45d0902dcef1..af4cc89252dfdc1498471ec7ac09b56d59b62eca 100644 --- a/crates/audio/src/rodio_ext.rs +++ b/crates/audio/src/rodio_ext.rs @@ -1,4 +1,5 @@ use std::{ + num::NonZero, sync::{ Arc, Mutex, atomic::{AtomicBool, Ordering}, @@ -7,12 +8,22 @@ use std::{ }; use crossbeam::queue::ArrayQueue; -use rodio::{ChannelCount, Sample, SampleRate, Source}; +use denoise::{Denoiser, DenoiserError}; +use log::warn; +use rodio::{ + ChannelCount, Sample, SampleRate, Source, conversions::SampleRateConverter, nz, + source::UniformSourceIterator, +}; + +const MAX_CHANNELS: usize = 8; #[derive(Debug, thiserror::Error)] #[error("Replay duration is too short must be >= 100ms")] pub struct ReplayDurationTooShort; +// These all require constant sources (so the span is infinitely long) +// this is not guaranteed by rodio however we know it to be true in all our +// applications. Rodio desperately needs a constant source concept. pub trait RodioExt: Source + Sized { fn process_buffer(self, callback: F) -> ProcessBuffer where @@ -25,6 +36,14 @@ pub trait RodioExt: Source + Sized { duration: Duration, ) -> Result<(Replay, Replayable), ReplayDurationTooShort>; fn take_samples(self, n: usize) -> TakeSamples; + fn denoise(self) -> Result, DenoiserError>; + fn constant_params( + self, + channel_count: ChannelCount, + sample_rate: SampleRate, + ) -> UniformSourceIterator; + fn constant_samplerate(self, sample_rate: SampleRate) -> ConstantSampleRate; + fn possibly_disconnected_channels_to_mono(self) -> ToMono; } impl RodioExt for S { @@ -101,8 +120,149 @@ impl RodioExt for S { left_to_take: n, } } + fn denoise(self) -> Result, DenoiserError> { + let res = Denoiser::try_new(self); + res + } + fn constant_params( + self, + channel_count: ChannelCount, + sample_rate: SampleRate, + ) -> UniformSourceIterator { + UniformSourceIterator::new(self, channel_count, sample_rate) + } + fn constant_samplerate(self, sample_rate: SampleRate) -> ConstantSampleRate { + ConstantSampleRate::new(self, sample_rate) + } + fn possibly_disconnected_channels_to_mono(self) -> ToMono { + ToMono::new(self) + } +} + +pub struct ConstantSampleRate { + inner: SampleRateConverter, + channels: ChannelCount, + sample_rate: SampleRate, +} + +impl ConstantSampleRate { + fn new(source: S, target_rate: SampleRate) -> Self { + let input_sample_rate = source.sample_rate(); + let channels = source.channels(); + let inner = SampleRateConverter::new(source, input_sample_rate, target_rate, channels); + Self { + inner, + channels, + sample_rate: target_rate, + } + } +} + +impl Iterator for ConstantSampleRate { + type Item = rodio::Sample; + + fn next(&mut self) -> Option { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +impl Source for ConstantSampleRate { + fn current_span_len(&self) -> Option { + None + } + + fn channels(&self) -> ChannelCount { + self.channels + } + + fn sample_rate(&self) -> SampleRate { + self.sample_rate + } + + fn total_duration(&self) -> Option { + None // not supported (not used by us) + } +} + +const TYPICAL_NOISE_FLOOR: Sample = 1e-3; + +/// constant source, only works on a single span +pub struct ToMono { + inner: S, + input_channel_count: ChannelCount, + connected_channels: ChannelCount, + /// running mean of second channel 'volume' + means: [f32; MAX_CHANNELS], +} +impl ToMono { + fn new(input: S) -> Self { + let channels = input + .channels() + .min(const { NonZero::::new(MAX_CHANNELS as u16).unwrap() }); + if channels < input.channels() { + warn!("Ignoring input channels {}..", channels.get()); + } + + Self { + connected_channels: channels, + input_channel_count: channels, + inner: input, + means: [TYPICAL_NOISE_FLOOR; MAX_CHANNELS], + } + } +} + +impl Source for ToMono { + fn current_span_len(&self) -> Option { + None + } + + fn channels(&self) -> ChannelCount { + rodio::nz!(1) + } + + fn sample_rate(&self) -> SampleRate { + self.inner.sample_rate() + } + + fn total_duration(&self) -> Option { + self.inner.total_duration() + } +} + +fn update_mean(mean: &mut f32, sample: Sample) { + const HISTORY: f32 = 500.0; + *mean *= (HISTORY - 1.0) / HISTORY; + *mean += sample.abs() / HISTORY; +} + +impl Iterator for ToMono { + type Item = Sample; + + fn next(&mut self) -> Option { + let mut mono_sample = 0f32; + let mut active_channels = 0; + for channel in 0..self.input_channel_count.get() as usize { + let sample = self.inner.next()?; + mono_sample += sample; + + update_mean(&mut self.means[channel], sample); + if self.means[channel] > TYPICAL_NOISE_FLOOR / 10.0 { + active_channels += 1; + } + } + mono_sample /= self.connected_channels.get() as f32; + self.connected_channels = NonZero::new(active_channels).unwrap_or(nz!(1)); + + Some(mono_sample) + } } +/// constant source, only works on a single span pub struct TakeSamples { inner: S, left_to_take: usize, @@ -147,6 +307,7 @@ impl Source for TakeSamples { } } +/// constant source, only works on a single span #[derive(Debug)] struct ReplayQueue { inner: ArrayQueue>, @@ -193,6 +354,7 @@ impl ReplayQueue { } } +/// constant source, only works on a single span pub struct ProcessBuffer where S: Source + Sized, @@ -260,6 +422,7 @@ where } } +/// constant source, only works on a single span pub struct InspectBuffer where S: Source + Sized, @@ -324,6 +487,7 @@ where } } +/// constant source, only works on a single span #[derive(Debug)] pub struct Replayable { inner: S, @@ -375,6 +539,7 @@ impl Source for Replayable { } } +/// constant source, only works on a single span #[derive(Debug)] pub struct Replay { rx: Arc, diff --git a/crates/denoise/src/engine.rs b/crates/denoise/src/engine.rs index 5196b70b5ba02f665385c022a0dfa9cd22c1db9c..be0548c689e3b902342cd1cb6d6d8e29351e8be4 100644 --- a/crates/denoise/src/engine.rs +++ b/crates/denoise/src/engine.rs @@ -138,13 +138,13 @@ impl Engine { const SPECTRUM_INPUT: &str = "input_2"; const MEMORY_INPUT: &str = "input_3"; - let memory_input = + let spectrum = Tensor::from_slice::<_, f32>(&self.in_magnitude, (1, 1, FFT_OUT_SIZE), &Device::Cpu) .expect("the in magnitude has enough elements to fill the Tensor"); let inputs = HashMap::from([ - (MEMORY_INPUT.to_string(), memory_input), - (SPECTRUM_INPUT.to_string(), self.spectral_memory.clone()), + (SPECTRUM_INPUT.to_string(), spectrum), + (MEMORY_INPUT.to_string(), self.spectral_memory.clone()), ]); inputs } diff --git a/crates/denoise/src/lib.rs b/crates/denoise/src/lib.rs index 1422c81a4b915d571d35585447165c04d3695b73..f6cbf0fadf1f216cc6168c2b249f807b557869af 100644 --- a/crates/denoise/src/lib.rs +++ b/crates/denoise/src/lib.rs @@ -84,7 +84,7 @@ impl Denoiser { .spawn(move || { run_neural_denoiser(denoised_tx, input_rx); }) - .unwrap(); + .expect("Should be ablet to spawn threads"); Ok(Self { inner: source, diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 45e929cb2ec0bebf054497632d614af1975f6397..04e669869ddbf64ffd92cbcad4bf927bfec55cb5 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use anyhow::{Context as _, Result}; +use anyhow::{Context as _, Result, anyhow}; use audio::AudioSettings; use collections::HashMap; use futures::{SinkExt, channel::mpsc}; @@ -12,7 +12,10 @@ use settings::Settings; mod playback; -use crate::{LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication}; +use crate::{ + LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication, + livekit_client::playback::Speaker, +}; pub use playback::AudioStream; pub(crate) use playback::{RemoteVideoFrame, play_remote_video_track}; @@ -132,11 +135,20 @@ impl Room { track: &RemoteAudioTrack, cx: &mut App, ) -> Result { + let speaker: Speaker = + serde_urlencoded::from_str(&track.0.name()).unwrap_or_else(|_| Speaker { + name: track.0.name(), + is_staff: false, + legacy_audio_compatible: true, + }); + if AudioSettings::get_global(cx).rodio_audio { info!("Using experimental.rodio_audio audio pipeline for output"); - playback::play_remote_audio_track(&track.0, cx) - } else { + playback::play_remote_audio_track(&track.0, speaker, cx) + } else if speaker.legacy_audio_compatible { Ok(self.playback.play_remote_audio_track(&track.0)) + } else { + Err(anyhow!("Client version too old to play audio in call")) } } } diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index df8b5ea54fb1ce11bf871faa912757bbff1fd7f9..b4cd68e08e4a88f9cb248e3b7ac64fbfca4c39de 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -1,6 +1,6 @@ use anyhow::{Context as _, Result}; -use audio::{AudioSettings, CHANNEL_COUNT, SAMPLE_RATE}; +use audio::{AudioSettings, CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE}; use cpal::traits::{DeviceTrait, StreamTrait as _}; use futures::channel::mpsc::UnboundedSender; use futures::{Stream, StreamExt as _}; @@ -43,12 +43,17 @@ pub(crate) struct AudioStack { pub(crate) fn play_remote_audio_track( track: &livekit::track::RemoteAudioTrack, + speaker: Speaker, cx: &mut gpui::App, ) -> Result { + let stream = source::LiveKitStream::new( + cx.background_executor(), + track, + speaker.legacy_audio_compatible, + ); + let stop_handle = Arc::new(AtomicBool::new(false)); let stop_handle_clone = stop_handle.clone(); - let stream = source::LiveKitStream::new(cx.background_executor(), track); - let stream = stream .stoppable() .periodic_access(Duration::from_millis(50), move |s| { @@ -57,10 +62,6 @@ pub(crate) fn play_remote_audio_track( } }); - let speaker: Speaker = serde_urlencoded::from_str(&track.name()).unwrap_or_else(|_| Speaker { - name: track.name(), - is_staff: false, - }); audio::Audio::play_voip_stream(stream, speaker.name, speaker.is_staff, cx) .context("Could not play audio")?; @@ -152,17 +153,32 @@ impl AudioStack { is_staff: bool, cx: &AsyncApp, ) -> Result<(crate::LocalAudioTrack, AudioStream)> { - let source = NativeAudioSource::new( - // n.b. this struct's options are always ignored, noise cancellation is provided by apm. - AudioSourceOptions::default(), - SAMPLE_RATE.get(), - CHANNEL_COUNT.get().into(), - 10, - ); + let legacy_audio_compatible = + AudioSettings::try_read_global(cx, |setting| setting.legacy_audio_compatible) + .unwrap_or_default(); + + let source = if legacy_audio_compatible { + NativeAudioSource::new( + // n.b. this struct's options are always ignored, noise cancellation is provided by apm. + AudioSourceOptions::default(), + LEGACY_SAMPLE_RATE.get(), + LEGACY_CHANNEL_COUNT.get().into(), + 10, + ) + } else { + NativeAudioSource::new( + // n.b. this struct's options are always ignored, noise cancellation is provided by apm. + AudioSourceOptions::default(), + SAMPLE_RATE.get(), + CHANNEL_COUNT.get().into(), + 10, + ) + }; let track_name = serde_urlencoded::to_string(Speaker { name: user_name, is_staff, + legacy_audio_compatible, }) .context("Could not encode user information in track name")?; @@ -186,22 +202,32 @@ impl AudioStack { let capture_task = if rodio_pipeline { info!("Using experimental.rodio_audio audio pipeline"); let voip_parts = audio::VoipParts::new(cx)?; - // Audio needs to run real-time and should never be paused. That is why we are using a - // normal std::thread and not a background task + // Audio needs to run real-time and should never be paused. That is + // why we are using a normal std::thread and not a background task thread::Builder::new() - .name("AudioCapture".to_string()) + .name("MicrophoneToLivekit".to_string()) .spawn(move || { // microphone is non send on mac - let microphone = audio::Audio::open_microphone(voip_parts)?; + let microphone = match audio::Audio::open_microphone(voip_parts) { + Ok(m) => m, + Err(e) => { + log::error!("Could not open microphone: {e}"); + return; + } + }; send_to_livekit(frame_tx, microphone); - Ok::<(), anyhow::Error>(()) }) - .unwrap(); + .expect("should be able to spawn threads"); Task::ready(Ok(())) } else { self.executor.spawn(async move { - Self::capture_input(apm, frame_tx, SAMPLE_RATE.get(), CHANNEL_COUNT.get().into()) - .await + Self::capture_input( + apm, + frame_tx, + LEGACY_SAMPLE_RATE.get(), + LEGACY_CHANNEL_COUNT.get().into(), + ) + .await }) }; @@ -389,25 +415,30 @@ impl AudioStack { } #[derive(Serialize, Deserialize)] -struct Speaker { - name: String, - is_staff: bool, +pub struct Speaker { + pub name: String, + pub is_staff: bool, + pub legacy_audio_compatible: bool, } fn send_to_livekit(frame_tx: UnboundedSender>, mut microphone: impl Source) { use cpal::Sample; + let sample_rate = microphone.sample_rate().get(); + let num_channels = microphone.channels().get() as u32; + let buffer_size = sample_rate / 100 * num_channels; + loop { let sampled: Vec<_> = microphone .by_ref() - .take(audio::BUFFER_SIZE) + .take(buffer_size as usize) .map(|s| s.to_sample()) .collect(); if frame_tx .unbounded_send(AudioFrame { - sample_rate: SAMPLE_RATE.get(), - num_channels: CHANNEL_COUNT.get() as u32, - samples_per_channel: sampled.len() as u32 / CHANNEL_COUNT.get() as u32, + sample_rate, + num_channels, + samples_per_channel: sampled.len() as u32 / num_channels, data: Cow::Owned(sampled), }) .is_err() diff --git a/crates/livekit_client/src/livekit_client/playback/source.rs b/crates/livekit_client/src/livekit_client/playback/source.rs index f605b3d517cd816491f0eceadce5ac778ef75d21..cde4b19fda2e053346ad535e7c75b2abda60431a 100644 --- a/crates/livekit_client/src/livekit_client/playback/source.rs +++ b/crates/livekit_client/src/livekit_client/playback/source.rs @@ -3,17 +3,19 @@ use std::num::NonZero; use futures::StreamExt; use libwebrtc::{audio_stream::native::NativeAudioStream, prelude::AudioFrame}; use livekit::track::RemoteAudioTrack; -use rodio::{Source, buffer::SamplesBuffer, conversions::SampleTypeConverter, nz}; +use rodio::{ + ChannelCount, SampleRate, Source, buffer::SamplesBuffer, conversions::SampleTypeConverter, +}; -use audio::{CHANNEL_COUNT, SAMPLE_RATE}; +use audio::{CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE}; fn frame_to_samplesbuffer(frame: AudioFrame) -> SamplesBuffer { let samples = frame.data.iter().copied(); let samples = SampleTypeConverter::<_, _>::new(samples); let samples: Vec = samples.collect(); SamplesBuffer::new( - nz!(2), // frame always has two channels - NonZero::new(frame.sample_rate).expect("audio frame sample rate is nonzero"), + NonZero::new(frame.num_channels as u16).expect("zero channels is nonsense"), + NonZero::new(frame.sample_rate).expect("samplerate zero is nonsense"), samples, ) } @@ -22,14 +24,26 @@ pub struct LiveKitStream { // shared_buffer: SharedBuffer, inner: rodio::queue::SourcesQueueOutput, _receiver_task: gpui::Task<()>, + channel_count: ChannelCount, + sample_rate: SampleRate, } impl LiveKitStream { - pub fn new(executor: &gpui::BackgroundExecutor, track: &RemoteAudioTrack) -> Self { + pub fn new( + executor: &gpui::BackgroundExecutor, + track: &RemoteAudioTrack, + legacy: bool, + ) -> Self { + let (channel_count, sample_rate) = if legacy { + (LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE) + } else { + (CHANNEL_COUNT, SAMPLE_RATE) + }; + let mut stream = NativeAudioStream::new( track.rtc_track(), - SAMPLE_RATE.get() as i32, - CHANNEL_COUNT.get().into(), + sample_rate.get() as i32, + channel_count.get().into(), ); let (queue_input, queue_output) = rodio::queue::queue(true); // spawn rtc stream @@ -45,6 +59,8 @@ impl LiveKitStream { LiveKitStream { _receiver_task: receiver_task, inner: queue_output, + sample_rate, + channel_count, } } } @@ -63,17 +79,11 @@ impl Source for LiveKitStream { } fn channels(&self) -> rodio::ChannelCount { - // This must be hardcoded because the playback source assumes constant - // sample rate and channel count. The queue upon which this is build - // will however report different counts and rates. Even though we put in - // only items with our (constant) CHANNEL_COUNT & SAMPLE_RATE this will - // play silence on one channel and at 44100 which is not what our - // constants are. - CHANNEL_COUNT + self.channel_count } fn sample_rate(&self) -> rodio::SampleRate { - SAMPLE_RATE // see comment on channels + self.sample_rate } fn total_duration(&self) -> Option { diff --git a/crates/settings/src/settings_content.rs b/crates/settings/src/settings_content.rs index 43402cae0e6c723b4cc2e94f28c1ba7d0c61c928..b47755be58445e8ba335c6ea64416265d176fc17 100644 --- a/crates/settings/src/settings_content.rs +++ b/crates/settings/src/settings_content.rs @@ -291,21 +291,43 @@ pub enum TitleBarVisibility { #[derive(Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] pub struct AudioSettingsContent { /// Opt into the new audio system. - #[serde(rename = "experimental.rodio_audio", default)] - pub rodio_audio: Option, + /// + /// You need to rejoin a call for this setting to apply + #[serde(rename = "experimental.rodio_audio")] + pub rodio_audio: Option, // default is false + /// Requires 'rodio_audio: true' + /// + /// Automatically increase or decrease you microphone's volume. This affects how + /// loud you sound to others. + /// + /// Recommended: off (default) + /// Microphones are too quite in zed, until everyone is on experimental + /// audio and has auto speaker volume on this will make you very loud + /// compared to other speakers. + #[serde(rename = "experimental.auto_microphone_volume")] + pub auto_microphone_volume: Option, /// Requires 'rodio_audio: true' /// - /// Use the new audio systems automatic gain control for your microphone. - /// This affects how loud you sound to others. - #[serde(rename = "experimental.control_input_volume", default)] - pub control_input_volume: Option, + /// Automatically increate or decrease the volume of other call members. + /// This only affects how things sound for you. + #[serde(rename = "experimental.auto_speaker_volume")] + pub auto_speaker_volume: Option, /// Requires 'rodio_audio: true' /// - /// Use the new audio systems automatic gain control on everyone in the - /// call. This makes call members who are too quite louder and those who are - /// too loud quieter. This only affects how things sound for you. - #[serde(rename = "experimental.control_output_volume", default)] - pub control_output_volume: Option, + /// Remove background noises. Works great for typing, cars, dogs, AC. Does + /// not work well on music. + #[serde(rename = "experimental.denoise")] + pub denoise: Option, + /// Requires 'rodio_audio: true' + /// + /// Use audio parameters compatible with the previous versions of + /// experimental audio and non-experimental audio. When this is false you + /// will sound strange to anyone not on the latest experimental audio. In + /// the future we will migrate by setting this to false + /// + /// You need to rejoin a call for this setting to apply + #[serde(rename = "experimental.legacy_audio_compatible")] + pub legacy_audio_compatible: Option, } /// Control what info is collected by Zed. diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index ec9629685d8366864b92a6160ece623450f72b0c..b50854abd55af883af1e97eac4afd51dbb31df3b 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -90,7 +90,6 @@ mime_guess = { version = "2" } miniz_oxide = { version = "0.8", features = ["simd"] } nom = { version = "7" } num-bigint = { version = "0.4" } -num-complex = { version = "0.4", features = ["bytemuck"] } num-integer = { version = "0.1", features = ["i128"] } num-iter = { version = "0.1", default-features = false, features = ["i128", "std"] } num-rational = { version = "0.4", features = ["num-bigint-std"] } @@ -229,7 +228,6 @@ mime_guess = { version = "2" } miniz_oxide = { version = "0.8", features = ["simd"] } nom = { version = "7" } num-bigint = { version = "0.4" } -num-complex = { version = "0.4", features = ["bytemuck"] } num-integer = { version = "0.1", features = ["i128"] } num-iter = { version = "0.1", default-features = false, features = ["i128", "std"] } num-rational = { version = "0.4", features = ["num-bigint-std"] } @@ -308,7 +306,6 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -338,7 +335,6 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -369,7 +365,6 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -399,7 +394,6 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } -num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -442,6 +436,7 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } +num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -483,6 +478,7 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } +num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -522,6 +518,7 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } +num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -563,6 +560,7 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } +num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -587,7 +585,6 @@ getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-f getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -num = { version = "0.4" } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "fs", "net"] } @@ -613,7 +610,6 @@ getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-f getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } -num = { version = "0.4" } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } @@ -655,6 +651,7 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } +num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -696,6 +693,7 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } +num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } From 5f728efccfe2dfc115934d5274d4eda4b37d1dea Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 19 Sep 2025 14:21:28 +0200 Subject: [PATCH 07/58] agent: Show custom MCP servers in agent configuration (#38500) Fixes a regression introduced in #38419 Release Notes: - N/A --- crates/agent_ui/src/agent_configuration.rs | 78 +++++++++------------- crates/project/src/context_server_store.rs | 21 +++--- 2 files changed, 43 insertions(+), 56 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 48203e75af3274fa30da826026c65869f96841f2..3fd78c44ec5a249c6acf4ddd9ac548988a51612c 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -543,35 +543,23 @@ impl AgentConfiguration { window: &mut Window, cx: &mut Context, ) -> impl IntoElement { - let mut registry_descriptors = self + let mut context_server_ids = self .context_server_store .read(cx) - .all_registry_descriptor_ids(cx); - let server_count = registry_descriptors.len(); - - // Sort context servers: non-mcp-server ones first, then mcp-server ones - registry_descriptors.sort_by(|a, b| { - let has_mcp_prefix_a = a.0.starts_with("mcp-server-"); - let has_mcp_prefix_b = b.0.starts_with("mcp-server-"); + .server_ids(cx) + .into_iter() + .collect::>(); - match (has_mcp_prefix_a, has_mcp_prefix_b) { + // Sort context servers: ones without mcp-server- prefix first, then prefixed ones + context_server_ids.sort_by(|a, b| { + const MCP_PREFIX: &str = "mcp-server-"; + match (a.0.strip_prefix(MCP_PREFIX), b.0.strip_prefix(MCP_PREFIX)) { // If one has mcp-server- prefix and other doesn't, non-mcp comes first - (true, false) => std::cmp::Ordering::Greater, - (false, true) => std::cmp::Ordering::Less, + (Some(_), None) => std::cmp::Ordering::Greater, + (None, Some(_)) => std::cmp::Ordering::Less, // If both have same prefix status, sort by appropriate key - _ => { - let get_sort_key = |server_id: &str| -> String { - if let Some(suffix) = server_id.strip_prefix("mcp-server-") { - suffix.to_string() - } else { - server_id.to_string() - } - }; - - let key_a = get_sort_key(&a.0); - let key_b = get_sort_key(&b.0); - key_a.cmp(&key_b) - } + (Some(a), Some(b)) => a.cmp(b), + (None, None) => a.0.cmp(&b.0), } }); @@ -636,8 +624,8 @@ impl AgentConfiguration { ) .child(add_server_popover), ) - .child(v_flex().w_full().gap_1().map(|parent| { - if registry_descriptors.is_empty() { + .child(v_flex().w_full().gap_1().map(|mut parent| { + if context_server_ids.is_empty() { parent.child( h_flex() .p_4() @@ -653,26 +641,18 @@ impl AgentConfiguration { ), ) } else { - { - parent.children(registry_descriptors.into_iter().enumerate().flat_map( - |(index, context_server_id)| { - let mut elements: Vec = vec![ - self.render_context_server(context_server_id, window, cx) - .into_any_element(), - ]; - - if index < server_count - 1 { - elements.push( - Divider::horizontal() - .color(DividerColor::BorderFaded) - .into_any_element(), - ); - } - - elements - }, - )) + for (index, context_server_id) in context_server_ids.into_iter().enumerate() { + if index > 0 { + parent = parent.child( + Divider::horizontal() + .color(DividerColor::BorderFaded) + .into_any_element(), + ); + } + parent = + parent.child(self.render_context_server(context_server_id, window, cx)); } + parent } })) } @@ -1106,7 +1086,13 @@ impl AgentConfiguration { IconName::AiClaude, "Claude Code", )) - .children(user_defined_agents), + .map(|mut parent| { + for agent in user_defined_agents { + parent = parent.child(Divider::horizontal().color(DividerColor::BorderFaded)) + .child(agent); + } + parent + }) ) } diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 70f2bb53d42ed843178dad75eb8d503924fb87f5..364128ae4f8cf5703bf7987117b0109462fa4e3c 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -282,16 +282,17 @@ impl ContextServerStore { self.servers.get(id).map(|state| state.configuration()) } - pub fn all_server_ids(&self) -> Vec { - self.servers.keys().cloned().collect() - } - - pub fn all_registry_descriptor_ids(&self, cx: &App) -> Vec { - self.registry - .read(cx) - .context_server_descriptors() - .into_iter() - .map(|(id, _)| ContextServerId(id)) + pub fn server_ids(&self, cx: &App) -> HashSet { + self.servers + .keys() + .cloned() + .chain( + self.registry + .read(cx) + .context_server_descriptors() + .into_iter() + .map(|(id, _)| ContextServerId(id)), + ) .collect() } From 430ac5175f47dd39526f765fff4eb9bef8d181a0 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Fri, 19 Sep 2025 09:14:52 -0400 Subject: [PATCH 08/58] python: Install basedpyright with npm instead of pip (#38471) Closes #ISSUE Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/languages/src/lib.rs | 2 +- crates/languages/src/python.rs | 246 ++++++++++++++++----------------- 2 files changed, 117 insertions(+), 131 deletions(-) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index f5a4a8c6f6480de7589f0a418157fafbf5fbe2ed..186d50d6ffbe9ea9861ccd5325a89c23062fd89e 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -94,7 +94,7 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime let ty_lsp_adapter = Arc::new(python::TyLspAdapter::new(fs.clone())); let python_context_provider = Arc::new(python::PythonContextProvider); let python_lsp_adapter = Arc::new(python::PyrightLspAdapter::new(node.clone())); - let basedpyright_lsp_adapter = Arc::new(BasedPyrightLspAdapter::new()); + let basedpyright_lsp_adapter = Arc::new(BasedPyrightLspAdapter::new(node.clone())); let ruff_lsp_adapter = Arc::new(RuffLspAdapter::new(fs.clone())); let python_toolchain_provider = Arc::new(python::PythonToolchainProvider); let rust_context_provider = Arc::new(rust::RustContextProvider); diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 91c1c675f8090d9d0161b6d3733d34ed386cfb50..d6d22399b4b8cbc04c87e416a27db8fcdc5eca24 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -29,7 +29,6 @@ use parking_lot::Mutex; use std::str::FromStr; use std::{ borrow::Cow, - ffi::OsString, fmt::Write, path::{Path, PathBuf}, sync::Arc, @@ -65,9 +64,6 @@ impl ManifestProvider for PyprojectTomlManifestProvider { } } -const SERVER_PATH: &str = "node_modules/pyright/langserver.index.js"; -const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "pyright/langserver.index.js"; - enum TestRunner { UNITTEST, PYTEST, @@ -85,10 +81,6 @@ impl FromStr for TestRunner { } } -fn server_binary_arguments(server_path: &Path) -> Vec { - vec![server_path.into(), "--stdio".into()] -} - /// Pyright assigns each completion item a `sortText` of the form `XX.YYYY.name`. /// Where `XX` is the sorting category, `YYYY` is based on most recent usage, /// and `name` is the symbol name itself. @@ -334,10 +326,29 @@ pub struct PyrightLspAdapter { impl PyrightLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("pyright"); + const SERVER_PATH: &str = "node_modules/pyright/langserver.index.js"; + const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "pyright/langserver.index.js"; pub fn new(node: NodeRuntime) -> Self { PyrightLspAdapter { node } } + + async fn get_cached_server_binary( + container_dir: PathBuf, + node: &NodeRuntime, + ) -> Option { + let server_path = container_dir.join(Self::SERVER_PATH); + if server_path.exists() { + Some(LanguageServerBinary { + path: node.binary_path().await.log_err()?, + env: None, + arguments: vec![server_path.into(), "--stdio".into()], + }) + } else { + log::error!("missing executable in directory {:?}", server_path); + None + } + } } #[async_trait(?Send)] @@ -550,13 +561,13 @@ impl LspInstaller for PyrightLspAdapter { .await .log_err()??; - let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH); + let path = node_modules_path.join(Self::NODE_MODULE_RELATIVE_SERVER_PATH); let env = delegate.shell_env().await; Some(LanguageServerBinary { path: node, env: Some(env), - arguments: server_binary_arguments(&path), + arguments: vec![path.into(), "--stdio".into()], }) } } @@ -567,7 +578,7 @@ impl LspInstaller for PyrightLspAdapter { container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Result { - let server_path = container_dir.join(SERVER_PATH); + let server_path = container_dir.join(Self::SERVER_PATH); self.node .npm_install_packages( @@ -580,7 +591,7 @@ impl LspInstaller for PyrightLspAdapter { Ok(LanguageServerBinary { path: self.node.binary_path().await?, env: Some(env), - arguments: server_binary_arguments(&server_path), + arguments: vec![server_path.into(), "--stdio".into()], }) } @@ -590,7 +601,7 @@ impl LspInstaller for PyrightLspAdapter { container_dir: &PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Option { - let server_path = container_dir.join(SERVER_PATH); + let server_path = container_dir.join(Self::SERVER_PATH); let should_install_language_server = self .node @@ -609,7 +620,7 @@ impl LspInstaller for PyrightLspAdapter { Some(LanguageServerBinary { path: self.node.binary_path().await.ok()?, env: Some(env), - arguments: server_binary_arguments(&server_path), + arguments: vec![server_path.into(), "--stdio".into()], }) } } @@ -619,29 +630,12 @@ impl LspInstaller for PyrightLspAdapter { container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Option { - let mut binary = get_cached_server_binary(container_dir, &self.node).await?; + let mut binary = Self::get_cached_server_binary(container_dir, &self.node).await?; binary.env = Some(delegate.shell_env().await); Some(binary) } } -async fn get_cached_server_binary( - container_dir: PathBuf, - node: &NodeRuntime, -) -> Option { - let server_path = container_dir.join(SERVER_PATH); - if server_path.exists() { - Some(LanguageServerBinary { - path: node.binary_path().await.log_err()?, - env: None, - arguments: server_binary_arguments(&server_path), - }) - } else { - log::error!("missing executable in directory {:?}", server_path); - None - } -} - pub(crate) struct PythonContextProvider; const PYTHON_TEST_TARGET_TASK_VARIABLE: VariableName = @@ -1606,64 +1600,34 @@ impl LspInstaller for PyLspAdapter { } pub(crate) struct BasedPyrightLspAdapter { - python_venv_base: OnceCell, String>>, + node: NodeRuntime, } impl BasedPyrightLspAdapter { const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("basedpyright"); const BINARY_NAME: &'static str = "basedpyright-langserver"; + const SERVER_PATH: &str = "node_modules/basedpyright/langserver.index.js"; + const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "basedpyright/langserver.index.js"; - pub(crate) fn new() -> Self { - Self { - python_venv_base: OnceCell::new(), - } + pub(crate) fn new(node: NodeRuntime) -> Self { + BasedPyrightLspAdapter { node } } - async fn ensure_venv(delegate: &dyn LspAdapterDelegate) -> Result> { - let python_path = Self::find_base_python(delegate) - .await - .context("Could not find Python installation for basedpyright")?; - let work_dir = delegate - .language_server_download_dir(&Self::SERVER_NAME) - .await - .context("Could not get working directory for basedpyright")?; - let mut path = PathBuf::from(work_dir.as_ref()); - path.push("basedpyright-venv"); - if !path.exists() { - util::command::new_smol_command(python_path) - .arg("-m") - .arg("venv") - .arg("basedpyright-venv") - .current_dir(work_dir) - .spawn() - .context("spawning child")? - .output() - .await - .context("getting child output")?; - } - - Ok(path.into()) - } - - // Find "baseline", user python version from which we'll create our own venv. - async fn find_base_python(delegate: &dyn LspAdapterDelegate) -> Option { - for path in ["python3", "python"] { - if let Some(path) = delegate.which(path.as_ref()).await { - return Some(path); - } - } - None - } - - async fn base_venv(&self, delegate: &dyn LspAdapterDelegate) -> Result, String> { - self.python_venv_base - .get_or_init(move || async move { - Self::ensure_venv(delegate) - .await - .map_err(|e| format!("{e}")) + async fn get_cached_server_binary( + container_dir: PathBuf, + node: &NodeRuntime, + ) -> Option { + let server_path = container_dir.join(Self::SERVER_PATH); + if server_path.exists() { + Some(LanguageServerBinary { + path: node.binary_path().await.log_err()?, + env: None, + arguments: vec![server_path.into(), "--stdio".into()], }) - .await - .clone() + } else { + log::error!("missing executable in directory {:?}", server_path); + None + } } } @@ -1853,90 +1817,112 @@ impl LspAdapter for BasedPyrightLspAdapter { } impl LspInstaller for BasedPyrightLspAdapter { - type BinaryVersion = (); + type BinaryVersion = String; async fn fetch_latest_server_version( &self, _: &dyn LspAdapterDelegate, _: bool, _: &mut AsyncApp, - ) -> Result<()> { - Ok(()) + ) -> Result { + self.node + .npm_package_latest_version(Self::SERVER_NAME.as_ref()) + .await } async fn check_if_user_installed( &self, delegate: &dyn LspAdapterDelegate, - toolchain: Option, + _: Option, _: &AsyncApp, ) -> Option { - if let Some(bin) = delegate.which(Self::BINARY_NAME.as_ref()).await { + if let Some(path) = delegate.which(Self::BINARY_NAME.as_ref()).await { let env = delegate.shell_env().await; Some(LanguageServerBinary { - path: bin, + path, env: Some(env), arguments: vec!["--stdio".into()], }) } else { - let path = Path::new(toolchain?.path.as_ref()) - .parent()? - .join(Self::BINARY_NAME); - delegate - .which(path.as_os_str()) + // TODO shouldn't this be self.node.binary_path()? + let node = delegate.which("node".as_ref()).await?; + let (node_modules_path, _) = delegate + .npm_package_installed_version(Self::SERVER_NAME.as_ref()) .await - .map(|_| LanguageServerBinary { - path, - arguments: vec!["--stdio".into()], - env: None, - }) + .log_err()??; + + let path = node_modules_path.join(Self::NODE_MODULE_RELATIVE_SERVER_PATH); + + let env = delegate.shell_env().await; + Some(LanguageServerBinary { + path: node, + env: Some(env), + arguments: vec![path.into(), "--stdio".into()], + }) } } async fn fetch_server_binary( &self, - _latest_version: (), - _container_dir: PathBuf, + latest_version: Self::BinaryVersion, + container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Result { - let venv = self.base_venv(delegate).await.map_err(|e| anyhow!(e))?; - let pip_path = venv.join(BINARY_DIR).join("pip3"); - ensure!( - util::command::new_smol_command(pip_path.as_path()) - .arg("install") - .arg("basedpyright") - .arg("--upgrade") - .output() - .await - .context("getting pip install output")? - .status - .success(), - "basedpyright installation failed" - ); - let path = venv.join(BINARY_DIR).join(Self::BINARY_NAME); - ensure!( - delegate.which(path.as_os_str()).await.is_some(), - "basedpyright installation was incomplete" - ); + let server_path = container_dir.join(Self::SERVER_PATH); + + self.node + .npm_install_packages( + &container_dir, + &[(Self::SERVER_NAME.as_ref(), latest_version.as_str())], + ) + .await?; + + let env = delegate.shell_env().await; Ok(LanguageServerBinary { - path, - env: None, - arguments: vec!["--stdio".into()], + path: self.node.binary_path().await?, + env: Some(env), + arguments: vec![server_path.into(), "--stdio".into()], }) } + async fn check_if_version_installed( + &self, + version: &Self::BinaryVersion, + container_dir: &PathBuf, + delegate: &dyn LspAdapterDelegate, + ) -> Option { + let server_path = container_dir.join(Self::SERVER_PATH); + + let should_install_language_server = self + .node + .should_install_npm_package( + Self::SERVER_NAME.as_ref(), + &server_path, + container_dir, + VersionStrategy::Latest(version), + ) + .await; + + if should_install_language_server { + None + } else { + let env = delegate.shell_env().await; + Some(LanguageServerBinary { + path: self.node.binary_path().await.ok()?, + env: Some(env), + arguments: vec![server_path.into(), "--stdio".into()], + }) + } + } + async fn cached_server_binary( &self, - _container_dir: PathBuf, + container_dir: PathBuf, delegate: &dyn LspAdapterDelegate, ) -> Option { - let venv = self.base_venv(delegate).await.ok()?; - let path = venv.join(BINARY_DIR).join(Self::BINARY_NAME); - delegate.which(path.as_os_str()).await?; - Some(LanguageServerBinary { - path, - env: None, - arguments: vec!["--stdio".into()], - }) + let mut binary = Self::get_cached_server_binary(container_dir, &self.node).await?; + binary.env = Some(delegate.shell_env().await); + Some(binary) } } From 9e6f1d5a6eac1b3b603d828470064718d97fbc01 Mon Sep 17 00:00:00 2001 From: Derek Nguyen <79728577+derekntnguyen@users.noreply.github.com> Date: Fri, 19 Sep 2025 09:29:40 -0400 Subject: [PATCH 09/58] python: Fix ty binary path and required args (#38458) Closes #38347 Release Notes: - Fixed path and args to ty lsp binary When attempting to use the new ty lsp integration in the preview, I noticed issues related to accessing the binary. After deleting the downloaded archive and adding the following changes that: - downloads the archive with the correct `AssetKind::TarGz` - uses the correct path to the extracted binary - adds the `server` argument to initialize the lsp (like ruff) After the above changes the LSP starts correctly ```bash 2025-09-18T16:17:03-05:00 INFO [lsp] starting language server process. binary path: "/Users/dereknguyen/Library/Application Support/Zed/languages/ty/ty-0.0.1-alpha.20/ty-aarch64-apple-darwin/ty", working directory: "/Users/dereknguyen/projects/test-project", args: ["server"] ``` image --------- Co-authored-by: Cole Miller --- crates/languages/src/python.rs | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index d6d22399b4b8cbc04c87e416a27db8fcdc5eca24..a8824d3776b08bdfdb99d216c8ab75e88e714c6c 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -100,7 +100,7 @@ pub struct TyLspAdapter { #[cfg(target_os = "macos")] impl TyLspAdapter { - const GITHUB_ASSET_KIND: AssetKind = AssetKind::Gz; + const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz; const ARCH_SERVER_NAME: &str = "apple-darwin"; } @@ -216,15 +216,20 @@ impl LspInstaller for TyLspAdapter { digest: expected_digest, } = latest_version; let destination_path = container_dir.join(format!("ty-{name}")); + + async_fs::create_dir_all(&destination_path).await?; + let server_path = match Self::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => destination_path.clone(), // Tar and gzip extract in place. - AssetKind::Zip => destination_path.clone().join("ty.exe"), // zip contains a .exe + AssetKind::TarGz | AssetKind::Gz => destination_path + .join(Self::build_asset_name()?.0) + .join("ty"), + AssetKind::Zip => destination_path.clone().join("ty.exe"), }; let binary = LanguageServerBinary { path: server_path.clone(), env: None, - arguments: Default::default(), + arguments: vec!["server".into()], }; let metadata_path = destination_path.with_extension("metadata"); @@ -283,7 +288,7 @@ impl LspInstaller for TyLspAdapter { Ok(LanguageServerBinary { path: server_path, env: None, - arguments: Default::default(), + arguments: vec!["server".into()], }) } @@ -305,14 +310,16 @@ impl LspInstaller for TyLspAdapter { let path = last.context("no cached binary")?; let path = match TyLspAdapter::GITHUB_ASSET_KIND { - AssetKind::TarGz | AssetKind::Gz => path, // Tar and gzip extract in place. - AssetKind::Zip => path.join("ty.exe"), // zip contains a .exe + AssetKind::TarGz | AssetKind::Gz => { + path.join(Self::build_asset_name()?.0).join("ty") + } + AssetKind::Zip => path.join("ty.exe"), }; anyhow::Ok(LanguageServerBinary { path, env: None, - arguments: Default::default(), + arguments: vec!["server".into()], }) }) .await From a3da66cec0f770b7d79b24891e960cbc3c81529d Mon Sep 17 00:00:00 2001 From: Bartosz Kaszubowski Date: Fri, 19 Sep 2025 15:41:52 +0200 Subject: [PATCH 10/58] editor: Correct "Toggle Excerpt Fold" tip on macOS (#38487) Show `"Option+click to toggle all"` instead of `"Alt+click to toggle all" on macOS. Screenshot 2025-09-19 at 10 16 11 Release Notes: - N/A --- crates/editor/src/element.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3bc05ccd537710c34c1c4e8e6d63c26440360f2e..28fe68e71cb4fac36f84d1161020e16ba2d0605f 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3838,7 +3838,11 @@ impl EditorElement { Tooltip::with_meta_in( "Toggle Excerpt Fold", Some(&ToggleFold), - "Alt+click to toggle all", + if cfg!(target_os = "macos") { + "Option+click to toggle all" + } else { + "Alt+click to toggle all" + }, &focus_handle, window, cx, From 3217bcb83efda44392047ceae5f3ac0febb3207d Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Fri, 19 Sep 2025 09:59:13 -0400 Subject: [PATCH 11/58] docs: Add Kotlin JAVA_HOME example (#38507) Closes: https://github.com/zed-extensions/kotlin/issues/46 Release Notes: - N/A --- docs/src/languages/kotlin.md | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/docs/src/languages/kotlin.md b/docs/src/languages/kotlin.md index 3955062a77783629c0bd838f7dd093af31f3aa0d..60d66f277eb62c2bdf9905687045abbca4db20b9 100644 --- a/docs/src/languages/kotlin.md +++ b/docs/src/languages/kotlin.md @@ -11,6 +11,12 @@ Report issues to: [https://github.com/zed-extensions/kotlin/issues](https://gith Workspace configuration options can be passed to the language server via lsp settings in `settings.json`. +The full list of lsp `settings` can be found +[here](https://github.com/fwcd/kotlin-language-server/blob/main/server/src/main/kotlin/org/javacs/kt/Configuration.kt) +under `class Configuration` and initialization_options under `class InitializationOptions`. + +### JVM Target + The following example changes the JVM target from `default` (which is 1.8) to `17`: @@ -30,5 +36,20 @@ The following example changes the JVM target from `default` (which is 1.8) to } ``` -The full list of workspace configuration options can be found -[here](https://github.com/fwcd/kotlin-language-server/blob/main/server/src/main/kotlin/org/javacs/kt/Configuration.kt). +### JAVA_HOME + +To use a specific java installation, just specify the `JAVA_HOME` environment variable with: + +```json +{ + "lsp": { + "kotlin-language-server": { + "binary": { + "env": { + "JAVA_HOME": "/Users/whatever/Applications/Work/Android Studio.app/Contents/jbr/Contents/Home" + } + } + } + } +} +``` From aa5b99dc1153db2faf02ad80f4e3287537f19190 Mon Sep 17 00:00:00 2001 From: David Kleingeld Date: Fri, 19 Sep 2025 16:12:49 +0200 Subject: [PATCH 12/58] Fully qualify images in Docker Compose (#38496) This enables podman-compose (easier to install and run on linux) as drop in replacement for docker-compose Release Notes: - N/A --- compose.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/compose.yml b/compose.yml index d0d9bac425356687bfb33efab9ee24e76d1b30a0..00a5780b597738260f90020f139627e7d0b0107c 100644 --- a/compose.yml +++ b/compose.yml @@ -1,6 +1,6 @@ services: postgres: - image: postgres:15 + image: docker.io/library/postgres:15 container_name: zed_postgres ports: - 5432:5432 @@ -23,7 +23,7 @@ services: - ./.blob_store:/data livekit_server: - image: livekit/livekit-server + image: docker.io/livekit/livekit-server container_name: livekit_server entrypoint: /livekit-server --config /livekit.yaml ports: @@ -34,7 +34,7 @@ services: - ./livekit.yaml:/livekit.yaml postgrest_app: - image: postgrest/postgrest + image: docker.io/postgrest/postgrest container_name: postgrest_app ports: - 8081:8081 @@ -47,7 +47,7 @@ services: - postgres postgrest_llm: - image: postgrest/postgrest + image: docker.io/postgrest/postgrest container_name: postgrest_llm ports: - 8082:8082 @@ -60,7 +60,7 @@ services: - postgres stripe-mock: - image: stripe/stripe-mock:v0.178.0 + image: docker.io/stripe/stripe-mock:v0.178.0 ports: - 12111:12111 - 12112:12112 From 2e97ef32c4391681ba891d25aaa5a9bdd3710d3a Mon Sep 17 00:00:00 2001 From: David Kleingeld Date: Fri, 19 Sep 2025 16:33:38 +0200 Subject: [PATCH 13/58] Revert "Audio fixes and mic denoise" (#38509) Reverts zed-industries/zed#38493 Release Notes: - N/A --- Cargo.lock | 2 +- assets/settings/default.json | 32 +--- crates/audio/Cargo.toml | 1 - crates/audio/src/audio.rs | 65 ++----- crates/audio/src/audio_settings.rs | 95 +++------- crates/audio/src/rodio_ext.rs | 167 +----------------- crates/denoise/src/engine.rs | 6 +- crates/denoise/src/lib.rs | 2 +- crates/livekit_client/src/livekit_client.rs | 20 +-- .../src/livekit_client/playback.rs | 89 +++------- .../src/livekit_client/playback/source.rs | 40 ++--- crates/settings/src/settings_content.rs | 44 ++--- tooling/workspace-hack/Cargo.toml | 14 +- 13 files changed, 123 insertions(+), 454 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index be3e5b04ca18d56024eabe45f14562fca3d56375..3acfed9bd7cfa8bc2742bb4f006c38a4f65a1f0e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1405,7 +1405,6 @@ dependencies = [ "async-tar", "collections", "crossbeam", - "denoise", "gpui", "libwebrtc", "log", @@ -20743,6 +20742,7 @@ dependencies = [ "nix 0.29.0", "nix 0.30.1", "nom 7.1.3", + "num", "num-bigint", "num-bigint-dig", "num-complex", diff --git a/assets/settings/default.json b/assets/settings/default.json index d469638ab28ea02eb9b7675296ee9582e2de3ccd..091231521470ebec50cf1351a76063e9205a3d24 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -413,33 +413,15 @@ "experimental.rodio_audio": false, // Requires 'rodio_audio: true' // - // Automatically increase or decrease you microphone's volume. This affects how - // loud you sound to others. - // - // Recommended: off (default) - // Microphones are too quite in zed, until everyone is on experimental - // audio and has auto speaker volume on this will make you very loud - // compared to other speakers. - "experimental.auto_microphone_volume": false, - // Requires 'rodio_audio: true' - // - // Automatically increate or decrease the volume of other call members. - // This only affects how things sound for you. - "experimental.auto_speaker_volume": true, + // Use the new audio systems automatic gain control for your microphone. + // This affects how loud you sound to others. + "experimental.control_input_volume": false, // Requires 'rodio_audio: true' // - // Remove background noises. Works great for typing, cars, dogs, AC. Does - // not work well on music. - "experimental.denoise": true, - // Requires 'rodio_audio: true' - // - // Use audio parameters compatible with the previous versions of - // experimental audio and non-experimental audio. When this is false you - // will sound strange to anyone not on the latest experimental audio. In - // the future we will migrate by setting this to false - // - // You need to rejoin a call for this setting to apply - "experimental.legacy_audio_compatible": true + // Use the new audio systems automatic gain control on everyone in the + // call. This makes call members who are too quite louder and those who are + // too loud quieter. This only affects how things sound for you. + "experimental.control_output_volume": false }, // Scrollbar related settings "scrollbar": { diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index 7f2fed80e2315e51fca7d8477b04885998336632..c083c9a659e50aef37acc2cdfc239696bd469c1e 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -18,7 +18,6 @@ async-tar.workspace = true collections.workspace = true crossbeam.workspace = true gpui.workspace = true -denoise = { path = "../denoise" } log.workspace = true parking_lot.workspace = true rodio = { workspace = true, features = [ "wav", "playback", "wav_output" ] } diff --git a/crates/audio/src/audio.rs b/crates/audio/src/audio.rs index dc4d97a8fa47f11f9120cf5144a37ae6fd94bc2a..f60ddb87b9615d2da9c2be248ab397c19a463616 100644 --- a/crates/audio/src/audio.rs +++ b/crates/audio/src/audio.rs @@ -9,7 +9,7 @@ mod non_windows_and_freebsd_deps { pub(super) use log::info; pub(super) use parking_lot::Mutex; pub(super) use rodio::cpal::Sample; - pub(super) use rodio::source::LimitSettings; + pub(super) use rodio::source::{LimitSettings, UniformSourceIterator}; pub(super) use std::sync::Arc; } @@ -31,20 +31,18 @@ pub use rodio_ext::RodioExt; use crate::audio_settings::LIVE_SETTINGS; -// We are migrating to 16kHz sample rate from 48kHz. In the future -// once we are reasonably sure most users have upgraded we will -// remove the LEGACY parameters. +// NOTE: We used to use WebRTC's mixer which only supported +// 16kHz, 32kHz and 48kHz. As 48 is the most common "next step up" +// for audio output devices like speakers/bluetooth, we just hard-code +// this; and downsample when we need to. // -// We migrate to 16kHz because it is sufficient for speech and required -// by the denoiser and future Speech to Text layers. -pub const SAMPLE_RATE: NonZero = nz!(16000); -pub const CHANNEL_COUNT: NonZero = nz!(1); +// Since most noise cancelling requires 16kHz we will move to +// that in the future. +pub const SAMPLE_RATE: NonZero = nz!(48000); +pub const CHANNEL_COUNT: NonZero = nz!(2); pub const BUFFER_SIZE: usize = // echo canceller and livekit want 10ms of audio (SAMPLE_RATE.get() as usize / 100) * CHANNEL_COUNT.get() as usize; -pub const LEGACY_SAMPLE_RATE: NonZero = nz!(48000); -pub const LEGACY_CHANNEL_COUNT: NonZero = nz!(2); - pub const REPLAY_DURATION: Duration = Duration::from_secs(30); pub fn init(cx: &mut App) { @@ -108,11 +106,6 @@ impl Global for Audio {} impl Audio { fn ensure_output_exists(&mut self) -> Result<&Mixer> { - #[cfg(debug_assertions)] - log::warn!( - "Audio does not sound correct without optimizations. Use a release build to debug audio issues" - ); - if self.output_handle.is_none() { self.output_handle = Some( OutputStreamBuilder::open_default_stream() @@ -167,20 +160,13 @@ impl Audio { let stream = rodio::microphone::MicrophoneBuilder::new() .default_device()? .default_config()? - .prefer_sample_rates([ - SAMPLE_RATE, // sample rates trivially resamplable to `SAMPLE_RATE` - SAMPLE_RATE.saturating_mul(nz!(2)), - SAMPLE_RATE.saturating_mul(nz!(3)), - SAMPLE_RATE.saturating_mul(nz!(4)), - ]) - .prefer_channel_counts([nz!(1), nz!(2), nz!(3), nz!(4)]) + .prefer_sample_rates([SAMPLE_RATE, SAMPLE_RATE.saturating_mul(nz!(2))]) + // .prefer_channel_counts([nz!(1), nz!(2)]) .prefer_buffer_sizes(512..) .open_stream()?; info!("Opened microphone: {:?}", stream.config()); - let (replay, stream) = stream - .possibly_disconnected_channels_to_mono() - .constant_samplerate(SAMPLE_RATE) + let (replay, stream) = UniformSourceIterator::new(stream, CHANNEL_COUNT, SAMPLE_RATE) .limit(LimitSettings::live_performance()) .process_buffer::(move |buffer| { let mut int_buffer: [i16; _] = buffer.map(|s| s.to_sample()); @@ -201,28 +187,15 @@ impl Audio { } } }) - .denoise() - .context("Could not set up denoiser")? - .periodic_access(Duration::from_millis(100), move |denoise| { - denoise.set_enabled(LIVE_SETTINGS.denoise.load(Ordering::Relaxed)); - }) - .automatic_gain_control(1.0, 2.0, 0.0, 5.0) + .automatic_gain_control(1.0, 4.0, 0.0, 5.0) .periodic_access(Duration::from_millis(100), move |agc_source| { - agc_source - .set_enabled(LIVE_SETTINGS.auto_microphone_volume.load(Ordering::Relaxed)); + agc_source.set_enabled(LIVE_SETTINGS.control_input_volume.load(Ordering::Relaxed)); }) .replayable(REPLAY_DURATION)?; voip_parts .replays .add_voip_stream("local microphone".to_string(), replay); - - let stream = if voip_parts.legacy_audio_compatible { - stream.constant_params(LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE) - } else { - stream.constant_params(CHANNEL_COUNT, SAMPLE_RATE) - }; - Ok(stream) } @@ -233,10 +206,9 @@ impl Audio { cx: &mut App, ) -> anyhow::Result<()> { let (replay_source, source) = source - .constant_params(CHANNEL_COUNT, SAMPLE_RATE) - .automatic_gain_control(1.0, 2.0, 0.0, 5.0) + .automatic_gain_control(1.0, 4.0, 0.0, 5.0) .periodic_access(Duration::from_millis(100), move |agc_source| { - agc_source.set_enabled(LIVE_SETTINGS.auto_speaker_volume.load(Ordering::Relaxed)); + agc_source.set_enabled(LIVE_SETTINGS.control_input_volume.load(Ordering::Relaxed)); }) .replayable(REPLAY_DURATION) .expect("REPLAY_DURATION is longer than 100ms"); @@ -297,7 +269,6 @@ impl Audio { pub struct VoipParts { echo_canceller: Arc>, replays: replays::Replays, - legacy_audio_compatible: bool, } #[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))] @@ -306,12 +277,8 @@ impl VoipParts { let (apm, replays) = cx.try_read_default_global::(|audio, _| { (Arc::clone(&audio.echo_canceller), audio.replays.clone()) })?; - let legacy_audio_compatible = - AudioSettings::try_read_global(cx, |settings| settings.legacy_audio_compatible) - .unwrap_or_default(); Ok(Self { - legacy_audio_compatible, echo_canceller: apm, replays, }) diff --git a/crates/audio/src/audio_settings.rs b/crates/audio/src/audio_settings.rs index cba7d45c31f4674be6a69c10ab34f00e0b8cbbd1..2c9db4989efa5edcf4ef84c4e3031b53980fad51 100644 --- a/crates/audio/src/audio_settings.rs +++ b/crates/audio/src/audio_settings.rs @@ -6,38 +6,18 @@ use settings::{Settings, SettingsStore}; #[derive(Clone, Debug)] pub struct AudioSettings { /// Opt into the new audio system. - /// - /// You need to rejoin a call for this setting to apply pub rodio_audio: bool, // default is false /// Requires 'rodio_audio: true' /// - /// Automatically increase or decrease you microphone's volume. This affects how - /// loud you sound to others. - /// - /// Recommended: off (default) - /// Microphones are too quite in zed, until everyone is on experimental - /// audio and has auto speaker volume on this will make you very loud - /// compared to other speakers. - pub auto_microphone_volume: bool, - /// Requires 'rodio_audio: true' - /// - /// Automatically increate or decrease the volume of other call members. - /// This only affects how things sound for you. - pub auto_speaker_volume: bool, - /// Requires 'rodio_audio: true' - /// - /// Remove background noises. Works great for typing, cars, dogs, AC. Does - /// not work well on music. - pub denoise: bool, + /// Use the new audio systems automatic gain control for your microphone. + /// This affects how loud you sound to others. + pub control_input_volume: bool, /// Requires 'rodio_audio: true' /// - /// Use audio parameters compatible with the previous versions of - /// experimental audio and non-experimental audio. When this is false you - /// will sound strange to anyone not on the latest experimental audio. In - /// the future we will migrate by setting this to false - /// - /// You need to rejoin a call for this setting to apply - pub legacy_audio_compatible: bool, + /// Use the new audio systems automatic gain control on everyone in the + /// call. This makes call members who are too quite louder and those who are + /// too loud quieter. This only affects how things sound for you. + pub control_output_volume: bool, } /// Configuration of audio in Zed @@ -45,66 +25,46 @@ impl Settings for AudioSettings { fn from_settings(content: &settings::SettingsContent, _cx: &mut App) -> Self { let audio = &content.audio.as_ref().unwrap(); AudioSettings { + control_input_volume: audio.control_input_volume.unwrap(), + control_output_volume: audio.control_output_volume.unwrap(), rodio_audio: audio.rodio_audio.unwrap(), - auto_microphone_volume: audio.auto_microphone_volume.unwrap(), - auto_speaker_volume: audio.auto_speaker_volume.unwrap(), - denoise: audio.denoise.unwrap(), - legacy_audio_compatible: audio.legacy_audio_compatible.unwrap(), } } + + fn import_from_vscode( + _vscode: &settings::VsCodeSettings, + _current: &mut settings::SettingsContent, + ) { + } } /// See docs on [LIVE_SETTINGS] pub(crate) struct LiveSettings { - pub(crate) auto_microphone_volume: AtomicBool, - pub(crate) auto_speaker_volume: AtomicBool, - pub(crate) denoise: AtomicBool, + pub(crate) control_input_volume: AtomicBool, + pub(crate) control_output_volume: AtomicBool, } impl LiveSettings { pub(crate) fn initialize(&self, cx: &mut App) { cx.observe_global::(move |cx| { - LIVE_SETTINGS.auto_microphone_volume.store( - AudioSettings::get_global(cx).auto_microphone_volume, + LIVE_SETTINGS.control_input_volume.store( + AudioSettings::get_global(cx).control_input_volume, Ordering::Relaxed, ); - LIVE_SETTINGS.auto_speaker_volume.store( - AudioSettings::get_global(cx).auto_speaker_volume, + LIVE_SETTINGS.control_output_volume.store( + AudioSettings::get_global(cx).control_output_volume, Ordering::Relaxed, ); - - let denoise_enabled = AudioSettings::get_global(cx).denoise; - #[cfg(debug_assertions)] - { - static DENOISE_WARNING_SEND: AtomicBool = AtomicBool::new(false); - if denoise_enabled && !DENOISE_WARNING_SEND.load(Ordering::Relaxed) { - DENOISE_WARNING_SEND.store(true, Ordering::Relaxed); - log::warn!("Denoise does not work on debug builds, not enabling") - } - } - #[cfg(not(debug_assertions))] - LIVE_SETTINGS - .denoise - .store(denoise_enabled, Ordering::Relaxed); }) .detach(); let init_settings = AudioSettings::get_global(cx); LIVE_SETTINGS - .auto_microphone_volume - .store(init_settings.auto_microphone_volume, Ordering::Relaxed); - LIVE_SETTINGS - .auto_speaker_volume - .store(init_settings.auto_speaker_volume, Ordering::Relaxed); - let denoise_enabled = AudioSettings::get_global(cx).denoise; - #[cfg(debug_assertions)] - if denoise_enabled { - log::warn!("Denoise does not work on debug builds, not enabling") - } - #[cfg(not(debug_assertions))] + .control_input_volume + .store(init_settings.control_input_volume, Ordering::Relaxed); LIVE_SETTINGS - .denoise - .store(denoise_enabled, Ordering::Relaxed); + .control_output_volume + .store(init_settings.control_output_volume, Ordering::Relaxed); } } @@ -113,7 +73,6 @@ impl LiveSettings { /// real time and must each run in a dedicated OS thread, therefore we can not /// use the background executor. pub(crate) static LIVE_SETTINGS: LiveSettings = LiveSettings { - auto_microphone_volume: AtomicBool::new(true), - auto_speaker_volume: AtomicBool::new(true), - denoise: AtomicBool::new(true), + control_input_volume: AtomicBool::new(true), + control_output_volume: AtomicBool::new(true), }; diff --git a/crates/audio/src/rodio_ext.rs b/crates/audio/src/rodio_ext.rs index af4cc89252dfdc1498471ec7ac09b56d59b62eca..e80b00e15a8fdbd3fc438b78a9ca45d0902dcef1 100644 --- a/crates/audio/src/rodio_ext.rs +++ b/crates/audio/src/rodio_ext.rs @@ -1,5 +1,4 @@ use std::{ - num::NonZero, sync::{ Arc, Mutex, atomic::{AtomicBool, Ordering}, @@ -8,22 +7,12 @@ use std::{ }; use crossbeam::queue::ArrayQueue; -use denoise::{Denoiser, DenoiserError}; -use log::warn; -use rodio::{ - ChannelCount, Sample, SampleRate, Source, conversions::SampleRateConverter, nz, - source::UniformSourceIterator, -}; - -const MAX_CHANNELS: usize = 8; +use rodio::{ChannelCount, Sample, SampleRate, Source}; #[derive(Debug, thiserror::Error)] #[error("Replay duration is too short must be >= 100ms")] pub struct ReplayDurationTooShort; -// These all require constant sources (so the span is infinitely long) -// this is not guaranteed by rodio however we know it to be true in all our -// applications. Rodio desperately needs a constant source concept. pub trait RodioExt: Source + Sized { fn process_buffer(self, callback: F) -> ProcessBuffer where @@ -36,14 +25,6 @@ pub trait RodioExt: Source + Sized { duration: Duration, ) -> Result<(Replay, Replayable), ReplayDurationTooShort>; fn take_samples(self, n: usize) -> TakeSamples; - fn denoise(self) -> Result, DenoiserError>; - fn constant_params( - self, - channel_count: ChannelCount, - sample_rate: SampleRate, - ) -> UniformSourceIterator; - fn constant_samplerate(self, sample_rate: SampleRate) -> ConstantSampleRate; - fn possibly_disconnected_channels_to_mono(self) -> ToMono; } impl RodioExt for S { @@ -120,149 +101,8 @@ impl RodioExt for S { left_to_take: n, } } - fn denoise(self) -> Result, DenoiserError> { - let res = Denoiser::try_new(self); - res - } - fn constant_params( - self, - channel_count: ChannelCount, - sample_rate: SampleRate, - ) -> UniformSourceIterator { - UniformSourceIterator::new(self, channel_count, sample_rate) - } - fn constant_samplerate(self, sample_rate: SampleRate) -> ConstantSampleRate { - ConstantSampleRate::new(self, sample_rate) - } - fn possibly_disconnected_channels_to_mono(self) -> ToMono { - ToMono::new(self) - } -} - -pub struct ConstantSampleRate { - inner: SampleRateConverter, - channels: ChannelCount, - sample_rate: SampleRate, -} - -impl ConstantSampleRate { - fn new(source: S, target_rate: SampleRate) -> Self { - let input_sample_rate = source.sample_rate(); - let channels = source.channels(); - let inner = SampleRateConverter::new(source, input_sample_rate, target_rate, channels); - Self { - inner, - channels, - sample_rate: target_rate, - } - } -} - -impl Iterator for ConstantSampleRate { - type Item = rodio::Sample; - - fn next(&mut self) -> Option { - self.inner.next() - } - - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} - -impl Source for ConstantSampleRate { - fn current_span_len(&self) -> Option { - None - } - - fn channels(&self) -> ChannelCount { - self.channels - } - - fn sample_rate(&self) -> SampleRate { - self.sample_rate - } - - fn total_duration(&self) -> Option { - None // not supported (not used by us) - } -} - -const TYPICAL_NOISE_FLOOR: Sample = 1e-3; - -/// constant source, only works on a single span -pub struct ToMono { - inner: S, - input_channel_count: ChannelCount, - connected_channels: ChannelCount, - /// running mean of second channel 'volume' - means: [f32; MAX_CHANNELS], -} -impl ToMono { - fn new(input: S) -> Self { - let channels = input - .channels() - .min(const { NonZero::::new(MAX_CHANNELS as u16).unwrap() }); - if channels < input.channels() { - warn!("Ignoring input channels {}..", channels.get()); - } - - Self { - connected_channels: channels, - input_channel_count: channels, - inner: input, - means: [TYPICAL_NOISE_FLOOR; MAX_CHANNELS], - } - } -} - -impl Source for ToMono { - fn current_span_len(&self) -> Option { - None - } - - fn channels(&self) -> ChannelCount { - rodio::nz!(1) - } - - fn sample_rate(&self) -> SampleRate { - self.inner.sample_rate() - } - - fn total_duration(&self) -> Option { - self.inner.total_duration() - } -} - -fn update_mean(mean: &mut f32, sample: Sample) { - const HISTORY: f32 = 500.0; - *mean *= (HISTORY - 1.0) / HISTORY; - *mean += sample.abs() / HISTORY; -} - -impl Iterator for ToMono { - type Item = Sample; - - fn next(&mut self) -> Option { - let mut mono_sample = 0f32; - let mut active_channels = 0; - for channel in 0..self.input_channel_count.get() as usize { - let sample = self.inner.next()?; - mono_sample += sample; - - update_mean(&mut self.means[channel], sample); - if self.means[channel] > TYPICAL_NOISE_FLOOR / 10.0 { - active_channels += 1; - } - } - mono_sample /= self.connected_channels.get() as f32; - self.connected_channels = NonZero::new(active_channels).unwrap_or(nz!(1)); - - Some(mono_sample) - } } -/// constant source, only works on a single span pub struct TakeSamples { inner: S, left_to_take: usize, @@ -307,7 +147,6 @@ impl Source for TakeSamples { } } -/// constant source, only works on a single span #[derive(Debug)] struct ReplayQueue { inner: ArrayQueue>, @@ -354,7 +193,6 @@ impl ReplayQueue { } } -/// constant source, only works on a single span pub struct ProcessBuffer where S: Source + Sized, @@ -422,7 +260,6 @@ where } } -/// constant source, only works on a single span pub struct InspectBuffer where S: Source + Sized, @@ -487,7 +324,6 @@ where } } -/// constant source, only works on a single span #[derive(Debug)] pub struct Replayable { inner: S, @@ -539,7 +375,6 @@ impl Source for Replayable { } } -/// constant source, only works on a single span #[derive(Debug)] pub struct Replay { rx: Arc, diff --git a/crates/denoise/src/engine.rs b/crates/denoise/src/engine.rs index be0548c689e3b902342cd1cb6d6d8e29351e8be4..5196b70b5ba02f665385c022a0dfa9cd22c1db9c 100644 --- a/crates/denoise/src/engine.rs +++ b/crates/denoise/src/engine.rs @@ -138,13 +138,13 @@ impl Engine { const SPECTRUM_INPUT: &str = "input_2"; const MEMORY_INPUT: &str = "input_3"; - let spectrum = + let memory_input = Tensor::from_slice::<_, f32>(&self.in_magnitude, (1, 1, FFT_OUT_SIZE), &Device::Cpu) .expect("the in magnitude has enough elements to fill the Tensor"); let inputs = HashMap::from([ - (SPECTRUM_INPUT.to_string(), spectrum), - (MEMORY_INPUT.to_string(), self.spectral_memory.clone()), + (MEMORY_INPUT.to_string(), memory_input), + (SPECTRUM_INPUT.to_string(), self.spectral_memory.clone()), ]); inputs } diff --git a/crates/denoise/src/lib.rs b/crates/denoise/src/lib.rs index f6cbf0fadf1f216cc6168c2b249f807b557869af..1422c81a4b915d571d35585447165c04d3695b73 100644 --- a/crates/denoise/src/lib.rs +++ b/crates/denoise/src/lib.rs @@ -84,7 +84,7 @@ impl Denoiser { .spawn(move || { run_neural_denoiser(denoised_tx, input_rx); }) - .expect("Should be ablet to spawn threads"); + .unwrap(); Ok(Self { inner: source, diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 04e669869ddbf64ffd92cbcad4bf927bfec55cb5..45e929cb2ec0bebf054497632d614af1975f6397 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -1,6 +1,6 @@ use std::sync::Arc; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result}; use audio::AudioSettings; use collections::HashMap; use futures::{SinkExt, channel::mpsc}; @@ -12,10 +12,7 @@ use settings::Settings; mod playback; -use crate::{ - LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication, - livekit_client::playback::Speaker, -}; +use crate::{LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication}; pub use playback::AudioStream; pub(crate) use playback::{RemoteVideoFrame, play_remote_video_track}; @@ -135,20 +132,11 @@ impl Room { track: &RemoteAudioTrack, cx: &mut App, ) -> Result { - let speaker: Speaker = - serde_urlencoded::from_str(&track.0.name()).unwrap_or_else(|_| Speaker { - name: track.0.name(), - is_staff: false, - legacy_audio_compatible: true, - }); - if AudioSettings::get_global(cx).rodio_audio { info!("Using experimental.rodio_audio audio pipeline for output"); - playback::play_remote_audio_track(&track.0, speaker, cx) - } else if speaker.legacy_audio_compatible { - Ok(self.playback.play_remote_audio_track(&track.0)) + playback::play_remote_audio_track(&track.0, cx) } else { - Err(anyhow!("Client version too old to play audio in call")) + Ok(self.playback.play_remote_audio_track(&track.0)) } } } diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index b4cd68e08e4a88f9cb248e3b7ac64fbfca4c39de..df8b5ea54fb1ce11bf871faa912757bbff1fd7f9 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -1,6 +1,6 @@ use anyhow::{Context as _, Result}; -use audio::{AudioSettings, CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE}; +use audio::{AudioSettings, CHANNEL_COUNT, SAMPLE_RATE}; use cpal::traits::{DeviceTrait, StreamTrait as _}; use futures::channel::mpsc::UnboundedSender; use futures::{Stream, StreamExt as _}; @@ -43,17 +43,12 @@ pub(crate) struct AudioStack { pub(crate) fn play_remote_audio_track( track: &livekit::track::RemoteAudioTrack, - speaker: Speaker, cx: &mut gpui::App, ) -> Result { - let stream = source::LiveKitStream::new( - cx.background_executor(), - track, - speaker.legacy_audio_compatible, - ); - let stop_handle = Arc::new(AtomicBool::new(false)); let stop_handle_clone = stop_handle.clone(); + let stream = source::LiveKitStream::new(cx.background_executor(), track); + let stream = stream .stoppable() .periodic_access(Duration::from_millis(50), move |s| { @@ -62,6 +57,10 @@ pub(crate) fn play_remote_audio_track( } }); + let speaker: Speaker = serde_urlencoded::from_str(&track.name()).unwrap_or_else(|_| Speaker { + name: track.name(), + is_staff: false, + }); audio::Audio::play_voip_stream(stream, speaker.name, speaker.is_staff, cx) .context("Could not play audio")?; @@ -153,32 +152,17 @@ impl AudioStack { is_staff: bool, cx: &AsyncApp, ) -> Result<(crate::LocalAudioTrack, AudioStream)> { - let legacy_audio_compatible = - AudioSettings::try_read_global(cx, |setting| setting.legacy_audio_compatible) - .unwrap_or_default(); - - let source = if legacy_audio_compatible { - NativeAudioSource::new( - // n.b. this struct's options are always ignored, noise cancellation is provided by apm. - AudioSourceOptions::default(), - LEGACY_SAMPLE_RATE.get(), - LEGACY_CHANNEL_COUNT.get().into(), - 10, - ) - } else { - NativeAudioSource::new( - // n.b. this struct's options are always ignored, noise cancellation is provided by apm. - AudioSourceOptions::default(), - SAMPLE_RATE.get(), - CHANNEL_COUNT.get().into(), - 10, - ) - }; + let source = NativeAudioSource::new( + // n.b. this struct's options are always ignored, noise cancellation is provided by apm. + AudioSourceOptions::default(), + SAMPLE_RATE.get(), + CHANNEL_COUNT.get().into(), + 10, + ); let track_name = serde_urlencoded::to_string(Speaker { name: user_name, is_staff, - legacy_audio_compatible, }) .context("Could not encode user information in track name")?; @@ -202,32 +186,22 @@ impl AudioStack { let capture_task = if rodio_pipeline { info!("Using experimental.rodio_audio audio pipeline"); let voip_parts = audio::VoipParts::new(cx)?; - // Audio needs to run real-time and should never be paused. That is - // why we are using a normal std::thread and not a background task + // Audio needs to run real-time and should never be paused. That is why we are using a + // normal std::thread and not a background task thread::Builder::new() - .name("MicrophoneToLivekit".to_string()) + .name("AudioCapture".to_string()) .spawn(move || { // microphone is non send on mac - let microphone = match audio::Audio::open_microphone(voip_parts) { - Ok(m) => m, - Err(e) => { - log::error!("Could not open microphone: {e}"); - return; - } - }; + let microphone = audio::Audio::open_microphone(voip_parts)?; send_to_livekit(frame_tx, microphone); + Ok::<(), anyhow::Error>(()) }) - .expect("should be able to spawn threads"); + .unwrap(); Task::ready(Ok(())) } else { self.executor.spawn(async move { - Self::capture_input( - apm, - frame_tx, - LEGACY_SAMPLE_RATE.get(), - LEGACY_CHANNEL_COUNT.get().into(), - ) - .await + Self::capture_input(apm, frame_tx, SAMPLE_RATE.get(), CHANNEL_COUNT.get().into()) + .await }) }; @@ -415,30 +389,25 @@ impl AudioStack { } #[derive(Serialize, Deserialize)] -pub struct Speaker { - pub name: String, - pub is_staff: bool, - pub legacy_audio_compatible: bool, +struct Speaker { + name: String, + is_staff: bool, } fn send_to_livekit(frame_tx: UnboundedSender>, mut microphone: impl Source) { use cpal::Sample; - let sample_rate = microphone.sample_rate().get(); - let num_channels = microphone.channels().get() as u32; - let buffer_size = sample_rate / 100 * num_channels; - loop { let sampled: Vec<_> = microphone .by_ref() - .take(buffer_size as usize) + .take(audio::BUFFER_SIZE) .map(|s| s.to_sample()) .collect(); if frame_tx .unbounded_send(AudioFrame { - sample_rate, - num_channels, - samples_per_channel: sampled.len() as u32 / num_channels, + sample_rate: SAMPLE_RATE.get(), + num_channels: CHANNEL_COUNT.get() as u32, + samples_per_channel: sampled.len() as u32 / CHANNEL_COUNT.get() as u32, data: Cow::Owned(sampled), }) .is_err() diff --git a/crates/livekit_client/src/livekit_client/playback/source.rs b/crates/livekit_client/src/livekit_client/playback/source.rs index cde4b19fda2e053346ad535e7c75b2abda60431a..f605b3d517cd816491f0eceadce5ac778ef75d21 100644 --- a/crates/livekit_client/src/livekit_client/playback/source.rs +++ b/crates/livekit_client/src/livekit_client/playback/source.rs @@ -3,19 +3,17 @@ use std::num::NonZero; use futures::StreamExt; use libwebrtc::{audio_stream::native::NativeAudioStream, prelude::AudioFrame}; use livekit::track::RemoteAudioTrack; -use rodio::{ - ChannelCount, SampleRate, Source, buffer::SamplesBuffer, conversions::SampleTypeConverter, -}; +use rodio::{Source, buffer::SamplesBuffer, conversions::SampleTypeConverter, nz}; -use audio::{CHANNEL_COUNT, LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE, SAMPLE_RATE}; +use audio::{CHANNEL_COUNT, SAMPLE_RATE}; fn frame_to_samplesbuffer(frame: AudioFrame) -> SamplesBuffer { let samples = frame.data.iter().copied(); let samples = SampleTypeConverter::<_, _>::new(samples); let samples: Vec = samples.collect(); SamplesBuffer::new( - NonZero::new(frame.num_channels as u16).expect("zero channels is nonsense"), - NonZero::new(frame.sample_rate).expect("samplerate zero is nonsense"), + nz!(2), // frame always has two channels + NonZero::new(frame.sample_rate).expect("audio frame sample rate is nonzero"), samples, ) } @@ -24,26 +22,14 @@ pub struct LiveKitStream { // shared_buffer: SharedBuffer, inner: rodio::queue::SourcesQueueOutput, _receiver_task: gpui::Task<()>, - channel_count: ChannelCount, - sample_rate: SampleRate, } impl LiveKitStream { - pub fn new( - executor: &gpui::BackgroundExecutor, - track: &RemoteAudioTrack, - legacy: bool, - ) -> Self { - let (channel_count, sample_rate) = if legacy { - (LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE) - } else { - (CHANNEL_COUNT, SAMPLE_RATE) - }; - + pub fn new(executor: &gpui::BackgroundExecutor, track: &RemoteAudioTrack) -> Self { let mut stream = NativeAudioStream::new( track.rtc_track(), - sample_rate.get() as i32, - channel_count.get().into(), + SAMPLE_RATE.get() as i32, + CHANNEL_COUNT.get().into(), ); let (queue_input, queue_output) = rodio::queue::queue(true); // spawn rtc stream @@ -59,8 +45,6 @@ impl LiveKitStream { LiveKitStream { _receiver_task: receiver_task, inner: queue_output, - sample_rate, - channel_count, } } } @@ -79,11 +63,17 @@ impl Source for LiveKitStream { } fn channels(&self) -> rodio::ChannelCount { - self.channel_count + // This must be hardcoded because the playback source assumes constant + // sample rate and channel count. The queue upon which this is build + // will however report different counts and rates. Even though we put in + // only items with our (constant) CHANNEL_COUNT & SAMPLE_RATE this will + // play silence on one channel and at 44100 which is not what our + // constants are. + CHANNEL_COUNT } fn sample_rate(&self) -> rodio::SampleRate { - self.sample_rate + SAMPLE_RATE // see comment on channels } fn total_duration(&self) -> Option { diff --git a/crates/settings/src/settings_content.rs b/crates/settings/src/settings_content.rs index b47755be58445e8ba335c6ea64416265d176fc17..43402cae0e6c723b4cc2e94f28c1ba7d0c61c928 100644 --- a/crates/settings/src/settings_content.rs +++ b/crates/settings/src/settings_content.rs @@ -291,43 +291,21 @@ pub enum TitleBarVisibility { #[derive(Clone, PartialEq, Default, Serialize, Deserialize, JsonSchema, MergeFrom, Debug)] pub struct AudioSettingsContent { /// Opt into the new audio system. - /// - /// You need to rejoin a call for this setting to apply - #[serde(rename = "experimental.rodio_audio")] - pub rodio_audio: Option, // default is false - /// Requires 'rodio_audio: true' - /// - /// Automatically increase or decrease you microphone's volume. This affects how - /// loud you sound to others. - /// - /// Recommended: off (default) - /// Microphones are too quite in zed, until everyone is on experimental - /// audio and has auto speaker volume on this will make you very loud - /// compared to other speakers. - #[serde(rename = "experimental.auto_microphone_volume")] - pub auto_microphone_volume: Option, + #[serde(rename = "experimental.rodio_audio", default)] + pub rodio_audio: Option, /// Requires 'rodio_audio: true' /// - /// Automatically increate or decrease the volume of other call members. - /// This only affects how things sound for you. - #[serde(rename = "experimental.auto_speaker_volume")] - pub auto_speaker_volume: Option, + /// Use the new audio systems automatic gain control for your microphone. + /// This affects how loud you sound to others. + #[serde(rename = "experimental.control_input_volume", default)] + pub control_input_volume: Option, /// Requires 'rodio_audio: true' /// - /// Remove background noises. Works great for typing, cars, dogs, AC. Does - /// not work well on music. - #[serde(rename = "experimental.denoise")] - pub denoise: Option, - /// Requires 'rodio_audio: true' - /// - /// Use audio parameters compatible with the previous versions of - /// experimental audio and non-experimental audio. When this is false you - /// will sound strange to anyone not on the latest experimental audio. In - /// the future we will migrate by setting this to false - /// - /// You need to rejoin a call for this setting to apply - #[serde(rename = "experimental.legacy_audio_compatible")] - pub legacy_audio_compatible: Option, + /// Use the new audio systems automatic gain control on everyone in the + /// call. This makes call members who are too quite louder and those who are + /// too loud quieter. This only affects how things sound for you. + #[serde(rename = "experimental.control_output_volume", default)] + pub control_output_volume: Option, } /// Control what info is collected by Zed. diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index b50854abd55af883af1e97eac4afd51dbb31df3b..ec9629685d8366864b92a6160ece623450f72b0c 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -90,6 +90,7 @@ mime_guess = { version = "2" } miniz_oxide = { version = "0.8", features = ["simd"] } nom = { version = "7" } num-bigint = { version = "0.4" } +num-complex = { version = "0.4", features = ["bytemuck"] } num-integer = { version = "0.1", features = ["i128"] } num-iter = { version = "0.1", default-features = false, features = ["i128", "std"] } num-rational = { version = "0.4", features = ["num-bigint-std"] } @@ -228,6 +229,7 @@ mime_guess = { version = "2" } miniz_oxide = { version = "0.8", features = ["simd"] } nom = { version = "7" } num-bigint = { version = "0.4" } +num-complex = { version = "0.4", features = ["bytemuck"] } num-integer = { version = "0.1", features = ["i128"] } num-iter = { version = "0.1", default-features = false, features = ["i128", "std"] } num-rational = { version = "0.4", features = ["num-bigint-std"] } @@ -306,6 +308,7 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } +num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -335,6 +338,7 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } +num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -365,6 +369,7 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } +num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -394,6 +399,7 @@ hyper-rustls = { version = "0.27", default-features = false, features = ["http1" livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } naga = { version = "25", features = ["msl-out", "wgsl-in"] } nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } +num = { version = "0.4" } objc2 = { version = "0.6" } objc2-core-foundation = { version = "0.3", default-features = false, features = ["CFArray", "CFCGTypes", "CFData", "CFDate", "CFDictionary", "CFRunLoop", "CFString", "CFURL", "objc2", "std"] } objc2-foundation = { version = "0.3", default-features = false, features = ["NSArray", "NSAttributedString", "NSBundle", "NSCoder", "NSData", "NSDate", "NSDictionary", "NSEnumerator", "NSError", "NSGeometry", "NSNotification", "NSNull", "NSObjCRuntime", "NSObject", "NSProcessInfo", "NSRange", "NSRunLoop", "NSString", "NSURL", "NSUndoManager", "NSValue", "objc2-core-foundation", "std"] } @@ -436,7 +442,6 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -478,7 +483,6 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -518,7 +522,6 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -560,7 +563,6 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -585,6 +587,7 @@ getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-f getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } +num = { version = "0.4" } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "fs", "net"] } @@ -610,6 +613,7 @@ getrandom-468e82937335b1c9 = { package = "getrandom", version = "0.3", default-f getrandom-6f8ce4dd05d13bba = { package = "getrandom", version = "0.2", default-features = false, features = ["js", "rdrand"] } hyper-rustls = { version = "0.27", default-features = false, features = ["http1", "http2", "native-tokio", "ring", "tls12"] } livekit-runtime = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d" } +num = { version = "0.4" } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } @@ -651,7 +655,6 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } @@ -693,7 +696,6 @@ nix-1f5adca70f036a62 = { package = "nix", version = "0.28", features = ["fs", "m nix-b73a96c0a5f6a7d9 = { package = "nix", version = "0.29", features = ["fs", "pthread", "signal", "user"] } nix-fa1f6196edfd7249 = { package = "nix", version = "0.30", features = ["fs", "socket", "uio", "user"] } num-bigint-dig = { version = "0.8", features = ["i128", "prime", "zeroize"] } -num-complex = { version = "0.4", features = ["bytemuck"] } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } From 94fcbb400b875aaaccab5e730c3a7c72dd660ef5 Mon Sep 17 00:00:00 2001 From: Dimas Ari <_@inchidi.dev> Date: Fri, 19 Sep 2025 21:36:36 +0700 Subject: [PATCH 14/58] docs: Update invalid property in a configuration example (#38466) Just install Zed for the first time and got a warning from the first config example i copied from docs. Great design btw, immediately able to see that this is a well thought out app. seems like i'll stick with zed and make it my new dev 'sanctuary'. Release Notes: - N/A --- docs/src/visual-customization.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 55f2dfe9b4d40d46a640520a99952964712c640e..6798542d14e448feec86cdd8cb8e6a8f61a4cc78 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -227,7 +227,7 @@ TBD: Centered layout related settings "git": { "inline_blame": { "enabled": true, // Show/hide inline blame - "delay": 0, // Show after delay (ms) + "delay_ms": 0, // Show after delay (ms) "min_column": 0, // Minimum column to inline display blame "padding": 7, // Padding between code and inline blame (em) "show_commit_summary": false // Show/hide commit summary From b6944d0bae3c53bff64ce31d2609135eb0807eb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=80=83=E7=94=9F=E8=88=B1?= Date: Fri, 19 Sep 2025 22:43:25 +0800 Subject: [PATCH 15/58] docs: Fix duplicate postgresql package and punctuation error (#38478) Found duplicate `postgresql` package in installation command. Uncertain whether it should be `postgresql-contrib` or `postgresql-client`, but neither appears necessary. Release Notes: - N/A --- docs/src/development/local-collaboration.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/src/development/local-collaboration.md b/docs/src/development/local-collaboration.md index eb7f3dfc43dc29ee3d25de3dbc373f5f925ba2af..87363a4269ac32ac39598efef640b80384d1f44a 100644 --- a/docs/src/development/local-collaboration.md +++ b/docs/src/development/local-collaboration.md @@ -48,17 +48,17 @@ You can install these dependencies natively or run them under Docker. - Follow the steps in the [collab README](https://github.com/zed-industries/zed/blob/main/crates/collab/README.md) to configure the Postgres database for integration tests -Alternatively, if you have [Docker](https://www.docker.com/) installed you can bring up all the `collab` dependencies using Docker Compose: +Alternatively, if you have [Docker](https://www.docker.com/) installed you can bring up all the `collab` dependencies using Docker Compose. ### Linux 1. Install [Postgres](https://www.postgresql.org/download/linux/) ```sh - sudo apt-get install postgresql postgresql # Ubuntu/Debian - sudo pacman -S postgresql # Arch Linux - sudo dnf install postgresql postgresql-server # RHEL/Fedora - sudo zypper install postgresql postgresql-server # OpenSUSE + sudo apt-get install postgresql # Ubuntu/Debian + sudo pacman -S postgresql # Arch Linux + sudo dnf install postgresql postgresql-server # RHEL/Fedora + sudo zypper install postgresql postgresql-server # OpenSUSE ``` 2. Install [Livekit](https://github.com/livekit/livekit-cli) From 154b01c5fe55192d5f319c1858556f3246f49952 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 19 Sep 2025 17:05:39 +0200 Subject: [PATCH 16/58] Dismiss agent panel when `disable_ai` is toggled to `true` (#38461) Closes https://github.com/zed-industries/zed/issues/38331 This fixes an issue where we would not dismiss the panel once the user toggled the setting, leaving them in an awkward state where closing the panel would become hard. Also takes care of one more check for the `Fix with assistant` action and consolidates some of the `AgentSettings` and `DisableAiSetting` checks into one method to make the code more readable. Release Notes: - N/A --- Cargo.lock | 1 + crates/agent_settings/Cargo.toml | 1 + crates/agent_settings/src/agent_settings.rs | 5 +++ crates/agent_ui/src/agent_panel.rs | 44 ++++++++++++++++++--- crates/agent_ui/src/inline_assistant.rs | 8 ++-- crates/git_ui/src/git_panel.rs | 22 ++++------- crates/zed/src/zed/quick_action_bar.rs | 18 +++------ 7 files changed, 61 insertions(+), 38 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3acfed9bd7cfa8bc2742bb4f006c38a4f65a1f0e..ed1e9bef3f1797201dd791a7b4616509bbbc5036 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -337,6 +337,7 @@ dependencies = [ "gpui", "language_model", "paths", + "project", "schemars 1.0.1", "serde", "serde_json", diff --git a/crates/agent_settings/Cargo.toml b/crates/agent_settings/Cargo.toml index 8af76053c2aabead30413c98e482ed97dbdbc361..a8b457a9dddb1f8932d015f895e6d2064944bfe9 100644 --- a/crates/agent_settings/Cargo.toml +++ b/crates/agent_settings/Cargo.toml @@ -19,6 +19,7 @@ convert_case.workspace = true fs.workspace = true gpui.workspace = true language_model.workspace = true +project.workspace = true schemars.workspace = true serde.workspace = true settings.workspace = true diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index e416ce73e5451e840af8c36e8ffee301bacc79b3..d862cacee18ea53f81cdc91981b22f5531f2d75e 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -5,6 +5,7 @@ use std::sync::Arc; use collections::IndexMap; use gpui::{App, Pixels, px}; use language_model::LanguageModel; +use project::DisableAiSettings; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ @@ -53,6 +54,10 @@ pub struct AgentSettings { } impl AgentSettings { + pub fn enabled(&self, cx: &App) -> bool { + self.enabled && !DisableAiSettings::get_global(cx).disable_ai + } + pub fn temperature_for_model(model: &Arc, cx: &App) -> Option { let settings = Self::get_global(cx); for setting in settings.model_parameters.iter().rev() { diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index ee9ac73b2ee23a8b4326ddbc4e60c345ef4a3526..ba71fd84ab5b9d666256afeb0a2c5677aac9adb1 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -1,4 +1,4 @@ -use std::ops::{Not, Range}; +use std::ops::Range; use std::path::Path; use std::rc::Rc; use std::sync::Arc; @@ -662,6 +662,43 @@ impl AgentPanel { ) }); + let mut old_disable_ai = false; + cx.observe_global_in::(window, move |panel, window, cx| { + let disable_ai = DisableAiSettings::get_global(cx).disable_ai; + if old_disable_ai != disable_ai { + let agent_panel_id = cx.entity_id(); + let agent_panel_visible = panel + .workspace + .update(cx, |workspace, cx| { + let agent_dock_position = panel.position(window, cx); + let agent_dock = workspace.dock_at_position(agent_dock_position); + let agent_panel_focused = agent_dock + .read(cx) + .active_panel() + .is_some_and(|panel| panel.panel_id() == agent_panel_id); + + let active_panel_visible = agent_dock + .read(cx) + .visible_panel() + .is_some_and(|panel| panel.panel_id() == agent_panel_id); + + if agent_panel_focused { + cx.dispatch_action(&ToggleFocus); + } + + active_panel_visible + }) + .unwrap_or_default(); + + if agent_panel_visible { + cx.emit(PanelEvent::Close); + } + + old_disable_ai = disable_ai; + } + }) + .detach(); + Self { active_view, workspace, @@ -674,11 +711,9 @@ impl AgentPanel { prompt_store, configuration: None, configuration_subscription: None, - inline_assist_context_store, previous_view: None, history_store: history_store.clone(), - new_thread_menu_handle: PopoverMenuHandle::default(), agent_panel_menu_handle: PopoverMenuHandle::default(), assistant_navigation_menu_handle: PopoverMenuHandle::default(), @@ -703,7 +738,6 @@ impl AgentPanel { if workspace .panel::(cx) .is_some_and(|panel| panel.read(cx).enabled(cx)) - && !DisableAiSettings::get_global(cx).disable_ai { workspace.toggle_panel_focus::(window, cx); } @@ -1499,7 +1533,7 @@ impl Panel for AgentPanel { } fn enabled(&self, cx: &App) -> bool { - DisableAiSettings::get_global(cx).disable_ai.not() && AgentSettings::get_global(cx).enabled + AgentSettings::get_global(cx).enabled(cx) } fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool { diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 79e092b709dd2778c89a79e1d6ce36802c853eb6..98e7276dc4fd3f94b01df82219f116a07cafa304 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -144,8 +144,7 @@ impl InlineAssistant { let Some(terminal_panel) = workspace.read(cx).panel::(cx) else { return; }; - let enabled = !DisableAiSettings::get_global(cx).disable_ai - && AgentSettings::get_global(cx).enabled; + let enabled = AgentSettings::get_global(cx).enabled(cx); terminal_panel.update(cx, |terminal_panel, cx| { terminal_panel.set_assistant_enabled(enabled, cx) }); @@ -257,8 +256,7 @@ impl InlineAssistant { window: &mut Window, cx: &mut Context, ) { - let settings = AgentSettings::get_global(cx); - if !settings.enabled || DisableAiSettings::get_global(cx).disable_ai { + if !AgentSettings::get_global(cx).enabled(cx) { return; } @@ -1788,7 +1786,7 @@ impl CodeActionProvider for AssistantCodeActionProvider { _: &mut Window, cx: &mut App, ) -> Task>> { - if !AgentSettings::get_global(cx).enabled { + if !AgentSettings::get_global(cx).enabled(cx) { return Task::ready(Ok(Vec::new())); } diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 76671eba7b577e86d5049add743e965d11acd6c4..47dcc68d2137f75666ae04d2b8ffe4e87cb478f8 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -46,7 +46,7 @@ use panel::{ panel_icon_button, }; use project::{ - DisableAiSettings, Fs, Project, ProjectPath, + Fs, Project, ProjectPath, git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId}, }; use serde::{Deserialize, Serialize}; @@ -405,15 +405,11 @@ impl GitPanel { let scroll_handle = UniformListScrollHandle::new(); - let mut assistant_enabled = AgentSettings::get_global(cx).enabled; - let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; + let mut was_ai_enabled = AgentSettings::get_global(cx).enabled(cx); let _settings_subscription = cx.observe_global::(move |_, cx| { - let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; - if assistant_enabled != AgentSettings::get_global(cx).enabled - || was_ai_disabled != is_ai_disabled - { - assistant_enabled = AgentSettings::get_global(cx).enabled; - was_ai_disabled = is_ai_disabled; + let is_ai_enabled = AgentSettings::get_global(cx).enabled(cx); + if was_ai_enabled != is_ai_enabled { + was_ai_enabled = is_ai_enabled; cx.notify(); } }); @@ -1739,10 +1735,7 @@ impl GitPanel { /// Generates a commit message using an LLM. pub fn generate_commit_message(&mut self, cx: &mut Context) { - if !self.can_commit() - || DisableAiSettings::get_global(cx).disable_ai - || !agent_settings::AgentSettings::get_global(cx).enabled - { + if !self.can_commit() || !AgentSettings::get_global(cx).enabled(cx) { return; } @@ -2996,8 +2989,7 @@ impl GitPanel { &self, cx: &Context, ) -> Option { - if !agent_settings::AgentSettings::get_global(cx).enabled - || DisableAiSettings::get_global(cx).disable_ai + if !agent_settings::AgentSettings::get_global(cx).enabled(cx) || LanguageModelRegistry::read_global(cx) .commit_message_model() .is_none() diff --git a/crates/zed/src/zed/quick_action_bar.rs b/crates/zed/src/zed/quick_action_bar.rs index a6f85000e9b2fd2b853880a9045984938b6a7445..df1a417f5815753698a18b077d69c81c5b7ba3ed 100644 --- a/crates/zed/src/zed/quick_action_bar.rs +++ b/crates/zed/src/zed/quick_action_bar.rs @@ -15,7 +15,6 @@ use gpui::{ FocusHandle, Focusable, InteractiveElement, ParentElement, Render, Styled, Subscription, WeakEntity, Window, anchored, deferred, point, }; -use project::DisableAiSettings; use project::project_settings::DiagnosticSeverity; use search::{BufferSearchBar, buffer_search}; use settings::{Settings, SettingsStore}; @@ -48,20 +47,15 @@ impl QuickActionBar { workspace: &Workspace, cx: &mut Context, ) -> Self { - let mut was_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; - let mut was_agent_enabled = AgentSettings::get_global(cx).enabled; + let mut was_agent_enabled = AgentSettings::get_global(cx).enabled(cx); let mut was_agent_button = AgentSettings::get_global(cx).button; let ai_settings_subscription = cx.observe_global::(move |_, cx| { - let is_ai_disabled = DisableAiSettings::get_global(cx).disable_ai; let agent_settings = AgentSettings::get_global(cx); + let is_agent_enabled = agent_settings.enabled(cx); - if was_ai_disabled != is_ai_disabled - || was_agent_enabled != agent_settings.enabled - || was_agent_button != agent_settings.button - { - was_ai_disabled = is_ai_disabled; - was_agent_enabled = agent_settings.enabled; + if was_agent_enabled != is_agent_enabled || was_agent_button != agent_settings.button { + was_agent_enabled = is_agent_enabled; was_agent_button = agent_settings.button; cx.notify(); } @@ -597,9 +591,7 @@ impl Render for QuickActionBar { .children(self.render_preview_button(self.workspace.clone(), cx)) .children(search_button) .when( - AgentSettings::get_global(cx).enabled - && AgentSettings::get_global(cx).button - && !DisableAiSettings::get_global(cx).disable_ai, + AgentSettings::get_global(cx).enabled(cx) && AgentSettings::get_global(cx).button, |bar| bar.child(assistant_button), ) .children(code_actions_dropdown) From 0f4bdca9e9742bc2acea8b72497e55f3deb4805a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20=C4=8Ce=C5=A1pivo?= Date: Fri, 19 Sep 2025 17:17:35 +0200 Subject: [PATCH 17/58] Update icon theme fallback to use default theme (#38485) https://github.com/zed-industries/zed/pull/38367 introduced panic: ``` thread 'main' panicked at crates/theme/src/settings.rs:812:18: called `Option::unwrap()` on a `None` value ``` In this PR I restored fallback logic from the original code - before settings refactor. Release Notes: - N/A --- crates/theme/src/settings.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index e8ae1eed3cd7ca49ec946645160b98732be83884..b2c19ae3ed0d6e3dbf622c9e125bad7eeccf0a6e 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -808,7 +808,7 @@ impl settings::Settings for ThemeSettings { theme_overrides: HashMap::default(), active_icon_theme: themes .get_icon_theme(icon_theme_selection.icon_theme(*system_appearance)) - .ok() + .or_else(|_| themes.default_icon_theme()) .unwrap(), icon_theme_selection: Some(icon_theme_selection), ui_density: content.ui_density.unwrap_or_default().into(), From 4743fe84151f8a014591f65a60da06529245abbc Mon Sep 17 00:00:00 2001 From: Dino Date: Fri, 19 Sep 2025 16:50:33 +0100 Subject: [PATCH 18/58] vim: Fix regression in surround behavior (#38344) Fix an issue introduced in https://github.com/zed-industries/zed/pull/37321 where vim's surround wouldn't work as expected when replacing quotes with non-quotes, with whitespace always being added, regardless of whether the opening or closing bracket was used. This is not the intended, or previous, behavior, where only the opening bracket would trigger whitespace to be added. Closes #38169 Release Notes: - Fixed regression in vim's surround plugin that ignored whether the opening or closing bracket was being used when replacing quotes, so space would always be added --- crates/vim/src/surrounds.rs | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index 8b3359c8f08046cf995db077a9a5ff0d36a97b95..5e25b08dd8656887b2013df52a5e7d62fce5dbe0 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -241,21 +241,15 @@ impl Vim { }, }; - // Determines whether space should be added/removed after + // Determines whether space should be added after // and before the surround pairs. - // For example, using `cs{[` will add a space before and - // after the pair, while using `cs{]` will not, notice the - // use of the closing bracket instead of the opening bracket - // on the target object. - // In the case of quotes, the opening and closing is the - // same, so no space will ever be added or removed. - let surround = match target { - Object::Quotes - | Object::BackQuotes - | Object::AnyQuotes - | Object::MiniQuotes - | Object::DoubleQuotes => true, - _ => pair.end != surround_alias((*text).as_ref()), + // Space is only added in the following cases: + // - new surround is not quote and is opening bracket (({[<) + // - new surround is quote and original was also quote + let surround = if pair.start != pair.end { + pair.end != surround_alias((*text).as_ref()) + } else { + will_replace_pair.start == will_replace_pair.end }; let (display_map, selections) = editor.selections.all_adjusted_display(cx); @@ -1241,6 +1235,15 @@ mod test { "}, Mode::Normal, ); + + // test quote to bracket spacing. + cx.set_state(indoc! {"'ˇfoobar'"}, Mode::Normal); + cx.simulate_keystrokes("c s ' {"); + cx.assert_state(indoc! {"ˇ{ foobar }"}, Mode::Normal); + + cx.set_state(indoc! {"'ˇfoobar'"}, Mode::Normal); + cx.simulate_keystrokes("c s ' }"); + cx.assert_state(indoc! {"ˇ{foobar}"}, Mode::Normal); } #[gpui::test] From df6f0bc2a7b768b16c7c02c5e6effeed4145ac44 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 19 Sep 2025 18:11:19 +0200 Subject: [PATCH 19/58] Fix markdown list in `bump-zed-minor-versions` (#38515) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This fixes a small markdown issue in the `bump-zed-minor-versions` script that bugged me for too long 😅 Release Notes: - N/A --- script/bump-zed-minor-versions | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/script/bump-zed-minor-versions b/script/bump-zed-minor-versions index 10535ce79b12f1820986fcbaa4062def0c9ec856..536dbb6244c7e9f99c4085ca95f667e43dd67ac3 100755 --- a/script/bump-zed-minor-versions +++ b/script/bump-zed-minor-versions @@ -104,7 +104,7 @@ Prepared new Zed versions locally. You will need to push the branches and open a ${prev_minor_branch_name} \\ ${bump_main_branch_name} - echo -e "Release Notes:\n\n-N/A" | gh pr create \\ + echo -e "Release Notes:\n\n- N/A" | gh pr create \\ --title "Bump Zed to v${major}.${next_minor}" \\ --body-file "-" \\ --base main \\ From b9188e0fd38a9738b4891b4e414cd41c77d6fa35 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Fri, 19 Sep 2025 18:38:22 +0200 Subject: [PATCH 20/58] collab: Fix screen share aspect ratio on non-Mac platforms (#38517) It was just a bunch of finnickery around UI layout. It affected Linux too. Release Notes: * Fixed aspect ratio of peer screen share when using Linux/Windows builds. --- Cargo.lock | 1 + crates/livekit_client/Cargo.toml | 1 + crates/livekit_client/src/remote_video_track_view.rs | 4 +++- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/Cargo.lock b/Cargo.lock index ed1e9bef3f1797201dd791a7b4616509bbbc5036..8fd76300f7507a284375e12e1275724972bebe7f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10163,6 +10163,7 @@ dependencies = [ "simplelog", "smallvec", "tokio-tungstenite 0.26.2", + "ui", "util", "workspace-hack", ] diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index 80e4960c0df31f6a3d8115bd4bd66c0de09b76f0..2400092c1c154b8d6a4ee24f43c0556a26dc532e 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -41,6 +41,7 @@ serde_urlencoded.workspace = true settings.workspace = true smallvec.workspace = true tokio-tungstenite.workspace = true +ui.workspace = true util.workspace = true workspace-hack.workspace = true diff --git a/crates/livekit_client/src/remote_video_track_view.rs b/crates/livekit_client/src/remote_video_track_view.rs index 9073b8729a1d72ef59fe6ed77fd727cdf6acae00..189806f2138e401e62ad46336e95d8468e3b3732 100644 --- a/crates/livekit_client/src/remote_video_track_view.rs +++ b/crates/livekit_client/src/remote_video_track_view.rs @@ -97,8 +97,10 @@ impl Render for RemoteVideoTrackView { self.previous_rendered_frame = Some(current_rendered_frame) } self.current_rendered_frame = Some(latest_frame.clone()); - return gpui::img(latest_frame.clone()) + use gpui::ParentElement; + return ui::h_flex() .size_full() + .child(gpui::img(latest_frame.clone()).size_full()) .into_any_element(); } From 30a29ab34ea7ca2c03f2d6e4f35596d01c7cf3e7 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 19 Sep 2025 10:38:39 -0600 Subject: [PATCH 21/58] Fix server settings (#38477) In the settings refactor I'd assumed server settings were like project settings. This is not the case, they are in fact the normal user settings; but just read from the server. Release Notes: - N/A --- Cargo.lock | 1 + crates/editor/Cargo.toml | 1 + crates/editor/src/editor.rs | 8 ++-- crates/project/src/project_settings.rs | 5 +-- .../remote_server/src/remote_editing_tests.rs | 7 +-- crates/settings/src/settings_content.rs | 7 --- crates/settings/src/settings_store.rs | 44 ++----------------- 7 files changed, 13 insertions(+), 60 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8fd76300f7507a284375e12e1275724972bebe7f..f38ea8f740e87643b063ecc358899e6ba0b0fd10 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5295,6 +5295,7 @@ dependencies = [ "url", "util", "uuid", + "vim_mode_setting", "workspace", "workspace-hack", "zed_actions", diff --git a/crates/editor/Cargo.toml b/crates/editor/Cargo.toml index be06cc04dfc7ee3f080e8d995783abb819e95842..52b3fa2affeca1ceb87485fb1242fe40b34f8f57 100644 --- a/crates/editor/Cargo.toml +++ b/crates/editor/Cargo.toml @@ -89,6 +89,7 @@ ui.workspace = true url.workspace = true util.workspace = true uuid.workspace = true +vim_mode_setting.workspace = true workspace.workspace = true zed_actions.workspace = true workspace-hack.workspace = true diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index d8501f2104a00183be68dc461f9128a600227aa6..4084f61bb4a44d591aa544a622fa8888f56a5c57 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -21678,12 +21678,10 @@ impl Editor { } } -// todo(settings_refactor) this should not be! fn vim_enabled(cx: &App) -> bool { - cx.global::() - .raw_user_settings() - .and_then(|settings| settings.content.vim_mode) - == Some(true) + vim_mode_setting::VimModeSetting::try_get(cx) + .map(|vim_mode| vim_mode.0) + .unwrap_or(false) } fn process_completion_for_edit( diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index c95c20a3352bb067e492874f6f650d38f04671b2..369d445b2051f463e862483f4afd1b8c444bb9ea 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -770,12 +770,9 @@ impl SettingsObserver { envelope: TypedEnvelope, cx: AsyncApp, ) -> anyhow::Result<()> { - let new_settings = serde_json::from_str(&envelope.payload.contents).with_context(|| { - format!("deserializing {} user settings", envelope.payload.contents) - })?; cx.update_global(|settings_store: &mut SettingsStore, cx| { settings_store - .set_raw_user_settings(new_settings, cx) + .set_user_settings(&envelope.payload.contents, cx) .context("setting new user settings")?; anyhow::Ok(()) })??; diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index cb486732c0a0a63e7f6d5d5aed7fe0499ef98b80..16b6e49063ad091967d88943024167bb246f8e2c 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -1797,8 +1797,8 @@ async fn test_remote_external_agent_server( pretty_assertions::assert_eq!(names, ["gemini", "claude"]); server_cx.update_global::(|settings_store, cx| { settings_store - .set_raw_server_settings( - Some(json!({ + .set_server_settings( + &json!({ "agent_servers": { "foo": { "command": "foo-cli", @@ -1808,7 +1808,8 @@ async fn test_remote_external_agent_server( } } } - })), + }) + .to_string(), cx, ) .unwrap(); diff --git a/crates/settings/src/settings_content.rs b/crates/settings/src/settings_content.rs index 43402cae0e6c723b4cc2e94f28c1ba7d0c61c928..38bff4d6a1428f017bcd65be3d27e945aebccabd 100644 --- a/crates/settings/src/settings_content.rs +++ b/crates/settings/src/settings_content.rs @@ -166,13 +166,6 @@ impl SettingsContent { } } -#[skip_serializing_none] -#[derive(Debug, Default, Serialize, Deserialize, JsonSchema, MergeFrom)] -pub struct ServerSettingsContent { - #[serde(flatten)] - pub project: ProjectSettingsContent, -} - #[skip_serializing_none] #[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize, JsonSchema, MergeFrom)] pub struct UserSettingsContent { diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index dc703e50f1de43aee8059e144dc4cb0815b3472d..a575182a4144d99bf3c3c7f29f649735ea8b8891 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -36,8 +36,7 @@ use crate::{ merge_from::MergeFrom, parse_json_with_comments, replace_value_in_json_text, settings_content::{ - ExtensionsSettingsContent, ProjectSettingsContent, ServerSettingsContent, SettingsContent, - UserSettingsContent, + ExtensionsSettingsContent, ProjectSettingsContent, SettingsContent, UserSettingsContent, }, update_value_in_json_text, }; @@ -327,33 +326,6 @@ impl SettingsStore { self.user_settings.as_ref() } - /// Replaces current settings with the values from the given JSON. - pub fn set_raw_user_settings( - &mut self, - new_settings: UserSettingsContent, - cx: &mut App, - ) -> Result<()> { - self.user_settings = Some(new_settings); - self.recompute_values(None, cx)?; - Ok(()) - } - - /// Replaces current settings with the values from the given JSON. - pub fn set_raw_server_settings( - &mut self, - new_settings: Option, - cx: &mut App, - ) -> Result<()> { - // Rewrite the server settings into a content type - self.server_settings = new_settings - .map(|settings| settings.to_string()) - .and_then(|str| parse_json_with_comments::(&str).ok()) - .map(Box::new); - - self.recompute_values(None, cx)?; - Ok(()) - } - /// Get the configured settings profile names. pub fn configured_settings_profiles(&self) -> impl Iterator { self.user_settings @@ -361,11 +333,6 @@ impl SettingsStore { .flat_map(|settings| settings.profiles.keys().map(|k| k.as_str())) } - /// Access the raw JSON value of the default settings. - pub fn raw_default_settings(&self) -> &SettingsContent { - &self.default_settings - } - #[cfg(any(test, feature = "test-support"))] pub fn test(cx: &mut App) -> Self { Self::new(cx, &crate::test_settings()) @@ -621,19 +588,14 @@ impl SettingsStore { server_settings_content: &str, cx: &mut App, ) -> Result<()> { - let settings: Option = if server_settings_content.is_empty() { + let settings: Option = if server_settings_content.is_empty() { None } else { parse_json_with_comments(server_settings_content)? }; // Rewrite the server settings into a content type - self.server_settings = settings.map(|settings| { - Box::new(SettingsContent { - project: settings.project, - ..Default::default() - }) - }); + self.server_settings = settings.map(|settings| Box::new(settings)); self.recompute_values(None, cx)?; Ok(()) From be7575536ea72342ce4792e62c32c81e62377df4 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 19 Sep 2025 10:51:21 -0600 Subject: [PATCH 22/58] Fix theme overrides (#38512) Release Notes: - N/A --- crates/theme/src/settings.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index b2c19ae3ed0d6e3dbf622c9e125bad7eeccf0a6e..04b8bd3dd7c597b8730c03f1ed8ac9fdb83929d6 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -804,8 +804,8 @@ impl settings::Settings for ThemeSettings { .or(themes.get(&zed_default_dark().name)) .unwrap(), theme_selection: Some(theme_selection), - experimental_theme_overrides: None, - theme_overrides: HashMap::default(), + experimental_theme_overrides: content.experimental_theme_overrides.clone(), + theme_overrides: content.theme_overrides.clone(), active_icon_theme: themes .get_icon_theme(icon_theme_selection.icon_theme(*system_appearance)) .or_else(|_| themes.default_icon_theme()) From 1afbfcb832496061d77c50280953e669dbdbcfc0 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 19 Sep 2025 11:14:31 -0600 Subject: [PATCH 23/58] git: Docs-based workaround for GitHub/git auth confusion (#38479) Closes #ISSUE Release Notes: - git: Added a link to Github's authentication help if you end up in Zed trying to type a password in for https auth --- crates/git_ui/src/askpass_modal.rs | 43 ++++++++++++++++++++++++++---- 1 file changed, 38 insertions(+), 5 deletions(-) diff --git a/crates/git_ui/src/askpass_modal.rs b/crates/git_ui/src/askpass_modal.rs index 149833ad3535bb69ba35e199ece5166e194745a9..1705ad6732ef57095a7e550a6c27978596a6b11e 100644 --- a/crates/git_ui/src/askpass_modal.rs +++ b/crates/git_ui/src/askpass_modal.rs @@ -2,9 +2,10 @@ use editor::Editor; use futures::channel::oneshot; use gpui::{AppContext, DismissEvent, Entity, EventEmitter, Focusable, Styled}; use ui::{ - ActiveTheme, App, Context, DynamicSpacing, Headline, HeadlineSize, Icon, IconName, IconSize, - InteractiveElement, IntoElement, ParentElement, Render, SharedString, StyledExt, - StyledTypography, Window, div, h_flex, v_flex, + ActiveTheme, AnyElement, App, Button, Clickable, Color, Context, DynamicSpacing, Headline, + HeadlineSize, Icon, IconName, IconSize, InteractiveElement, IntoElement, Label, LabelCommon, + LabelSize, ParentElement, Render, SharedString, StyledExt, StyledTypography, Window, div, + h_flex, v_flex, }; use workspace::ModalView; @@ -33,7 +34,7 @@ impl AskPassModal { ) -> Self { let editor = cx.new(|cx| { let mut editor = Editor::single_line(window, cx); - if prompt.contains("yes/no") { + if prompt.contains("yes/no") || prompt.contains("Username") { editor.set_masked(false, cx); } else { editor.set_masked(true, cx); @@ -58,6 +59,36 @@ impl AskPassModal { } cx.emit(DismissEvent); } + + fn render_hint(&mut self, cx: &mut Context) -> Option { + let color = cx.theme().status().info_background; + if (self.prompt.contains("Password") || self.prompt.contains("Username")) + && self.prompt.contains("github.com") + { + return Some( + div() + .p_2() + .bg(color) + .border_t_1() + .border_color(cx.theme().status().info_border) + .child( + h_flex().gap_2() + .child( + Icon::new(IconName::Github).size(IconSize::Small) + ) + .child( + Label::new("You may need to configure git for Github.") + .size(LabelSize::Small), + ) + .child(Button::new("learn-more", "Learn more").color(Color::Accent).label_size(LabelSize::Small).on_click(|_, _, cx| { + cx.open_url("https://docs.github.com/en/get-started/git-basics/set-up-git#authenticating-with-github-from-git") + })), + ) + .into_any_element(), + ); + } + None + } } impl Render for AskPassModal { @@ -68,9 +99,9 @@ impl Render for AskPassModal { .on_action(cx.listener(Self::confirm)) .elevation_2(cx) .size_full() - .font_buffer(cx) .child( h_flex() + .font_buffer(cx) .px(DynamicSpacing::Base12.rems(cx)) .pt(DynamicSpacing::Base08.rems(cx)) .pb(DynamicSpacing::Base04.rems(cx)) @@ -86,6 +117,7 @@ impl Render for AskPassModal { ) .child( div() + .font_buffer(cx) .text_buffer(cx) .py_2() .px_3() @@ -97,5 +129,6 @@ impl Render for AskPassModal { .child(self.prompt.clone()) .child(self.editor.clone()), ) + .children(self.render_hint(cx)) } } From 4e316c683bc31794833ce64a706b18319371f72f Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Fri, 19 Sep 2025 14:07:02 -0500 Subject: [PATCH 24/58] macos: Fix panic when `NSWindow::screen` returns `nil` (#38524) Closes #ISSUE Release Notes: - mac: Fixed an issue where Zed would panic if the workspace window was previously off screen --- crates/gpui/src/platform/mac/window.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 1230a704062ba835bceb5db5d2ecf05b688e34df..e8b42c57b8239f53118487f51bd194178c3c21c0 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -513,10 +513,11 @@ impl MacWindowState { fn bounds(&self) -> Bounds { let mut window_frame = unsafe { NSWindow::frame(self.native_window) }; - let screen_frame = unsafe { - let screen = NSWindow::screen(self.native_window); - NSScreen::frame(screen) - }; + let screen = unsafe { NSWindow::screen(self.native_window) }; + if screen == nil { + return Bounds::new(point(px(0.), px(0.)), crate::DEFAULT_WINDOW_SIZE); + } + let screen_frame = unsafe { NSScreen::frame(screen) }; // Flip the y coordinate to be top-left origin window_frame.origin.y = From de75e2d9f6142b6e667dff357de0444ff56e326b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Fri, 19 Sep 2025 16:48:52 -0400 Subject: [PATCH 25/58] extension_host: Expand supported extension API range to include v0.7.0 (#38529) This PR updates the version range for v0.6.0 of the extension API to include v0.7.0. Since we bumped the `zed_extension_api` crate's version to v0.7.0, we need to expand this range in order for Zed clients to be able to install extensions built against v0.7.0 of `zed_extension_api`. Currently no extensions that target `zed_extension_api@0.7.0` can be installed. Release Notes: - N/A --- crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index e879ed0cb01f70f24a9b2b52438e1ff7d405f2d6..be4f3ca71a3f392965488bd2d30eab556d8fb300 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -35,7 +35,7 @@ use util::{archive::extract_zip, fs::make_file_executable, maybe}; use wasmtime::component::{Linker, Resource}; pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 6, 0); -pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 6, 0); +pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 7, 0); wasmtime::component::bindgen!({ async: true, From 89520ea2216e7a148e1d6c280a0a0cd8daf52648 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Sat, 20 Sep 2025 00:15:01 +0200 Subject: [PATCH 26/58] chore: Bump alacritty_terminal to 0.25.1-rc1 (#38505) Release Notes: - N/A --------- Co-authored-by: Dave Waggoner --- Cargo.lock | 41 +-- Cargo.toml | 2 +- crates/terminal/src/terminal.rs | 2 + crates/terminal/src/terminal_hyperlinks.rs | 294 ++++++--------------- tooling/workspace-hack/Cargo.toml | 40 +-- 5 files changed, 123 insertions(+), 256 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f38ea8f740e87643b063ecc358899e6ba0b0fd10..548ff152066745344b65c75b0be80db71c6f7f5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -498,8 +498,9 @@ dependencies = [ [[package]] name = "alacritty_terminal" -version = "0.25.1-dev" -source = "git+https://github.com/zed-industries/alacritty.git?branch=add-hush-login-flag#828457c9ff1f7ea0a0469337cc8a37ee3a1b0590" +version = "0.25.1-rc1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cb5f4f1ef69bdb8b2095ddd14b09dd74ee0303aae8bd5372667a54cff689a1b" dependencies = [ "base64 0.22.1", "bitflags 2.9.0", @@ -511,10 +512,11 @@ dependencies = [ "piper", "polling", "regex-automata", + "rustix 1.0.7", "rustix-openpty", "serde", "signal-hook", - "unicode-width 0.1.14", + "unicode-width 0.2.0", "vte", "windows-sys 0.59.0", ] @@ -8221,12 +8223,6 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" -[[package]] -name = "hermit-abi" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" - [[package]] name = "hermit-abi" version = "0.5.0" @@ -12822,17 +12818,16 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.4" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.4.0", + "hermit-abi 0.5.0", "pin-project-lite", - "rustix 0.38.44", - "tracing", - "windows-sys 0.59.0", + "rustix 1.0.7", + "windows-sys 0.61.0", ] [[package]] @@ -14679,7 +14674,6 @@ checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags 2.9.0", "errno 0.3.11", - "itoa", "libc", "linux-raw-sys 0.4.15", "windows-sys 0.59.0", @@ -14710,13 +14704,13 @@ dependencies = [ [[package]] name = "rustix-openpty" -version = "0.1.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a25c3aad9fc1424eb82c88087789a7d938e1829724f3e4043163baf0d13cfc12" +checksum = "1de16c7c59892b870a6336f185dc10943517f1327447096bbb7bb32cd85e2393" dependencies = [ "errno 0.3.11", "libc", - "rustix 0.38.44", + "rustix 1.0.7", ] [[package]] @@ -20039,6 +20033,15 @@ dependencies = [ "windows-targets 0.53.2", ] +[[package]] +name = "windows-sys" +version = "0.61.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa" +dependencies = [ + "windows-link 0.2.0", +] + [[package]] name = "windows-targets" version = "0.42.2" diff --git a/Cargo.toml b/Cargo.toml index 6939fb4dd60cd443e07d16988f187d7074535de7..aa95b1f4a78fe2599bcccd3036c2ebb65761ada3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -439,7 +439,7 @@ zlog_settings = { path = "crates/zlog_settings" } agent-client-protocol = { version = "0.4.0", features = ["unstable"] } aho-corasick = "1.1" -alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" } +alacritty_terminal = "0.25.1-rc1" any_vec = "0.14" anyhow = "1.0.86" arrayvec = { version = "0.7.4", features = ["serde"] } diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 6bdeb9638a329c2384e538e27e13c21f02df7284..d1a4c8af9687c87a8c63b598262d0bdf797fada4 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -427,6 +427,8 @@ impl TerminalBuilder { working_directory: working_directory.clone(), drain_on_exit: true, env: env.clone().into_iter().collect(), + #[cfg(windows)] + escape_args: false, } }; diff --git a/crates/terminal/src/terminal_hyperlinks.rs b/crates/terminal/src/terminal_hyperlinks.rs index 2d3d356b4663a8aa271dda8d36d5fab720228527..25db02c5e84f692622a1c97ed891c886b02b26a9 100644 --- a/crates/terminal/src/terminal_hyperlinks.rs +++ b/crates/terminal/src/terminal_hyperlinks.rs @@ -79,8 +79,7 @@ pub(super) fn find_from_grid_point( Some((url, true, url_match)) } else if let Some(url_match) = regex_match_at(term, point, &mut regex_searches.url_regex) { let url = term.bounds_to_string(*url_match.start(), *url_match.end()); - let (sanitized_url, sanitized_match) = sanitize_url_punctuation(url, url_match, term); - Some((sanitized_url, true, sanitized_match)) + Some((url, true, url_match)) } else if let Some(python_match) = regex_match_at(term, point, &mut regex_searches.python_file_line_regex) { @@ -165,63 +164,6 @@ pub(super) fn find_from_grid_point( }) } -fn sanitize_url_punctuation( - url: String, - url_match: Match, - term: &Term, -) -> (String, Match) { - let mut sanitized_url = url; - let mut chars_trimmed = 0; - - // First, handle parentheses balancing using single traversal - let (open_parens, close_parens) = - sanitized_url - .chars() - .fold((0, 0), |(opens, closes), c| match c { - '(' => (opens + 1, closes), - ')' => (opens, closes + 1), - _ => (opens, closes), - }); - - // Trim unbalanced closing parentheses - if close_parens > open_parens { - let mut remaining_close = close_parens; - while sanitized_url.ends_with(')') && remaining_close > open_parens { - sanitized_url.pop(); - chars_trimmed += 1; - remaining_close -= 1; - } - } - - // Handle trailing periods - if sanitized_url.ends_with('.') { - let trailing_periods = sanitized_url - .chars() - .rev() - .take_while(|&c| c == '.') - .count(); - - if trailing_periods > 1 { - sanitized_url.truncate(sanitized_url.len() - trailing_periods); - chars_trimmed += trailing_periods; - } else if trailing_periods == 1 - && let Some(second_last_char) = sanitized_url.chars().rev().nth(1) - && (second_last_char.is_alphanumeric() || second_last_char == '/') - { - sanitized_url.pop(); - chars_trimmed += 1; - } - } - - if chars_trimmed > 0 { - let new_end = url_match.end().sub(term, Boundary::Grid, chars_trimmed); - let sanitized_match = Match::new(*url_match.start(), new_end); - (sanitized_url, sanitized_match) - } else { - (sanitized_url, url_match) - } -} - fn is_path_surrounded_by_common_symbols(path: &str) -> bool { // Avoid detecting `[]` or `()` strings as paths, surrounded by common symbols path.len() > 2 @@ -261,8 +203,8 @@ mod tests { use super::*; use alacritty_terminal::{ event::VoidListener, - index::{Boundary, Column, Line, Point as AlacPoint}, - term::{Config, cell::Flags, search::Match, test::TermSize}, + index::{Boundary, Point as AlacPoint}, + term::{Config, cell::Flags, test::TermSize}, vte::ansi::Handler, }; use std::{cell::RefCell, ops::RangeInclusive, path::PathBuf}; @@ -291,91 +233,6 @@ mod tests { ); } - #[test] - fn test_url_parentheses_sanitization() { - // Test our sanitize_url_parentheses function directly - let test_cases = vec![ - // Cases that should be sanitized (unbalanced parentheses) - ("https://www.google.com/)", "https://www.google.com/"), - ("https://example.com/path)", "https://example.com/path"), - ("https://test.com/))", "https://test.com/"), - // Cases that should NOT be sanitized (balanced parentheses) - ( - "https://en.wikipedia.org/wiki/Example_(disambiguation)", - "https://en.wikipedia.org/wiki/Example_(disambiguation)", - ), - ("https://test.com/(hello)", "https://test.com/(hello)"), - ( - "https://example.com/path(1)(2)", - "https://example.com/path(1)(2)", - ), - // Edge cases - ("https://test.com/", "https://test.com/"), - ("https://example.com", "https://example.com"), - ]; - - for (input, expected) in test_cases { - // Create a minimal terminal for testing - let term = Term::new(Config::default(), &TermSize::new(80, 24), VoidListener); - - // Create a dummy match that spans the entire input - let start_point = AlacPoint::new(Line(0), Column(0)); - let end_point = AlacPoint::new(Line(0), Column(input.len())); - let dummy_match = Match::new(start_point, end_point); - - let (result, _) = sanitize_url_punctuation(input.to_string(), dummy_match, &term); - assert_eq!(result, expected, "Failed for input: {}", input); - } - } - - #[test] - fn test_url_periods_sanitization() { - // Test URLs with trailing periods (sentence punctuation) - let test_cases = vec![ - // Cases that should be sanitized (trailing periods likely punctuation) - ("https://example.com.", "https://example.com"), - ( - "https://github.com/zed-industries/zed.", - "https://github.com/zed-industries/zed", - ), - ( - "https://example.com/path/file.html.", - "https://example.com/path/file.html", - ), - ( - "https://example.com/file.pdf.", - "https://example.com/file.pdf", - ), - ("https://example.com:8080.", "https://example.com:8080"), - ("https://example.com..", "https://example.com"), - ( - "https://en.wikipedia.org/wiki/C.E.O.", - "https://en.wikipedia.org/wiki/C.E.O", - ), - // Cases that should NOT be sanitized (periods are part of URL structure) - ( - "https://example.com/v1.0/api", - "https://example.com/v1.0/api", - ), - ("https://192.168.1.1", "https://192.168.1.1"), - ("https://sub.domain.com", "https://sub.domain.com"), - ]; - - for (input, expected) in test_cases { - // Create a minimal terminal for testing - let term = Term::new(Config::default(), &TermSize::new(80, 24), VoidListener); - - // Create a dummy match that spans the entire input - let start_point = AlacPoint::new(Line(0), Column(0)); - let end_point = AlacPoint::new(Line(0), Column(input.len())); - let dummy_match = Match::new(start_point, end_point); - - // This test should initially fail since we haven't implemented period sanitization yet - let (result, _) = sanitize_url_punctuation(input.to_string(), dummy_match, &term); - assert_eq!(result, expected, "Failed for input: {}", input); - } - } - #[test] fn test_word_regex() { re_test( @@ -468,17 +325,6 @@ mod tests { ) } }; - ($($columns:literal),+; $($lines:expr),+; $hyperlink_kind:ident) => { { - use crate::terminal_hyperlinks::tests::line_cells_count; - - let test_lines = vec![$($lines),+]; - let total_cells = test_lines.iter().copied().map(line_cells_count).sum(); - - test_hyperlink!( - [ $($columns),+ ]; total_cells; test_lines.iter().copied(); $hyperlink_kind - ) - } }; - ([ $($columns:expr),+ ]; $total_cells:expr; $lines:expr; $hyperlink_kind:ident) => { { use crate::terminal_hyperlinks::tests::{ test_hyperlink, HyperlinkKind }; @@ -504,9 +350,6 @@ mod tests { /// macro_rules! test_path { ($($lines:literal),+) => { test_hyperlink!($($lines),+; Path) }; - ($($columns:literal),+; $($lines:literal),+) => { - test_hyperlink!($($columns),+; $($lines),+; Path) - }; } #[test] @@ -572,39 +415,52 @@ mod tests { test_path!("‹«/test/co👉ol.rs»(«1»,«618»)›::"); } + #[test] + fn quotes_and_brackets() { + test_path!("\"‹«/test/co👉ol.rs»:«4»›\""); + test_path!("'‹«/test/co👉ol.rs»:«4»›'"); + test_path!("`‹«/test/co👉ol.rs»:«4»›`"); + + test_path!("[‹«/test/co👉ol.rs»:«4»›]"); + test_path!("(‹«/test/co👉ol.rs»:«4»›)"); + test_path!("{‹«/test/co👉ol.rs»:«4»›}"); + test_path!("<‹«/test/co👉ol.rs»:«4»›>"); + + test_path!("[\"‹«/test/co👉ol.rs»:«4»›\"]"); + test_path!("'(‹«/test/co👉ol.rs»:«4»›)'"); + } + #[test] fn word_wide_chars() { // Rust paths - test_path!(4, 6, 12; "‹«/👉例/cool.rs»›"); - test_path!(4, 6, 12; "‹«/例👈/cool.rs»›"); - test_path!(4, 8, 16; "‹«/例/cool.rs»:«👉4»›"); - test_path!(4, 8, 16; "‹«/例/cool.rs»:«4»:«👉2»›"); + test_path!("‹«/👉例/cool.rs»›"); + test_path!("‹«/例👈/cool.rs»›"); + test_path!("‹«/例/cool.rs»:«👉4»›"); + test_path!("‹«/例/cool.rs»:«4»:«👉2»›"); // Cargo output - test_path!(4, 27, 30; " Compiling Cool (‹«/👉例/Cool»›)"); - test_path!(4, 27, 30; " Compiling Cool (‹«/例👈/Cool»›)"); + test_path!(" Compiling Cool (‹«/👉例/Cool»›)"); + test_path!(" Compiling Cool (‹«/例👈/Cool»›)"); // Python - test_path!(4, 11; "‹«👉例wesome.py»›"); - test_path!(4, 11; "‹«例👈wesome.py»›"); - test_path!(6, 17, 40; " ‹File \"«/👉例wesome.py»\", line «42»›: Wat?"); - test_path!(6, 17, 40; " ‹File \"«/例👈wesome.py»\", line «42»›: Wat?"); + test_path!("‹«👉例wesome.py»›"); + test_path!("‹«例👈wesome.py»›"); + test_path!(" ‹File \"«/👉例wesome.py»\", line «42»›: Wat?"); + test_path!(" ‹File \"«/例👈wesome.py»\", line «42»›: Wat?"); } #[test] fn non_word_wide_chars() { // Mojo diagnostic message - test_path!(4, 18, 38; " ‹File \"«/awe👉some.🔥»\", line «42»›: Wat?"); - test_path!(4, 18, 38; " ‹File \"«/awesome👉.🔥»\", line «42»›: Wat?"); - test_path!(4, 18, 38; " ‹File \"«/awesome.👉🔥»\", line «42»›: Wat?"); - test_path!(4, 18, 38; " ‹File \"«/awesome.🔥👈»\", line «42»›: Wat?"); + test_path!(" ‹File \"«/awe👉some.🔥»\", line «42»›: Wat?"); + test_path!(" ‹File \"«/awesome👉.🔥»\", line «42»›: Wat?"); + test_path!(" ‹File \"«/awesome.👉🔥»\", line «42»›: Wat?"); + test_path!(" ‹File \"«/awesome.🔥👈»\", line «42»›: Wat?"); } /// These likely rise to the level of being worth fixing. mod issues { #[test] - #[cfg_attr(not(target_os = "windows"), should_panic(expected = "Path = «例»"))] - #[cfg_attr(target_os = "windows", should_panic(expected = r#"Path = «C:\\例»"#))] // fn issue_alacritty_8586() { // Rust paths @@ -689,21 +545,13 @@ mod tests { /// Minor issues arguably not important enough to fix/workaround... mod nits { #[test] - #[cfg_attr( - not(target_os = "windows"), - should_panic(expected = "Path = «/test/cool.rs(4»") - )] - #[cfg_attr( - target_os = "windows", - should_panic(expected = r#"Path = «C:\\test\\cool.rs(4»"#) - )] fn alacritty_bugs_with_two_columns() { - test_path!(2; "‹«/👉test/cool.rs»(«4»)›"); - test_path!(2; "‹«/test/cool.rs»(«👉4»)›"); - test_path!(2; "‹«/test/cool.rs»(«4»,«👉2»)›"); + test_path!("‹«/👉test/cool.rs»(«4»)›"); + test_path!("‹«/test/cool.rs»(«👉4»)›"); + test_path!("‹«/test/cool.rs»(«4»,«👉2»)›"); // Python - test_path!(2; "‹«awe👉some.py»›"); + test_path!("‹«awe👉some.py»›"); } #[test] @@ -791,9 +639,6 @@ mod tests { /// macro_rules! test_file_iri { ($file_iri:literal) => { { test_hyperlink!(concat!("‹«👉", $file_iri, "»›"); FileIri) } }; - ($($columns:literal),+; $file_iri:literal) => { { - test_hyperlink!($($columns),+; concat!("‹«👉", $file_iri, "»›"); FileIri) - } }; } #[cfg(not(target_os = "windows"))] @@ -865,9 +710,6 @@ mod tests { /// macro_rules! test_iri { ($iri:literal) => { { test_hyperlink!(concat!("‹«👉", $iri, "»›"); Iri) } }; - ($($columns:literal),+; $iri:literal) => { { - test_hyperlink!($($columns),+; concat!("‹«👉", $iri, "»›"); Iri) - } }; } #[test] @@ -898,26 +740,26 @@ mod tests { #[test] fn wide_chars() { // In the order they appear in URL_REGEX, except 'file://' which is treated as a path - test_iri!(4, 20; "ipfs://例🏃🦀/cool.ipfs"); - test_iri!(4, 20; "ipns://例🏃🦀/cool.ipns"); - test_iri!(6, 20; "magnet://例🏃🦀/cool.git"); - test_iri!(4, 20; "mailto:someone@somewhere.here"); - test_iri!(4, 20; "gemini://somewhere.here"); - test_iri!(4, 20; "gopher://somewhere.here"); - test_iri!(4, 20; "http://例🏃🦀/cool/index.html"); - test_iri!(4, 20; "http://10.10.10.10:1111/cool.html"); - test_iri!(4, 20; "http://例🏃🦀/cool/index.html?amazing=1"); - test_iri!(4, 20; "http://例🏃🦀/cool/index.html#right%20here"); - test_iri!(4, 20; "http://例🏃🦀/cool/index.html?amazing=1#right%20here"); - test_iri!(4, 20; "https://例🏃🦀/cool/index.html"); - test_iri!(4, 20; "https://10.10.10.10:1111/cool.html"); - test_iri!(4, 20; "https://例🏃🦀/cool/index.html?amazing=1"); - test_iri!(4, 20; "https://例🏃🦀/cool/index.html#right%20here"); - test_iri!(4, 20; "https://例🏃🦀/cool/index.html?amazing=1#right%20here"); - test_iri!(4, 20; "news://例🏃🦀/cool.news"); - test_iri!(5, 20; "git://例/cool.git"); - test_iri!(5, 20; "ssh://user@somewhere.over.here:12345/例🏃🦀/cool.git"); - test_iri!(7, 20; "ftp://例🏃🦀/cool.ftp"); + test_iri!("ipfs://例🏃🦀/cool.ipfs"); + test_iri!("ipns://例🏃🦀/cool.ipns"); + test_iri!("magnet://例🏃🦀/cool.git"); + test_iri!("mailto:someone@somewhere.here"); + test_iri!("gemini://somewhere.here"); + test_iri!("gopher://somewhere.here"); + test_iri!("http://例🏃🦀/cool/index.html"); + test_iri!("http://10.10.10.10:1111/cool.html"); + test_iri!("http://例🏃🦀/cool/index.html?amazing=1"); + test_iri!("http://例🏃🦀/cool/index.html#right%20here"); + test_iri!("http://例🏃🦀/cool/index.html?amazing=1#right%20here"); + test_iri!("https://例🏃🦀/cool/index.html"); + test_iri!("https://10.10.10.10:1111/cool.html"); + test_iri!("https://例🏃🦀/cool/index.html?amazing=1"); + test_iri!("https://例🏃🦀/cool/index.html#right%20here"); + test_iri!("https://例🏃🦀/cool/index.html?amazing=1#right%20here"); + test_iri!("news://例🏃🦀/cool.news"); + test_iri!("git://例/cool.git"); + test_iri!("ssh://user@somewhere.over.here:12345/例🏃🦀/cool.git"); + test_iri!("ftp://例🏃🦀/cool.ftp"); } // There are likely more tests needed for IRI vs URI @@ -1006,6 +848,22 @@ mod tests { point } + fn end_point_from_prev_input_point( + term: &Term, + prev_input_point: AlacPoint, + ) -> AlacPoint { + if term + .grid() + .index(prev_input_point) + .flags + .contains(Flags::WIDE_CHAR) + { + prev_input_point.add(term, Boundary::Grid, 1) + } else { + prev_input_point + } + } + let mut hovered_grid_point: Option = None; let mut hyperlink_match = AlacPoint::default()..=AlacPoint::default(); let mut iri_or_path = String::default(); @@ -1040,7 +898,10 @@ mod tests { panic!("Should have been handled by char input") } CapturesState::Path(start_point) => { - iri_or_path = term.bounds_to_string(start_point, prev_input_point); + iri_or_path = term.bounds_to_string( + start_point, + end_point_from_prev_input_point(&term, prev_input_point), + ); CapturesState::RowScan } CapturesState::RowScan => CapturesState::Row(String::new()), @@ -1065,7 +926,8 @@ mod tests { panic!("Should have been handled by char input") } MatchState::Match(start_point) => { - hyperlink_match = start_point..=prev_input_point; + hyperlink_match = start_point + ..=end_point_from_prev_input_point(&term, prev_input_point); MatchState::Done } MatchState::Done => { diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index ec9629685d8366864b92a6160ece623450f72b0c..68fd84b32b64e15b0ea63ef851ec5aac457179c2 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -316,8 +316,8 @@ objc2-metal = { version = "0.3" } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } @@ -347,8 +347,8 @@ object = { version = "0.36", default-features = false, features = ["archive", "r proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } @@ -377,8 +377,8 @@ objc2-metal = { version = "0.3" } object = { version = "0.36", default-features = false, features = ["archive", "read_core", "unaligned", "write"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } @@ -408,8 +408,8 @@ object = { version = "0.36", default-features = false, features = ["archive", "r proc-macro2 = { version = "1", default-features = false, features = ["span-locations"] } prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "process"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "termios", "time"] } scopeguard = { version = "1" } security-framework = { version = "3", features = ["OSX_10_14"] } security-framework-sys = { version = "2", features = ["OSX_10_14"] } @@ -448,8 +448,8 @@ prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["pro quote = { version = "1" } rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "pty", "shm", "stdio", "system", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } @@ -488,8 +488,8 @@ proc-macro2 = { version = "1", default-features = false, features = ["span-locat prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "pty", "shm", "stdio", "system", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } @@ -528,8 +528,8 @@ prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["pro quote = { version = "1" } rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "pty", "shm", "stdio", "system", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } @@ -568,8 +568,8 @@ proc-macro2 = { version = "1", default-features = false, features = ["span-locat prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "pty", "shm", "stdio", "system", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } @@ -661,8 +661,8 @@ prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["pro quote = { version = "1" } rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "pty", "shm", "stdio", "system", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } scopeguard = { version = "1" } syn-f595c2ba2a3f28df = { package = "syn", version = "2", features = ["extra-traits", "fold", "full", "visit", "visit-mut"] } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } @@ -701,8 +701,8 @@ proc-macro2 = { version = "1", default-features = false, features = ["span-locat prost-5ef9efb8ec2df382 = { package = "prost", version = "0.12", features = ["prost-derive"] } rand-274715c4dabd11b0 = { package = "rand", version = "0.9" } ring = { version = "0.17", features = ["std"] } -rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "pty", "shm", "stdio", "system", "termios", "time"] } -rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["process", "termios", "time"] } +rustix-d585fab2519d2d1 = { package = "rustix", version = "0.38", features = ["event", "mm", "net", "param", "pipe", "process", "shm", "system"] } +rustix-dff4ba8e3ae991db = { package = "rustix", version = "1", default-features = false, features = ["event", "pipe", "process", "pty", "stdio", "termios", "time"] } scopeguard = { version = "1" } sync_wrapper = { version = "1", default-features = false, features = ["futures"] } tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "ring"] } From 8df616e28bb34e8dab899747acae118788af4c1b Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Fri, 19 Sep 2025 15:55:32 -0700 Subject: [PATCH 27/58] Suppress the 'Agent Thread Started' event when initializing the panel (#38535) Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 9 ++++++++- crates/gpui/src/app.rs | 14 ++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index ba71fd84ab5b9d666256afeb0a2c5677aac9adb1..ca6a5fb2f6c216e7886394da069c93e5029a5ed0 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -408,6 +408,7 @@ impl ActiveView { pub struct AgentPanel { workspace: WeakEntity, + loading: bool, user_store: Entity, project: Entity, fs: Arc, @@ -513,6 +514,7 @@ impl AgentPanel { cx, ) }); + panel.as_mut(cx).loading = true; if let Some(serialized_panel) = serialized_panel { panel.update(cx, |panel, cx| { panel.width = serialized_panel.width.map(|w| w.round()); @@ -527,6 +529,7 @@ impl AgentPanel { panel.new_agent_thread(AgentType::NativeAgent, window, cx); }); } + panel.as_mut(cx).loading = false; panel })?; @@ -726,6 +729,7 @@ impl AgentPanel { acp_history, acp_history_store, selected_agent: AgentType::default(), + loading: false, } } @@ -857,6 +861,7 @@ impl AgentPanel { agent: crate::ExternalAgent, } + let loading = self.loading; let history = self.acp_history_store.clone(); cx.spawn_in(window, async move |this, cx| { @@ -898,7 +903,9 @@ impl AgentPanel { } }; - telemetry::event!("Agent Thread Started", agent = ext_agent.name()); + if !loading { + telemetry::event!("Agent Thread Started", agent = ext_agent.name()); + } let server = ext_agent.server(fs, history); diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index e6c3e3b8deea9b82514b5ac932c4f204fa081e14..07ff04e32abc19dbe681ab6214d06469fe7917ff 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -2401,6 +2401,20 @@ impl<'a, T: 'static> std::borrow::BorrowMut for GpuiBorrow<'a, T> { } } +impl<'a, T: 'static> std::ops::Deref for GpuiBorrow<'a, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + self.inner.as_ref().unwrap() + } +} + +impl<'a, T: 'static> std::ops::DerefMut for GpuiBorrow<'a, T> { + fn deref_mut(&mut self) -> &mut T { + self.inner.as_mut().unwrap() + } +} + impl<'a, T> Drop for GpuiBorrow<'a, T> { fn drop(&mut self) { let lease = self.inner.take().unwrap(); From be77682a3fd3ace184a9059cc7b212e3ea4891d3 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Sat, 20 Sep 2025 04:40:22 +0530 Subject: [PATCH 28/58] editor: Fix adding extraneous closing tags within TSX (#38534) --- .../src/session/running/console.rs | 6 +- crates/editor/src/editor.rs | 60 ++++----- crates/editor/src/editor_tests.rs | 116 +++++++++++++++--- crates/editor/src/hover_links.rs | 2 +- crates/editor/src/items.rs | 7 +- crates/language/src/buffer.rs | 39 ++++-- crates/language/src/language.rs | 15 +++ crates/language/src/text_diff.rs | 5 +- crates/languages/src/javascript/config.toml | 3 + crates/languages/src/tsx/config.toml | 3 + crates/multi_buffer/src/multi_buffer.rs | 28 ++--- crates/project/src/lsp_command.rs | 11 +- crates/vim/src/vim.rs | 7 +- 13 files changed, 218 insertions(+), 84 deletions(-) diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index 92c5ace8f0128e47db08c6b772376679213ffbe1..cf7b59f2fe96bb031fc1ed1a5d7ae4005dd37eb9 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -12,7 +12,7 @@ use gpui::{ Action as _, AppContext, Context, Corner, Entity, FocusHandle, Focusable, HighlightStyle, Hsla, Render, Subscription, Task, TextStyle, WeakEntity, actions, }; -use language::{Anchor, Buffer, CodeLabel, TextBufferSnapshot, ToOffset}; +use language::{Anchor, Buffer, CharScopeContext, CodeLabel, TextBufferSnapshot, ToOffset}; use menu::{Confirm, SelectNext, SelectPrevious}; use project::{ Completion, CompletionDisplayOptions, CompletionResponse, @@ -575,7 +575,9 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider { return false; } - let classifier = snapshot.char_classifier_at(position).for_completion(true); + let classifier = snapshot + .char_classifier_at(position) + .scope_context(Some(CharScopeContext::Completion)); if trigger_in_words && classifier.is_word(char) { return true; } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 4084f61bb4a44d591aa544a622fa8888f56a5c57..8b0fc5512731eff70b1e9ac41b6bfe16a65babfa 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -121,10 +121,10 @@ use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy}; use itertools::{Either, Itertools}; use language::{ AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow, - BufferSnapshot, Capability, CharClassifier, CharKind, CodeLabel, CursorShape, DiagnosticEntry, - DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind, IndentSize, - Language, OffsetRangeExt, Point, Runnable, RunnableRange, Selection, SelectionGoal, TextObject, - TransactionId, TreeSitterOptions, WordsQuery, + BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape, + DiagnosticEntry, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind, + IndentSize, Language, OffsetRangeExt, Point, Runnable, RunnableRange, Selection, SelectionGoal, + TextObject, TransactionId, TreeSitterOptions, WordsQuery, language_settings::{ self, InlayHintSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode, all_language_settings, language_settings, @@ -3123,7 +3123,8 @@ impl Editor { let position_matches = start_offset == completion_position.to_offset(buffer); let continue_showing = if position_matches { if self.snippet_stack.is_empty() { - buffer.char_kind_before(start_offset, true) == Some(CharKind::Word) + buffer.char_kind_before(start_offset, Some(CharScopeContext::Completion)) + == Some(CharKind::Word) } else { // Snippet choices can be shown even when the cursor is in whitespace. // Dismissing the menu with actions like backspace is handled by @@ -3551,7 +3552,7 @@ impl Editor { let position = display_map .clip_point(position, Bias::Left) .to_offset(&display_map, Bias::Left); - let (range, _) = buffer.surrounding_word(position, false); + let (range, _) = buffer.surrounding_word(position, None); start = buffer.anchor_before(range.start); end = buffer.anchor_before(range.end); mode = SelectMode::Word(start..end); @@ -3711,10 +3712,10 @@ impl Editor { .to_offset(&display_map, Bias::Left); let original_range = original_range.to_offset(buffer); - let head_offset = if buffer.is_inside_word(offset, false) + let head_offset = if buffer.is_inside_word(offset, None) || original_range.contains(&offset) { - let (word_range, _) = buffer.surrounding_word(offset, false); + let (word_range, _) = buffer.surrounding_word(offset, None); if word_range.start < original_range.start { word_range.start } else { @@ -4244,7 +4245,7 @@ impl Editor { let is_word_char = text.chars().next().is_none_or(|char| { let classifier = snapshot .char_classifier_at(start_anchor.to_offset(&snapshot)) - .ignore_punctuation(true); + .scope_context(Some(CharScopeContext::LinkedEdit)); classifier.is_word(char) }); @@ -5101,7 +5102,8 @@ impl Editor { fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option { let offset = position.to_offset(buffer); - let (word_range, kind) = buffer.surrounding_word(offset, true); + let (word_range, kind) = + buffer.surrounding_word(offset, Some(CharScopeContext::Completion)); if offset > word_range.start && kind == Some(CharKind::Word) { Some( buffer @@ -5571,7 +5573,7 @@ impl Editor { } = buffer_position; let (word_replace_range, word_to_exclude) = if let (word_range, Some(CharKind::Word)) = - buffer_snapshot.surrounding_word(buffer_position, false) + buffer_snapshot.surrounding_word(buffer_position, None) { let word_to_exclude = buffer_snapshot .text_for_range(word_range.clone()) @@ -6787,8 +6789,8 @@ impl Editor { } let snapshot = cursor_buffer.read(cx).snapshot(); - let (start_word_range, _) = snapshot.surrounding_word(cursor_buffer_position, false); - let (end_word_range, _) = snapshot.surrounding_word(tail_buffer_position, false); + let (start_word_range, _) = snapshot.surrounding_word(cursor_buffer_position, None); + let (end_word_range, _) = snapshot.surrounding_word(tail_buffer_position, None); if start_word_range != end_word_range { self.document_highlights_task.take(); self.clear_background_highlights::(cx); @@ -11440,7 +11442,7 @@ impl Editor { let selection_is_empty = selection.is_empty(); let (start, end) = if selection_is_empty { - let (word_range, _) = buffer.surrounding_word(selection.start, false); + let (word_range, _) = buffer.surrounding_word(selection.start, None); (word_range.start, word_range.end) } else { ( @@ -14206,8 +14208,8 @@ impl Editor { start_offset + query_match.start()..start_offset + query_match.end(); if !select_next_state.wordwise - || (!buffer.is_inside_word(offset_range.start, false) - && !buffer.is_inside_word(offset_range.end, false)) + || (!buffer.is_inside_word(offset_range.start, None) + && !buffer.is_inside_word(offset_range.end, None)) { // TODO: This is n^2, because we might check all the selections if !selections @@ -14271,7 +14273,7 @@ impl Editor { if only_carets { for selection in &mut selections { - let (word_range, _) = buffer.surrounding_word(selection.start, false); + let (word_range, _) = buffer.surrounding_word(selection.start, None); selection.start = word_range.start; selection.end = word_range.end; selection.goal = SelectionGoal::None; @@ -14356,8 +14358,8 @@ impl Editor { }; if !select_next_state.wordwise - || (!buffer.is_inside_word(offset_range.start, false) - && !buffer.is_inside_word(offset_range.end, false)) + || (!buffer.is_inside_word(offset_range.start, None) + && !buffer.is_inside_word(offset_range.end, None)) { new_selections.push(offset_range.start..offset_range.end); } @@ -14431,8 +14433,8 @@ impl Editor { end_offset - query_match.end()..end_offset - query_match.start(); if !select_prev_state.wordwise - || (!buffer.is_inside_word(offset_range.start, false) - && !buffer.is_inside_word(offset_range.end, false)) + || (!buffer.is_inside_word(offset_range.start, None) + && !buffer.is_inside_word(offset_range.end, None)) { next_selected_range = Some(offset_range); break; @@ -14490,7 +14492,7 @@ impl Editor { if only_carets { for selection in &mut selections { - let (word_range, _) = buffer.surrounding_word(selection.start, false); + let (word_range, _) = buffer.surrounding_word(selection.start, None); selection.start = word_range.start; selection.end = word_range.end; selection.goal = SelectionGoal::None; @@ -14968,11 +14970,10 @@ impl Editor { if let Some((node, _)) = buffer.syntax_ancestor(old_range.clone()) { // manually select word at selection if ["string_content", "inline"].contains(&node.kind()) { - let (word_range, _) = buffer.surrounding_word(old_range.start, false); + let (word_range, _) = buffer.surrounding_word(old_range.start, None); // ignore if word is already selected if !word_range.is_empty() && old_range != word_range { - let (last_word_range, _) = - buffer.surrounding_word(old_range.end, false); + let (last_word_range, _) = buffer.surrounding_word(old_range.end, None); // only select word if start and end point belongs to same word if word_range == last_word_range { selected_larger_node = true; @@ -22545,7 +22546,8 @@ fn snippet_completions( let mut is_incomplete = false; let mut completions: Vec = Vec::new(); for (scope, snippets) in scopes.into_iter() { - let classifier = CharClassifier::new(Some(scope)).for_completion(true); + let classifier = + CharClassifier::new(Some(scope)).scope_context(Some(CharScopeContext::Completion)); let mut last_word = chars .chars() .take_while(|c| classifier.is_word(*c)) @@ -22766,7 +22768,9 @@ impl CompletionProvider for Entity { if !menu_is_open && !snapshot.settings_at(position, cx).show_completions_on_input { return false; } - let classifier = snapshot.char_classifier_at(position).for_completion(true); + let classifier = snapshot + .char_classifier_at(position) + .scope_context(Some(CharScopeContext::Completion)); if trigger_in_words && classifier.is_word(char) { return true; } @@ -22879,7 +22883,7 @@ impl SemanticsProvider for Entity { // Fallback on using TreeSitter info to determine identifier range buffer.read_with(cx, |buffer, _| { let snapshot = buffer.snapshot(); - let (range, kind) = snapshot.surrounding_word(position, false); + let (range, kind) = snapshot.surrounding_word(position, None); if kind != Some(CharKind::Word) { return None; } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index f18187d558f1cb90e137d06591ec5b2ecb7b1654..05742cd00bb834550ee20377ff46da6649272f43 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -13,6 +13,7 @@ use crate::{ }, }; use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind}; +use collections::HashMap; use futures::StreamExt; use gpui::{ BackgroundExecutor, DismissEvent, Rgba, SemanticVersion, TestAppContext, UpdateGlobal, @@ -23773,6 +23774,28 @@ async fn test_hide_mouse_context_menu_on_modal_opened(cx: &mut TestAppContext) { }); } +fn set_linked_edit_ranges( + opening: (Point, Point), + closing: (Point, Point), + editor: &mut Editor, + cx: &mut Context, +) { + let Some((buffer, _)) = editor + .buffer + .read(cx) + .text_anchor_for_position(editor.selections.newest_anchor().start, cx) + else { + panic!("Failed to get buffer for selection position"); + }; + let buffer = buffer.read(cx); + let buffer_id = buffer.remote_id(); + let opening_range = buffer.anchor_before(opening.0)..buffer.anchor_after(opening.1); + let closing_range = buffer.anchor_before(closing.0)..buffer.anchor_after(closing.1); + let mut linked_ranges = HashMap::default(); + linked_ranges.insert(buffer_id, vec![(opening_range, vec![closing_range])]); + editor.linked_edit_ranges = LinkedEditingRanges(linked_ranges); +} + #[gpui::test] async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) { init_test(cx, |_| {}); @@ -23851,22 +23874,12 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) { editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| { selections.select_ranges([Point::new(0, 3)..Point::new(0, 3)]); }); - let Some((buffer, _)) = editor - .buffer - .read(cx) - .text_anchor_for_position(editor.selections.newest_anchor().start, cx) - else { - panic!("Failed to get buffer for selection position"); - }; - let buffer = buffer.read(cx); - let buffer_id = buffer.remote_id(); - let opening_range = - buffer.anchor_before(Point::new(0, 1))..buffer.anchor_after(Point::new(0, 3)); - let closing_range = - buffer.anchor_before(Point::new(0, 6))..buffer.anchor_after(Point::new(0, 8)); - let mut linked_ranges = HashMap::default(); - linked_ranges.insert(buffer_id, vec![(opening_range, vec![closing_range])]); - editor.linked_edit_ranges = LinkedEditingRanges(linked_ranges); + set_linked_edit_ranges( + (Point::new(0, 1), Point::new(0, 3)), + (Point::new(0, 6), Point::new(0, 8)), + editor, + cx, + ); }); let mut completion_handle = fake_server.set_request_handler::(move |_, _| async move { @@ -23910,6 +23923,77 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_linked_edits_on_typing_punctuation(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let language = Arc::new(Language::new( + LanguageConfig { + name: "TSX".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["tsx".to_string()], + ..LanguageMatcher::default() + }, + brackets: BracketPairConfig { + pairs: vec![BracketPair { + start: "<".into(), + end: ">".into(), + close: true, + ..Default::default() + }], + ..Default::default() + }, + linked_edit_characters: HashSet::from_iter(['.']), + ..Default::default() + }, + Some(tree_sitter_typescript::LANGUAGE_TSX.into()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + // Test typing > does not extend linked pair + cx.set_state(""); + cx.update_editor(|editor, _, cx| { + set_linked_edit_ranges( + (Point::new(0, 1), Point::new(0, 4)), + (Point::new(0, 11), Point::new(0, 14)), + editor, + cx, + ); + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(">", window, cx); + }); + cx.assert_editor_state("
ˇ
"); + + // Test typing . do extend linked pair + cx.set_state(""); + cx.update_editor(|editor, _, cx| { + set_linked_edit_ranges( + (Point::new(0, 1), Point::new(0, 9)), + (Point::new(0, 12), Point::new(0, 20)), + editor, + cx, + ); + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(".", window, cx); + }); + cx.assert_editor_state(""); + cx.update_editor(|editor, _, cx| { + set_linked_edit_ranges( + (Point::new(0, 1), Point::new(0, 10)), + (Point::new(0, 13), Point::new(0, 21)), + editor, + cx, + ); + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input("V", window, cx); + }); + cx.assert_editor_state(""); +} + #[gpui::test] async fn test_invisible_worktree_servers(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index d5a3f17822ff7f0f2324414aeaa9819b8605f53b..2b91f8cb1ca4c515d2f09997f07b42d611b4baaf 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -627,7 +627,7 @@ pub fn show_link_definition( TriggerPoint::Text(trigger_anchor) => { // If no symbol range returned from language server, use the surrounding word. let (offset_range, _) = - snapshot.surrounding_word(*trigger_anchor, false); + snapshot.surrounding_word(*trigger_anchor, None); RangeInEditor::Text( snapshot.anchor_before(offset_range.start) ..snapshot.anchor_after(offset_range.end), diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index bf21d6b461e6fdc082fdd1431f13b8daae730824..a1b311a3ac3b8ed330fee0f015c41d327efe342d 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -17,8 +17,8 @@ use gpui::{ ParentElement, Pixels, SharedString, Styled, Task, WeakEntity, Window, point, }; use language::{ - Bias, Buffer, BufferRow, CharKind, DiskState, LocalFile, Point, SelectionGoal, - proto::serialize_anchor as serialize_text_anchor, + Bias, Buffer, BufferRow, CharKind, CharScopeContext, DiskState, LocalFile, Point, + SelectionGoal, proto::serialize_anchor as serialize_text_anchor, }; use lsp::DiagnosticSeverity; use project::{ @@ -1573,7 +1573,8 @@ impl SearchableItem for Editor { } SeedQuerySetting::Selection => String::new(), SeedQuerySetting::Always => { - let (range, kind) = snapshot.surrounding_word(selection.start, true); + let (range, kind) = + snapshot.surrounding_word(selection.start, Some(CharScopeContext::Completion)); if kind == Some(CharKind::Word) { let text: String = snapshot.text_for_range(range).collect(); if !text.trim().is_empty() { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1a7fca79f64c2c253117a3acde8c4d7519a9c282..d5d83da47bc18a4fd15f59df2ddb2238ceb768d4 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -546,6 +546,23 @@ pub enum CharKind { Word, } +/// Context for character classification within a specific scope. +#[derive(Copy, Clone, Eq, PartialEq, Debug)] +pub enum CharScopeContext { + /// Character classification for completion queries. + /// + /// This context treats certain characters as word constituents that would + /// normally be considered punctuation, such as '-' in Tailwind classes + /// ("bg-yellow-100") or '.' in import paths ("foo.ts"). + Completion, + /// Character classification for linked edits. + /// + /// This context handles characters that should be treated as part of + /// identifiers during linked editing operations, such as '.' in JSX + /// component names like ``. + LinkedEdit, +} + /// A runnable is a set of data about a region that could be resolved into a task pub struct Runnable { pub tags: SmallVec<[RunnableTag; 1]>, @@ -3449,16 +3466,14 @@ impl BufferSnapshot { pub fn surrounding_word( &self, start: T, - for_completion: bool, + scope_context: Option, ) -> (Range, Option) { let mut start = start.to_offset(self); let mut end = start; let mut next_chars = self.chars_at(start).take(128).peekable(); let mut prev_chars = self.reversed_chars_at(start).take(128).peekable(); - let classifier = self - .char_classifier_at(start) - .for_completion(for_completion); + let classifier = self.char_classifier_at(start).scope_context(scope_context); let word_kind = cmp::max( prev_chars.peek().copied().map(|c| classifier.kind(c)), next_chars.peek().copied().map(|c| classifier.kind(c)), @@ -5212,7 +5227,7 @@ pub(crate) fn contiguous_ranges( #[derive(Default, Debug)] pub struct CharClassifier { scope: Option, - for_completion: bool, + scope_context: Option, ignore_punctuation: bool, } @@ -5220,14 +5235,14 @@ impl CharClassifier { pub fn new(scope: Option) -> Self { Self { scope, - for_completion: false, + scope_context: None, ignore_punctuation: false, } } - pub fn for_completion(self, for_completion: bool) -> Self { + pub fn scope_context(self, scope_context: Option) -> Self { Self { - for_completion, + scope_context, ..self } } @@ -5257,10 +5272,10 @@ impl CharClassifier { } if let Some(scope) = &self.scope { - let characters = if self.for_completion { - scope.completion_query_characters() - } else { - scope.word_characters() + let characters = match self.scope_context { + Some(CharScopeContext::Completion) => scope.completion_query_characters(), + Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(), + None => scope.word_characters(), }; if let Some(characters) = characters && characters.contains(&c) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 2af5657ea776ddd85bf9495d3c1f32c2d0c69ac2..3e9f3bf1bd0cb4719f5442e1b1bd9e357ac9efca 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -780,6 +780,9 @@ pub struct LanguageConfig { /// A list of characters that Zed should treat as word characters for completion queries. #[serde(default)] pub completion_query_characters: HashSet, + /// A list of characters that Zed should treat as word characters for linked edit operations. + #[serde(default)] + pub linked_edit_characters: HashSet, /// A list of preferred debuggers for this language. #[serde(default)] pub debuggers: IndexSet, @@ -916,6 +919,8 @@ pub struct LanguageConfigOverride { #[serde(default)] pub completion_query_characters: Override>, #[serde(default)] + pub linked_edit_characters: Override>, + #[serde(default)] pub opt_into_language_servers: Vec, #[serde(default)] pub prefer_label_for_snippet: Option, @@ -974,6 +979,7 @@ impl Default for LanguageConfig { hidden: false, jsx_tag_auto_close: None, completion_query_characters: Default::default(), + linked_edit_characters: Default::default(), debuggers: Default::default(), } } @@ -2011,6 +2017,15 @@ impl LanguageScope { ) } + /// Returns a list of language-specific characters that are considered part of + /// identifiers during linked editing operations. + pub fn linked_edit_characters(&self) -> Option<&HashSet> { + Override::as_option( + self.config_override().map(|o| &o.linked_edit_characters), + Some(&self.language.config.linked_edit_characters), + ) + } + /// Returns whether to prefer snippet `label` over `new_text` to replace text when /// completion is accepted. /// diff --git a/crates/language/src/text_diff.rs b/crates/language/src/text_diff.rs index 11d8a070d213852f0a98078f2ed8c76c9cced47b..5a74362d7d3cb2404cc67ed32595a06efd291ca4 100644 --- a/crates/language/src/text_diff.rs +++ b/crates/language/src/text_diff.rs @@ -1,4 +1,4 @@ -use crate::{CharClassifier, CharKind, LanguageScope}; +use crate::{CharClassifier, CharKind, CharScopeContext, LanguageScope}; use anyhow::{Context, anyhow}; use imara_diff::{ Algorithm, UnifiedDiffBuilder, diff, @@ -181,7 +181,8 @@ fn diff_internal( } fn tokenize(text: &str, language_scope: Option) -> impl Iterator { - let classifier = CharClassifier::new(language_scope).for_completion(true); + let classifier = + CharClassifier::new(language_scope).scope_context(Some(CharScopeContext::Completion)); let mut chars = text.char_indices(); let mut prev = None; let mut start_ix = 0; diff --git a/crates/languages/src/javascript/config.toml b/crates/languages/src/javascript/config.toml index 128eac0e4dda2b5b437c494e862970c23a8df3a1..3bac37aa13ed34c18d1fb8e4f70e0905938e5213 100644 --- a/crates/languages/src/javascript/config.toml +++ b/crates/languages/src/javascript/config.toml @@ -30,6 +30,9 @@ close_tag_node_name = "jsx_closing_element" jsx_element_node_name = "jsx_element" tag_name_node_name = "identifier" +[overrides.default] +linked_edit_characters = ["."] + [overrides.element] line_comments = { remove = true } block_comment = { start = "{/* ", prefix = "", end = "*/}", tab_size = 0 } diff --git a/crates/languages/src/tsx/config.toml b/crates/languages/src/tsx/config.toml index b5ef5bd56df2097bc920f02b87d07e4118d7b0d1..d0a4eb6532db621d741df2fbc99125e1c037ccdf 100644 --- a/crates/languages/src/tsx/config.toml +++ b/crates/languages/src/tsx/config.toml @@ -29,6 +29,9 @@ jsx_element_node_name = "jsx_element" tag_name_node_name = "identifier" tag_name_node_name_alternates = ["member_expression"] +[overrides.default] +linked_edit_characters = ["."] + [overrides.element] line_comments = { remove = true } block_comment = { start = "{/*", prefix = "", end = "*/}", tab_size = 0 } diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 2ceeffc89061aa429727142a1659a392d6374b09..c79bc03489be89ad00d10392c520fe13e7748a60 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -17,10 +17,10 @@ use gpui::{App, AppContext as _, Context, Entity, EntityId, EventEmitter, Task}; use itertools::Itertools; use language::{ AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier, - CharKind, Chunk, CursorShape, DiagnosticEntry, DiskState, File, IndentGuideSettings, - IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point, - PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId, - TreeSitterOptions, Unclipped, + CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntry, DiskState, File, + IndentGuideSettings, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, + OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, + ToPoint as _, TransactionId, TreeSitterOptions, Unclipped, language_settings::{LanguageSettings, language_settings}, }; @@ -4204,11 +4204,15 @@ impl MultiBufferSnapshot { self.diffs.values().any(|diff| !diff.is_empty()) } - pub fn is_inside_word(&self, position: T, for_completion: bool) -> bool { + pub fn is_inside_word( + &self, + position: T, + scope_context: Option, + ) -> bool { let position = position.to_offset(self); let classifier = self .char_classifier_at(position) - .for_completion(for_completion); + .scope_context(scope_context); let next_char_kind = self.chars_at(position).next().map(|c| classifier.kind(c)); let prev_char_kind = self .reversed_chars_at(position) @@ -4220,16 +4224,14 @@ impl MultiBufferSnapshot { pub fn surrounding_word( &self, start: T, - for_completion: bool, + scope_context: Option, ) -> (Range, Option) { let mut start = start.to_offset(self); let mut end = start; let mut next_chars = self.chars_at(start).peekable(); let mut prev_chars = self.reversed_chars_at(start).peekable(); - let classifier = self - .char_classifier_at(start) - .for_completion(for_completion); + let classifier = self.char_classifier_at(start).scope_context(scope_context); let word_kind = cmp::max( prev_chars.peek().copied().map(|c| classifier.kind(c)), @@ -4258,12 +4260,10 @@ impl MultiBufferSnapshot { pub fn char_kind_before( &self, start: T, - for_completion: bool, + scope_context: Option, ) -> Option { let start = start.to_offset(self); - let classifier = self - .char_classifier_at(start) - .for_completion(for_completion); + let classifier = self.char_classifier_at(start).scope_context(scope_context); self.reversed_chars_at(start) .next() .map(|ch| classifier.kind(ch)) diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index a960e1183dd46537ef3aee829cd9753b28001480..5ec6e502bd85a25b6755c6994feff7a3062c919c 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -16,8 +16,8 @@ use collections::{HashMap, HashSet}; use futures::future; use gpui::{App, AsyncApp, Entity, Task}; use language::{ - Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind, OffsetRangeExt, PointUtf16, - ToOffset, ToPointUtf16, Transaction, Unclipped, + Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind, CharScopeContext, + OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped, language_settings::{InlayHintKind, LanguageSettings, language_settings}, point_from_lsp, point_to_lsp, proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version}, @@ -350,7 +350,7 @@ impl LspCommand for PrepareRename { } Some(lsp::PrepareRenameResponse::DefaultBehavior { .. }) => { let snapshot = buffer.snapshot(); - let (range, _) = snapshot.surrounding_word(self.position, false); + let (range, _) = snapshot.surrounding_word(self.position, None); let range = snapshot.anchor_after(range.start)..snapshot.anchor_before(range.end); Ok(PrepareRenameResponse::Success(range)) } @@ -2293,7 +2293,10 @@ impl LspCommand for GetCompletions { range_for_token .get_or_insert_with(|| { let offset = self.position.to_offset(&snapshot); - let (range, kind) = snapshot.surrounding_word(offset, true); + let (range, kind) = snapshot.surrounding_word( + offset, + Some(CharScopeContext::Completion), + ); let range = if kind == Some(CharKind::Word) { range } else { diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 9e7fb4a564751335db7ba6fe2afe61563ea0f161..c7fb8ffa35ea090296f137b11f08379db968ce3d 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -30,7 +30,9 @@ use gpui::{ Render, Subscription, Task, WeakEntity, Window, actions, }; use insert::{NormalBefore, TemporaryNormal}; -use language::{CharKind, CursorShape, Point, Selection, SelectionGoal, TransactionId}; +use language::{ + CharKind, CharScopeContext, CursorShape, Point, Selection, SelectionGoal, TransactionId, +}; pub use mode_indicator::ModeIndicator; use motion::Motion; use normal::search::SearchSubmit; @@ -1347,7 +1349,8 @@ impl Vim { let selection = editor.selections.newest::(cx); let snapshot = &editor.snapshot(window, cx).buffer_snapshot; - let (range, kind) = snapshot.surrounding_word(selection.start, true); + let (range, kind) = + snapshot.surrounding_word(selection.start, Some(CharScopeContext::Completion)); if kind == Some(CharKind::Word) { let text: String = snapshot.text_for_range(range).collect(); if !text.trim().is_empty() { From 782058647db4f1b25d6c04c85728faac557e59cf Mon Sep 17 00:00:00 2001 From: Nia Date: Sat, 20 Sep 2025 09:04:32 +0200 Subject: [PATCH 29/58] tests: Add an automatic perf profiler (#38543) Add an auto-profiler for our tests, to hopefully allow better triage of performance impacts resulting from code changes. Comprehensive usage docs are in the code. Currently, it uses hyperfine under the hood and prints markdown to the command line for all crates with relevant tests enabled. We may want to expand this to allow outputting json in the future to allow e.g. automatically comparing the difference between two runs on different commits, and in general a lot of functionality could be added (maybe measuring memory usage?). It's enabled (mostly as an example) on two tests inside `gpui` and a bunch of those inside `vim`. I'd have happily used `cargo bench`, but that's nightly-only. Release Notes: - N/A --- .cargo/config.toml | 1 + Cargo.lock | 9 ++ Cargo.toml | 2 +- crates/gpui/Cargo.toml | 1 + crates/gpui/src/style.rs | 6 +- crates/util_macros/Cargo.toml | 3 + crates/util_macros/src/util_macros.rs | 147 +++++++++++++++++++- crates/vim/Cargo.toml | 1 + crates/vim/src/test.rs | 25 ++++ tooling/perf/Cargo.toml | 11 ++ tooling/perf/LICENSE-APACHE | 1 + tooling/perf/src/main.rs | 191 ++++++++++++++++++++++++++ 12 files changed, 393 insertions(+), 5 deletions(-) create mode 100644 tooling/perf/Cargo.toml create mode 120000 tooling/perf/LICENSE-APACHE create mode 100644 tooling/perf/src/main.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index 717c5e18c8d294bacf65207bc6b8ecb7dba1b152..74d34226af09c11b56faa6722e00afa218c924f5 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -4,6 +4,7 @@ rustflags = ["-C", "symbol-mangling-version=v0", "--cfg", "tokio_unstable"] [alias] xtask = "run --package xtask --" +perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests", "--config", "target.'cfg(true)'.runner='target/release/perf'", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]"] [target.x86_64-unknown-linux-gnu] linker = "clang" diff --git a/Cargo.lock b/Cargo.lock index 548ff152066745344b65c75b0be80db71c6f7f5e..84ae8e613365ef3976970e61dfc7b03aaf969062 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7924,6 +7924,7 @@ dependencies = [ "unicode-segmentation", "usvg", "util", + "util_macros", "uuid", "waker-fn", "wayland-backend", @@ -12163,6 +12164,13 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "perf" +version = "0.1.0" +dependencies = [ + "workspace-hack", +] + [[package]] name = "pest" version = "2.8.0" @@ -18727,6 +18735,7 @@ dependencies = [ "tokio", "ui", "util", + "util_macros", "vim_mode_setting", "workspace", "workspace-hack", diff --git a/Cargo.toml b/Cargo.toml index aa95b1f4a78fe2599bcccd3036c2ebb65761ada3..ad07429243817b27b4c09fc651b50de820183a9d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -220,7 +220,7 @@ members = [ # "tooling/workspace-hack", - "tooling/xtask", + "tooling/xtask", "tooling/perf", ] default-members = ["crates/zed"] diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index ac1bdf85cb478064db42b3dccde8e44adee72fdd..2919fecabf050a011109b2abfe69394a0ead2e67 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -110,6 +110,7 @@ resvg = { version = "0.45.0", default-features = false, features = [ "memmap-fonts", ] } usvg = { version = "0.45.0", default-features = false } +util_macros.workspace = true schemars.workspace = true seahash = "4.1" semantic_version.workspace = true diff --git a/crates/gpui/src/style.rs b/crates/gpui/src/style.rs index 78bca5a4993271883c555fe05366a7c9a0c472ac..8afb4e4eb8af70a78c1cd4fc0176a7fe3baf3c3e 100644 --- a/crates/gpui/src/style.rs +++ b/crates/gpui/src/style.rs @@ -1300,7 +1300,9 @@ mod tests { use super::*; - #[test] + use util_macros::perf; + + #[perf] fn test_basic_highlight_style_combination() { let style_a = HighlightStyle::default(); let style_b = HighlightStyle::default(); @@ -1385,7 +1387,7 @@ mod tests { ); } - #[test] + #[perf] fn test_combine_highlights() { assert_eq!( combine_highlights( diff --git a/crates/util_macros/Cargo.toml b/crates/util_macros/Cargo.toml index 996eefcb303ee5959f0e7fa920f1a91a509407eb..45145a68f6a7d54d759d932c3dc851d14f4939d9 100644 --- a/crates/util_macros/Cargo.toml +++ b/crates/util_macros/Cargo.toml @@ -17,3 +17,6 @@ doctest = false quote.workspace = true syn.workspace = true workspace-hack.workspace = true + +[features] +perf-enabled = [] diff --git a/crates/util_macros/src/util_macros.rs b/crates/util_macros/src/util_macros.rs index 9d0b06ab10a7454d6c0d19fd54722fd98db4ac25..d3f05afdecbca8cb3b4c8685054d3828e6c702fd 100644 --- a/crates/util_macros/src/util_macros.rs +++ b/crates/util_macros/src/util_macros.rs @@ -1,8 +1,9 @@ #![cfg_attr(not(target_os = "windows"), allow(unused))] +#![allow(clippy::test_attr_in_doctest)] use proc_macro::TokenStream; -use quote::quote; -use syn::{LitStr, parse_macro_input}; +use quote::{ToTokens, quote}; +use syn::{ItemFn, LitStr, parse_macro_input, parse_quote}; /// A macro used in tests for cross-platform path string literals in tests. On Windows it replaces /// `/` with `\\` and adds `C:` to the beginning of absolute paths. On other platforms, the path is @@ -87,3 +88,145 @@ pub fn line_endings(input: TokenStream) -> TokenStream { #text }) } + +/// Inner data for the perf macro. +struct PerfArgs { + /// How many times to loop a test before rerunning the test binary. + /// If left empty, the test harness will auto-determine this value. + iterations: Option, +} + +impl syn::parse::Parse for PerfArgs { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + if input.is_empty() { + return Ok(PerfArgs { iterations: None }); + } + + let mut iterations = None; + // In principle we only have one possible argument, but leave this as + // a loop in case we expand this in the future. + for meta in + syn::punctuated::Punctuated::::parse_terminated(input)? + { + match &meta { + syn::Meta::NameValue(meta_name_value) => { + if meta_name_value.path.is_ident("iterations") { + iterations = Some(meta_name_value.value.clone()); + } else { + return Err(syn::Error::new_spanned( + &meta_name_value.path, + "unexpected argument, expected 'iterations'", + )); + } + } + _ => { + return Err(syn::Error::new_spanned( + meta, + "expected name-value argument like 'iterations = 1'", + )); + } + } + } + + Ok(PerfArgs { iterations }) + } +} + +/// Marks a test as perf-sensitive, to be triaged when checking the performance +/// of a build. This also automatically applies `#[test]`. +/// +/// By default, the number of iterations when profiling this test is auto-determined. +/// If this needs to be overwritten, pass the desired iteration count to the macro +/// as a parameter (`#[perf(iterations = n)]`). Note that the actual profiler may still +/// run the test an arbitrary number times; this flag just sets the number of executions +/// before the process is restarted and global state is reset. +/// +/// # Usage notes +/// This should probably not be applied to tests that do any significant fs IO, as +/// locks on files may not be released in time when repeating a test many times. This +/// might lead to spurious failures. +/// +/// # Examples +/// ```rust +/// use util_macros::perf; +/// +/// #[perf] +/// fn expensive_computation_test() { +/// // Test goes here. +/// } +/// ``` +/// +/// This also works with `#[gpui::test]`s, though in most cases it shouldn't +/// be used with automatic iterations. +/// ```rust,ignore +/// use util_macros::perf; +/// +/// #[perf(iterations = 1)] +/// #[gpui::test] +/// fn oneshot_test(_cx: &mut gpui::TestAppContext) { +/// // Test goes here. +/// } +/// ``` +#[proc_macro_attribute] +pub fn perf(our_attr: TokenStream, input: TokenStream) -> TokenStream { + // If any of the below constants are changed, make sure to also update the perf + // profiler to match! + + /// The suffix on tests marked with `#[perf]`. + const SUF_NORMAL: &str = "__ZED_PERF"; + /// The suffix on tests marked with `#[perf(iterations = n)]`. + const SUF_FIXED: &str = "__ZED_PERF_FIXEDITER"; + /// The env var in which we pass the iteration count to our tests. + const ITER_ENV_VAR: &str = "ZED_PERF_ITER"; + + let iter_count = parse_macro_input!(our_attr as PerfArgs).iterations; + + let ItemFn { + mut attrs, + vis, + mut sig, + block, + } = parse_macro_input!(input as ItemFn); + attrs.push(parse_quote!(#[test])); + attrs.push(parse_quote!(#[allow(non_snake_case)])); + + let block: Box = if cfg!(perf_enabled) { + // Make the ident obvious when calling, for the test parser. + let mut new_ident = sig.ident.to_string(); + if iter_count.is_some() { + new_ident.push_str(SUF_FIXED); + } else { + new_ident.push_str(SUF_NORMAL); + } + + let new_ident = syn::Ident::new(&new_ident, sig.ident.span()); + sig.ident = new_ident; + // If we have a preset iteration count, just use that. + if let Some(iter_count) = iter_count { + parse_quote!({ + for _ in 0..#iter_count { + #block + } + }) + } else { + // Otherwise, the perf harness will pass us the value in an env var. + parse_quote!({ + let iter_count = std::env::var(#ITER_ENV_VAR).unwrap().parse::().unwrap(); + for _ in 0..iter_count { + #block + } + }) + } + } else { + block + }; + + ItemFn { + attrs, + vis, + sig, + block, + } + .into_token_stream() + .into() +} diff --git a/crates/vim/Cargo.toml b/crates/vim/Cargo.toml index abe92dd58594f05d8cf71dbde4fb129aafa26a03..a76d1f7ddc7b619ac231cd163a0721439255889a 100644 --- a/crates/vim/Cargo.toml +++ b/crates/vim/Cargo.toml @@ -46,6 +46,7 @@ theme.workspace = true tokio = { version = "1.15", features = ["full"], optional = true } ui.workspace = true util.workspace = true +util_macros.workspace = true vim_mode_setting.workspace = true workspace.workspace = true zed_actions.workspace = true diff --git a/crates/vim/src/test.rs b/crates/vim/src/test.rs index 2256c2577ecd282f690ee7b3afe9e2b21b6e8788..03adfc8af15cf92f7ee6c4c857c0f154e2c969f3 100644 --- a/crates/vim/src/test.rs +++ b/crates/vim/src/test.rs @@ -25,6 +25,9 @@ use search::BufferSearchBar; use crate::{PushSneak, PushSneakBackward, insert::NormalBefore, motion, state::Mode}; +use util_macros::perf; + +#[perf] #[gpui::test] async fn test_initially_disabled(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, false).await; @@ -44,6 +47,7 @@ async fn test_neovim(cx: &mut gpui::TestAppContext) { cx.assert_editor_state("ˇtest"); } +#[perf] #[gpui::test] async fn test_toggle_through_settings(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -80,6 +84,7 @@ async fn test_toggle_through_settings(cx: &mut gpui::TestAppContext) { assert_eq!(cx.mode(), Mode::Normal); } +#[perf] #[gpui::test] async fn test_cancel_selection(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -104,6 +109,7 @@ async fn test_cancel_selection(cx: &mut gpui::TestAppContext) { cx.assert_editor_state("The quick brown fox juˇmps over the lazy dog"); } +#[perf] #[gpui::test] async fn test_buffer_search(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -132,6 +138,7 @@ async fn test_buffer_search(cx: &mut gpui::TestAppContext) { }) } +#[perf] #[gpui::test] async fn test_count_down(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -143,6 +150,7 @@ async fn test_count_down(cx: &mut gpui::TestAppContext) { cx.assert_editor_state("aa\nbb\ncc\ndd\neˇe"); } +#[perf] #[gpui::test] async fn test_end_of_document_710(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -157,6 +165,7 @@ async fn test_end_of_document_710(cx: &mut gpui::TestAppContext) { cx.assert_editor_state("aˇa\nbb\ncc"); } +#[perf] #[gpui::test] async fn test_end_of_line_with_times(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -175,6 +184,7 @@ async fn test_end_of_line_with_times(cx: &mut gpui::TestAppContext) { cx.assert_editor_state("aa\nbb\ncˇc"); } +#[perf] #[gpui::test] async fn test_indent_outdent(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -241,6 +251,7 @@ async fn test_escape_command_palette(cx: &mut gpui::TestAppContext) { cx.assert_state("aˇbc\n", Mode::Insert); } +#[perf] #[gpui::test] async fn test_escape_cancels(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -251,6 +262,7 @@ async fn test_escape_cancels(cx: &mut gpui::TestAppContext) { cx.assert_state("aˇbc", Mode::Normal); } +#[perf] #[gpui::test] async fn test_selection_on_search(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -289,6 +301,7 @@ async fn test_selection_on_search(cx: &mut gpui::TestAppContext) { cx.assert_state(indoc! {"aa\nbb\nˇcc\ncc\ncc\n"}, Mode::Normal); } +#[perf] #[gpui::test] async fn test_word_characters(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new_typescript(cx).await; @@ -315,6 +328,7 @@ async fn test_word_characters(cx: &mut gpui::TestAppContext) { ) } +#[perf] #[gpui::test] async fn test_kebab_case(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new_html(cx).await; @@ -821,6 +835,7 @@ async fn test_paragraphs_dont_wrap(cx: &mut gpui::TestAppContext) { two"}); } +#[perf] #[gpui::test] async fn test_select_all_issue_2170(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -881,6 +896,7 @@ fn assert_pending_input(cx: &mut VimTestContext, expected: &str) { }); } +#[perf] #[gpui::test] async fn test_jk_multi(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -972,6 +988,7 @@ async fn test_comma_w(cx: &mut gpui::TestAppContext) { .assert_eq("hellˇo hello\nhello hello"); } +#[perf] #[gpui::test] async fn test_completion_menu_scroll_aside(cx: &mut TestAppContext) { let mut cx = VimTestContext::new_typescript(cx).await; @@ -1053,6 +1070,7 @@ async fn test_completion_menu_scroll_aside(cx: &mut TestAppContext) { }); } +#[perf] #[gpui::test] async fn test_rename(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new_typescript(cx).await; @@ -1088,6 +1106,7 @@ async fn test_rename(cx: &mut gpui::TestAppContext) { cx.assert_state("const afterˇ = 2; console.log(after)", Mode::Normal) } +#[perf(iterations = 1)] #[gpui::test] async fn test_remap(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1210,6 +1229,7 @@ async fn test_undo(cx: &mut gpui::TestAppContext) { 3"}); } +#[perf] #[gpui::test] async fn test_mouse_selection(cx: &mut TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1385,6 +1405,7 @@ async fn test_dw_eol(cx: &mut gpui::TestAppContext) { .assert_eq("twelve ˇtwelve char\ntwelve char"); } +#[perf] #[gpui::test] async fn test_toggle_comments(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1476,6 +1497,7 @@ async fn test_find_multibyte(cx: &mut gpui::TestAppContext) { .assert_eq(r#""#); } +#[perf] #[gpui::test] async fn test_sneak(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1695,6 +1717,7 @@ async fn test_ctrl_w_override(cx: &mut gpui::TestAppContext) { cx.shared_state().await.assert_eq("ˇ"); } +#[perf] #[gpui::test] async fn test_visual_indent_count(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1850,6 +1873,7 @@ async fn test_ctrl_o_dot(cx: &mut gpui::TestAppContext) { cx.shared_state().await.assert_eq("hellˇllo world."); } +#[perf(iterations = 1)] #[gpui::test] async fn test_folded_multibuffer_excerpts(cx: &mut gpui::TestAppContext) { VimTestContext::init(cx); @@ -2150,6 +2174,7 @@ async fn test_paragraph_multi_delete(cx: &mut gpui::TestAppContext) { cx.shared_state().await.assert_eq(indoc! {"ˇ"}); } +#[perf] #[gpui::test] async fn test_multi_cursor_replay(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; diff --git a/tooling/perf/Cargo.toml b/tooling/perf/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..f5013a82836b9888d94fb39fa18f0efa00e1b0ce --- /dev/null +++ b/tooling/perf/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "perf" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[lints] +workspace = true + +[dependencies] +workspace-hack.workspace = true diff --git a/tooling/perf/LICENSE-APACHE b/tooling/perf/LICENSE-APACHE new file mode 120000 index 0000000000000000000000000000000000000000..1cd601d0a3affae83854be02a0afdec3b7a9ec4d --- /dev/null +++ b/tooling/perf/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/tooling/perf/src/main.rs b/tooling/perf/src/main.rs new file mode 100644 index 0000000000000000000000000000000000000000..a119811aba76afccc16dbef48e4dbee576b46fdc --- /dev/null +++ b/tooling/perf/src/main.rs @@ -0,0 +1,191 @@ +#![warn(clippy::all, clippy::pedantic, clippy::undocumented_unsafe_blocks)] +#![cfg_attr(release, deny(warnings))] + +//! Perf profiler for Zed tests. Outputs timings of tests marked with the `#[perf]` +//! attribute to stdout in Markdown. See the documentation of `util_macros::perf` +//! for usage details on the actual attribute. +//! +//! # Setup +//! Make sure `hyperfine` is installed and in the shell path, then run +//! `cargo build --bin perf --workspace --release` to build the profiler. +//! +//! # Usage +//! Calling this tool rebuilds everything with some cfg flags set for the perf +//! proc macro *and* enables optimisations (`release-fast` profile), so expect it +//! to take a little while. +//! +//! To test an individual crate, run: +//! ```sh +//! cargo perf-test -p $CRATE +//! ``` +//! +//! To test everything (which will be **VERY SLOW**), run: +//! ```sh +//! cargo perf-test --workspace +//! ``` +//! +//! # Notes +//! This should probably not be called manually unless you're working on the profiler +//! itself; use the `cargo perf-test` alias (after building this crate) instead. + +use std::{ + process::{Command, Stdio}, + time::{Duration, Instant}, +}; + +/// How many iterations to attempt the first time a test is run. +const DEFAULT_ITER_COUNT: usize = 12; +/// Multiplier for the iteration count when a test doesn't pass the noise cutoff. +const ITER_COUNT_MUL: usize = 4; +/// How long a test must have run to be assumed to be reliable-ish. +const NOISE_CUTOFF: Duration = Duration::from_millis(250); + +// If any of the below constants are changed, make sure to also update the perf +// proc macro to match! + +/// The suffix on tests marked with `#[perf]`. +const SUF_NORMAL: &str = "__ZED_PERF"; +/// The suffix on tests marked with `#[perf(iterations = n)]`. +const SUF_FIXED: &str = "__ZED_PERF_FIXEDITER"; +/// The env var in which we pass the iteration count to our tests. +const ITER_ENV_VAR: &str = "ZED_PERF_ITER"; + +#[allow(clippy::too_many_lines)] +fn main() { + // We get passed the test we need to run as the 1st argument after our own name. + let test_bin = std::env::args().nth(1).unwrap(); + let mut cmd = Command::new(&test_bin); + // --format=json is nightly-only :( + cmd.args(["--list", "--format=terse"]); + let out = cmd + .output() + .expect("FATAL: Could not run test binary {test_bin}"); + assert!( + out.status.success(), + "FATAL: Cannot do perf check - test binary {test_bin} returned an error" + ); + // Parse the test harness output to look for tests we care about. + let stdout = String::from_utf8_lossy(&out.stdout); + let mut test_list: Vec<_> = stdout + .lines() + .filter_map(|line| { + // This should split only in two; e.g., + // "app::test::test_arena: test" => "app::test::test_arena:", "test" + let line: Vec<_> = line.split_whitespace().collect(); + match line[..] { + // Final byte of t_name is ":", which we need to ignore. + [t_name, kind] => (kind == "test").then(|| &t_name[..t_name.len() - 1]), + _ => None, + } + }) + // Exclude tests that aren't marked for perf triage based on suffix. + .filter(|t_name| t_name.ends_with(SUF_NORMAL) || t_name.ends_with(SUF_FIXED)) + .collect(); + + // Pulling itertools just for .dedup() would be quite a big dependency that's + // not used elsewhere, so do this on the vec instead. + test_list.sort_unstable(); + test_list.dedup(); + + if !test_list.is_empty() { + // Print the markdown header which matches hyperfine's result. + // TODO: Support exporting JSON also. + println!( + "| Command | Mean [ms] | Min [ms] | Max [ms] | Iterations | Iter/sec |\n|:---|---:|---:|---:|---:|---:|" + ); + } + + // Spawn and profile an instance of each perf-sensitive test, via hyperfine. + for t_name in test_list { + // Pretty-print the stripped name for the test. + let t_name_normal = t_name.replace(SUF_FIXED, "").replace(SUF_NORMAL, ""); + // Time test execution to see how many iterations we need to do in order + // to account for random noise. This is skipped for tests with fixed + // iteration counts. + let final_iter_count = if t_name.ends_with(SUF_FIXED) { + None + } else { + let mut iter_count = DEFAULT_ITER_COUNT; + loop { + let mut cmd = Command::new(&test_bin); + cmd.args([t_name, "--exact"]); + cmd.env(ITER_ENV_VAR, format!("{iter_count}")); + // Don't let the child muck up our stdin/out/err. + cmd.stdin(Stdio::null()); + cmd.stdout(Stdio::null()); + cmd.stderr(Stdio::null()); + let pre = Instant::now(); + // Discard the output beyond ensuring success. + let out = cmd.spawn().unwrap().wait(); + let post = Instant::now(); + if !out.unwrap().success() { + println!( + "| {t_name_normal} (ERRORED IN TRIAGE) | N/A | N/A | N/A | {iter_count} | N/A |" + ); + return; + } + if post - pre > NOISE_CUTOFF { + break Some(iter_count); + } else if let Some(c) = iter_count.checked_mul(ITER_COUNT_MUL) { + iter_count = c; + } else { + // This should almost never happen, but maybe..? + eprintln!( + "WARNING: Running nearly usize::MAX iterations of test {t_name_normal}" + ); + break Some(iter_count); + } + } + }; + + // Now profile! + let mut perf_cmd = Command::new("hyperfine"); + // Warm up the cache and print markdown output to stdout. + perf_cmd.args([ + "--style", + "none", + "--warmup", + "1", + "--export-markdown", + "-", + &format!("{test_bin} {t_name}"), + ]); + if let Some(final_iter_count) = final_iter_count { + perf_cmd.env(ITER_ENV_VAR, format!("{final_iter_count}")); + } + let p_out = perf_cmd.output().unwrap(); + let fin_iter = match final_iter_count { + Some(i) => &format!("{i}"), + None => "(preset)", + }; + if p_out.status.success() { + let output = String::from_utf8_lossy(&p_out.stdout); + // Strip the name of the test binary from the table (and the space after it!) + // + our extraneous test bits + the "Relative" column (which is always at the end and "1.00"). + let output = output + .replace(&format!("{test_bin} "), "") + .replace(SUF_FIXED, "") + .replace(SUF_NORMAL, "") + .replace(" 1.00 |", ""); + // Can't use .last() since we have a trailing newline. Sigh. + let fin = output.lines().nth(3).unwrap(); + + // Calculate how many iterations this does per second, for easy comparison. + let ms = fin + .split_whitespace() + .nth(3) + .unwrap() + .parse::() + .unwrap(); + let mul_fac = 1000.0 / ms; + let iter_sec = match final_iter_count { + #[allow(clippy::cast_precision_loss)] + Some(c) => &format!("{:.1}", mul_fac * c as f64), + None => "(unknown)", + }; + println!("{fin} {fin_iter} | {iter_sec} |"); + } else { + println!("{t_name_normal} (ERRORED) | N/A | N/A | N/A | {fin_iter} | N/A |"); + } + } +} From ffa23d25e315eda8079664fb8fe2d8618003ef4b Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Sat, 20 Sep 2025 11:23:02 -0400 Subject: [PATCH 30/58] Fix formatting in workspace `Cargo.toml` (#38563) This PR fixes some formatting issues in the workspace `Cargo.toml`. Release Notes: - N/A --- Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index ad07429243817b27b4c09fc651b50de820183a9d..fd08fbb3f971e84d27e469bd79888531366109c4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -219,8 +219,9 @@ members = [ # Tooling # + "tooling/perf", "tooling/workspace-hack", - "tooling/xtask", "tooling/perf", + "tooling/xtask", ] default-members = ["crates/zed"] From 1d1bbf01a931720febd3fb6a1a10b27e27c06179 Mon Sep 17 00:00:00 2001 From: Vitaly Slobodin Date: Sat, 20 Sep 2025 19:29:12 +0200 Subject: [PATCH 31/58] docs: Mention `herb` LSP for Ruby language (#38351) Hi! This pull request mentions [the `herb` LSP](https://herb-tools.dev) for `HTML/ERB` language that the Ruby extension supports. Thanks! Release Notes: - N/A --------- Co-authored-by: Finn Evers --- docs/src/languages/ruby.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/src/languages/ruby.md b/docs/src/languages/ruby.md index ef4b026db1db85ccf9104fdd3522ea27d2e1b50f..bcab5333d7fc3216eb6475acfe47b51013010afe 100644 --- a/docs/src/languages/ruby.md +++ b/docs/src/languages/ruby.md @@ -9,6 +9,7 @@ Ruby support is available through the [Ruby extension](https://github.com/zed-ex - [ruby-lsp](https://github.com/Shopify/ruby-lsp) - [solargraph](https://github.com/castwide/solargraph) - [rubocop](https://github.com/rubocop/rubocop) + - [Herb](https://herb-tools.dev) - Debug Adapter: [`rdbg`](https://github.com/ruby/debug) The Ruby extension also provides support for ERB files. @@ -27,6 +28,7 @@ In addition to these two language servers, Zed also supports: - [rubocop](https://github.com/rubocop/rubocop) which is a static code analyzer and linter for Ruby. Under the hood, it's also used by Zed as a language server, but its functionality is complimentary to that of solargraph and ruby-lsp. - [sorbet](https://sorbet.org/) which is a static type checker for Ruby with a custom gradual type system. - [steep](https://github.com/soutaro/steep) which is a static type checker for Ruby that leverages Ruby Signature (RBS). +- [Herb](https://herb-tools.dev) which is a language server for ERB files. When configuring a language server, it helps to open the LSP Logs window using the 'dev: Open Language Server Logs' command. You can then choose the corresponding language instance to see any logged information. @@ -238,6 +240,10 @@ To enable Steep, add `\"steep\"` to the `language_servers` list for Ruby in your } ``` +## Setting up Herb + +`Herb` is enabled by default for the `HTML/ERB` language. + ## Using the Tailwind CSS Language Server with Ruby It's possible to use the [Tailwind CSS Language Server](https://github.com/tailwindlabs/tailwindcss-intellisense/tree/HEAD/packages/tailwindcss-language-server#readme) in Ruby and ERB files. From f5c2e4b49e5126d29cbee05733072f7ed769304e Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Sun, 21 Sep 2025 02:01:55 +0800 Subject: [PATCH 32/58] vim: Remove duplicate bracket pair (#38560) remove depulicate code, this same with line: 556-562 Release Notes: - N/A --- crates/vim/src/surrounds.rs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/crates/vim/src/surrounds.rs b/crates/vim/src/surrounds.rs index 5e25b08dd8656887b2013df52a5e7d62fce5dbe0..3ce4e6a2e94b1090714a81195c5562374efb95eb 100644 --- a/crates/vim/src/surrounds.rs +++ b/crates/vim/src/surrounds.rs @@ -602,13 +602,6 @@ fn all_support_surround_pair() -> Vec { surround: true, newline: false, }, - BracketPair { - start: "{".into(), - end: "}".into(), - close: true, - surround: true, - newline: false, - }, BracketPair { start: "<".into(), end: ">".into(), From 18df6a81b420914507515e7ba3c19665e4e11197 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Sat, 20 Sep 2025 14:14:55 -0400 Subject: [PATCH 33/58] acp: Fix spawning login task (#38567) Reverts #38175, which is not correct, since in fact we do need to pre-quote the command and arguments for the shell when using `SpawnInTerminal` (although we should probably change the API so that this isn't necessary). Then, applies the same fix as #38565 to fix the root cause of being unable to spawn the login task on macOS, or in any case where the command/args contain spaces. Release Notes: - Fixed being unable to login with Claude Code or Gemini using the terminal. --- Cargo.lock | 1 + crates/agent_ui/Cargo.toml | 1 + crates/agent_ui/src/acp/thread_view.rs | 34 ++++++++++++-------------- 3 files changed, 17 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 84ae8e613365ef3976970e61dfc7b03aaf969062..6293b0cc2da475ef5f2282a039c123c76d31e1c7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -419,6 +419,7 @@ dependencies = [ "serde_json", "serde_json_lenient", "settings", + "shlex", "smol", "streaming_diff", "task", diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 47d9f6d6a27a2ad5102e831094912208e66a9b43..028db95c10a8c7a319bb05927dcabd0564a14683 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -80,6 +80,7 @@ serde.workspace = true serde_json.workspace = true serde_json_lenient.workspace = true settings.workspace = true +shlex.workspace = true smol.workspace = true streaming_diff.workspace = true task.workspace = true diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index cd72be9b184ded0d53125bfd569da89acff59a48..8658e2c285997c18ece2b9783c25fbcaa614dc83 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -9,7 +9,7 @@ use agent_client_protocol::{self as acp, PromptCapabilities}; use agent_servers::{AgentServer, AgentServerDelegate}; use agent_settings::{AgentProfileId, AgentSettings, CompletionMode}; use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer}; -use anyhow::{Result, anyhow, bail}; +use anyhow::{Context as _, Result, anyhow, bail}; use arrayvec::ArrayVec; use audio::{Audio, Sound}; use buffer_diff::BufferDiff; @@ -1582,6 +1582,19 @@ impl AcpThreadView { window.spawn(cx, async move |cx| { let mut task = login.clone(); + task.command = task + .command + .map(|command| anyhow::Ok(shlex::try_quote(&command)?.to_string())) + .transpose()?; + task.args = task + .args + .iter() + .map(|arg| { + Ok(shlex::try_quote(arg) + .context("Failed to quote argument")? + .to_string()) + }) + .collect::>>()?; task.full_label = task.label.clone(); task.id = task::TaskId(format!("external-agent-{}-login", task.label)); task.command_label = task.label.clone(); @@ -1591,7 +1604,7 @@ impl AcpThreadView { task.shell = shell; let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| { - terminal_panel.spawn_task(&login, window, cx) + terminal_panel.spawn_task(&task, window, cx) })?; let terminal = terminal.await?; @@ -5669,23 +5682,6 @@ pub(crate) mod tests { }); } - #[gpui::test] - async fn test_spawn_external_agent_login_handles_spaces(cx: &mut TestAppContext) { - init_test(cx); - - // Verify paths with spaces aren't pre-quoted - let path_with_spaces = "/Users/test/Library/Application Support/Zed/cli.js"; - let login_task = task::SpawnInTerminal { - command: Some("node".to_string()), - args: vec![path_with_spaces.to_string(), "/login".to_string()], - ..Default::default() - }; - - // Args should be passed as-is, not pre-quoted - assert!(!login_task.args[0].starts_with('"')); - assert!(!login_task.args[0].starts_with('\'')); - } - #[gpui::test] async fn test_notification_for_tool_authorization(cx: &mut TestAppContext) { init_test(cx); From 839c216620af116459e2ba15e82f3df8c3597349 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Sat, 20 Sep 2025 22:10:47 +0200 Subject: [PATCH 34/58] terminal: Re-add sanitizing trailing periods in URL detection (#38569) I accidentally regressed this when bumping alacritty in https://github.com/zed-industries/zed/pull/38505 cc @davewa Release Notes: - N/A --- crates/terminal/src/terminal_hyperlinks.rs | 145 ++++++++++++++++++++- 1 file changed, 144 insertions(+), 1 deletion(-) diff --git a/crates/terminal/src/terminal_hyperlinks.rs b/crates/terminal/src/terminal_hyperlinks.rs index 25db02c5e84f692622a1c97ed891c886b02b26a9..3c20261988a7b30e124000bcdae7596c162d0853 100644 --- a/crates/terminal/src/terminal_hyperlinks.rs +++ b/crates/terminal/src/terminal_hyperlinks.rs @@ -79,7 +79,8 @@ pub(super) fn find_from_grid_point( Some((url, true, url_match)) } else if let Some(url_match) = regex_match_at(term, point, &mut regex_searches.url_regex) { let url = term.bounds_to_string(*url_match.start(), *url_match.end()); - Some((url, true, url_match)) + let (sanitized_url, sanitized_match) = sanitize_url_punctuation(url, url_match, term); + Some((sanitized_url, true, sanitized_match)) } else if let Some(python_match) = regex_match_at(term, point, &mut regex_searches.python_file_line_regex) { @@ -164,6 +165,63 @@ pub(super) fn find_from_grid_point( }) } +fn sanitize_url_punctuation( + url: String, + url_match: Match, + term: &Term, +) -> (String, Match) { + let mut sanitized_url = url; + let mut chars_trimmed = 0; + + // First, handle parentheses balancing using single traversal + let (open_parens, close_parens) = + sanitized_url + .chars() + .fold((0, 0), |(opens, closes), c| match c { + '(' => (opens + 1, closes), + ')' => (opens, closes + 1), + _ => (opens, closes), + }); + + // Trim unbalanced closing parentheses + if close_parens > open_parens { + let mut remaining_close = close_parens; + while sanitized_url.ends_with(')') && remaining_close > open_parens { + sanitized_url.pop(); + chars_trimmed += 1; + remaining_close -= 1; + } + } + + // Handle trailing periods + if sanitized_url.ends_with('.') { + let trailing_periods = sanitized_url + .chars() + .rev() + .take_while(|&c| c == '.') + .count(); + + if trailing_periods > 1 { + sanitized_url.truncate(sanitized_url.len() - trailing_periods); + chars_trimmed += trailing_periods; + } else if trailing_periods == 1 + && let Some(second_last_char) = sanitized_url.chars().rev().nth(1) + && (second_last_char.is_alphanumeric() || second_last_char == '/') + { + sanitized_url.pop(); + chars_trimmed += 1; + } + } + + if chars_trimmed > 0 { + let new_end = url_match.end().sub(term, Boundary::Grid, chars_trimmed); + let sanitized_match = Match::new(*url_match.start(), new_end); + (sanitized_url, sanitized_match) + } else { + (sanitized_url, url_match) + } +} + fn is_path_surrounded_by_common_symbols(path: &str) -> bool { // Avoid detecting `[]` or `()` strings as paths, surrounded by common symbols path.len() > 2 @@ -233,6 +291,91 @@ mod tests { ); } + #[test] + fn test_url_parentheses_sanitization() { + // Test our sanitize_url_parentheses function directly + let test_cases = vec![ + // Cases that should be sanitized (unbalanced parentheses) + ("https://www.google.com/)", "https://www.google.com/"), + ("https://example.com/path)", "https://example.com/path"), + ("https://test.com/))", "https://test.com/"), + // Cases that should NOT be sanitized (balanced parentheses) + ( + "https://en.wikipedia.org/wiki/Example_(disambiguation)", + "https://en.wikipedia.org/wiki/Example_(disambiguation)", + ), + ("https://test.com/(hello)", "https://test.com/(hello)"), + ( + "https://example.com/path(1)(2)", + "https://example.com/path(1)(2)", + ), + // Edge cases + ("https://test.com/", "https://test.com/"), + ("https://example.com", "https://example.com"), + ]; + + for (input, expected) in test_cases { + // Create a minimal terminal for testing + let term = Term::new(Config::default(), &TermSize::new(80, 24), VoidListener); + + // Create a dummy match that spans the entire input + let start_point = AlacPoint::new(Line(0), Column(0)); + let end_point = AlacPoint::new(Line(0), Column(input.len())); + let dummy_match = Match::new(start_point, end_point); + + let (result, _) = sanitize_url_punctuation(input.to_string(), dummy_match, &term); + assert_eq!(result, expected, "Failed for input: {}", input); + } + } + + #[test] + fn test_url_periods_sanitization() { + // Test URLs with trailing periods (sentence punctuation) + let test_cases = vec![ + // Cases that should be sanitized (trailing periods likely punctuation) + ("https://example.com.", "https://example.com"), + ( + "https://github.com/zed-industries/zed.", + "https://github.com/zed-industries/zed", + ), + ( + "https://example.com/path/file.html.", + "https://example.com/path/file.html", + ), + ( + "https://example.com/file.pdf.", + "https://example.com/file.pdf", + ), + ("https://example.com:8080.", "https://example.com:8080"), + ("https://example.com..", "https://example.com"), + ( + "https://en.wikipedia.org/wiki/C.E.O.", + "https://en.wikipedia.org/wiki/C.E.O", + ), + // Cases that should NOT be sanitized (periods are part of URL structure) + ( + "https://example.com/v1.0/api", + "https://example.com/v1.0/api", + ), + ("https://192.168.1.1", "https://192.168.1.1"), + ("https://sub.domain.com", "https://sub.domain.com"), + ]; + + for (input, expected) in test_cases { + // Create a minimal terminal for testing + let term = Term::new(Config::default(), &TermSize::new(80, 24), VoidListener); + + // Create a dummy match that spans the entire input + let start_point = AlacPoint::new(Line(0), Column(0)); + let end_point = AlacPoint::new(Line(0), Column(input.len())); + let dummy_match = Match::new(start_point, end_point); + + // This test should initially fail since we haven't implemented period sanitization yet + let (result, _) = sanitize_url_punctuation(input.to_string(), dummy_match, &term); + assert_eq!(result, expected, "Failed for input: {}", input); + } + } + #[test] fn test_word_regex() { re_test( From 11041ef3b0b9751b70890190601dc3e247ebde7b Mon Sep 17 00:00:00 2001 From: Nia Date: Sun, 21 Sep 2025 13:54:59 +0200 Subject: [PATCH 35/58] perf: Greatly expand profiler (#38584) Expands on #38543 (notably allows setting importance categories and weights on tests, and a lot of internal refactoring) because I couldn't help myself. Also allows exporting runs to json and comparing across them. See code for docs. Release Notes: - N/A --- .cargo/config.toml | 2 + .gitignore | 1 + Cargo.lock | 4 + Cargo.toml | 1 + crates/util_macros/Cargo.toml | 1 + crates/util_macros/src/util_macros.rs | 224 ++++++++------ tooling/perf/Cargo.toml | 23 +- tooling/perf/src/lib.rs | 413 +++++++++++++++++++++++++ tooling/perf/src/main.rs | 414 +++++++++++++++++++++----- 9 files changed, 917 insertions(+), 166 deletions(-) create mode 100644 tooling/perf/src/lib.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index 74d34226af09c11b56faa6722e00afa218c924f5..9da793fc48b62f7f03cd1d36a505fa1e1ef2a45a 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -5,6 +5,7 @@ rustflags = ["-C", "symbol-mangling-version=v0", "--cfg", "tokio_unstable"] [alias] xtask = "run --package xtask --" perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests", "--config", "target.'cfg(true)'.runner='target/release/perf'", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]"] +perf-compare = ["run", "--release", "-p", "perf", "--", "compare"] [target.x86_64-unknown-linux-gnu] linker = "clang" @@ -24,3 +25,4 @@ rustflags = [ [env] MACOSX_DEPLOYMENT_TARGET = "10.15.7" +CARGO_WORKSPACE_DIR = { value = "", relative = true } diff --git a/.gitignore b/.gitignore index 7b40c45adf614eb91f1676144e7b70a7b2a373f2..d248b1f7e5adf30cb286a1737c1cd4f72f0f5d20 100644 --- a/.gitignore +++ b/.gitignore @@ -20,6 +20,7 @@ .venv .vscode .wrangler +.perf-runs /assets/*licenses.* /crates/collab/seed.json /crates/theme/schemas/theme.json diff --git a/Cargo.lock b/Cargo.lock index 6293b0cc2da475ef5f2282a039c123c76d31e1c7..dbe2467499ad1c5d6f67c4de82546e2b560451bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12169,6 +12169,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" name = "perf" version = "0.1.0" dependencies = [ + "collections", + "serde", + "serde_json", "workspace-hack", ] @@ -18586,6 +18589,7 @@ dependencies = [ name = "util_macros" version = "0.1.0" dependencies = [ + "perf", "quote", "syn 2.0.101", "workspace-hack", diff --git a/Cargo.toml b/Cargo.toml index fd08fbb3f971e84d27e469bd79888531366109c4..d4812908ac8292caf8371ce1d6dd9c9ee4042ca0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -356,6 +356,7 @@ outline = { path = "crates/outline" } outline_panel = { path = "crates/outline_panel" } panel = { path = "crates/panel" } paths = { path = "crates/paths" } +perf = { path = "tooling/perf" } picker = { path = "crates/picker" } plugin = { path = "crates/plugin" } plugin_macros = { path = "crates/plugin_macros" } diff --git a/crates/util_macros/Cargo.toml b/crates/util_macros/Cargo.toml index 45145a68f6a7d54d759d932c3dc851d14f4939d9..344331f1395ebb90251c4b08f92c57f28213fa4f 100644 --- a/crates/util_macros/Cargo.toml +++ b/crates/util_macros/Cargo.toml @@ -16,6 +16,7 @@ doctest = false [dependencies] quote.workspace = true syn.workspace = true +perf.workspace = true workspace-hack.workspace = true [features] diff --git a/crates/util_macros/src/util_macros.rs b/crates/util_macros/src/util_macros.rs index d3f05afdecbca8cb3b4c8685054d3828e6c702fd..69f6306133f490087b2cefeb71aeafab08b98a9a 100644 --- a/crates/util_macros/src/util_macros.rs +++ b/crates/util_macros/src/util_macros.rs @@ -1,6 +1,7 @@ #![cfg_attr(not(target_os = "windows"), allow(unused))] #![allow(clippy::test_attr_in_doctest)] +use perf::*; use proc_macro::TokenStream; use quote::{ToTokens, quote}; use syn::{ItemFn, LitStr, parse_macro_input, parse_quote}; @@ -90,68 +91,81 @@ pub fn line_endings(input: TokenStream) -> TokenStream { } /// Inner data for the perf macro. +#[derive(Default)] struct PerfArgs { - /// How many times to loop a test before rerunning the test binary. - /// If left empty, the test harness will auto-determine this value. + /// How many times to loop a test before rerunning the test binary. If left + /// empty, the test harness will auto-determine this value. iterations: Option, + /// How much this test's results should be weighed when comparing across runs. + /// If unspecified, defaults to `WEIGHT_DEFAULT` (50). + weight: Option, + /// How relevant a benchmark is to overall performance. See docs on the enum + /// for details. If unspecified, `Average` is selected. + importance: Importance, } -impl syn::parse::Parse for PerfArgs { - fn parse(input: syn::parse::ParseStream) -> syn::Result { - if input.is_empty() { - return Ok(PerfArgs { iterations: None }); - } - - let mut iterations = None; - // In principle we only have one possible argument, but leave this as - // a loop in case we expand this in the future. - for meta in - syn::punctuated::Punctuated::::parse_terminated(input)? - { - match &meta { - syn::Meta::NameValue(meta_name_value) => { - if meta_name_value.path.is_ident("iterations") { - iterations = Some(meta_name_value.value.clone()); - } else { - return Err(syn::Error::new_spanned( - &meta_name_value.path, - "unexpected argument, expected 'iterations'", - )); - } - } - _ => { - return Err(syn::Error::new_spanned( - meta, - "expected name-value argument like 'iterations = 1'", - )); - } - } +#[warn(clippy::all, clippy::pedantic)] +impl PerfArgs { + /// Parses attribute arguments into a `PerfArgs`. + fn parse_into(&mut self, meta: syn::meta::ParseNestedMeta) -> syn::Result<()> { + if meta.path.is_ident("iterations") { + self.iterations = Some(meta.value()?.parse()?); + } else if meta.path.is_ident("weight") { + self.weight = Some(meta.value()?.parse()?); + } else if meta.path.is_ident("critical") { + self.importance = Importance::Critical; + } else if meta.path.is_ident("important") { + self.importance = Importance::Important; + } else if meta.path.is_ident("average") { + // This shouldn't be specified manually, but oh well. + self.importance = Importance::Average; + } else if meta.path.is_ident("iffy") { + self.importance = Importance::Iffy; + } else if meta.path.is_ident("fluff") { + self.importance = Importance::Fluff; + } else { + return Err(syn::Error::new_spanned(meta.path, "unexpected identifier")); } - - Ok(PerfArgs { iterations }) + Ok(()) } } /// Marks a test as perf-sensitive, to be triaged when checking the performance /// of a build. This also automatically applies `#[test]`. /// +/// +/// # Usage +/// Applying this attribute to a test marks it as average importance by default. +/// There are 4 levels of importance (`Critical`, `Important`, `Average`, `Fluff`); +/// see the documentation on `Importance` for details. Add the importance as a +/// parameter to override the default (e.g. `#[perf(important)]`). +/// +/// Each test also has a weight factor. This is irrelevant on its own, but is considered +/// when comparing results across different runs. By default, this is set to 50; +/// pass `weight = n` as a parameter to override this. Note that this value is only +/// relevant within its importance category. +/// /// By default, the number of iterations when profiling this test is auto-determined. -/// If this needs to be overwritten, pass the desired iteration count to the macro -/// as a parameter (`#[perf(iterations = n)]`). Note that the actual profiler may still -/// run the test an arbitrary number times; this flag just sets the number of executions -/// before the process is restarted and global state is reset. +/// If this needs to be overwritten, pass the desired iteration count as a parameter +/// (`#[perf(iterations = n)]`). Note that the actual profiler may still run the test +/// an arbitrary number times; this flag just sets the number of executions before the +/// process is restarted and global state is reset. /// -/// # Usage notes -/// This should probably not be applied to tests that do any significant fs IO, as -/// locks on files may not be released in time when repeating a test many times. This -/// might lead to spurious failures. +/// This attribute should probably not be applied to tests that do any significant +/// disk IO, as locks on files may not be released in time when repeating a test many +/// times. This might lead to spurious failures. /// /// # Examples /// ```rust /// use util_macros::perf; /// /// #[perf] -/// fn expensive_computation_test() { +/// fn generic_test() { +/// // Test goes here. +/// } +/// +/// #[perf(fluff, weight = 30)] +/// fn cold_path_test() { /// // Test goes here. /// } /// ``` @@ -161,72 +175,108 @@ impl syn::parse::Parse for PerfArgs { /// ```rust,ignore /// use util_macros::perf; /// -/// #[perf(iterations = 1)] +/// #[perf(iterations = 1, critical)] /// #[gpui::test] /// fn oneshot_test(_cx: &mut gpui::TestAppContext) { /// // Test goes here. /// } /// ``` #[proc_macro_attribute] +#[warn(clippy::all, clippy::pedantic)] pub fn perf(our_attr: TokenStream, input: TokenStream) -> TokenStream { - // If any of the below constants are changed, make sure to also update the perf - // profiler to match! - - /// The suffix on tests marked with `#[perf]`. - const SUF_NORMAL: &str = "__ZED_PERF"; - /// The suffix on tests marked with `#[perf(iterations = n)]`. - const SUF_FIXED: &str = "__ZED_PERF_FIXEDITER"; - /// The env var in which we pass the iteration count to our tests. - const ITER_ENV_VAR: &str = "ZED_PERF_ITER"; - - let iter_count = parse_macro_input!(our_attr as PerfArgs).iterations; + let mut args = PerfArgs::default(); + let parser = syn::meta::parser(|meta| PerfArgs::parse_into(&mut args, meta)); + parse_macro_input!(our_attr with parser); let ItemFn { - mut attrs, + attrs: mut attrs_main, vis, - mut sig, + sig: mut sig_main, block, } = parse_macro_input!(input as ItemFn); - attrs.push(parse_quote!(#[test])); - attrs.push(parse_quote!(#[allow(non_snake_case)])); + attrs_main.push(parse_quote!(#[test])); + attrs_main.push(parse_quote!(#[allow(non_snake_case)])); + + let fns = if cfg!(perf_enabled) { + #[allow(clippy::wildcard_imports, reason = "We control the other side")] + use consts::*; - let block: Box = if cfg!(perf_enabled) { // Make the ident obvious when calling, for the test parser. - let mut new_ident = sig.ident.to_string(); - if iter_count.is_some() { - new_ident.push_str(SUF_FIXED); - } else { - new_ident.push_str(SUF_NORMAL); - } + // Also set up values for the second metadata-returning "test". + let mut new_ident_main = sig_main.ident.to_string(); + let mut new_ident_meta = new_ident_main.clone(); + new_ident_main.push_str(SUF_NORMAL); + new_ident_meta.push_str(SUF_MDATA); - let new_ident = syn::Ident::new(&new_ident, sig.ident.span()); - sig.ident = new_ident; - // If we have a preset iteration count, just use that. - if let Some(iter_count) = iter_count { - parse_quote!({ - for _ in 0..#iter_count { - #block - } - }) - } else { - // Otherwise, the perf harness will pass us the value in an env var. + let new_ident_main = syn::Ident::new(&new_ident_main, sig_main.ident.span()); + sig_main.ident = new_ident_main; + + // We don't want any nonsense if the original test had a weird signature. + let new_ident_meta = syn::Ident::new(&new_ident_meta, sig_main.ident.span()); + let sig_meta = parse_quote!(fn #new_ident_meta()); + let attrs_meta = parse_quote!(#[test] #[allow(non_snake_case)]); + + // Make the test loop as the harness instructs it to. + let block_main = { + // The perf harness will pass us the value in an env var. Even if we + // have a preset value, just do this to keep the code paths unified. parse_quote!({ let iter_count = std::env::var(#ITER_ENV_VAR).unwrap().parse::().unwrap(); for _ in 0..iter_count { #block } }) - } + }; + let importance = format!("{}", args.importance); + let block_meta = { + // This function's job is to just print some relevant info to stdout, + // based on the params this attr is passed. It's not an actual test. + // Since we use a custom attr set on our metadata fn, it shouldn't + // cause problems with xfail tests. + let q_iter = if let Some(iter) = args.iterations { + quote! { + println!("{} {} {}", #MDATA_LINE_PREF, #ITER_COUNT_LINE_NAME, #iter); + } + } else { + quote! {} + }; + let weight = args + .weight + .unwrap_or_else(|| parse_quote! { #WEIGHT_DEFAULT }); + parse_quote!({ + #q_iter + println!("{} {} {}", #MDATA_LINE_PREF, #WEIGHT_LINE_NAME, #weight); + println!("{} {} {}", #MDATA_LINE_PREF, #IMPORTANCE_LINE_NAME, #importance); + println!("{} {} {}", #MDATA_LINE_PREF, #VERSION_LINE_NAME, #MDATA_VER); + }) + }; + + vec![ + // The real test. + ItemFn { + attrs: attrs_main, + vis: vis.clone(), + sig: sig_main, + block: block_main, + }, + // The fake test. + ItemFn { + attrs: attrs_meta, + vis, + sig: sig_meta, + block: block_meta, + }, + ] } else { - block + vec![ItemFn { + attrs: attrs_main, + vis, + sig: sig_main, + block, + }] }; - ItemFn { - attrs, - vis, - sig, - block, - } - .into_token_stream() - .into() + fns.into_iter() + .flat_map(|f| TokenStream::from(f.into_token_stream())) + .collect() } diff --git a/tooling/perf/Cargo.toml b/tooling/perf/Cargo.toml index f5013a82836b9888d94fb39fa18f0efa00e1b0ce..4766b58d8a760aa995dba7092d33c436559019c2 100644 --- a/tooling/perf/Cargo.toml +++ b/tooling/perf/Cargo.toml @@ -1,11 +1,30 @@ [package] name = "perf" version = "0.1.0" +description = "A tool for measuring Zed test performance, with too many Clippy lints" publish.workspace = true edition.workspace = true -[lints] -workspace = true +[lib] + +# Some personal lint preferences :3 +[lints.rust] +missing_docs = "warn" + +[lints.clippy] +needless_continue = "allow" # For a convenience macro +all = "warn" +pedantic = "warn" +style = "warn" +missing_docs_in_private_items = "warn" +as_underscore = "deny" +allow_attributes_without_reason = "deny" +let_underscore_must_use = "forbid" +undocumented_unsafe_blocks = "forbid" +missing_safety_doc = "forbid" [dependencies] +collections.workspace = true +serde.workspace = true +serde_json.workspace = true workspace-hack.workspace = true diff --git a/tooling/perf/src/lib.rs b/tooling/perf/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..30909f646b061895e10f5c860149e2370892ccd2 --- /dev/null +++ b/tooling/perf/src/lib.rs @@ -0,0 +1,413 @@ +//! Some constants and datatypes used in the Zed perf profiler. Should only be +//! consumed by the crate providing the matching macros. + +use collections::HashMap; +use serde::{Deserialize, Serialize}; +use std::time::Duration; + +pub mod consts { + //! Preset idenitifiers and constants so that the profiler and proc macro agree + //! on their communication protocol. + + /// The suffix on the actual test function. + pub const SUF_NORMAL: &str = "__ZED_PERF_FN"; + /// The suffix on an extra function which prints metadata about a test to stdout. + pub const SUF_MDATA: &str = "__ZED_PERF_MDATA"; + /// The env var in which we pass the iteration count to our tests. + pub const ITER_ENV_VAR: &str = "ZED_PERF_ITER"; + /// The prefix printed on all benchmark test metadata lines, to distinguish it from + /// possible output by the test harness itself. + pub const MDATA_LINE_PREF: &str = "ZED_MDATA_"; + /// The version number for the data returned from the test metadata function. + /// Increment on non-backwards-compatible changes. + pub const MDATA_VER: u32 = 0; + /// The default weight, if none is specified. + pub const WEIGHT_DEFAULT: u8 = 50; + /// How long a test must have run to be assumed to be reliable-ish. + pub const NOISE_CUTOFF: std::time::Duration = std::time::Duration::from_millis(250); + + /// Identifier for the iteration count of a test metadata. + pub const ITER_COUNT_LINE_NAME: &str = "iter_count"; + /// Identifier for the weight of a test metadata. + pub const WEIGHT_LINE_NAME: &str = "weight"; + /// Identifier for importance in test metadata. + pub const IMPORTANCE_LINE_NAME: &str = "importance"; + /// Identifier for the test metadata version. + pub const VERSION_LINE_NAME: &str = "version"; + + /// Where to save json run information. + pub const RUNS_DIR: &str = ".perf-runs"; +} + +/// How relevant a benchmark is. +#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)] +pub enum Importance { + /// Regressions shouldn't be accepted without good reason. + Critical = 4, + /// Regressions should be paid extra attention. + Important = 3, + /// No extra attention should be paid to regressions, but they might still + /// be indicative of something happening. + #[default] + Average = 2, + /// Unclear if regressions are likely to be meaningful, but still worth keeping + /// an eye on. Lowest level that's checked by default by the profiler. + Iffy = 1, + /// Regressions are likely to be spurious or don't affect core functionality. + /// Only relevant if a lot of them happen, or as supplemental evidence for a + /// higher-importance benchmark regressing. Not checked by default. + Fluff = 0, +} + +impl std::fmt::Display for Importance { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Importance::Critical => f.write_str("critical"), + Importance::Important => f.write_str("important"), + Importance::Average => f.write_str("average"), + Importance::Iffy => f.write_str("iffy"), + Importance::Fluff => f.write_str("fluff"), + } + } +} + +/// Why or when did this test fail? +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum FailKind { + /// Failed while triaging it to determine the iteration count. + Triage, + /// Failed while profiling it. + Profile, + /// Failed due to an incompatible version for the test. + VersionMismatch, + /// Skipped due to filters applied on the perf run. + Skipped, +} + +impl std::fmt::Display for FailKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + FailKind::Triage => f.write_str("failed in triage"), + FailKind::Profile => f.write_str("failed while profiling"), + FailKind::VersionMismatch => f.write_str("test version mismatch"), + FailKind::Skipped => f.write_str("skipped"), + } + } +} + +/// Information about a given perf test. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct TestMdata { + /// A version number for when the test was generated. If this is greater + /// than the version this test handler expects, one of the following will + /// happen in an unspecified manner: + /// - The test is skipped silently. + /// - The handler exits with an error message indicating the version mismatch + /// or inability to parse the metadata. + /// + /// INVARIANT: If `version` <= `MDATA_VER`, this tool *must* be able to + /// correctly parse the output of this test. + pub version: u32, + /// How many iterations to pass this test, if this is preset. + pub iterations: Option, + /// The importance of this particular test. See the docs on `Importance` for + /// details. + pub importance: Importance, + /// The weight of this particular test within its importance category. Used + /// when comparing across runs. + pub weight: u8, +} + +/// The actual timings of a test, as measured by Hyperfine. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Timings { + /// Mean runtime for `self.iter_total` runs of this test. + pub mean: Duration, + /// Standard deviation for the above. + pub stddev: Duration, +} + +impl Timings { + /// How many iterations does this test seem to do per second? + #[expect( + clippy::cast_precision_loss, + reason = "We only care about a couple sig figs anyways" + )] + #[must_use] + pub fn iters_per_sec(&self, total_iters: usize) -> f64 { + (1000. / self.mean.as_millis() as f64) * total_iters as f64 + } +} + +/// Aggregate output of all tests run by this handler. +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct Output { + /// A list of test outputs. Format is `(test_name, iter_count, timings)`. + /// The latter being set indicates the test succeeded. + /// + /// INVARIANT: If the test succeeded, the second field is `Some(mdata)` and + /// `mdata.iterations` is `Some(_)`. + tests: Vec<(String, Option, Result)>, +} + +impl Output { + /// Instantiates an empty "output". Useful for merging. + #[must_use] + pub fn blank() -> Self { + Output { tests: Vec::new() } + } + + /// Reports a success and adds it to this run's `Output`. + pub fn success( + &mut self, + name: impl AsRef, + mut mdata: TestMdata, + iters: usize, + timings: Timings, + ) { + mdata.iterations = Some(iters); + self.tests + .push((name.as_ref().to_string(), Some(mdata), Ok(timings))); + } + + /// Reports a failure and adds it to this run's `Output`. If this test was tried + /// with some number of iterations (i.e. this was not a version mismatch or skipped + /// test), it should be reported also. + /// + /// Using the `fail!()` macro is usually more convenient. + pub fn failure( + &mut self, + name: impl AsRef, + mut mdata: Option, + attempted_iters: Option, + kind: FailKind, + ) { + if let Some(ref mut mdata) = mdata { + mdata.iterations = attempted_iters; + } + self.tests + .push((name.as_ref().to_string(), mdata, Err(kind))); + } + + /// True if no tests executed this run. + #[must_use] + pub fn is_empty(&self) -> bool { + self.tests.is_empty() + } + + /// Sorts the runs in the output in the order that we want it printed. + pub fn sort(&mut self) { + self.tests.sort_unstable_by(|a, b| match (a, b) { + // Tests where we got no metadata go at the end. + ((_, Some(_), _), (_, None, _)) => std::cmp::Ordering::Greater, + ((_, None, _), (_, Some(_), _)) => std::cmp::Ordering::Less, + // Then sort by importance, then weight. + ((_, Some(a_mdata), _), (_, Some(b_mdata), _)) => { + let c = a_mdata.importance.cmp(&b_mdata.importance); + if matches!(c, std::cmp::Ordering::Equal) { + a_mdata.weight.cmp(&b_mdata.weight) + } else { + c + } + } + // Lastly by name. + ((a_name, ..), (b_name, ..)) => a_name.cmp(b_name), + }); + } + + /// Merges the output of two runs, appending a prefix to the results of the new run. + /// To be used in conjunction with `Output::blank()`, or else only some tests will have + /// a prefix set. + pub fn merge(&mut self, other: Self, pref_other: impl AsRef) { + self.tests = std::mem::take(&mut self.tests) + .into_iter() + .chain(other.tests.into_iter().map(|(name, md, tm)| { + let mut new_name = "crates/".to_string(); + new_name.push_str(pref_other.as_ref()); + new_name.push_str("::"); + new_name.push_str(&name); + (new_name, md, tm) + })) + .collect(); + } + + /// Evaluates the performance of `self` against `baseline`. The latter is taken + /// as the comparison point, i.e. a positive resulting `PerfReport` means that + /// `self` performed better. + /// + /// # Panics + /// `self` and `baseline` are assumed to have the iterations field on all + /// `TestMdata`s set to `Some(_)` if the `TestMdata` is present itself. + #[must_use] + pub fn compare_perf(self, baseline: Self) -> PerfReport { + let self_categories = self.collapse(); + let mut other_categories = baseline.collapse(); + + let deltas = self_categories + .into_iter() + .filter_map(|(cat, self_data)| { + // Only compare categories where both meow + // runs have data. / + let mut other_data = other_categories.remove(&cat)?; + let mut max = 0.; + let mut min = 0.; + + // Running totals for averaging out tests. + let mut r_total_numerator = 0.; + let mut r_total_denominator = 0; + // Yeah this is O(n^2), but realistically it'll hardly be a bottleneck. + for (name, (s_timings, s_iters, weight)) in self_data { + // Only use the new weights if they conflict. + let Some((o_timings, o_iters, _)) = other_data.remove(&name) else { + continue; + }; + let shift = + (s_timings.iters_per_sec(s_iters) / o_timings.iters_per_sec(o_iters)) - 1.; + if shift > max { + max = shift; + } + if shift < min { + min = shift; + } + r_total_numerator += shift * f64::from(weight); + r_total_denominator += u32::from(weight); + } + let mean = r_total_numerator / f64::from(r_total_denominator); + // TODO: also aggregate standard deviation? that's harder to keep + // meaningful, though, since we dk which tests are correlated + Some((cat, PerfDelta { max, mean, min })) + }) + .collect(); + + PerfReport { deltas } + } + + /// Collapses the `PerfReport` into a `HashMap` of `Importance` <-> tests + /// each represented as a map of `name, (Timings, iterations, weight)`. + fn collapse(self) -> HashMap> { + let mut categories = HashMap::>::default(); + for entry in self.tests { + if let Some(mdata) = entry.1 + && let Ok(timings) = entry.2 + { + if let Some(handle) = categories.get_mut(&mdata.importance) { + handle.insert(entry.0, (timings, mdata.iterations.unwrap(), mdata.weight)); + } else { + let mut new = HashMap::default(); + new.insert(entry.0, (timings, mdata.iterations.unwrap(), mdata.weight)); + categories.insert(mdata.importance, new); + } + } + } + + categories + } +} + +impl std::fmt::Display for Output { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // Don't print the header for an empty run. + if self.tests.is_empty() { + return Ok(()); + } + + // We want to print important tests at the top, then alphabetical. + let mut sorted = self.clone(); + sorted.sort(); + // Markdown header for making a nice little table :> + writeln!( + f, + "| Command | Iter/sec | Mean [ms] | SD [ms] | Iterations | Importance (weight) |", + )?; + writeln!(f, "|:---|---:|---:|---:|---:|---:|")?; + for (name, metadata, timings) in &sorted.tests { + match metadata { + Some(metadata) => match timings { + // Happy path. + Ok(timings) => { + // If the test succeeded, then metadata.iterations is Some(_). + writeln!( + f, + "| {} | {:.2} | {} | {:.2} | {} | {} ({}) |", + name, + timings.iters_per_sec(metadata.iterations.unwrap()), + { + // Very small mean runtimes will give inaccurate + // results. Should probably also penalise weight. + let mean = timings.mean.as_secs_f64() * 1000.; + if mean < consts::NOISE_CUTOFF.as_secs_f64() * 1000. / 8. { + format!("{mean:.2} (unreliable)") + } else { + format!("{mean:.2}") + } + }, + timings.stddev.as_secs_f64() * 1000., + metadata.iterations.unwrap(), + metadata.importance, + metadata.weight, + )?; + } + // We have (some) metadata, but the test errored. + Err(err) => writeln!( + f, + "| ({}) {} | N/A | N/A | N/A | {} | {} ({}) |", + err, + name, + metadata + .iterations + .map_or_else(|| "N/A".to_owned(), |i| format!("{i}")), + metadata.importance, + metadata.weight + )?, + }, + // No metadata, couldn't even parse the test output. + None => writeln!( + f, + "| ({}) {} | N/A | N/A | N/A | N/A | N/A |", + timings.as_ref().unwrap_err(), + name + )?, + } + } + writeln!(f)?; + Ok(()) + } +} + +/// The difference in performance between two runs within a given importance +/// category. +struct PerfDelta { + /// The biggest improvement / least bad regression. + max: f64, + /// The weighted average change in test times. + mean: f64, + /// The worst regression / smallest improvement. + min: f64, +} + +/// Shim type for reporting all performance deltas across importance categories. +pub struct PerfReport { + /// Inner (group, diff) pairing. + deltas: HashMap, +} + +impl std::fmt::Display for PerfReport { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if self.deltas.is_empty() { + return write!(f, "(no matching tests)"); + } + let sorted = self.deltas.iter().collect::>(); + writeln!(f, "| Category | Max | Mean | Min |")?; + // We don't want to print too many newlines at the end, so handle newlines + // a little jankily like this. + write!(f, "|:---|---:|---:|---:|")?; + for (cat, delta) in sorted.into_iter().rev() { + write!( + f, + "\n| {cat} | {:.3} | {:.3} | {:.3} |", + delta.max, delta.mean, delta.min + )?; + } + Ok(()) + } +} diff --git a/tooling/perf/src/main.rs b/tooling/perf/src/main.rs index a119811aba76afccc16dbef48e4dbee576b46fdc..2610adc66f88dfa675df975219f5b2937011e81b 100644 --- a/tooling/perf/src/main.rs +++ b/tooling/perf/src/main.rs @@ -1,18 +1,15 @@ -#![warn(clippy::all, clippy::pedantic, clippy::undocumented_unsafe_blocks)] -#![cfg_attr(release, deny(warnings))] - //! Perf profiler for Zed tests. Outputs timings of tests marked with the `#[perf]` //! attribute to stdout in Markdown. See the documentation of `util_macros::perf` //! for usage details on the actual attribute. //! //! # Setup //! Make sure `hyperfine` is installed and in the shell path, then run -//! `cargo build --bin perf --workspace --release` to build the profiler. +//! `cargo build -p perf --release` to build the profiler. //! //! # Usage -//! Calling this tool rebuilds everything with some cfg flags set for the perf -//! proc macro *and* enables optimisations (`release-fast` profile), so expect it -//! to take a little while. +//! Calling this tool rebuilds the targeted crate(s) with some cfg flags set for the +//! perf proc macro *and* enables optimisations (`release-fast` profile), so expect +//! it to take a little while. //! //! To test an individual crate, run: //! ```sh @@ -24,37 +21,221 @@ //! cargo perf-test --workspace //! ``` //! +//! Some command-line parameters are also recognised by this profiler. To filter +//! out all tests below a certain importance (e.g. `important`), run: +//! ```sh +//! cargo perf-test $WHATEVER -- --important +//! ``` +//! +//! Similarly, to skip outputting progress to the command line, pass `-- --quiet`. +//! These flags can be combined. +//! +//! ## Comparing runs +//! Passing `--json=ident` will save per-crate run files in `.perf-runs`, e.g. +//! `cargo perf-test -p gpui -- --json=blah` will result in `.perf-runs/blah.gpui.json` +//! being created (unless no tests were run). These results can be automatically +//! compared. To do so, run `cargo perf-compare new-ident old-ident`. +//! +//! NB: All files matching `.perf-runs/ident.*.json` will be considered when +//! doing this comparison, so ensure there aren't leftover files in your `.perf-runs` +//! directory that might match that! +//! //! # Notes //! This should probably not be called manually unless you're working on the profiler //! itself; use the `cargo perf-test` alias (after building this crate) instead. +#[allow(clippy::wildcard_imports, reason = "Our crate")] +use perf::*; + use std::{ + fs::OpenOptions, + io::Write, + path::{Path, PathBuf}, process::{Command, Stdio}, time::{Duration, Instant}, }; /// How many iterations to attempt the first time a test is run. -const DEFAULT_ITER_COUNT: usize = 12; +const DEFAULT_ITER_COUNT: usize = 3; /// Multiplier for the iteration count when a test doesn't pass the noise cutoff. const ITER_COUNT_MUL: usize = 4; -/// How long a test must have run to be assumed to be reliable-ish. -const NOISE_CUTOFF: Duration = Duration::from_millis(250); -// If any of the below constants are changed, make sure to also update the perf -// proc macro to match! +/// Report a failure into the output and skip an iteration. +macro_rules! fail { + ($output:ident, $name:expr, $kind:expr) => {{ + $output.failure($name, None, None, $kind); + continue; + }}; + ($output:ident, $name:expr, $mdata:expr, $kind:expr) => {{ + $output.failure($name, Some($mdata), None, $kind); + continue; + }}; + ($output:ident, $name:expr, $mdata:expr, $count:expr, $kind:expr) => {{ + $output.failure($name, Some($mdata), Some($count), $kind); + continue; + }}; +} -/// The suffix on tests marked with `#[perf]`. -const SUF_NORMAL: &str = "__ZED_PERF"; -/// The suffix on tests marked with `#[perf(iterations = n)]`. -const SUF_FIXED: &str = "__ZED_PERF_FIXEDITER"; -/// The env var in which we pass the iteration count to our tests. -const ITER_ENV_VAR: &str = "ZED_PERF_ITER"; +/// How does this perf run return its output? +enum OutputKind<'a> { + /// Print markdown to the terminal. + Markdown, + /// Save JSON to a file. + Json(&'a Path), +} -#[allow(clippy::too_many_lines)] +/// Runs a given metadata-returning function from a test handler, parsing its +/// output into a `TestMdata`. +fn parse_mdata(test_bin: &str, mdata_fn: &str) -> Result { + let mut cmd = Command::new(test_bin); + cmd.args([mdata_fn, "--exact", "--nocapture"]); + let out = cmd + .output() + .expect("FATAL: Could not run test binary {test_bin}"); + assert!(out.status.success()); + let stdout = String::from_utf8_lossy(&out.stdout); + let mut version = None; + let mut iterations = None; + let mut importance = Importance::default(); + let mut weight = consts::WEIGHT_DEFAULT; + for line in stdout + .lines() + .filter_map(|l| l.strip_prefix(consts::MDATA_LINE_PREF)) + { + let mut items = line.split_whitespace(); + // For v0, we know the ident always comes first, then one field. + match items.next().unwrap() { + consts::VERSION_LINE_NAME => { + let v = items.next().unwrap().parse::().unwrap(); + if v > consts::MDATA_VER { + return Err(FailKind::VersionMismatch); + } + version = Some(v); + } + consts::ITER_COUNT_LINE_NAME => { + iterations = Some(items.next().unwrap().parse::().unwrap()); + } + consts::IMPORTANCE_LINE_NAME => { + importance = match items.next().unwrap() { + "critical" => Importance::Critical, + "important" => Importance::Important, + "average" => Importance::Average, + "iffy" => Importance::Iffy, + "fluff" => Importance::Fluff, + _ => unreachable!(), + }; + } + consts::WEIGHT_LINE_NAME => { + weight = items.next().unwrap().parse::().unwrap(); + } + _ => unreachable!(), + } + } + + Ok(TestMdata { + version: version.unwrap(), + // Iterations may be determined by us and thus left unspecified. + iterations, + // In principle this should always be set, but just for the sake of + // stability allow the potentially-breaking change of not reporting the + // importance without erroring. Maybe we want to change this. + importance, + // Same with weight. + weight, + }) +} + +/// Compares the perf results of two profiles as per the arguments passed in. +fn compare_profiles(args: &[String]) { + let ident_new = args.first().expect("FATAL: missing identifier for new run"); + let ident_old = args.get(1).expect("FATAL: missing identifier for old run"); + // TODO: move this to a constant also tbh + let wspace_dir = std::env::var("CARGO_WORKSPACE_DIR").unwrap(); + let runs_dir = PathBuf::from(&wspace_dir).join(consts::RUNS_DIR); + + // Use the blank outputs initially, so we can merge into these with prefixes. + let mut outputs_new = Output::blank(); + let mut outputs_old = Output::blank(); + + for e in runs_dir.read_dir().unwrap() { + let Ok(entry) = e else { + continue; + }; + let Ok(metadata) = entry.metadata() else { + continue; + }; + if metadata.is_file() { + let Ok(name) = entry.file_name().into_string() else { + continue; + }; + + // A little helper to avoid code duplication. Reads the `output` from + // a json file, then merges it into what we have so far. + let read_into = |output: &mut Output| { + let mut elems = name.split('.').skip(1); + let prefix = elems.next().unwrap(); + assert_eq!("json", elems.next().unwrap()); + assert!(elems.next().is_none()); + let handle = OpenOptions::new().read(true).open(entry.path()).unwrap(); + let o_other: Output = serde_json::from_reader(handle).unwrap(); + output.merge(o_other, prefix); + }; + + if name.starts_with(ident_old) { + read_into(&mut outputs_old); + } else if name.starts_with(ident_new) { + read_into(&mut outputs_new); + } + } + } + + let res = outputs_new.compare_perf(outputs_old); + println!("{res}"); +} + +#[expect(clippy::too_many_lines, reason = "This will be split up soon!")] fn main() { + let args = std::env::args().collect::>(); // We get passed the test we need to run as the 1st argument after our own name. - let test_bin = std::env::args().nth(1).unwrap(); - let mut cmd = Command::new(&test_bin); + let test_bin = args + .get(1) + .expect("FATAL: No test binary or command; this shouldn't be manually invoked!"); + + // We're being asked to compare two results, not run the profiler. + if test_bin == "compare" { + compare_profiles(&args[2..]); + return; + } + + // Whether to skip printing some information to stderr. + let mut quiet = false; + // Minimum test importance we care about this run. + let mut thresh = Importance::Iffy; + // Where to print the output of this run. + let mut out_kind = OutputKind::Markdown; + + for arg in args.iter().skip(2) { + match arg.as_str() { + "--critical" => thresh = Importance::Critical, + "--important" => thresh = Importance::Important, + "--average" => thresh = Importance::Average, + "--iffy" => thresh = Importance::Iffy, + "--fluff" => thresh = Importance::Fluff, + "--quiet" => quiet = true, + s if s.starts_with("--json") => { + out_kind = OutputKind::Json(Path::new( + s.strip_prefix("--json=") + .expect("FATAL: Invalid json parameter; pass --json=filename"), + )); + } + _ => (), + } + } + if !quiet { + eprintln!("Starting perf check"); + } + + let mut cmd = Command::new(test_bin); // --format=json is nightly-only :( cmd.args(["--list", "--format=terse"]); let out = cmd @@ -64,6 +245,9 @@ fn main() { out.status.success(), "FATAL: Cannot do perf check - test binary {test_bin} returned an error" ); + if !quiet { + eprintln!("Test binary ran successfully; starting profile..."); + } // Parse the test harness output to look for tests we care about. let stdout = String::from_utf8_lossy(&out.stdout); let mut test_list: Vec<_> = stdout @@ -79,37 +263,62 @@ fn main() { } }) // Exclude tests that aren't marked for perf triage based on suffix. - .filter(|t_name| t_name.ends_with(SUF_NORMAL) || t_name.ends_with(SUF_FIXED)) + .filter(|t_name| { + t_name.ends_with(consts::SUF_NORMAL) || t_name.ends_with(consts::SUF_MDATA) + }) .collect(); // Pulling itertools just for .dedup() would be quite a big dependency that's - // not used elsewhere, so do this on the vec instead. + // not used elsewhere, so do this on a vec instead. test_list.sort_unstable(); test_list.dedup(); - if !test_list.is_empty() { - // Print the markdown header which matches hyperfine's result. - // TODO: Support exporting JSON also. - println!( - "| Command | Mean [ms] | Min [ms] | Max [ms] | Iterations | Iter/sec |\n|:---|---:|---:|---:|---:|---:|" - ); - } + let len = test_list.len(); + + // Tests should come in pairs with their mdata fn! + assert!( + len.is_multiple_of(2), + "Malformed tests in test binary {test_bin}" + ); + + let mut output = Output::default(); // Spawn and profile an instance of each perf-sensitive test, via hyperfine. - for t_name in test_list { - // Pretty-print the stripped name for the test. - let t_name_normal = t_name.replace(SUF_FIXED, "").replace(SUF_NORMAL, ""); + // Each test is a pair of (test, metadata-returning-fn), so grab both. We also + // know the list is sorted. + for (idx, t_pair) in test_list.chunks_exact(2).enumerate() { + if !quiet { + eprint!("\rProfiling test {}/{}", idx + 1, len / 2); + } + // Be resilient against changes to these constants. + let (t_name, t_mdata) = if consts::SUF_NORMAL < consts::SUF_MDATA { + (t_pair[0], t_pair[1]) + } else { + (t_pair[1], t_pair[0]) + }; + // Pretty-printable stripped name for the test. + let t_name_pretty = t_name.replace(consts::SUF_NORMAL, ""); + + // Get the metadata this test reports for us. + let t_mdata = match parse_mdata(test_bin, t_mdata) { + Ok(mdata) => mdata, + Err(err) => fail!(output, t_name_pretty, err), + }; + + if t_mdata.importance < thresh { + fail!(output, t_name_pretty, t_mdata, FailKind::Skipped); + } + // Time test execution to see how many iterations we need to do in order // to account for random noise. This is skipped for tests with fixed // iteration counts. - let final_iter_count = if t_name.ends_with(SUF_FIXED) { - None - } else { + let mut errored = false; + let final_iter_count = t_mdata.iterations.unwrap_or_else(|| { let mut iter_count = DEFAULT_ITER_COUNT; loop { - let mut cmd = Command::new(&test_bin); + let mut cmd = Command::new(test_bin); cmd.args([t_name, "--exact"]); - cmd.env(ITER_ENV_VAR, format!("{iter_count}")); + cmd.env(consts::ITER_ENV_VAR, format!("{iter_count}")); // Don't let the child muck up our stdin/out/err. cmd.stdin(Stdio::null()); cmd.stdout(Stdio::null()); @@ -119,28 +328,32 @@ fn main() { let out = cmd.spawn().unwrap().wait(); let post = Instant::now(); if !out.unwrap().success() { - println!( - "| {t_name_normal} (ERRORED IN TRIAGE) | N/A | N/A | N/A | {iter_count} | N/A |" - ); - return; + errored = true; + break iter_count; } - if post - pre > NOISE_CUTOFF { - break Some(iter_count); + if post - pre > consts::NOISE_CUTOFF { + break iter_count; } else if let Some(c) = iter_count.checked_mul(ITER_COUNT_MUL) { iter_count = c; } else { // This should almost never happen, but maybe..? eprintln!( - "WARNING: Running nearly usize::MAX iterations of test {t_name_normal}" + "WARNING: Running nearly usize::MAX iterations of test {t_name_pretty}" ); - break Some(iter_count); + break iter_count; } } - }; + }); + + // Don't profile failing tests. + if errored { + fail!(output, t_name_pretty, t_mdata, FailKind::Triage); + } // Now profile! let mut perf_cmd = Command::new("hyperfine"); // Warm up the cache and print markdown output to stdout. + // TODO: json perf_cmd.args([ "--style", "none", @@ -150,42 +363,89 @@ fn main() { "-", &format!("{test_bin} {t_name}"), ]); - if let Some(final_iter_count) = final_iter_count { - perf_cmd.env(ITER_ENV_VAR, format!("{final_iter_count}")); - } + perf_cmd.env(consts::ITER_ENV_VAR, format!("{final_iter_count}")); let p_out = perf_cmd.output().unwrap(); - let fin_iter = match final_iter_count { - Some(i) => &format!("{i}"), - None => "(preset)", - }; if p_out.status.success() { - let output = String::from_utf8_lossy(&p_out.stdout); - // Strip the name of the test binary from the table (and the space after it!) - // + our extraneous test bits + the "Relative" column (which is always at the end and "1.00"). - let output = output - .replace(&format!("{test_bin} "), "") - .replace(SUF_FIXED, "") - .replace(SUF_NORMAL, "") - .replace(" 1.00 |", ""); + let cmd_output = String::from_utf8_lossy(&p_out.stdout); // Can't use .last() since we have a trailing newline. Sigh. - let fin = output.lines().nth(3).unwrap(); + let results_line = cmd_output.lines().nth(3).unwrap(); + // Grab the values out of the pretty-print. + // TODO: Parse json instead. + let mut res_iter = results_line.split_whitespace(); + // Durations are given in milliseconds, so account for that. + let mean = + Duration::from_secs_f64(res_iter.nth(4).unwrap().parse::().unwrap() / 1000.); + let stddev = + Duration::from_secs_f64(res_iter.nth(1).unwrap().parse::().unwrap() / 1000.); - // Calculate how many iterations this does per second, for easy comparison. - let ms = fin - .split_whitespace() - .nth(3) + output.success( + t_name_pretty, + t_mdata, + final_iter_count, + Timings { mean, stddev }, + ); + } else { + fail!( + output, + t_name_pretty, + t_mdata, + final_iter_count, + FailKind::Profile + ); + } + } + if !quiet { + if output.is_empty() { + eprintln!("Nothing to do."); + } else { + // If stdout and stderr are on the same terminal, move us after the + // output from above. + eprintln!(); + } + } + + // No need making an empty json file on every empty test bin. + if output.is_empty() { + return; + } + + match out_kind { + OutputKind::Markdown => print!("{output}"), + OutputKind::Json(user_path) => { + let wspace_dir = std::env::var("CARGO_WORKSPACE_DIR").unwrap(); + let runs_dir = PathBuf::from(&wspace_dir).join(consts::RUNS_DIR); + std::fs::create_dir_all(&runs_dir).unwrap(); + assert!( + !user_path.to_string_lossy().is_empty(), + "FATAL: Empty filename specified!" + ); + // Get the test binary's crate's name; a path like + // target/release-fast/deps/gpui-061ff76c9b7af5d7 + // would be reduced to just "gpui". + let test_bin_stripped = Path::new(test_bin) + .file_name() + .unwrap() + .to_str() .unwrap() - .parse::() + .rsplit_once('-') + .unwrap() + .0; + let mut file_path = runs_dir.join(user_path); + file_path + .as_mut_os_string() + .push(format!(".{test_bin_stripped}.json")); + let mut out_file = OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(&file_path) .unwrap(); - let mul_fac = 1000.0 / ms; - let iter_sec = match final_iter_count { - #[allow(clippy::cast_precision_loss)] - Some(c) => &format!("{:.1}", mul_fac * c as f64), - None => "(unknown)", - }; - println!("{fin} {fin_iter} | {iter_sec} |"); - } else { - println!("{t_name_normal} (ERRORED) | N/A | N/A | N/A | {fin_iter} | N/A |"); + out_file + .write_all(&serde_json::to_vec(&output).unwrap()) + .unwrap(); + if !quiet { + eprintln!("JSON output written to {}", file_path.display()); + } } } } From 891a06c2940b7aa441aac047a98d0dce86fb39a0 Mon Sep 17 00:00:00 2001 From: Remy Suen Date: Sun, 21 Sep 2025 20:18:17 -0400 Subject: [PATCH 36/58] docs: Small grammar fix to use a possessive pronoun (#38610) > Your extension can define it's own debug locators > Your extension can define it is own debug locators The sentence above does not make sense after expanding "it's". We should instead be using the possessive "its" in this scenario. Release Notes: - N/A Signed-off-by: Remy Suen --- docs/src/extensions/debugger-extensions.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/extensions/debugger-extensions.md b/docs/src/extensions/debugger-extensions.md index 4412bf8b9aa576e736b5b6dc25c5f4bc48100b18..fa33c25732631ca98910f0b19275ab32475955ff 100644 --- a/docs/src/extensions/debugger-extensions.md +++ b/docs/src/extensions/debugger-extensions.md @@ -65,7 +65,7 @@ Put another way, it is supposed to answer the question: "Given a program, a list Zed offers an automatic way to create debug scenarios with _debug locators_. A locator locates the debug target and figures out how to spawn a debug session for it. Thanks to locators, we can automatically convert existing user tasks (e.g. `cargo run`) and convert them into debug scenarios (e.g. `cargo build` followed by spawning a debugger with `target/debug/my_program` as the program to debug). -> Your extension can define it's own debug locators even if it does not expose a debug adapter. We strongly recommend doing so when your extension already exposes language tasks, as it allows users to spawn a debug session without having to manually configure the debug adapter. +> Your extension can define its own debug locators even if it does not expose a debug adapter. We strongly recommend doing so when your extension already exposes language tasks, as it allows users to spawn a debug session without having to manually configure the debug adapter. Locators can (but don't have to) be agnostic to the debug adapter they are used with. They are simply responsible for locating the debug target and figuring out how to spawn a debug session for it. This allows for a more flexible and extensible debugging experience. From 271771c742001157c0568c2cf40f4b184120fbe4 Mon Sep 17 00:00:00 2001 From: Miao Date: Mon, 22 Sep 2025 17:06:54 +0800 Subject: [PATCH 37/58] =?UTF-8?q?editor:=20Prevent=20non=E2=80=91boundary?= =?UTF-8?q?=20highlight=20indices=20in=20UTF=E2=80=918=20(#38510)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #38359 Release Notes: - Use byte offsets for highlights; fix UTF‑8 crash --- crates/debugger_ui/src/new_process_modal.rs | 1 - .../src/highlighted_match_with_paths.rs | 49 ++++++++++++++++--- crates/recent_projects/src/recent_projects.rs | 25 +++++----- crates/tasks_ui/src/modal.rs | 1 - 4 files changed, 52 insertions(+), 24 deletions(-) diff --git a/crates/debugger_ui/src/new_process_modal.rs b/crates/debugger_ui/src/new_process_modal.rs index f1fa4738e30e5ed24e7815b61571b03e5a16252e..a25c02c1b5f72f1e85f532fcee244f0165a8a48e 100644 --- a/crates/debugger_ui/src/new_process_modal.rs +++ b/crates/debugger_ui/src/new_process_modal.rs @@ -1514,7 +1514,6 @@ impl PickerDelegate for DebugDelegate { let highlighted_location = HighlightedMatch { text: hit.string.clone(), highlight_positions: hit.positions.clone(), - char_count: hit.string.chars().count(), color: Color::Default, }; diff --git a/crates/picker/src/highlighted_match_with_paths.rs b/crates/picker/src/highlighted_match_with_paths.rs index 255e0150e8d6d9684b4f5b1315d4975f037ace48..6e91b997da2dab2ac61befd2f596e6f3a4207c85 100644 --- a/crates/picker/src/highlighted_match_with_paths.rs +++ b/crates/picker/src/highlighted_match_with_paths.rs @@ -10,36 +10,36 @@ pub struct HighlightedMatchWithPaths { pub struct HighlightedMatch { pub text: String, pub highlight_positions: Vec, - pub char_count: usize, pub color: Color, } impl HighlightedMatch { pub fn join(components: impl Iterator, separator: &str) -> Self { - let mut char_count = 0; - let separator_char_count = separator.chars().count(); + // Track a running byte offset and insert separators between parts. + let mut first = true; + let mut byte_offset = 0; let mut text = String::new(); let mut highlight_positions = Vec::new(); for component in components { - if char_count != 0 { + if !first { text.push_str(separator); - char_count += separator_char_count; + byte_offset += separator.len(); } + first = false; highlight_positions.extend( component .highlight_positions .iter() - .map(|position| position + char_count), + .map(|position| position + byte_offset), ); text.push_str(&component.text); - char_count += component.text.chars().count(); + byte_offset += component.text.len(); } Self { text, highlight_positions, - char_count, color: Color::Default, } } @@ -73,3 +73,36 @@ impl RenderOnce for HighlightedMatchWithPaths { }) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn join_offsets_positions_by_bytes_not_chars() { + // "αβγ" is 3 Unicode scalar values, 6 bytes in UTF-8. + let left_text = "αβγ".to_string(); + let right_text = "label".to_string(); + let left = HighlightedMatch { + text: left_text, + highlight_positions: vec![], + color: Color::Default, + }; + let right = HighlightedMatch { + text: right_text, + highlight_positions: vec![0, 1], + color: Color::Default, + }; + let joined = HighlightedMatch::join([left, right].into_iter(), ""); + + assert!( + joined + .highlight_positions + .iter() + .all(|&p| joined.text.is_char_boundary(p)), + "join produced non-boundary positions {:?} for text {:?}", + joined.highlight_positions, + joined.text + ); + } +} diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index 2b011638218dd58b758f3af2e46836614e1c6780..ad7270d98c2597d77a71945c4aed97374cc6d8da 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -463,8 +463,7 @@ impl PickerDelegate for RecentProjectsDelegate { .map(|path| { let highlighted_text = highlights_for_path(path.as_ref(), &hit.positions, path_start_offset); - - path_start_offset += highlighted_text.1.char_count; + path_start_offset += highlighted_text.1.text.len(); highlighted_text }) .unzip(); @@ -590,34 +589,33 @@ fn highlights_for_path( path_start_offset: usize, ) -> (Option, HighlightedMatch) { let path_string = path.to_string_lossy(); - let path_char_count = path_string.chars().count(); + let path_text = path_string.to_string(); + let path_byte_len = path_text.len(); // Get the subset of match highlight positions that line up with the given path. // Also adjusts them to start at the path start let path_positions = match_positions .iter() .copied() .skip_while(|position| *position < path_start_offset) - .take_while(|position| *position < path_start_offset + path_char_count) + .take_while(|position| *position < path_start_offset + path_byte_len) .map(|position| position - path_start_offset) .collect::>(); // Again subset the highlight positions to just those that line up with the file_name // again adjusted to the start of the file_name let file_name_text_and_positions = path.file_name().map(|file_name| { - let text = file_name.to_string_lossy(); - let char_count = text.chars().count(); - let file_name_start = path_char_count - char_count; + let file_name_text = file_name.to_string_lossy().to_string(); + let file_name_start_byte = path_byte_len - file_name_text.len(); let highlight_positions = path_positions .iter() .copied() - .skip_while(|position| *position < file_name_start) - .take_while(|position| *position < file_name_start + char_count) - .map(|position| position - file_name_start) + .skip_while(|position| *position < file_name_start_byte) + .take_while(|position| *position < file_name_start_byte + file_name_text.len()) + .map(|position| position - file_name_start_byte) .collect::>(); HighlightedMatch { - text: text.to_string(), + text: file_name_text, highlight_positions, - char_count, color: Color::Default, } }); @@ -625,9 +623,8 @@ fn highlights_for_path( ( file_name_text_and_positions, HighlightedMatch { - text: path_string.to_string(), + text: path_text, highlight_positions: path_positions, - char_count: path_char_count, color: Color::Default, }, ) diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs index 3522e9522a6d32d729e7f0dca6731b2052f63f94..3b669e5a4d88405d32c77d88abf336c4c65f30c0 100644 --- a/crates/tasks_ui/src/modal.rs +++ b/crates/tasks_ui/src/modal.rs @@ -482,7 +482,6 @@ impl PickerDelegate for TasksModalDelegate { let highlighted_location = HighlightedMatch { text: hit.string.clone(), highlight_positions: hit.positions.clone(), - char_count: hit.string.chars().count(), color: Color::Default, }; let icon = match source_kind { From 79620454d0ba0fb81306e80dccfe77a7652d94fa Mon Sep 17 00:00:00 2001 From: Matheus Date: Mon, 22 Sep 2025 07:14:04 -0300 Subject: [PATCH 38/58] Docs: change format_on_save value from false to "off" (#38615) Found this outdated piece of information in the docs while trying to disable it myself, this PR simply changes `false` to `"off"`. Release Notes: - N/A --- docs/src/languages/python.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/languages/python.md b/docs/src/languages/python.md index faca1185768d09b5dcb6e485c146b0e1973cf870..98eca1fcc9d43747aaf45085db5ed831f8d0b25f 100644 --- a/docs/src/languages/python.md +++ b/docs/src/languages/python.md @@ -198,7 +198,7 @@ You can disable format-on-save for Python files in your `settings.json`: { "languages": { "Python": { - "format_on_save": false + "format_on_save": "off" } } } From a2c71d3d207c963f54a622f77a745d943075c02a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 22 Sep 2025 13:20:46 +0200 Subject: [PATCH 39/58] text: Assert text anchor offset validity on construction (#38441) Attempt to aid debugging some utf8 indexing issues Release Notes: - N/A Co-authored-by: Mikayla Maki --- .../editor/src/highlight_matching_bracket.rs | 4 +- crates/language/src/buffer.rs | 6 +- crates/multi_buffer/src/multi_buffer.rs | 3 +- crates/rope/src/rope.rs | 167 ++++++++++++++++++ crates/text/src/text.rs | 21 +++ 5 files changed, 195 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/highlight_matching_bracket.rs b/crates/editor/src/highlight_matching_bracket.rs index aa4e616924ad6bd47627bfd95e9a5c58587afc25..aa1647cac0ba3e9d7644871faeac783e44501c57 100644 --- a/crates/editor/src/highlight_matching_bracket.rs +++ b/crates/editor/src/highlight_matching_bracket.rs @@ -29,7 +29,9 @@ pub fn refresh_matching_bracket_highlights( if (editor.cursor_shape == CursorShape::Block || editor.cursor_shape == CursorShape::Hollow) && head < snapshot.buffer_snapshot.len() { - tail += 1; + if let Some(tail_ch) = snapshot.buffer_snapshot.chars_at(tail).next() { + tail += tail_ch.len_utf8(); + } } if let Some((opening_range, closing_range)) = snapshot diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index d5d83da47bc18a4fd15f59df2ddb2238ceb768d4..1f5c35f8df9288182b294b8e52a9cd7bdb531124 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -4121,8 +4121,7 @@ impl BufferSnapshot { range: Range, ) -> impl Iterator + '_ { // Find bracket pairs that *inclusively* contain the given range. - let range = range.start.to_offset(self).saturating_sub(1) - ..self.len().min(range.end.to_offset(self) + 1); + let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self); self.all_bracket_ranges(range) .filter(|pair| !pair.newline_only) } @@ -4131,8 +4130,7 @@ impl BufferSnapshot { &self, range: Range, ) -> impl Iterator, DebuggerTextObject)> + '_ { - let range = range.start.to_offset(self).saturating_sub(1) - ..self.len().min(range.end.to_offset(self) + 1); + let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self); let mut matches = self.syntax.matches_with_options( range.clone(), diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index c79bc03489be89ad00d10392c520fe13e7748a60..8d72e919a4802c1875c956fc3e6d72fc0e9a2ade 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -5259,7 +5259,8 @@ impl MultiBufferSnapshot { } else { Anchor::max() }; - // TODO this is a hack, remove it + + // TODO this is a hack, because all APIs should be able to handle ExcerptId::min and max. if let Some((excerpt_id, _, _)) = self.as_singleton() { anchor.excerpt_id = *excerpt_id; } diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 9c98985989c2ac0fdcd5a39342dd9911d64dd01a..d802f972168348b15f003e1fd2fa567c1f9d75bb 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -30,6 +30,76 @@ impl Rope { Self::default() } + pub fn is_char_boundary(&self, offset: usize) -> bool { + if self.chunks.is_empty() { + return offset == 0; + } + let mut cursor = self.chunks.cursor::(&()); + cursor.seek(&offset, Bias::Left); + let chunk_offset = offset - cursor.start(); + cursor + .item() + .map(|chunk| chunk.text.is_char_boundary(chunk_offset)) + .unwrap_or(false) + } + + pub fn floor_char_boundary(&self, index: usize) -> usize { + if index >= self.len() { + self.len() + } else { + #[inline] + pub(crate) const fn is_utf8_char_boundary(u8: u8) -> bool { + // This is bit magic equivalent to: b < 128 || b >= 192 + (u8 as i8) >= -0x40 + } + + let mut cursor = self.chunks.cursor::(&()); + cursor.seek(&index, Bias::Left); + let chunk_offset = index - cursor.start(); + let lower_idx = cursor.item().map(|chunk| { + let lower_bound = chunk_offset.saturating_sub(3); + chunk + .text + .as_bytes() + .get(lower_bound..=chunk_offset) + .map(|it| { + let new_idx = it + .iter() + .rposition(|&b| is_utf8_char_boundary(b)) + .unwrap_or(0); + lower_bound + new_idx + }) + .unwrap_or(chunk.text.len()) + }); + lower_idx.map_or_else(|| self.len(), |idx| cursor.start() + idx) + } + } + + pub fn ceil_char_boundary(&self, index: usize) -> usize { + if index > self.len() { + self.len() + } else { + #[inline] + pub(crate) const fn is_utf8_char_boundary(u8: u8) -> bool { + // This is bit magic equivalent to: b < 128 || b >= 192 + (u8 as i8) >= -0x40 + } + + let mut cursor = self.chunks.cursor::(&()); + cursor.seek(&index, Bias::Left); + let chunk_offset = index - cursor.start(); + let upper_idx = cursor.item().map(|chunk| { + let upper_bound = Ord::min(chunk_offset + 4, chunk.text.len()); + chunk.text.as_bytes()[chunk_offset..upper_bound] + .iter() + .position(|&b| is_utf8_char_boundary(b)) + .map_or(upper_bound, |pos| pos + chunk_offset) + }); + + upper_idx.map_or_else(|| self.len(), |idx| cursor.start() + idx) + } + } + pub fn append(&mut self, rope: Rope) { if let Some(chunk) = rope.chunks.first() && (self @@ -2069,6 +2139,103 @@ mod tests { assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo")); } + #[test] + fn test_is_char_boundary() { + let fixture = "地"; + let rope = Rope::from("地"); + for b in 0..=fixture.len() { + assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); + } + let fixture = ""; + let rope = Rope::from(""); + for b in 0..=fixture.len() { + assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); + } + let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; + let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); + for b in 0..=fixture.len() { + assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); + } + } + + #[test] + fn test_floor_char_boundary() { + // polyfill of str::floor_char_boundary + fn floor_char_boundary(str: &str, index: usize) -> usize { + if index >= str.len() { + str.len() + } else { + let lower_bound = index.saturating_sub(3); + let new_index = str.as_bytes()[lower_bound..=index] + .iter() + .rposition(|b| (*b as i8) >= -0x40); + + lower_bound + new_index.unwrap() + } + } + + let fixture = "地"; + let rope = Rope::from("地"); + for b in 0..=fixture.len() { + assert_eq!( + rope.floor_char_boundary(b), + floor_char_boundary(&fixture, b) + ); + } + + let fixture = ""; + let rope = Rope::from(""); + for b in 0..=fixture.len() { + assert_eq!( + rope.floor_char_boundary(b), + floor_char_boundary(&fixture, b) + ); + } + + let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; + let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); + for b in 0..=fixture.len() { + assert_eq!( + rope.floor_char_boundary(b), + floor_char_boundary(&fixture, b) + ); + } + } + + #[test] + fn test_ceil_char_boundary() { + // polyfill of str::ceil_char_boundary + fn ceil_char_boundary(str: &str, index: usize) -> usize { + if index > str.len() { + str.len() + } else { + let upper_bound = Ord::min(index + 4, str.len()); + str.as_bytes()[index..upper_bound] + .iter() + .position(|b| (*b as i8) >= -0x40) + .map_or(upper_bound, |pos| pos + index) + } + } + + let fixture = "地"; + let rope = Rope::from("地"); + for b in 0..=fixture.len() { + assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); + } + + let fixture = ""; + let rope = Rope::from(""); + for b in 0..=fixture.len() { + assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); + } + + let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; + let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); + for b in 0..=fixture.len() { + assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); + } + } + fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize { while !text.is_char_boundary(offset) { match bias { diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 590c30c8a73c13180e4d09dda1b3a071ef46ad7f..bbf9e2e3812238eebbdf92c63bd3b23819a9dac0 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2400,6 +2400,17 @@ impl BufferSnapshot { } else if bias == Bias::Right && offset == self.len() { Anchor::MAX } else { + if !self.visible_text.is_char_boundary(offset) { + // find the character + let char_start = self.visible_text.floor_char_boundary(offset); + // `char_start` must be less than len and a char boundary + let ch = self.visible_text.chars_at(char_start).next().unwrap(); + let char_range = char_start..char_start + ch.len_utf8(); + panic!( + "byte index {} is not a char boundary; it is inside {:?} (bytes {:?})", + offset, ch, char_range, + ); + } let mut fragment_cursor = self.fragments.cursor::(&None); fragment_cursor.seek(&offset, bias); let fragment = fragment_cursor.item().unwrap(); @@ -3065,6 +3076,16 @@ impl operation_queue::Operation for Operation { pub trait ToOffset { fn to_offset(&self, snapshot: &BufferSnapshot) -> usize; + fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize { + snapshot + .visible_text + .ceil_char_boundary(self.to_offset(snapshot) + 1) + } + fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize { + snapshot + .visible_text + .floor_char_boundary(self.to_offset(snapshot).saturating_sub(1)) + } } impl ToOffset for Point { From 50bd8bc2551a287cca389c1177eb1db570e83a0d Mon Sep 17 00:00:00 2001 From: Justin Su Date: Mon, 22 Sep 2025 07:29:46 -0400 Subject: [PATCH 40/58] docs: Add instructions for setting up `fish_indent` for fish (#38414) Release Notes: - N/A --- docs/src/languages/fish.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/docs/src/languages/fish.md b/docs/src/languages/fish.md index 969beee2209b7f066b01fa703a7912260cf8b05e..ad2148d807baeb73241206ab5538ddaffdc789ce 100644 --- a/docs/src/languages/fish.md +++ b/docs/src/languages/fish.md @@ -4,3 +4,28 @@ Fish language support in Zed is provided by the community-maintained [Fish exten Report issues to: [https://github.com/hasit/zed-fish/issues](https://github.com/hasit/zed-fish/issues) - Tree-sitter: [ram02z/tree-sitter-fish](https://github.com/ram02z/tree-sitter-fish) + +### Formatting + +Zed supports auto-formatting fish code using external tools like [`fish_indent`](https://fishshell.com/docs/current/cmds/fish_indent.html), which is included with fish. + +1. Ensure `fish_indent` is available in your path and check the version: + +```sh +which fish_indent +fish_indent --version +``` + +2. Configure Zed to automatically format fish code with `fish_indent`: + +```json + "languages": { + "Fish": { + "formatter": { + "external": { + "command": "fish_indent" + } + } + } + }, +``` From 55dc9ff7ca77009d0c342ae6377b0d1d7413e32c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 22 Sep 2025 13:45:23 +0200 Subject: [PATCH 41/58] text: Implement `Rope::clip_offset` in terms of the new utf8 boundary methods (#38630) Release Notes: - N/A --- .../src/assistant_context.rs | 2 +- crates/language/src/buffer.rs | 4 ++-- crates/project/src/git_store/conflict_set.rs | 4 ++-- crates/rope/src/rope.rs | 24 ++++--------------- crates/text/src/text.rs | 4 +++- 5 files changed, 12 insertions(+), 26 deletions(-) diff --git a/crates/assistant_context/src/assistant_context.rs b/crates/assistant_context/src/assistant_context.rs index 12eda0954a2e1cca9ddc7df9816b8f5a37d0ce10..23aeabbc8929e6a3874c2fbbf74b8f9729860481 100644 --- a/crates/assistant_context/src/assistant_context.rs +++ b/crates/assistant_context/src/assistant_context.rs @@ -2445,7 +2445,7 @@ impl AssistantContext { .message_anchors .get(next_message_ix) .map_or(buffer.len(), |message| { - buffer.clip_offset(message.start.to_offset(buffer) - 1, Bias::Left) + buffer.clip_offset(message.start.to_previous_offset(buffer), Bias::Left) }); Some(self.insert_message_at_offset(offset, role, status, cx)) } else { diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1f5c35f8df9288182b294b8e52a9cd7bdb531124..311ef4d55b947888cd7fbc6706a9bd581f2dd27d 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -4198,8 +4198,8 @@ impl BufferSnapshot { range: Range, options: TreeSitterOptions, ) -> impl Iterator, TextObject)> + '_ { - let range = range.start.to_offset(self).saturating_sub(1) - ..self.len().min(range.end.to_offset(self) + 1); + let range = + range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self)); let mut matches = self.syntax diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 2bcfc75b32da3c5a4860cc72f3266bff38f022e3..067af17820e58264006d0227cfb0f3c13069fcf9 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -344,8 +344,8 @@ mod tests { assert_eq!(conflicts_in_range.len(), 1); // Test with a range that doesn't include any conflicts - let range = buffer.anchor_after(first_conflict_end.to_offset(&buffer) + 1) - ..buffer.anchor_before(second_conflict_start.to_offset(&buffer) - 1); + let range = buffer.anchor_after(first_conflict_end.to_next_offset(&buffer)) + ..buffer.anchor_before(second_conflict_start.to_previous_offset(&buffer)); let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot); assert_eq!(conflicts_in_range.len(), 0); } diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index d802f972168348b15f003e1fd2fa567c1f9d75bb..cb2e103f76fa636571526c71afcbb3358542b083 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -459,26 +459,10 @@ impl Rope { }) } - pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize { - let mut cursor = self.chunks.cursor::(&()); - cursor.seek(&offset, Bias::Left); - if let Some(chunk) = cursor.item() { - let mut ix = offset - cursor.start(); - while !chunk.text.is_char_boundary(ix) { - match bias { - Bias::Left => { - ix -= 1; - offset -= 1; - } - Bias::Right => { - ix += 1; - offset += 1; - } - } - } - offset - } else { - self.summary().len + pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize { + match bias { + Bias::Left => self.floor_char_boundary(offset), + Bias::Right => self.ceil_char_boundary(offset), } } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index bbf9e2e3812238eebbdf92c63bd3b23819a9dac0..a38693ed934855acd0e0c6ff726c7835a1aa057e 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2128,7 +2128,7 @@ impl BufferSnapshot { let row_end_offset = if row >= self.max_point().row { self.len() } else { - Point::new(row + 1, 0).to_offset(self) - 1 + Point::new(row + 1, 0).to_previous_offset(self) }; (row_end_offset - row_start_offset) as u32 } @@ -3076,11 +3076,13 @@ impl operation_queue::Operation for Operation { pub trait ToOffset { fn to_offset(&self, snapshot: &BufferSnapshot) -> usize; + /// Turns this point into the next offset in the buffer that comes after this, respecting utf8 boundaries. fn to_next_offset(&self, snapshot: &BufferSnapshot) -> usize { snapshot .visible_text .ceil_char_boundary(self.to_offset(snapshot) + 1) } + /// Turns this point into the previous offset in the buffer that comes before this, respecting utf8 boundaries. fn to_previous_offset(&self, snapshot: &BufferSnapshot) -> usize { snapshot .visible_text From 8bac1bee7a6e6b3c08873e9233b2515c3a6fd386 Mon Sep 17 00:00:00 2001 From: localcc Date: Mon, 22 Sep 2025 13:46:29 +0200 Subject: [PATCH 42/58] Disable subpixel shifting for y axis on Windows (#38440) Release Notes: - N/A --------- Co-authored-by: Jakub Konka --- crates/gpui/src/platform/linux/text_system.rs | 6 +++--- crates/gpui/src/platform/mac/text_system.rs | 4 ++-- crates/gpui/src/platform/windows/direct_write.rs | 11 +++++------ crates/gpui/src/text_system.rs | 8 +++++++- crates/gpui/src/window.rs | 9 +++++---- 5 files changed, 22 insertions(+), 16 deletions(-) diff --git a/crates/gpui/src/platform/linux/text_system.rs b/crates/gpui/src/platform/linux/text_system.rs index f66a2e71d49f39c0e82770e23aa8eca752970daf..3f045f9406f2ce1f06b55367600547843cdb69e3 100644 --- a/crates/gpui/src/platform/linux/text_system.rs +++ b/crates/gpui/src/platform/linux/text_system.rs @@ -1,6 +1,6 @@ use crate::{ Bounds, DevicePixels, Font, FontFeatures, FontId, FontMetrics, FontRun, FontStyle, FontWeight, - GlyphId, LineLayout, Pixels, PlatformTextSystem, Point, RenderGlyphParams, SUBPIXEL_VARIANTS, + GlyphId, LineLayout, Pixels, PlatformTextSystem, Point, RenderGlyphParams, SUBPIXEL_VARIANTS_X, ShapedGlyph, ShapedRun, SharedString, Size, point, size, }; use anyhow::{Context as _, Ok, Result}; @@ -276,7 +276,7 @@ impl CosmicTextSystemState { let font = &self.loaded_fonts[params.font_id.0].font; let subpixel_shift = params .subpixel_variant - .map(|v| v as f32 / (SUBPIXEL_VARIANTS as f32 * params.scale_factor)); + .map(|v| v as f32 / (SUBPIXEL_VARIANTS_X as f32 * params.scale_factor)); let image = self .swash_cache .get_image( @@ -311,7 +311,7 @@ impl CosmicTextSystemState { let font = &self.loaded_fonts[params.font_id.0].font; let subpixel_shift = params .subpixel_variant - .map(|v| v as f32 / (SUBPIXEL_VARIANTS as f32 * params.scale_factor)); + .map(|v| v as f32 / (SUBPIXEL_VARIANTS_X as f32 * params.scale_factor)); let mut image = self .swash_cache .get_image( diff --git a/crates/gpui/src/platform/mac/text_system.rs b/crates/gpui/src/platform/mac/text_system.rs index 7f765fbaac80e27f8db4b9c4f2d00de90e991a9a..67ece1f153fb4ea73a12259d2d55409059aadb40 100644 --- a/crates/gpui/src/platform/mac/text_system.rs +++ b/crates/gpui/src/platform/mac/text_system.rs @@ -1,7 +1,7 @@ use crate::{ Bounds, DevicePixels, Font, FontFallbacks, FontFeatures, FontId, FontMetrics, FontRun, FontStyle, FontWeight, GlyphId, LineLayout, Pixels, PlatformTextSystem, Point, - RenderGlyphParams, Result, SUBPIXEL_VARIANTS, ShapedGlyph, ShapedRun, SharedString, Size, + RenderGlyphParams, Result, SUBPIXEL_VARIANTS_X, ShapedGlyph, ShapedRun, SharedString, Size, point, px, size, swap_rgba_pa_to_bgra, }; use anyhow::anyhow; @@ -395,7 +395,7 @@ impl MacTextSystemState { let subpixel_shift = params .subpixel_variant - .map(|v| v as f32 / SUBPIXEL_VARIANTS as f32); + .map(|v| v as f32 / SUBPIXEL_VARIANTS_X as f32); cx.set_allows_font_smoothing(true); cx.set_text_drawing_mode(CGTextDrawingMode::CGTextFill); cx.set_gray_fill_color(0.0, 1.0); diff --git a/crates/gpui/src/platform/windows/direct_write.rs b/crates/gpui/src/platform/windows/direct_write.rs index df3161bf079a8eb0cb04908e586f5d344519821e..e187fc4b09176906102a1bf8fe50b410aae3cb2b 100644 --- a/crates/gpui/src/platform/windows/direct_write.rs +++ b/crates/gpui/src/platform/windows/direct_write.rs @@ -723,11 +723,10 @@ impl DirectWriteState { dx: 0.0, dy: 0.0, }; - let subpixel_shift = params - .subpixel_variant - .map(|v| v as f32 / SUBPIXEL_VARIANTS as f32); - let baseline_origin_x = subpixel_shift.x / params.scale_factor; - let baseline_origin_y = subpixel_shift.y / params.scale_factor; + let baseline_origin_x = + params.subpixel_variant.x as f32 / SUBPIXEL_VARIANTS_X as f32 / params.scale_factor; + let baseline_origin_y = + params.subpixel_variant.y as f32 / SUBPIXEL_VARIANTS_Y as f32 / params.scale_factor; let mut rendering_mode = DWRITE_RENDERING_MODE1::default(); let mut grid_fit_mode = DWRITE_GRID_FIT_MODE::default(); @@ -859,7 +858,7 @@ impl DirectWriteState { let bitmap_size = glyph_bounds.size; let subpixel_shift = params .subpixel_variant - .map(|v| v as f32 / SUBPIXEL_VARIANTS as f32); + .map(|v| v as f32 / SUBPIXEL_VARIANTS_X as f32); let baseline_origin_x = subpixel_shift.x / params.scale_factor; let baseline_origin_y = subpixel_shift.y / params.scale_factor; diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index 4d4087f45d4093c239218f96f015d153fa77dc10..efa4ad032a66ce92a71cbd82be6ed4a63d527858 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -41,7 +41,13 @@ pub struct FontId(pub usize); #[derive(Hash, PartialEq, Eq, Clone, Copy, Debug)] pub struct FontFamilyId(pub usize); -pub(crate) const SUBPIXEL_VARIANTS: u8 = 4; +pub(crate) const SUBPIXEL_VARIANTS_X: u8 = 4; + +pub(crate) const SUBPIXEL_VARIANTS_Y: u8 = if cfg!(target_os = "windows") { + 1 +} else { + SUBPIXEL_VARIANTS_X +}; /// The GPUI text rendering sub system. pub struct TextSystem { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 307197acca7cd3eba7f69e7731288449a96ad35a..d9bf27dca1253fa0a5286148ea64a03c3a5bac37 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -11,8 +11,8 @@ use crate::{ MouseMoveEvent, MouseUpEvent, Path, Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, Point, PolychromeSprite, PromptButton, PromptLevel, Quad, Render, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Replay, ResizeEdge, - SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS, ScaledPixels, Scene, Shadow, SharedString, Size, - StrikethroughStyle, Style, SubscriberSet, Subscription, SystemWindowTab, + SMOOTH_SVG_SCALE_FACTOR, SUBPIXEL_VARIANTS_X, SUBPIXEL_VARIANTS_Y, ScaledPixels, Scene, Shadow, + SharedString, Size, StrikethroughStyle, Style, SubscriberSet, Subscription, SystemWindowTab, SystemWindowTabController, TabHandles, TaffyLayoutEngine, Task, TextStyle, TextStyleRefinement, TransformationMatrix, Underline, UnderlineStyle, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControls, WindowDecorations, WindowOptions, WindowParams, WindowTextSystem, @@ -2944,9 +2944,10 @@ impl Window { let element_opacity = self.element_opacity(); let scale_factor = self.scale_factor(); let glyph_origin = origin.scale(scale_factor); + let subpixel_variant = Point { - x: (glyph_origin.x.0.fract() * SUBPIXEL_VARIANTS as f32).floor() as u8, - y: (glyph_origin.y.0.fract() * SUBPIXEL_VARIANTS as f32).floor() as u8, + x: (glyph_origin.x.0.fract() * SUBPIXEL_VARIANTS_X as f32).floor() as u8, + y: (glyph_origin.y.0.fract() * SUBPIXEL_VARIANTS_Y as f32).floor() as u8, }; let params = RenderGlyphParams { font_id, From 1bbf98aea6f335e791f19d8f76ba8a5f0510937f Mon Sep 17 00:00:00 2001 From: strygwyr <13832826+dstrygwyr@users.noreply.github.com> Date: Mon, 22 Sep 2025 19:35:43 +0700 Subject: [PATCH 43/58] Fix arrow function detection in TypeScript/JavaScript outline (#38411) Closes #35102 https://github.com/user-attachments/assets/3c946d6c-0acd-4cfe-8cb3-61eb6d20f808 Release Notes: - TypeScript/JavaScript: symbol outline now includes closures nested within functions. --- crates/languages/src/javascript/outline.scm | 22 +++++++++++++++++++++ crates/languages/src/tsx/outline.scm | 22 +++++++++++++++++++++ crates/languages/src/typescript/outline.scm | 22 +++++++++++++++++++++ 3 files changed, 66 insertions(+) diff --git a/crates/languages/src/javascript/outline.scm b/crates/languages/src/javascript/outline.scm index ca16c27a27be3e1e09ced16cd2eef7aa28345f9e..8a58b6dc1b04b0b36f6155055bd8439bc2ddeca2 100644 --- a/crates/languages/src/javascript/outline.scm +++ b/crates/languages/src/javascript/outline.scm @@ -116,4 +116,26 @@ ) ) @item +; Arrow functions in variable declarations (anywhere in the tree, including nested in functions) +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (arrow_function)) @item) + +; Async arrow functions in variable declarations +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (arrow_function + "async" @context)) @item) + +; Named function expressions in variable declarations +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (function_expression)) @item) + (comment) @annotation diff --git a/crates/languages/src/tsx/outline.scm b/crates/languages/src/tsx/outline.scm index f4261b9697d376f517b717bc942387190e0b6dde..72efff9ab74155c626bd7b91ad1cf8b580431492 100644 --- a/crates/languages/src/tsx/outline.scm +++ b/crates/languages/src/tsx/outline.scm @@ -124,4 +124,26 @@ ) ) @item +; Arrow functions in variable declarations (anywhere in the tree, including nested in functions) +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (arrow_function)) @item) + +; Async arrow functions in variable declarations +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (arrow_function + "async" @context)) @item) + +; Named function expressions in variable declarations +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (function_expression)) @item) + (comment) @annotation diff --git a/crates/languages/src/typescript/outline.scm b/crates/languages/src/typescript/outline.scm index f4261b9697d376f517b717bc942387190e0b6dde..72efff9ab74155c626bd7b91ad1cf8b580431492 100644 --- a/crates/languages/src/typescript/outline.scm +++ b/crates/languages/src/typescript/outline.scm @@ -124,4 +124,26 @@ ) ) @item +; Arrow functions in variable declarations (anywhere in the tree, including nested in functions) +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (arrow_function)) @item) + +; Async arrow functions in variable declarations +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (arrow_function + "async" @context)) @item) + +; Named function expressions in variable declarations +(lexical_declaration + ["let" "const"] @context + (variable_declarator + name: (_) @name + value: (function_expression)) @item) + (comment) @annotation From e0028fbef26947298a8b44a27f560fd36b04a131 Mon Sep 17 00:00:00 2001 From: Bartosz Kaszubowski Date: Mon, 22 Sep 2025 14:56:37 +0200 Subject: [PATCH 44/58] git_ui: Remove duplicated/unused tooltips (#38439) Release Notes: - N/A --- crates/git_ui/src/commit_modal.rs | 4 ---- crates/git_ui/src/git_panel.rs | 4 ---- 2 files changed, 8 deletions(-) diff --git a/crates/git_ui/src/commit_modal.rs b/crates/git_ui/src/commit_modal.rs index a2f84726543af50312dc24d0fcd9e0486b51d9c5..b7088f27e918e92ac669de87158f8b810488e956 100644 --- a/crates/git_ui/src/commit_modal.rs +++ b/crates/git_ui/src/commit_modal.rs @@ -368,10 +368,6 @@ impl CommitModal { .icon_color(Color::Placeholder) .color(Color::Muted) .icon_position(IconPosition::Start) - .tooltip(Tooltip::for_action_title( - "Switch Branch", - &zed_actions::git::Branch, - )) .on_click(cx.listener(|_, _, window, cx| { window.dispatch_action(zed_actions::git::Branch.boxed_clone(), cx); })) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 47dcc68d2137f75666ae04d2b8ffe4e87cb478f8..f8df51357da99909b28e871a8aa6202328d2412d 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -4575,10 +4575,6 @@ impl RenderOnce for PanelRepoFooter { .size(ButtonSize::None) .label_size(LabelSize::Small) .truncate(true) - .tooltip(Tooltip::for_action_title( - "Switch Branch", - &zed_actions::git::Switch, - )) .on_click(|_, window, cx| { window.dispatch_action(zed_actions::git::Switch.boxed_clone(), cx); }); From fbe06238e45c2fec6955c70b86eb26fd6a864266 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Mon, 22 Sep 2025 21:12:19 +0800 Subject: [PATCH 45/58] cli: Refactor URL prefix checks (#38375) use slice apply to prefix. Release Notes: - N/A --------- Signed-off-by: Xiaobo Liu --- crates/cli/src/main.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/crates/cli/src/main.rs b/crates/cli/src/main.rs index d0d30f72d7aea7d7f6cf0355caf12b1f2a36eedb..b1a7ebbd8c445e9eb09d572bff42b81dc995dea1 100644 --- a/crates/cli/src/main.rs +++ b/crates/cli/src/main.rs @@ -20,6 +20,8 @@ use util::paths::PathWithPosition; #[cfg(any(target_os = "linux", target_os = "freebsd"))] use std::io::IsTerminal; +const URL_PREFIX: [&'static str; 5] = ["zed://", "http://", "https://", "file://", "ssh://"]; + struct Detect; trait InstalledApp { @@ -310,12 +312,7 @@ fn main() -> Result<()> { let wsl = None; for path in args.paths_with_position.iter() { - if path.starts_with("zed://") - || path.starts_with("http://") - || path.starts_with("https://") - || path.starts_with("file://") - || path.starts_with("ssh://") - { + if URL_PREFIX.iter().any(|&prefix| path.starts_with(prefix)) { urls.push(path.to_string()); } else if path == "-" && args.paths_with_position.len() == 1 { let file = NamedTempFile::new()?; From b97843ea02994f029db9cf740fae89ead4417c55 Mon Sep 17 00:00:00 2001 From: Ilija Tovilo Date: Mon, 22 Sep 2025 16:52:33 +0200 Subject: [PATCH 46/58] Add quick "Edit debug.json" button to debugger control strip (#38600) This button already exists in the main menu, as well as the "New Session" view in the debugger panel. However, this view disappears after starting the debugging session. This PR adds the same button to the debugger control strip that remains accessible. This is convenient for people editing their debug.json frequently. Site-node: I feel like the `Cog` icon would be more appropriate, but I picked `Code` to stay consistent with the "New Session" view. Before: image After: image Release Notes: - Added "Edit debug.json" button to debugger control strip --- crates/debugger_ui/src/debugger_panel.rs | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index f1a1b4dc738f82f729832c60648392af8b9921ed..787bca01acb204a4a50b18a34f3567137f92aa0e 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -625,6 +625,15 @@ impl DebugPanel { }) }; + let edit_debug_json_button = || { + IconButton::new("debug-edit-debug-json", IconName::Code) + .icon_size(IconSize::Small) + .on_click(|_, window, cx| { + window.dispatch_action(zed_actions::OpenProjectDebugTasks.boxed_clone(), cx); + }) + .tooltip(Tooltip::text("Edit debug.json")) + }; + let documentation_button = || { IconButton::new("debug-open-documentation", IconName::CircleHelp) .icon_size(IconSize::Small) @@ -899,8 +908,9 @@ impl DebugPanel { ) .when(is_side, |this| { this.child(new_session_button()) - .child(logs_button()) + .child(edit_debug_json_button()) .child(documentation_button()) + .child(logs_button()) }), ) .child( @@ -951,8 +961,9 @@ impl DebugPanel { )) .when(!is_side, |this| { this.child(new_session_button()) - .child(logs_button()) + .child(edit_debug_json_button()) .child(documentation_button()) + .child(logs_button()) }), ), ), From dccbb47fbca1fc7fca283d0fffc0aa9eee6e1f68 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 22 Sep 2025 08:56:15 -0600 Subject: [PATCH 47/58] Use a consistent default for window scaling (#38527) (And make it 2, because most macs have retina screens) Release Notes: - N/A --- crates/gpui/src/platform/mac/window.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index e8b42c57b8239f53118487f51bd194178c3c21c0..d5b1ea900d81adf83448960f485ed767c5549716 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -1566,7 +1566,7 @@ fn get_scale_factor(native_window: id) -> f32 { let factor = unsafe { let screen: id = msg_send![native_window, screen]; if screen.is_null() { - return 1.0; + return 2.0; } NSScreen::backingScaleFactor(screen) as f32 }; From 4e6e424fd78f2c024eff88efaaca4878379a5472 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 22 Sep 2025 17:07:40 +0200 Subject: [PATCH 48/58] acp: Support model selection for ACP agents (#38652) It requires the agent to implement the (still unstable) model selection API. Will allow us to test it out before stabilizing. Release Notes: - N/A --- Cargo.lock | 9 +- Cargo.toml | 2 +- crates/acp_thread/src/connection.rs | 51 +++--- crates/agent2/src/agent.rs | 119 ++++++++------ crates/agent2/src/tests/mod.rs | 29 ++-- crates/agent_servers/src/acp.rs | 103 +++++++++++- crates/agent_ui/src/acp/model_selector.rs | 154 +++++++++++------- .../src/acp/model_selector_popover.rs | 4 +- crates/agent_ui/src/acp/thread_view.rs | 32 ++-- crates/picker/Cargo.toml | 1 + crates/picker/src/picker.rs | 67 +++++++- crates/ui/src/components/context_menu.rs | 6 +- tooling/workspace-hack/Cargo.toml | 4 +- 13 files changed, 391 insertions(+), 190 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dbe2467499ad1c5d6f67c4de82546e2b560451bb..e51968b0262a91d3a1ed78a10656e75b9d0d4523 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -195,9 +195,9 @@ dependencies = [ [[package]] name = "agent-client-protocol" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2526e80463b9742afed4829aedd6ae5632d6db778c6cc1fecb80c960c3521b" +checksum = "00e33b9f4bd34d342b6f80b7156d3a37a04aeec16313f264001e52d6a9118600" dependencies = [ "anyhow", "async-broadcast", @@ -4932,7 +4932,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.0", - "windows-sys 0.60.2", + "windows-sys 0.61.0", ] [[package]] @@ -12677,6 +12677,7 @@ dependencies = [ "schemars 1.0.1", "serde", "serde_json", + "theme", "ui", "workspace", "workspace-hack", @@ -20853,7 +20854,7 @@ dependencies = [ "windows-sys 0.48.0", "windows-sys 0.52.0", "windows-sys 0.59.0", - "windows-sys 0.60.2", + "windows-sys 0.61.0", "winnow", "zeroize", "zvariant", diff --git a/Cargo.toml b/Cargo.toml index d4812908ac8292caf8371ce1d6dd9c9ee4042ca0..fd552c6e9d117bd03b251f231dee8294b02ba928 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -439,7 +439,7 @@ zlog_settings = { path = "crates/zlog_settings" } # External crates # -agent-client-protocol = { version = "0.4.0", features = ["unstable"] } +agent-client-protocol = { version = "0.4.2", features = ["unstable"] } aho-corasick = "1.1" alacritty_terminal = "0.25.1-rc1" any_vec = "0.14" diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 10c9dd22b6ec476f17fabeae7f6bd4f1a9672db7..fe66f954370f8118d054ee56f1e9f68f2de7e6f4 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -68,7 +68,7 @@ pub trait AgentConnection { /// /// If the agent does not support model selection, returns [None]. /// This allows sharing the selector in UI components. - fn model_selector(&self) -> Option> { + fn model_selector(&self, _session_id: &acp::SessionId) -> Option> { None } @@ -177,61 +177,48 @@ pub trait AgentModelSelector: 'static { /// If the session doesn't exist or the model is invalid, it returns an error. /// /// # Parameters - /// - `session_id`: The ID of the session (thread) to apply the model to. /// - `model`: The model to select (should be one from [list_models]). /// - `cx`: The GPUI app context. /// /// # Returns /// A task resolving to `Ok(())` on success or an error. - fn select_model( - &self, - session_id: acp::SessionId, - model_id: AgentModelId, - cx: &mut App, - ) -> Task>; + fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task>; /// Retrieves the currently selected model for a specific session (thread). /// /// # Parameters - /// - `session_id`: The ID of the session (thread) to query. /// - `cx`: The GPUI app context. /// /// # Returns /// A task resolving to the selected model (always set) or an error (e.g., session not found). - fn selected_model( - &self, - session_id: &acp::SessionId, - cx: &mut App, - ) -> Task>; + fn selected_model(&self, cx: &mut App) -> Task>; /// Whenever the model list is updated the receiver will be notified. - fn watch(&self, cx: &mut App) -> watch::Receiver<()>; -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct AgentModelId(pub SharedString); - -impl std::ops::Deref for AgentModelId { - type Target = SharedString; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -impl fmt::Display for AgentModelId { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) + /// Optional for agents that don't update their model list. + fn watch(&self, _cx: &mut App) -> Option> { + None } } #[derive(Debug, Clone, PartialEq, Eq)] pub struct AgentModelInfo { - pub id: AgentModelId, + pub id: acp::ModelId, pub name: SharedString, + pub description: Option, pub icon: Option, } +impl From for AgentModelInfo { + fn from(info: acp::ModelInfo) -> Self { + Self { + id: info.model_id, + name: info.name.into(), + description: info.description.map(|desc| desc.into()), + icon: None, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct AgentModelGroupName(pub SharedString); diff --git a/crates/agent2/src/agent.rs b/crates/agent2/src/agent.rs index 86fb50242c64917248df5c620782af066e639b54..36ab1be9ef79221b530258c4fdd55be2ac1e8b29 100644 --- a/crates/agent2/src/agent.rs +++ b/crates/agent2/src/agent.rs @@ -56,7 +56,7 @@ struct Session { pub struct LanguageModels { /// Access language model by ID - models: HashMap>, + models: HashMap>, /// Cached list for returning language model information model_list: acp_thread::AgentModelList, refresh_models_rx: watch::Receiver<()>, @@ -132,10 +132,7 @@ impl LanguageModels { self.refresh_models_rx.clone() } - pub fn model_from_id( - &self, - model_id: &acp_thread::AgentModelId, - ) -> Option> { + pub fn model_from_id(&self, model_id: &acp::ModelId) -> Option> { self.models.get(model_id).cloned() } @@ -146,12 +143,13 @@ impl LanguageModels { acp_thread::AgentModelInfo { id: Self::model_id(model), name: model.name().0, + description: None, icon: Some(provider.icon()), } } - fn model_id(model: &Arc) -> acp_thread::AgentModelId { - acp_thread::AgentModelId(format!("{}/{}", model.provider_id().0, model.id().0).into()) + fn model_id(model: &Arc) -> acp::ModelId { + acp::ModelId(format!("{}/{}", model.provider_id().0, model.id().0).into()) } fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> { @@ -836,10 +834,15 @@ impl NativeAgentConnection { } } -impl AgentModelSelector for NativeAgentConnection { +struct NativeAgentModelSelector { + session_id: acp::SessionId, + connection: NativeAgentConnection, +} + +impl acp_thread::AgentModelSelector for NativeAgentModelSelector { fn list_models(&self, cx: &mut App) -> Task> { log::debug!("NativeAgentConnection::list_models called"); - let list = self.0.read(cx).models.model_list.clone(); + let list = self.connection.0.read(cx).models.model_list.clone(); Task::ready(if list.is_empty() { Err(anyhow::anyhow!("No models available")) } else { @@ -847,24 +850,24 @@ impl AgentModelSelector for NativeAgentConnection { }) } - fn select_model( - &self, - session_id: acp::SessionId, - model_id: acp_thread::AgentModelId, - cx: &mut App, - ) -> Task> { - log::debug!("Setting model for session {}: {}", session_id, model_id); + fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task> { + log::debug!( + "Setting model for session {}: {}", + self.session_id, + model_id + ); let Some(thread) = self + .connection .0 .read(cx) .sessions - .get(&session_id) + .get(&self.session_id) .map(|session| session.thread.clone()) else { return Task::ready(Err(anyhow!("Session not found"))); }; - let Some(model) = self.0.read(cx).models.model_from_id(&model_id) else { + let Some(model) = self.connection.0.read(cx).models.model_from_id(&model_id) else { return Task::ready(Err(anyhow!("Invalid model ID {}", model_id))); }; @@ -872,33 +875,32 @@ impl AgentModelSelector for NativeAgentConnection { thread.set_model(model.clone(), cx); }); - update_settings_file(self.0.read(cx).fs.clone(), cx, move |settings, _cx| { - let provider = model.provider_id().0.to_string(); - let model = model.id().0.to_string(); - settings - .agent - .get_or_insert_default() - .set_model(LanguageModelSelection { - provider: provider.into(), - model, - }); - }); + update_settings_file( + self.connection.0.read(cx).fs.clone(), + cx, + move |settings, _cx| { + let provider = model.provider_id().0.to_string(); + let model = model.id().0.to_string(); + settings + .agent + .get_or_insert_default() + .set_model(LanguageModelSelection { + provider: provider.into(), + model, + }); + }, + ); Task::ready(Ok(())) } - fn selected_model( - &self, - session_id: &acp::SessionId, - cx: &mut App, - ) -> Task> { - let session_id = session_id.clone(); - + fn selected_model(&self, cx: &mut App) -> Task> { let Some(thread) = self + .connection .0 .read(cx) .sessions - .get(&session_id) + .get(&self.session_id) .map(|session| session.thread.clone()) else { return Task::ready(Err(anyhow!("Session not found"))); @@ -915,8 +917,8 @@ impl AgentModelSelector for NativeAgentConnection { ))) } - fn watch(&self, cx: &mut App) -> watch::Receiver<()> { - self.0.read(cx).models.watch() + fn watch(&self, cx: &mut App) -> Option> { + Some(self.connection.0.read(cx).models.watch()) } } @@ -972,8 +974,11 @@ impl acp_thread::AgentConnection for NativeAgentConnection { Task::ready(Ok(())) } - fn model_selector(&self) -> Option> { - Some(Rc::new(self.clone()) as Rc) + fn model_selector(&self, session_id: &acp::SessionId) -> Option> { + Some(Rc::new(NativeAgentModelSelector { + session_id: session_id.clone(), + connection: self.clone(), + }) as Rc) } fn prompt( @@ -1196,9 +1201,7 @@ mod tests { use crate::HistoryEntryId; use super::*; - use acp_thread::{ - AgentConnection, AgentModelGroupName, AgentModelId, AgentModelInfo, MentionUri, - }; + use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelInfo, MentionUri}; use fs::FakeFs; use gpui::TestAppContext; use indoc::indoc; @@ -1292,7 +1295,25 @@ mod tests { .unwrap(), ); - let models = cx.update(|cx| connection.list_models(cx)).await.unwrap(); + // Create a thread/session + let acp_thread = cx + .update(|cx| { + Rc::new(connection.clone()).new_thread(project.clone(), Path::new("/a"), cx) + }) + .await + .unwrap(); + + let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone()); + + let models = cx + .update(|cx| { + connection + .model_selector(&session_id) + .unwrap() + .list_models(cx) + }) + .await + .unwrap(); let acp_thread::AgentModelList::Grouped(models) = models else { panic!("Unexpected model group"); @@ -1302,8 +1323,9 @@ mod tests { IndexMap::from_iter([( AgentModelGroupName("Fake".into()), vec![AgentModelInfo { - id: AgentModelId("fake/fake".into()), + id: acp::ModelId("fake/fake".into()), name: "Fake".into(), + description: None, icon: Some(ui::IconName::ZedAssistant), }] )]) @@ -1360,8 +1382,9 @@ mod tests { let session_id = cx.update(|cx| acp_thread.read(cx).session_id().clone()); // Select a model - let model_id = AgentModelId("fake/fake".into()); - cx.update(|cx| connection.select_model(session_id.clone(), model_id.clone(), cx)) + let selector = connection.model_selector(&session_id).unwrap(); + let model_id = acp::ModelId("fake/fake".into()); + cx.update(|cx| selector.select_model(model_id.clone(), cx)) .await .unwrap(); diff --git a/crates/agent2/src/tests/mod.rs b/crates/agent2/src/tests/mod.rs index c0f693afe6dc0decdce4447471191bd78cf345f1..2e63aa5856501f880fec94f7659b13be321b03b3 100644 --- a/crates/agent2/src/tests/mod.rs +++ b/crates/agent2/src/tests/mod.rs @@ -1850,8 +1850,18 @@ async fn test_agent_connection(cx: &mut TestAppContext) { .unwrap(); let connection = NativeAgentConnection(agent.clone()); + // Create a thread using new_thread + let connection_rc = Rc::new(connection.clone()); + let acp_thread = cx + .update(|cx| connection_rc.new_thread(project, cwd, cx)) + .await + .expect("new_thread should succeed"); + + // Get the session_id from the AcpThread + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + // Test model_selector returns Some - let selector_opt = connection.model_selector(); + let selector_opt = connection.model_selector(&session_id); assert!( selector_opt.is_some(), "agent2 should always support ModelSelector" @@ -1868,23 +1878,16 @@ async fn test_agent_connection(cx: &mut TestAppContext) { }; assert!(!listed_models.is_empty(), "should have at least one model"); assert_eq!( - listed_models[&AgentModelGroupName("Fake".into())][0].id.0, + listed_models[&AgentModelGroupName("Fake".into())][0] + .id + .0 + .as_ref(), "fake/fake" ); - // Create a thread using new_thread - let connection_rc = Rc::new(connection.clone()); - let acp_thread = cx - .update(|cx| connection_rc.new_thread(project, cwd, cx)) - .await - .expect("new_thread should succeed"); - - // Get the session_id from the AcpThread - let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); - // Test selected_model returns the default let model = cx - .update(|cx| selector.selected_model(&session_id, cx)) + .update(|cx| selector.selected_model(cx)) .await .expect("selected_model should succeed"); let model = cx diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index b8c75a01a2e2965c255e32bd3c0746b26d78ecab..b14c0467c58d3f41e32e602996560e2cc672d76a 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -44,6 +44,7 @@ pub struct AcpConnection { pub struct AcpSession { thread: WeakEntity, suppress_abort_err: bool, + models: Option>>, session_modes: Option>>, } @@ -264,6 +265,7 @@ impl AgentConnection for AcpConnection { })?; let modes = response.modes.map(|modes| Rc::new(RefCell::new(modes))); + let models = response.models.map(|models| Rc::new(RefCell::new(models))); if let Some(default_mode) = default_mode { if let Some(modes) = modes.as_ref() { @@ -326,10 +328,12 @@ impl AgentConnection for AcpConnection { ) })?; + let session = AcpSession { thread: thread.downgrade(), suppress_abort_err: false, - session_modes: modes + session_modes: modes, + models, }; sessions.borrow_mut().insert(session_id, session); @@ -450,6 +454,27 @@ impl AgentConnection for AcpConnection { } } + fn model_selector( + &self, + session_id: &acp::SessionId, + ) -> Option> { + let sessions = self.sessions.clone(); + let sessions_ref = sessions.borrow(); + let Some(session) = sessions_ref.get(session_id) else { + return None; + }; + + if let Some(models) = session.models.as_ref() { + Some(Rc::new(AcpModelSelector::new( + session_id.clone(), + self.connection.clone(), + models.clone(), + )) as _) + } else { + None + } + } + fn into_any(self: Rc) -> Rc { self } @@ -500,6 +525,82 @@ impl acp_thread::AgentSessionModes for AcpSessionModes { } } +struct AcpModelSelector { + session_id: acp::SessionId, + connection: Rc, + state: Rc>, +} + +impl AcpModelSelector { + fn new( + session_id: acp::SessionId, + connection: Rc, + state: Rc>, + ) -> Self { + Self { + session_id, + connection, + state, + } + } +} + +impl acp_thread::AgentModelSelector for AcpModelSelector { + fn list_models(&self, _cx: &mut App) -> Task> { + Task::ready(Ok(acp_thread::AgentModelList::Flat( + self.state + .borrow() + .available_models + .clone() + .into_iter() + .map(acp_thread::AgentModelInfo::from) + .collect(), + ))) + } + + fn select_model(&self, model_id: acp::ModelId, cx: &mut App) -> Task> { + let connection = self.connection.clone(); + let session_id = self.session_id.clone(); + let old_model_id; + { + let mut state = self.state.borrow_mut(); + old_model_id = state.current_model_id.clone(); + state.current_model_id = model_id.clone(); + }; + let state = self.state.clone(); + cx.foreground_executor().spawn(async move { + let result = connection + .set_session_model(acp::SetSessionModelRequest { + session_id, + model_id, + meta: None, + }) + .await; + + if result.is_err() { + state.borrow_mut().current_model_id = old_model_id; + } + + result?; + + Ok(()) + }) + } + + fn selected_model(&self, _cx: &mut App) -> Task> { + let state = self.state.borrow(); + Task::ready( + state + .available_models + .iter() + .find(|m| m.model_id == state.current_model_id) + .cloned() + .map(acp_thread::AgentModelInfo::from) + .ok_or_else(|| anyhow::anyhow!("Model not found")), + ) + } +} + struct ClientDelegate { sessions: Rc>>, cx: AsyncApp, diff --git a/crates/agent_ui/src/acp/model_selector.rs b/crates/agent_ui/src/acp/model_selector.rs index 95c0478aa3cf6b1ca78cf391a5bd734820c41454..381bdb01edec49e222c9bd9b3a97ce9ba21a9789 100644 --- a/crates/agent_ui/src/acp/model_selector.rs +++ b/crates/agent_ui/src/acp/model_selector.rs @@ -1,7 +1,6 @@ use std::{cmp::Reverse, rc::Rc, sync::Arc}; use acp_thread::{AgentModelInfo, AgentModelList, AgentModelSelector}; -use agent_client_protocol as acp; use anyhow::Result; use collections::IndexMap; use futures::FutureExt; @@ -10,20 +9,19 @@ use gpui::{Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, Task, W use ordered_float::OrderedFloat; use picker::{Picker, PickerDelegate}; use ui::{ - AnyElement, App, Context, IntoElement, ListItem, ListItemSpacing, SharedString, Window, - prelude::*, rems, + AnyElement, App, Context, DocumentationAside, DocumentationEdge, DocumentationSide, + IntoElement, ListItem, ListItemSpacing, SharedString, Window, prelude::*, rems, }; use util::ResultExt; pub type AcpModelSelector = Picker; pub fn acp_model_selector( - session_id: acp::SessionId, selector: Rc, window: &mut Window, cx: &mut Context, ) -> AcpModelSelector { - let delegate = AcpModelPickerDelegate::new(session_id, selector, window, cx); + let delegate = AcpModelPickerDelegate::new(selector, window, cx); Picker::list(delegate, window, cx) .show_scrollbar(true) .width(rems(20.)) @@ -36,61 +34,63 @@ enum AcpModelPickerEntry { } pub struct AcpModelPickerDelegate { - session_id: acp::SessionId, selector: Rc, filtered_entries: Vec, models: Option, selected_index: usize, + selected_description: Option<(usize, SharedString)>, selected_model: Option, _refresh_models_task: Task<()>, } impl AcpModelPickerDelegate { fn new( - session_id: acp::SessionId, selector: Rc, window: &mut Window, cx: &mut Context, ) -> Self { - let mut rx = selector.watch(cx); - let refresh_models_task = cx.spawn_in(window, { - let session_id = session_id.clone(); - async move |this, cx| { - async fn refresh( - this: &WeakEntity>, - session_id: &acp::SessionId, - cx: &mut AsyncWindowContext, - ) -> Result<()> { - let (models_task, selected_model_task) = this.update(cx, |this, cx| { - ( - this.delegate.selector.list_models(cx), - this.delegate.selector.selected_model(session_id, cx), - ) - })?; - - let (models, selected_model) = futures::join!(models_task, selected_model_task); + let rx = selector.watch(cx); + let refresh_models_task = { + cx.spawn_in(window, { + async move |this, cx| { + async fn refresh( + this: &WeakEntity>, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let (models_task, selected_model_task) = this.update(cx, |this, cx| { + ( + this.delegate.selector.list_models(cx), + this.delegate.selector.selected_model(cx), + ) + })?; - this.update_in(cx, |this, window, cx| { - this.delegate.models = models.ok(); - this.delegate.selected_model = selected_model.ok(); - this.refresh(window, cx) - }) - } + let (models, selected_model) = + futures::join!(models_task, selected_model_task); - refresh(&this, &session_id, cx).await.log_err(); - while let Ok(()) = rx.recv().await { - refresh(&this, &session_id, cx).await.log_err(); + this.update_in(cx, |this, window, cx| { + this.delegate.models = models.ok(); + this.delegate.selected_model = selected_model.ok(); + this.refresh(window, cx) + }) + } + + refresh(&this, cx).await.log_err(); + if let Some(mut rx) = rx { + while let Ok(()) = rx.recv().await { + refresh(&this, cx).await.log_err(); + } + } } - } - }); + }) + }; Self { - session_id, selector, filtered_entries: Vec::new(), models: None, selected_model: None, selected_index: 0, + selected_description: None, _refresh_models_task: refresh_models_task, } } @@ -182,7 +182,7 @@ impl PickerDelegate for AcpModelPickerDelegate { self.filtered_entries.get(self.selected_index) { self.selector - .select_model(self.session_id.clone(), model_info.id.clone(), cx) + .select_model(model_info.id.clone(), cx) .detach_and_log_err(cx); self.selected_model = Some(model_info.clone()); let current_index = self.selected_index; @@ -233,31 +233,46 @@ impl PickerDelegate for AcpModelPickerDelegate { }; Some( - ListItem::new(ix) - .inset(true) - .spacing(ListItemSpacing::Sparse) - .toggle_state(selected) - .start_slot::(model_info.icon.map(|icon| { - Icon::new(icon) - .color(model_icon_color) - .size(IconSize::Small) - })) + div() + .id(("model-picker-menu-child", ix)) + .when_some(model_info.description.clone(), |this, description| { + this + .on_hover(cx.listener(move |menu, hovered, _, cx| { + if *hovered { + menu.delegate.selected_description = Some((ix, description.clone())); + } else if matches!(menu.delegate.selected_description, Some((id, _)) if id == ix) { + menu.delegate.selected_description = None; + } + cx.notify(); + })) + }) .child( - h_flex() - .w_full() - .pl_0p5() - .gap_1p5() - .w(px(240.)) - .child(Label::new(model_info.name.clone()).truncate()), + ListItem::new(ix) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .toggle_state(selected) + .start_slot::(model_info.icon.map(|icon| { + Icon::new(icon) + .color(model_icon_color) + .size(IconSize::Small) + })) + .child( + h_flex() + .w_full() + .pl_0p5() + .gap_1p5() + .w(px(240.)) + .child(Label::new(model_info.name.clone()).truncate()), + ) + .end_slot(div().pr_3().when(is_selected, |this| { + this.child( + Icon::new(IconName::Check) + .color(Color::Accent) + .size(IconSize::Small), + ) + })), ) - .end_slot(div().pr_3().when(is_selected, |this| { - this.child( - Icon::new(IconName::Check) - .color(Color::Accent) - .size(IconSize::Small), - ) - })) - .into_any_element(), + .into_any_element() ) } } @@ -292,6 +307,21 @@ impl PickerDelegate for AcpModelPickerDelegate { .into_any(), ) } + + fn documentation_aside( + &self, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + self.selected_description.as_ref().map(|(_, description)| { + let description = description.clone(); + DocumentationAside::new( + DocumentationSide::Left, + DocumentationEdge::Bottom, + Rc::new(move |_| Label::new(description.clone()).into_any_element()), + ) + }) + } } fn info_list_to_picker_entries( @@ -371,6 +401,7 @@ async fn fuzzy_search( #[cfg(test)] mod tests { + use agent_client_protocol as acp; use gpui::TestAppContext; use super::*; @@ -383,8 +414,9 @@ mod tests { models .into_iter() .map(|model| acp_thread::AgentModelInfo { - id: acp_thread::AgentModelId(model.to_string().into()), + id: acp::ModelId(model.to_string().into()), name: model.to_string().into(), + description: None, icon: None, }) .collect::>(), diff --git a/crates/agent_ui/src/acp/model_selector_popover.rs b/crates/agent_ui/src/acp/model_selector_popover.rs index fa771c695ecf8175859d145b8d08d2cf3447a77a..55f530c81b1cead74fd4ec4f6cc29ececcf2bf7e 100644 --- a/crates/agent_ui/src/acp/model_selector_popover.rs +++ b/crates/agent_ui/src/acp/model_selector_popover.rs @@ -1,7 +1,6 @@ use std::rc::Rc; use acp_thread::AgentModelSelector; -use agent_client_protocol as acp; use gpui::{Entity, FocusHandle}; use picker::popover_menu::PickerPopoverMenu; use ui::{ @@ -20,7 +19,6 @@ pub struct AcpModelSelectorPopover { impl AcpModelSelectorPopover { pub(crate) fn new( - session_id: acp::SessionId, selector: Rc, menu_handle: PopoverMenuHandle, focus_handle: FocusHandle, @@ -28,7 +26,7 @@ impl AcpModelSelectorPopover { cx: &mut Context, ) -> Self { Self { - selector: cx.new(move |cx| acp_model_selector(session_id, selector, window, cx)), + selector: cx.new(move |cx| acp_model_selector(selector, window, cx)), menu_handle, focus_handle, } diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 8658e2c285997c18ece2b9783c25fbcaa614dc83..391486a68eca87e238f9efb88288bc970e3eb412 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -577,23 +577,21 @@ impl AcpThreadView { AgentDiff::set_active_thread(&workspace, thread.clone(), window, cx); - this.model_selector = - thread - .read(cx) - .connection() - .model_selector() - .map(|selector| { - cx.new(|cx| { - AcpModelSelectorPopover::new( - thread.read(cx).session_id().clone(), - selector, - PopoverMenuHandle::default(), - this.focus_handle(cx), - window, - cx, - ) - }) - }); + this.model_selector = thread + .read(cx) + .connection() + .model_selector(thread.read(cx).session_id()) + .map(|selector| { + cx.new(|cx| { + AcpModelSelectorPopover::new( + selector, + PopoverMenuHandle::default(), + this.focus_handle(cx), + window, + cx, + ) + }) + }); let mode_selector = thread .read(cx) diff --git a/crates/picker/Cargo.toml b/crates/picker/Cargo.toml index d785cb5b3a96502165b10e2bf0def0d8bf66cd67..23c867b6f30aa64d5916e8939d836dda27ebf6c9 100644 --- a/crates/picker/Cargo.toml +++ b/crates/picker/Cargo.toml @@ -22,6 +22,7 @@ gpui.workspace = true menu.workspace = true schemars.workspace = true serde.workspace = true +theme.workspace = true ui.workspace = true workspace.workspace = true workspace-hack.workspace = true diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index 8816fb5424ff25788cec9cb602d2960ab753c135..247fcbdd875ffc2e52d90d9b1309f874c508e588 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -18,11 +18,12 @@ use head::Head; use schemars::JsonSchema; use serde::Deserialize; use std::{ops::Range, sync::Arc, time::Duration}; +use theme::ThemeSettings; use ui::{ - Color, Divider, Label, ListItem, ListItemSpacing, ScrollAxes, Scrollbars, WithScrollbar, - prelude::*, v_flex, + Color, Divider, DocumentationAside, DocumentationEdge, DocumentationSide, Label, ListItem, + ListItemSpacing, ScrollAxes, Scrollbars, WithScrollbar, prelude::*, utils::WithRemSize, v_flex, }; -use workspace::ModalView; +use workspace::{ModalView, item::Settings}; enum ElementContainer { List(ListState), @@ -222,6 +223,14 @@ pub trait PickerDelegate: Sized + 'static { ) -> Option { None } + + fn documentation_aside( + &self, + _window: &mut Window, + _cx: &mut Context>, + ) -> Option { + None + } } impl Focusable for Picker { @@ -781,8 +790,15 @@ impl ModalView for Picker {} impl Render for Picker { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let ui_font_size = ThemeSettings::get_global(cx).ui_font_size(cx); + let window_size = window.viewport_size(); + let rem_size = window.rem_size(); + let is_wide_window = window_size.width / rem_size > rems_from_px(800.).0; + + let aside = self.delegate.documentation_aside(window, cx); + let editor_position = self.delegate.editor_position(); - v_flex() + let menu = v_flex() .key_context("Picker") .size_full() .when_some(self.width, |el, width| el.w(width)) @@ -865,6 +881,47 @@ impl Render for Picker { } } Head::Empty(empty_head) => Some(div().child(empty_head.clone())), - }) + }); + + let Some(aside) = aside else { + return menu; + }; + + let render_aside = |aside: DocumentationAside, cx: &mut Context| { + WithRemSize::new(ui_font_size) + .occlude() + .elevation_2(cx) + .w_full() + .p_2() + .overflow_hidden() + .when(is_wide_window, |this| this.max_w_96()) + .when(!is_wide_window, |this| this.max_w_48()) + .child((aside.render)(cx)) + }; + + if is_wide_window { + div().relative().child(menu).child( + h_flex() + .absolute() + .when(aside.side == DocumentationSide::Left, |this| { + this.right_full().mr_1() + }) + .when(aside.side == DocumentationSide::Right, |this| { + this.left_full().ml_1() + }) + .when(aside.edge == DocumentationEdge::Top, |this| this.top_0()) + .when(aside.edge == DocumentationEdge::Bottom, |this| { + this.bottom_0() + }) + .child(render_aside(aside, cx)), + ) + } else { + v_flex() + .w_full() + .gap_1() + .justify_end() + .child(render_aside(aside, cx)) + .child(menu) + } } } diff --git a/crates/ui/src/components/context_menu.rs b/crates/ui/src/components/context_menu.rs index e57f02be915fdecec7a5af4894c6f4fdd72f48bc..7b61789b3c87d54ff231e1d635266d6502fb944f 100644 --- a/crates/ui/src/components/context_menu.rs +++ b/crates/ui/src/components/context_menu.rs @@ -180,9 +180,9 @@ pub enum DocumentationEdge { #[derive(Clone)] pub struct DocumentationAside { - side: DocumentationSide, - edge: DocumentationEdge, - render: Rc AnyElement>, + pub side: DocumentationSide, + pub edge: DocumentationEdge, + pub render: Rc AnyElement>, } impl DocumentationAside { diff --git a/tooling/workspace-hack/Cargo.toml b/tooling/workspace-hack/Cargo.toml index 68fd84b32b64e15b0ea63ef851ec5aac457179c2..342d675bf38c3f9233d3dee4f8eefd77bfbc7836 100644 --- a/tooling/workspace-hack/Cargo.toml +++ b/tooling/workspace-hack/Cargo.toml @@ -600,10 +600,10 @@ tower = { version = "0.5", default-features = false, features = ["timeout", "uti winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } windows-core = { version = "0.61" } windows-numerics = { version = "0.2" } -windows-sys-4db8c43aad08e7ae = { package = "windows-sys", version = "0.60", features = ["Win32_Globalization", "Win32_System_Com", "Win32_UI_Shell"] } windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] } windows-sys-b21d60becc0929df = { package = "windows-sys", version = "0.52", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Wdk_System_IO", "Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_IO", "Win32_System_Memory", "Win32_System_Pipes", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_WindowsProgramming"] } windows-sys-c8eced492e86ede7 = { package = "windows-sys", version = "0.48", features = ["Win32_Foundation", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } +windows-sys-d4189bed749088b6 = { package = "windows-sys", version = "0.61", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_IO", "Win32_System_LibraryLoader", "Win32_System_Threading", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } [target.x86_64-pc-windows-msvc.build-dependencies] codespan-reporting = { version = "0.12" } @@ -627,10 +627,10 @@ tower = { version = "0.5", default-features = false, features = ["timeout", "uti winapi = { version = "0.3", default-features = false, features = ["cfg", "commapi", "consoleapi", "evntrace", "fileapi", "handleapi", "impl-debug", "impl-default", "in6addr", "inaddr", "ioapiset", "knownfolders", "minwinbase", "minwindef", "namedpipeapi", "ntsecapi", "objbase", "processenv", "processthreadsapi", "shlobj", "std", "synchapi", "sysinfoapi", "timezoneapi", "winbase", "windef", "winerror", "winioctl", "winnt", "winreg", "winsock2", "winuser"] } windows-core = { version = "0.61" } windows-numerics = { version = "0.2" } -windows-sys-4db8c43aad08e7ae = { package = "windows-sys", version = "0.60", features = ["Win32_Globalization", "Win32_System_Com", "Win32_UI_Shell"] } windows-sys-73dcd821b1037cfd = { package = "windows-sys", version = "0.59", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_NetworkManagement_IpHelper", "Win32_Networking_WinSock", "Win32_Security_Authentication_Identity", "Win32_Security_Credentials", "Win32_Security_Cryptography", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Console", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Ioctl", "Win32_System_Kernel", "Win32_System_LibraryLoader", "Win32_System_Memory", "Win32_System_Performance", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Input_KeyboardAndMouse", "Win32_UI_Shell", "Win32_UI_WindowsAndMessaging"] } windows-sys-b21d60becc0929df = { package = "windows-sys", version = "0.52", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Wdk_System_IO", "Win32_Foundation", "Win32_Networking_WinSock", "Win32_Security_Authorization", "Win32_Storage_FileSystem", "Win32_System_Console", "Win32_System_IO", "Win32_System_Memory", "Win32_System_Pipes", "Win32_System_SystemServices", "Win32_System_Threading", "Win32_System_WindowsProgramming"] } windows-sys-c8eced492e86ede7 = { package = "windows-sys", version = "0.48", features = ["Win32_Foundation", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_Diagnostics_Debug", "Win32_System_IO", "Win32_System_Pipes", "Win32_System_Registry", "Win32_System_Threading", "Win32_System_Time", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } +windows-sys-d4189bed749088b6 = { package = "windows-sys", version = "0.61", features = ["Wdk_Foundation", "Wdk_Storage_FileSystem", "Win32_Globalization", "Win32_Networking_WinSock", "Win32_Security", "Win32_Storage_FileSystem", "Win32_System_Com", "Win32_System_IO", "Win32_System_LibraryLoader", "Win32_System_Threading", "Win32_System_WindowsProgramming", "Win32_UI_Shell"] } [target.x86_64-unknown-linux-musl.dependencies] aes = { version = "0.8", default-features = false, features = ["zeroize"] } From d4fd59f0a299b021025b31d1872a4e7058858dbe Mon Sep 17 00:00:00 2001 From: Ran Benita Date: Mon, 22 Sep 2025 19:07:16 +0300 Subject: [PATCH 49/58] vim: Add support for `gt` and `gT` (#38570) Vim mode currently supports `gt` (go to next tab) and `gT` (go to previous tab) but not with count. Implement the expected behavior as defined by vim: - `gt` moves to tab `` - `gT` moves to previous tab `` times (with wraparound) Release Notes: - Improved vim `gt` and `gT` to support count, e.g. `5gt` - go to tab 5, `8gT` - go to 8th previous tab with wraparound. --- assets/keymaps/vim.json | 4 +- crates/vim/src/normal.rs | 132 ++++++++++++++++++++++++++++++++++++++- 2 files changed, 133 insertions(+), 3 deletions(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 590e84cf7fc10f7af5dd317bc114b75390414e4f..8f5f99e96f708dcc08cc1a9c1fcfc799d6ba43e7 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -95,8 +95,8 @@ "g g": "vim::StartOfDocument", "g h": "editor::Hover", "g B": "editor::BlameHover", - "g t": "pane::ActivateNextItem", - "g shift-t": "pane::ActivatePreviousItem", + "g t": "vim::GoToTab", + "g shift-t": "vim::GoToPreviousTab", "g d": "editor::GoToDefinition", "g shift-d": "editor::GoToDeclaration", "g y": "editor::GoToTypeDefinition", diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 7dfdb973c7603e9ef28bf757a9a716e729b72170..5d227ffd8bb6d92acc0546fcb9b9767962f6b417 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -28,7 +28,7 @@ use editor::Editor; use editor::{Anchor, SelectionEffects}; use editor::{Bias, ToPoint}; use editor::{display_map::ToDisplayPoint, movement}; -use gpui::{Context, Window, actions}; +use gpui::{Action, Context, Window, actions}; use language::{Point, SelectionGoal}; use log::error; use multi_buffer::MultiBufferRow; @@ -94,6 +94,10 @@ actions!( Redo, /// Undoes all changes to the most recently changed line. UndoLastLine, + /// Go to tab page (with count support). + GoToTab, + /// Go to previous tab page (with count support). + GoToPreviousTab, ] ); @@ -116,6 +120,8 @@ pub(crate) fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, Vim::toggle_comments); Vim::action(editor, cx, Vim::paste); Vim::action(editor, cx, Vim::show_location); + Vim::action(editor, cx, Vim::go_to_tab); + Vim::action(editor, cx, Vim::go_to_previous_tab); Vim::action(editor, cx, |vim, _: &DeleteLeft, window, cx| { vim.record_current_action(cx); @@ -984,6 +990,54 @@ impl Vim { self.switch_mode(Mode::Insert, true, window, cx); } } + + fn go_to_tab(&mut self, _: &GoToTab, window: &mut Window, cx: &mut Context) { + let count = Vim::take_count(cx); + Vim::take_forced_motion(cx); + + if let Some(tab_index) = count { + // gt goes to tab (1-based). + let zero_based_index = tab_index.saturating_sub(1); + window.dispatch_action( + workspace::pane::ActivateItem(zero_based_index).boxed_clone(), + cx, + ); + } else { + // If no count is provided, go to the next tab. + window.dispatch_action(workspace::pane::ActivateNextItem.boxed_clone(), cx); + } + } + + fn go_to_previous_tab( + &mut self, + _: &GoToPreviousTab, + window: &mut Window, + cx: &mut Context, + ) { + let count = Vim::take_count(cx); + Vim::take_forced_motion(cx); + + if let Some(count) = count { + // gT with count goes back that many tabs with wraparound (not the same as gt!). + if let Some(workspace) = self.workspace(window) { + let pane = workspace.read(cx).active_pane().read(cx); + let item_count = pane.items().count(); + if item_count > 0 { + let current_index = pane.active_item_index(); + let target_index = (current_index as isize - count as isize) + .rem_euclid(item_count as isize) + as usize; + window.dispatch_action( + workspace::pane::ActivateItem(target_index).boxed_clone(), + cx, + ); + } + } + } else { + // No count provided, go to the previous tab. + window.dispatch_action(workspace::pane::ActivatePreviousItem.boxed_clone(), cx); + } + } } #[cfg(test)] mod test { @@ -2119,4 +2173,80 @@ mod test { Mode::Normal, ); } + + #[gpui::test] + async fn test_go_to_tab_with_count(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Open 4 tabs. + cx.simulate_keystrokes(": tabnew"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes(": tabnew"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes(": tabnew"); + cx.simulate_keystrokes("enter"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.items(cx).count(), 4); + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 3); + }); + + cx.simulate_keystrokes("1 g t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 0); + }); + + cx.simulate_keystrokes("3 g t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 2); + }); + + cx.simulate_keystrokes("4 g t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 3); + }); + + cx.simulate_keystrokes("1 g t"); + cx.simulate_keystrokes("g t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 1); + }); + } + + #[gpui::test] + async fn test_go_to_previous_tab_with_count(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Open 4 tabs. + cx.simulate_keystrokes(": tabnew"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes(": tabnew"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes(": tabnew"); + cx.simulate_keystrokes("enter"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.items(cx).count(), 4); + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 3); + }); + + cx.simulate_keystrokes("2 g shift-t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 1); + }); + + cx.simulate_keystrokes("g shift-t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 0); + }); + + // Wraparound: gT from first tab should go to last. + cx.simulate_keystrokes("g shift-t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 3); + }); + + cx.simulate_keystrokes("6 g shift-t"); + cx.workspace(|workspace, _, cx| { + assert_eq!(workspace.active_pane().read(cx).active_item_index(), 1); + }); + } } From 9e64b7b911ea23d0758b7e5b8aad1e8c411fe72f Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 22 Sep 2025 18:12:35 +0200 Subject: [PATCH 50/58] terminal: Escape args in alacritty on Windows (#38650) Release Notes: - N/A --- crates/terminal/src/terminal.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index d1a4c8af9687c87a8c63b598262d0bdf797fada4..987e3272602763f93d350c07b10246707b0ea2ec 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -428,7 +428,7 @@ impl TerminalBuilder { drain_on_exit: true, env: env.clone().into_iter().collect(), #[cfg(windows)] - escape_args: false, + escape_args: true, } }; From 003163eb4f08e87eace4aaa40f41bdfb5a7c1f19 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 22 Sep 2025 10:22:37 -0600 Subject: [PATCH 51/58] Move my keybinding fixes to the right platform (#38654) In cffb883108ec07ec2f51446cb35eac19b89e625f I put the fixed keybindings on the wrong platform Release Notes: - Fix syntax node shortcuts --- assets/keymaps/default-macos.json | 2 ++ assets/keymaps/default-windows.json | 2 -- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 3d5887de75bf13218985c33ab37b8b54ca9ea0a1..4cf25c3b71047b7eb19791cee91f062d5720fe61 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -550,6 +550,8 @@ "cmd-ctrl-left": "editor::SelectSmallerSyntaxNode", // Shrink selection "cmd-ctrl-right": "editor::SelectLargerSyntaxNode", // Expand selection "cmd-ctrl-up": "editor::SelectPreviousSyntaxNode", // Move selection up + "ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand selection (VSCode version) + "ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink selection (VSCode version) "cmd-ctrl-down": "editor::SelectNextSyntaxNode", // Move selection down "cmd-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand "cmd-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 9165840d695af73a41aedded9b8037ffbce8ccbf..5608d01379de75e612270da243c846d4da8d775f 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -497,8 +497,6 @@ "shift-alt-down": "editor::DuplicateLineDown", "shift-alt-right": "editor::SelectLargerSyntaxNode", // Expand selection "shift-alt-left": "editor::SelectSmallerSyntaxNode", // Shrink selection - "ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand selection (VSCode version) - "ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink selection (VSCode version) "ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection "ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word "ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand From c88fdaf02d82160a5d321c28f238320c08e965e5 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 22 Sep 2025 12:33:12 -0400 Subject: [PATCH 52/58] Implement Markdown link embedding on paste (#38639) This PR adds automatic markdown URL embedding on paste when you are in text associated with the Markdown language and you have a valid URL in your clipboard. This the default behavior in VS Code and GitHub, when pasting a URL in Markdown. It works in both singleton buffers and multi buffers. One thing that is a bit unfortunate is that, previously, `do_paste` use to simply call `Editor::insert()`, in the case of pasting content that was copied from an external application, and now, we are duplicating some of `insert()`'s logic in place, in order to have control over transforming the edits before they are inserted. Release Notes: - Added automatic Markdown URL embedding on paste. --------- Co-authored-by: Cole Miller <53574922+cole-miller@users.noreply.github.com> --- crates/editor/src/editor.rs | 112 +++++++++++++--- crates/editor/src/editor_tests.rs | 211 ++++++++++++++++++++++++++++++ 2 files changed, 307 insertions(+), 16 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8b0fc5512731eff70b1e9ac41b6bfe16a65babfa..fbf70322b890ab9a2a3c1f9e915a5debae2e4e64 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12452,13 +12452,14 @@ impl Editor { return; } - let clipboard_text = Cow::Borrowed(text); + let clipboard_text = Cow::Borrowed(text.as_str()); self.transact(window, cx, |this, window, cx| { let had_active_edit_prediction = this.has_active_edit_prediction(); + let old_selections = this.selections.all::(cx); + let cursor_offset = this.selections.last::(cx).head(); if let Some(mut clipboard_selections) = clipboard_selections { - let old_selections = this.selections.all::(cx); let all_selections_were_entire_line = clipboard_selections.iter().all(|s| s.is_entire_line); let first_selection_indent_column = @@ -12466,7 +12467,6 @@ impl Editor { if clipboard_selections.len() != old_selections.len() { clipboard_selections.drain(..); } - let cursor_offset = this.selections.last::(cx).head(); let mut auto_indent_on_paste = true; this.buffer.update(cx, |buffer, cx| { @@ -12489,22 +12489,36 @@ impl Editor { start_offset = end_offset + 1; original_indent_column = Some(clipboard_selection.first_line_indent); } else { - to_insert = clipboard_text.as_str(); + to_insert = &*clipboard_text; entire_line = all_selections_were_entire_line; original_indent_column = first_selection_indent_column } - // If the corresponding selection was empty when this slice of the - // clipboard text was written, then the entire line containing the - // selection was copied. If this selection is also currently empty, - // then paste the line before the current line of the buffer. - let range = if selection.is_empty() && handle_entire_lines && entire_line { - let column = selection.start.to_point(&snapshot).column as usize; - let line_start = selection.start - column; - line_start..line_start - } else { - selection.range() - }; + let (range, to_insert) = + if selection.is_empty() && handle_entire_lines && entire_line { + // If the corresponding selection was empty when this slice of the + // clipboard text was written, then the entire line containing the + // selection was copied. If this selection is also currently empty, + // then paste the line before the current line of the buffer. + let column = selection.start.to_point(&snapshot).column as usize; + let line_start = selection.start - column; + (line_start..line_start, Cow::Borrowed(to_insert)) + } else { + let language = snapshot.language_at(selection.head()); + let range = selection.range(); + if let Some(language) = language + && language.name() == "Markdown".into() + { + edit_for_markdown_paste( + &snapshot, + range, + to_insert, + url::Url::parse(to_insert).ok(), + ) + } else { + (range, Cow::Borrowed(to_insert)) + } + }; edits.push((range, to_insert)); original_indent_columns.push(original_indent_column); @@ -12527,7 +12541,53 @@ impl Editor { let selections = this.selections.all::(cx); this.change_selections(Default::default(), window, cx, |s| s.select(selections)); } else { - this.insert(&clipboard_text, window, cx); + let url = url::Url::parse(&clipboard_text).ok(); + + let auto_indent_mode = if !clipboard_text.is_empty() { + Some(AutoindentMode::Block { + original_indent_columns: Vec::new(), + }) + } else { + None + }; + + let selection_anchors = this.buffer.update(cx, |buffer, cx| { + let snapshot = buffer.snapshot(cx); + + let anchors = old_selections + .iter() + .map(|s| { + let anchor = snapshot.anchor_after(s.head()); + s.map(|_| anchor) + }) + .collect::>(); + + let mut edits = Vec::new(); + + for selection in old_selections.iter() { + let language = snapshot.language_at(selection.head()); + let range = selection.range(); + + let (edit_range, edit_text) = if let Some(language) = language + && language.name() == "Markdown".into() + { + edit_for_markdown_paste(&snapshot, range, &clipboard_text, url.clone()) + } else { + (range, clipboard_text.clone()) + }; + + edits.push((edit_range, edit_text)); + } + + drop(snapshot); + buffer.edit(edits, auto_indent_mode, cx); + + anchors + }); + + this.change_selections(Default::default(), window, cx, |s| { + s.select_anchors(selection_anchors); + }); } let trigger_in_words = @@ -21679,6 +21739,26 @@ impl Editor { } } +fn edit_for_markdown_paste<'a>( + buffer: &MultiBufferSnapshot, + range: Range, + to_insert: &'a str, + url: Option, +) -> (Range, Cow<'a, str>) { + if url.is_none() { + return (range, Cow::Borrowed(to_insert)); + }; + + let old_text = buffer.text_for_range(range.clone()).collect::(); + + let new_text = if range.is_empty() || url::Url::parse(&old_text).is_ok() { + Cow::Borrowed(to_insert) + } else { + Cow::Owned(format!("[{old_text}]({to_insert})")) + }; + (range, new_text) +} + fn vim_enabled(cx: &App) -> bool { vim_mode_setting::VimModeSetting::try_get(cx) .map(|vim_mode| vim_mode.0) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 05742cd00bb834550ee20377ff46da6649272f43..9f888702f99b0b916d35625806c18e53043d0101 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -25974,6 +25974,217 @@ let result = variable * 2;", ); } +#[gpui::test] +async fn test_paste_url_from_other_app_creates_markdown_link_over_selected_text( + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let url = "https://zed.dev"; + + let markdown_language = Arc::new(Language::new( + LanguageConfig { + name: "Markdown".into(), + ..LanguageConfig::default() + }, + None, + )); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + cx.set_state("Hello, «editorˇ».\nZed is «ˇgreat» (see this link: ˇ)"); + + cx.update_editor(|editor, window, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(url.to_string())); + editor.paste(&Paste, window, cx); + }); + + cx.assert_editor_state(&format!( + "Hello, [editor]({url})ˇ.\nZed is [great]({url})ˇ (see this link: {url}ˇ)" + )); +} + +#[gpui::test] +async fn test_paste_url_from_zed_copy_creates_markdown_link_over_selected_text( + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let url = "https://zed.dev"; + + let markdown_language = Arc::new(Language::new( + LanguageConfig { + name: "Markdown".into(), + ..LanguageConfig::default() + }, + None, + )); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + cx.set_state(&format!( + "Hello, editor.\nZed is great (see this link: )\n«{url}ˇ»" + )); + + cx.update_editor(|editor, window, cx| { + editor.copy(&Copy, window, cx); + }); + + cx.set_state(&format!( + "Hello, «editorˇ».\nZed is «ˇgreat» (see this link: ˇ)\n{url}" + )); + + cx.update_editor(|editor, window, cx| { + editor.paste(&Paste, window, cx); + }); + + cx.assert_editor_state(&format!( + "Hello, [editor]({url})ˇ.\nZed is [great]({url})ˇ (see this link: {url}ˇ)\n{url}" + )); +} + +#[gpui::test] +async fn test_paste_url_from_other_app_replaces_existing_url_without_creating_markdown_link( + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let url = "https://zed.dev"; + + let markdown_language = Arc::new(Language::new( + LanguageConfig { + name: "Markdown".into(), + ..LanguageConfig::default() + }, + None, + )); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + cx.set_state("Please visit zed's homepage: «https://www.apple.comˇ»"); + + cx.update_editor(|editor, window, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(url.to_string())); + editor.paste(&Paste, window, cx); + }); + + cx.assert_editor_state(&format!("Please visit zed's homepage: {url}ˇ")); +} + +#[gpui::test] +async fn test_paste_plain_text_from_other_app_replaces_selection_without_creating_markdown_link( + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let text = "Awesome"; + + let markdown_language = Arc::new(Language::new( + LanguageConfig { + name: "Markdown".into(), + ..LanguageConfig::default() + }, + None, + )); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + cx.set_state("Hello, «editorˇ».\nZed is «ˇgreat»"); + + cx.update_editor(|editor, window, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(text.to_string())); + editor.paste(&Paste, window, cx); + }); + + cx.assert_editor_state(&format!("Hello, {text}ˇ.\nZed is {text}ˇ")); +} + +#[gpui::test] +async fn test_paste_url_from_other_app_without_creating_markdown_link_in_non_markdown_language( + cx: &mut gpui::TestAppContext, +) { + init_test(cx, |_| {}); + + let url = "https://zed.dev"; + + let markdown_language = Arc::new(Language::new( + LanguageConfig { + name: "Rust".into(), + ..LanguageConfig::default() + }, + None, + )); + + let mut cx = EditorTestContext::new(cx).await; + cx.update_buffer(|buffer, cx| buffer.set_language(Some(markdown_language), cx)); + cx.set_state("// Hello, «editorˇ».\n// Zed is «ˇgreat» (see this link: ˇ)"); + + cx.update_editor(|editor, window, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(url.to_string())); + editor.paste(&Paste, window, cx); + }); + + cx.assert_editor_state(&format!( + "// Hello, {url}ˇ.\n// Zed is {url}ˇ (see this link: {url}ˇ)" + )); +} + +#[gpui::test] +async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_multi_buffer( + cx: &mut TestAppContext, +) { + init_test(cx, |_| {}); + + let url = "https://zed.dev"; + + let markdown_language = Arc::new(Language::new( + LanguageConfig { + name: "Markdown".into(), + ..LanguageConfig::default() + }, + None, + )); + + let (editor, cx) = cx.add_window_view(|window, cx| { + let multi_buffer = MultiBuffer::build_multi( + [ + ("this will embed -> link", vec![Point::row_range(0..1)]), + ("this will replace -> link", vec![Point::row_range(0..1)]), + ], + cx, + ); + let mut editor = Editor::new(EditorMode::full(), multi_buffer.clone(), None, window, cx); + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges(vec![ + Point::new(0, 19)..Point::new(0, 23), + Point::new(1, 21)..Point::new(1, 25), + ]) + }); + let first_buffer_id = multi_buffer + .read(cx) + .excerpt_buffer_ids() + .into_iter() + .next() + .unwrap(); + let first_buffer = multi_buffer.read(cx).buffer(first_buffer_id).unwrap(); + first_buffer.update(cx, |buffer, cx| { + buffer.set_language(Some(markdown_language.clone()), cx); + }); + + editor + }); + let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await; + + cx.update_editor(|editor, window, cx| { + cx.write_to_clipboard(ClipboardItem::new_string(url.to_string())); + editor.paste(&Paste, window, cx); + }); + + cx.assert_editor_state(&format!( + "this will embed -> [link]({url})ˇ\nthis will replace -> {url}ˇ" + )); +} + #[track_caller] fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec { editor From a0514af58955a21401b4c10918a45c9c241a4a74 Mon Sep 17 00:00:00 2001 From: Miao Date: Tue, 23 Sep 2025 00:56:40 +0800 Subject: [PATCH 53/58] editor: Make buffer search bar capture CopyPath & CopyRelativePath actions (#38645) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #38495 Cause: - When the Find input is focused, CopyPath/CopyRelativePath were handled by the editor and stopped during the bubble phase, preventing BufferSearchBar from relaying to the file-backed editor. Release Notes: - Fixes “Workspace: Copy Relative Path” not copying while the Find bar is focused. --- crates/editor/src/editor.rs | 4 ++++ crates/search/src/buffer_search.rs | 12 +++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index fbf70322b890ab9a2a3c1f9e915a5debae2e4e64..38ebdb4909b051d96700447617b392a7741714a7 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -19308,6 +19308,8 @@ impl Editor { && let Some(path) = path.to_str() { cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); + } else { + cx.propagate(); } } @@ -19321,6 +19323,8 @@ impl Editor { && let Some(path) = path.to_str() { cx.write_to_clipboard(ClipboardItem::new_string(path.to_string())); + } else { + cx.propagate(); } } diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index dd096cde851b21983be80c5ce64b78338f54d78e..925d390cb3eb5489025818e4826aba691ac1bfa8 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -28,7 +28,7 @@ use schemars::JsonSchema; use serde::Deserialize; use settings::Settings; use std::sync::Arc; -use zed_actions::outline::ToggleOutline; +use zed_actions::{outline::ToggleOutline, workspace::CopyPath, workspace::CopyRelativePath}; use ui::{ BASE_REM_SIZE_IN_PX, IconButton, IconButtonShape, IconName, Tooltip, h_flex, prelude::*, @@ -425,6 +425,16 @@ impl Render for BufferSearchBar { active_searchable_item.relay_action(Box::new(ToggleOutline), window, cx); } })) + .on_action(cx.listener(|this, _: &CopyPath, window, cx| { + if let Some(active_searchable_item) = &mut this.active_searchable_item { + active_searchable_item.relay_action(Box::new(CopyPath), window, cx); + } + })) + .on_action(cx.listener(|this, _: &CopyRelativePath, window, cx| { + if let Some(active_searchable_item) = &mut this.active_searchable_item { + active_searchable_item.relay_action(Box::new(CopyRelativePath), window, cx); + } + })) .when(replacement, |this| { this.on_action(cx.listener(Self::toggle_replace)) .when(in_replace, |this| { From d4adb515539193e617752df144c42baa9d8a4f03 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Mon, 22 Sep 2025 14:59:24 -0400 Subject: [PATCH 54/58] languages: Update package.json and tsconfig.json schemas (#38655) Closes: https://github.com/zed-industries/zed/issues/34382 - Add support for `tsconfig.*.json` not just `tsconfig.json`. - Updated JSON schemas to [SchemaStore/schemastore@281aa4a](https://github.com/SchemaStore/schemastore/tree/281aa4aa4ac21385814423f86a54d1b8ccfc17a1) (2025-09-21) - [tsconfig.json](https://github.com/SchemaStore/schemastore/commits/master/src/schemas/json/tsconfig.json) @ [281aa4a](https://raw.githubusercontent.com/SchemaStore/schemastore/281aa4aa4ac21385814423f86a54d1b8ccfc17a1/src/schemas/json/tsconfig.json) - [package.json](https://github.com/SchemaStore/schemastore/commits/master/src/schemas/json/package.json) @ [281aa4a](https://raw.githubusercontent.com/SchemaStore/schemastore/281aa4aa4ac21385814423f86a54d1b8ccfc17a1/src/schemas/json/package.json) See also: - [discord thread](https://discord.com/channels/869392257814519848/1419298937290096760) - https://github.com/zed-industries/zed/issues/21994#issuecomment-3319321308 Release Notes: - Updated package.json and tsconfig.json schemas to newest release (2025-09-21). Match `tsconfig.*.json` too. --- assets/settings/default.json | 2 +- crates/languages/src/json.rs | 2 +- .../languages/src/json/schemas/package.json | 150 +++++++++++- .../languages/src/json/schemas/tsconfig.json | 225 +++++------------- 4 files changed, 212 insertions(+), 167 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 091231521470ebec50cf1351a76063e9205a3d24..7730ba8cf63f94ddab0ecf6c1d989c9d66c590d4 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1514,7 +1514,7 @@ // } // "file_types": { - "JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json"], + "JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "**/.vscode/**/*.json", "tsconfig*.json"], "Shell Script": [".env.*"] }, // Settings for which version of Node.js and NPM to use when installing diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index 496b0389e6e331f5c1d694d3ad30b5abffbee106..7e698cbf095b5679aefda4230b2b5b08c24b0825 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -181,7 +181,7 @@ impl JsonLspAdapter { #[allow(unused_mut)] let mut schemas = serde_json::json!([ { - "fileMatch": ["tsconfig.json"], + "fileMatch": ["tsconfig*.json"], "schema":tsconfig_schema }, { diff --git a/crates/languages/src/json/schemas/package.json b/crates/languages/src/json/schemas/package.json index 664149eca92b81946420c98405219440c7be7c08..a24583fa8848891d661114291951d4df28f463fd 100644 --- a/crates/languages/src/json/schemas/package.json +++ b/crates/languages/src/json/schemas/package.json @@ -160,6 +160,11 @@ "$ref": "#/definitions/packageExportsEntryOrFallback", "description": "The module path that is resolved when this specifier is imported as an ECMAScript module using an `import` declaration or the dynamic `import(...)` function." }, + "module-sync": { + "$ref": "#/definitions/packageExportsEntryOrFallback", + "$comment": "https://nodejs.org/api/packages.html#conditional-exports#:~:text=%22module-sync%22", + "description": "The same as `import`, but can be used with require(esm) in Node 20+. This requires the files to not use any top-level awaits." + }, "node": { "$ref": "#/definitions/packageExportsEntryOrFallback", "description": "The module path that is resolved when this environment is Node.js." @@ -304,6 +309,33 @@ "required": [ "url" ] + }, + "devEngineDependency": { + "description": "Specifies requirements for development environment components such as operating systems, runtimes, or package managers. Used to ensure consistent development environments across the team.", + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string", + "description": "The name of the dependency, with allowed values depending on the parent field" + }, + "version": { + "type": "string", + "description": "The version range for the dependency" + }, + "onFail": { + "type": "string", + "enum": [ + "ignore", + "warn", + "error", + "download" + ], + "description": "What action to take if validation fails" + } + } } }, "type": "object", @@ -755,7 +787,7 @@ ] }, "resolutions": { - "description": "Resolutions is used to support selective version resolutions using yarn, which lets you define custom package versions or ranges inside your dependencies. For npm, use overrides instead. See: https://classic.yarnpkg.com/en/docs/selective-version-resolutions", + "description": "Resolutions is used to support selective version resolutions using yarn, which lets you define custom package versions or ranges inside your dependencies. For npm, use overrides instead. See: https://yarnpkg.com/configuration/manifest#resolutions", "type": "object" }, "overrides": { @@ -810,6 +842,82 @@ "type": "string" } }, + "devEngines": { + "description": "Define the runtime and package manager for developing the current project.", + "type": "object", + "properties": { + "os": { + "oneOf": [ + { + "$ref": "#/definitions/devEngineDependency" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/devEngineDependency" + } + } + ], + "description": "Specifies which operating systems are supported for development" + }, + "cpu": { + "oneOf": [ + { + "$ref": "#/definitions/devEngineDependency" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/devEngineDependency" + } + } + ], + "description": "Specifies which CPU architectures are supported for development" + }, + "libc": { + "oneOf": [ + { + "$ref": "#/definitions/devEngineDependency" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/devEngineDependency" + } + } + ], + "description": "Specifies which C standard libraries are supported for development" + }, + "runtime": { + "oneOf": [ + { + "$ref": "#/definitions/devEngineDependency" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/devEngineDependency" + } + } + ], + "description": "Specifies which JavaScript runtimes (like Node.js, Deno, Bun) are supported for development. Values should use WinterCG Runtime Keys (see https://runtime-keys.proposal.wintercg.org/)" + }, + "packageManager": { + "oneOf": [ + { + "$ref": "#/definitions/devEngineDependency" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/devEngineDependency" + } + } + ], + "description": "Specifies which package managers are supported for development" + } + } + }, "preferGlobal": { "type": "boolean", "description": "DEPRECATED: This option used to trigger an npm warning, but it will no longer warn. It is purely there for informational purposes. It is now recommended that you install any binaries as local devDependencies wherever possible." @@ -973,6 +1081,7 @@ "additionalProperties": false }, "peerDependencyRules": { + "type": "object", "properties": { "ignoreMissing": { "description": "pnpm will not print warnings about missing peer dependencies from this list.", @@ -1032,6 +1141,10 @@ "description": "When true, installation won't fail if some of the patches from the \"patchedDependencies\" field were not applied.", "type": "boolean" }, + "allowUnusedPatches": { + "description": "When true, installation won't fail if some of the patches from the \"patchedDependencies\" field were not applied.", + "type": "boolean" + }, "updateConfig": { "type": "object", "properties": { @@ -1122,6 +1235,41 @@ } }, "additionalProperties": false + }, + "stackblitz": { + "description": "Defines the StackBlitz configuration for the project.", + "type": "object", + "properties": { + "installDependencies": { + "description": "StackBlitz automatically installs npm dependencies when opening a project.", + "type": "boolean" + }, + "startCommand": { + "description": "A terminal command to be executed when opening the project, after installing npm dependencies.", + "type": [ + "string", + "boolean" + ] + }, + "compileTrigger": { + "description": "The compileTrigger option controls how file changes in the editor are written to the WebContainers in-memory filesystem. ", + "oneOf": [ + { + "type": "string", + "enum": [ + "auto", + "keystroke", + "save" + ] + } + ] + }, + "env": { + "description": "A map of default environment variables that will be set in each top-level shell process.", + "type": "object" + } + }, + "additionalProperties": false } }, "anyOf": [ diff --git a/crates/languages/src/json/schemas/tsconfig.json b/crates/languages/src/json/schemas/tsconfig.json index 4b9088725401e27dfc24c14d7c58acfae4355631..e734062d65b1f330495e96ea55c2e28388e5bcc8 100644 --- a/crates/languages/src/json/schemas/tsconfig.json +++ b/crates/languages/src/json/schemas/tsconfig.json @@ -1,5 +1,6 @@ { "$schema": "http://json-schema.org/draft-04/schema#", + "$comment": "Note that this schema uses 'null' in various places. The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058)", "allowTrailingCommas": true, "allOf": [ { @@ -49,7 +50,6 @@ "filesDefinition": { "properties": { "files": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "If no 'files' or 'include' property is present in a tsconfig.json, the compiler defaults to including all files in the containing directory and subdirectories except those specified by 'exclude'. When a 'files' property is specified, only those files and those specified by 'include' are included.", "type": ["array", "null"], "uniqueItems": true, @@ -62,7 +62,6 @@ "excludeDefinition": { "properties": { "exclude": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specifies a list of files to be excluded from compilation. The 'exclude' property only affects the files included via the 'include' property and not the 'files' property. Glob patterns require TypeScript version 2.0 or later.", "type": ["array", "null"], "uniqueItems": true, @@ -75,7 +74,6 @@ "includeDefinition": { "properties": { "include": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specifies a list of glob patterns that match files to be included in compilation. If no 'files' or 'include' property is present in a tsconfig.json, the compiler defaults to including all files in the containing directory and subdirectories except those specified by 'exclude'. Requires TypeScript version 2.0 or later.", "type": ["array", "null"], "uniqueItems": true, @@ -118,41 +116,35 @@ "buildOptions": { "properties": { "dry": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "~", "type": ["boolean", "null"], "default": false }, "force": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Build all projects, including those that appear to be up to date", "type": ["boolean", "null"], "default": false, "markdownDescription": "Build all projects, including those that appear to be up to date\n\nSee more: https://www.typescriptlang.org/tsconfig#force" }, "verbose": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable verbose logging", "type": ["boolean", "null"], "default": false, "markdownDescription": "Enable verbose logging\n\nSee more: https://www.typescriptlang.org/tsconfig#verbose" }, "incremental": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Save .tsbuildinfo files to allow for incremental compilation of projects.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Save .tsbuildinfo files to allow for incremental compilation of projects.\n\nSee more: https://www.typescriptlang.org/tsconfig#incremental" }, "assumeChangesOnlyAffectDirectDependencies": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Have recompiles in projects that use `incremental` and `watch` mode assume that changes within a file will only affect files directly depending on it.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Have recompiles in projects that use `incremental` and `watch` mode assume that changes within a file will only affect files directly depending on it.\n\nSee more: https://www.typescriptlang.org/tsconfig#assumeChangesOnlyAffectDirectDependencies" }, "traceResolution": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Log paths used during the `moduleResolution` process.", "type": ["boolean", "null"], "default": false, @@ -165,7 +157,6 @@ "watchOptionsDefinition": { "properties": { "watchOptions": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["object", "null"], "description": "Settings for the watch mode in TypeScript.", "properties": { @@ -174,31 +165,26 @@ "type": ["string", "null"] }, "watchFile": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify how the TypeScript watch mode works.", "type": ["string", "null"], "markdownDescription": "Specify how the TypeScript watch mode works.\n\nSee more: https://www.typescriptlang.org/tsconfig#watchFile" }, "watchDirectory": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify how directories are watched on systems that lack recursive file-watching functionality.", "type": ["string", "null"], "markdownDescription": "Specify how directories are watched on systems that lack recursive file-watching functionality.\n\nSee more: https://www.typescriptlang.org/tsconfig#watchDirectory" }, "fallbackPolling": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify what approach the watcher should use if the system runs out of native file watchers.", "type": ["string", "null"], "markdownDescription": "Specify what approach the watcher should use if the system runs out of native file watchers.\n\nSee more: https://www.typescriptlang.org/tsconfig#fallbackPolling" }, "synchronousWatchDirectory": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Synchronously call callbacks and update the state of directory watchers on platforms that don`t support recursive watching natively.", "type": ["boolean", "null"], "markdownDescription": "Synchronously call callbacks and update the state of directory watchers on platforms that don`t support recursive watching natively.\n\nSee more: https://www.typescriptlang.org/tsconfig#synchronousWatchDirectory" }, "excludeFiles": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Remove a list of files from the watch mode's processing.", "type": ["array", "null"], "uniqueItems": true, @@ -208,7 +194,6 @@ "markdownDescription": "Remove a list of files from the watch mode's processing.\n\nSee more: https://www.typescriptlang.org/tsconfig#excludeFiles" }, "excludeDirectories": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Remove a list of directories from the watch process.", "type": ["array", "null"], "uniqueItems": true, @@ -224,37 +209,31 @@ "compilerOptionsDefinition": { "properties": { "compilerOptions": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["object", "null"], "description": "Instructs the TypeScript compiler how to compile .ts files.", "properties": { "allowArbitraryExtensions": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable importing files with any extension, provided a declaration file is present.", "type": ["boolean", "null"], "markdownDescription": "Enable importing files with any extension, provided a declaration file is present.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowArbitraryExtensions" }, "allowImportingTsExtensions": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", - "description": "Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set.", + "description": "Allow imports to include TypeScript file extensions. Requires either '--noEmit' or '--emitDeclarationOnly' to be set.", "type": ["boolean", "null"], - "markdownDescription": "Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowImportingTsExtensions" + "markdownDescription": "Allow imports to include TypeScript file extensions. Requires either '--noEmit' or '--emitDeclarationOnly' to be set.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowImportingTsExtensions" }, "charset": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "No longer supported. In early versions, manually set the text encoding for reading files.", "type": ["string", "null"], "markdownDescription": "No longer supported. In early versions, manually set the text encoding for reading files.\n\nSee more: https://www.typescriptlang.org/tsconfig#charset" }, "composite": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable constraints that allow a TypeScript project to be used with project references.", "type": ["boolean", "null"], "default": true, "markdownDescription": "Enable constraints that allow a TypeScript project to be used with project references.\n\nSee more: https://www.typescriptlang.org/tsconfig#composite" }, "customConditions": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Conditions to set in addition to the resolver-specific defaults when resolving imports.", "type": ["array", "null"], "uniqueItems": true, @@ -264,52 +243,50 @@ "markdownDescription": "Conditions to set in addition to the resolver-specific defaults when resolving imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#customConditions" }, "declaration": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Generate .d.ts files from TypeScript and JavaScript files in your project.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Generate .d.ts files from TypeScript and JavaScript files in your project.\n\nSee more: https://www.typescriptlang.org/tsconfig#declaration" }, "declarationDir": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the output directory for generated declaration files.", "type": ["string", "null"], "markdownDescription": "Specify the output directory for generated declaration files.\n\nSee more: https://www.typescriptlang.org/tsconfig#declarationDir" }, "diagnostics": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Output compiler performance information after building.", "type": ["boolean", "null"], "markdownDescription": "Output compiler performance information after building.\n\nSee more: https://www.typescriptlang.org/tsconfig#diagnostics" }, "disableReferencedProjectLoad": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Reduce the number of projects loaded automatically by TypeScript.", "type": ["boolean", "null"], "markdownDescription": "Reduce the number of projects loaded automatically by TypeScript.\n\nSee more: https://www.typescriptlang.org/tsconfig#disableReferencedProjectLoad" }, "noPropertyAccessFromIndexSignature": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enforces using indexed accessors for keys declared using an indexed type", "type": ["boolean", "null"], "markdownDescription": "Enforces using indexed accessors for keys declared using an indexed type\n\nSee more: https://www.typescriptlang.org/tsconfig#noPropertyAccessFromIndexSignature" }, "emitBOM": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files.\n\nSee more: https://www.typescriptlang.org/tsconfig#emitBOM" }, "emitDeclarationOnly": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Only output d.ts files and not JavaScript files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Only output d.ts files and not JavaScript files.\n\nSee more: https://www.typescriptlang.org/tsconfig#emitDeclarationOnly" }, + "erasableSyntaxOnly": { + "description": "Do not allow runtime constructs that are not part of ECMAScript.", + "type": ["boolean", "null"], + "default": false, + "markdownDescription": "Do not allow runtime constructs that are not part of ECMAScript.\n\nSee more: https://www.typescriptlang.org/tsconfig#erasableSyntaxOnly" + }, "exactOptionalPropertyTypes": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Differentiate between undefined and not present when type checking", "type": ["boolean", "null"], "default": false, @@ -320,21 +297,18 @@ "type": ["boolean", "null"] }, "tsBuildInfoFile": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the folder for .tsbuildinfo incremental compilation files.", "default": ".tsbuildinfo", "type": ["string", "null"], "markdownDescription": "Specify the folder for .tsbuildinfo incremental compilation files.\n\nSee more: https://www.typescriptlang.org/tsconfig#tsBuildInfoFile" }, "inlineSourceMap": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Include sourcemap files inside the emitted JavaScript.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Include sourcemap files inside the emitted JavaScript.\n\nSee more: https://www.typescriptlang.org/tsconfig#inlineSourceMap" }, "inlineSources": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Include source code in the sourcemaps inside the emitted JavaScript.", "type": ["boolean", "null"], "default": false, @@ -351,76 +325,70 @@ ] }, "reactNamespace": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit.", "type": ["string", "null"], "default": "React", "markdownDescription": "Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit.\n\nSee more: https://www.typescriptlang.org/tsconfig#reactNamespace" }, "jsxFactory": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'", "type": ["string", "null"], "default": "React.createElement", "markdownDescription": "Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'\n\nSee more: https://www.typescriptlang.org/tsconfig#jsxFactory" }, "jsxFragmentFactory": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'.", "type": ["string", "null"], "default": "React.Fragment", "markdownDescription": "Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'.\n\nSee more: https://www.typescriptlang.org/tsconfig#jsxFragmentFactory" }, "jsxImportSource": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx`.", "type": ["string", "null"], "default": "react", "markdownDescription": "Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx`.\n\nSee more: https://www.typescriptlang.org/tsconfig#jsxImportSource" }, "listFiles": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Print all of the files read during the compilation.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Print all of the files read during the compilation.\n\nSee more: https://www.typescriptlang.org/tsconfig#listFiles" }, "mapRoot": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the location where debugger should locate map files instead of generated locations.", "type": ["string", "null"], "markdownDescription": "Specify the location where debugger should locate map files instead of generated locations.\n\nSee more: https://www.typescriptlang.org/tsconfig#mapRoot" }, "module": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify what module code is generated.", "type": ["string", "null"], "anyOf": [ { "enum": [ - "CommonJS", - "AMD", - "System", - "UMD", - "ES6", - "ES2015", - "ES2020", - "ESNext", - "None", - "ES2022", - "Node16", - "NodeNext", - "Preserve" + "commonjs", + "amd", + "system", + "umd", + "es6", + "es2015", + "es2020", + "esnext", + "none", + "es2022", + "node16", + "node18", + "node20", + "nodenext", + "preserve" ] }, { - "pattern": "^([Cc][Oo][Mm][Mm][Oo][Nn][Jj][Ss]|[AaUu][Mm][Dd]|[Ss][Yy][Ss][Tt][Ee][Mm]|[Ee][Ss]([356]|20(1[567]|2[02])|[Nn][Ee][Xx][Tt])|[Nn][Oo][dD][Ee]16|[Nn][Oo][Dd][Ee][Nn][Ee][Xx][Tt]|[Nn][Oo][Nn][Ee]|[Pp][Rr][Ee][Ss][Ee][Rr][Vv][Ee])$" + "pattern": "^([Cc][Oo][Mm][Mm][Oo][Nn][Jj][Ss]|[AaUu][Mm][Dd]|[Ss][Yy][Ss][Tt][Ee][Mm]|[Ee][Ss]([356]|20(1[567]|2[02])|[Nn][Ee][Xx][Tt])|[Nn][Oo][dD][Ee]1[68]|[Nn][Oo][Dd][Ee][Nn][Ee][Xx][Tt]|[Nn][Oo][Nn][Ee]|[Pp][Rr][Ee][Ss][Ee][Rr][Vv][Ee])$" } ], "markdownDescription": "Specify what module code is generated.\n\nSee more: https://www.typescriptlang.org/tsconfig#module" }, "moduleResolution": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify how TypeScript looks up a file from a given module specifier.", "type": ["string", "null"], "anyOf": [ @@ -449,7 +417,6 @@ "markdownDescription": "Specify how TypeScript looks up a file from a given module specifier.\n\nSee more: https://www.typescriptlang.org/tsconfig#moduleResolution" }, "newLine": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Set the newline character for emitting files.", "type": ["string", "null"], "default": "lf", @@ -464,208 +431,191 @@ "markdownDescription": "Set the newline character for emitting files.\n\nSee more: https://www.typescriptlang.org/tsconfig#newLine" }, "noEmit": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable emitting file from a compilation.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable emitting file from a compilation.\n\nSee more: https://www.typescriptlang.org/tsconfig#noEmit" }, "noEmitHelpers": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable generating custom helper functions like `__extends` in compiled output.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable generating custom helper functions like `__extends` in compiled output.\n\nSee more: https://www.typescriptlang.org/tsconfig#noEmitHelpers" }, "noEmitOnError": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable emitting files if any type checking errors are reported.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable emitting files if any type checking errors are reported.\n\nSee more: https://www.typescriptlang.org/tsconfig#noEmitOnError" }, "noImplicitAny": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable error reporting for expressions and declarations with an implied `any` type..", "type": ["boolean", "null"], "markdownDescription": "Enable error reporting for expressions and declarations with an implied `any` type..\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitAny" }, "noImplicitThis": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable error reporting when `this` is given the type `any`.", "type": ["boolean", "null"], "markdownDescription": "Enable error reporting when `this` is given the type `any`.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitThis" }, "noUnusedLocals": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable error reporting when a local variable isn't read.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Enable error reporting when a local variable isn't read.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUnusedLocals" }, "noUnusedParameters": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Raise an error when a function parameter isn't read", "type": ["boolean", "null"], "default": false, "markdownDescription": "Raise an error when a function parameter isn't read\n\nSee more: https://www.typescriptlang.org/tsconfig#noUnusedParameters" }, "noLib": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable including any library files, including the default lib.d.ts.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable including any library files, including the default lib.d.ts.\n\nSee more: https://www.typescriptlang.org/tsconfig#noLib" }, "noResolve": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project.\n\nSee more: https://www.typescriptlang.org/tsconfig#noResolve" }, "noStrictGenericChecks": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable strict checking of generic signatures in function types.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable strict checking of generic signatures in function types.\n\nSee more: https://www.typescriptlang.org/tsconfig#noStrictGenericChecks" }, + "out": { + "description": "DEPRECATED. Specify an output for the build. It is recommended to use `outFile` instead.", + "type": ["string", "null"], + "markdownDescription": "Specify an output for the build. It is recommended to use `outFile` instead.\n\nSee more: https://www.typescriptlang.org/tsconfig/#out" + }, "skipDefaultLibCheck": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Skip type checking .d.ts files that are included with TypeScript.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Skip type checking .d.ts files that are included with TypeScript.\n\nSee more: https://www.typescriptlang.org/tsconfig#skipDefaultLibCheck" }, "skipLibCheck": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Skip type checking all .d.ts files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Skip type checking all .d.ts files.\n\nSee more: https://www.typescriptlang.org/tsconfig#skipLibCheck" }, "outFile": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output.", "type": ["string", "null"], "markdownDescription": "Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output.\n\nSee more: https://www.typescriptlang.org/tsconfig#outFile" }, "outDir": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify an output folder for all emitted files.", "type": ["string", "null"], "markdownDescription": "Specify an output folder for all emitted files.\n\nSee more: https://www.typescriptlang.org/tsconfig#outDir" }, "preserveConstEnums": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable erasing `const enum` declarations in generated code.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable erasing `const enum` declarations in generated code.\n\nSee more: https://www.typescriptlang.org/tsconfig#preserveConstEnums" }, "preserveSymlinks": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable resolving symlinks to their realpath. This correlates to the same flag in node.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable resolving symlinks to their realpath. This correlates to the same flag in node.\n\nSee more: https://www.typescriptlang.org/tsconfig#preserveSymlinks" }, "preserveValueImports": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Preserve unused imported values in the JavaScript output that would otherwise be removed", "type": ["boolean", "null"], "default": false, "markdownDescription": "Preserve unused imported values in the JavaScript output that would otherwise be removed\n\nSee more: https://www.typescriptlang.org/tsconfig#preserveValueImports" }, "preserveWatchOutput": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable wiping the console in watch mode", "type": ["boolean", "null"], "markdownDescription": "Disable wiping the console in watch mode\n\nSee more: https://www.typescriptlang.org/tsconfig#preserveWatchOutput" }, "pretty": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable color and formatting in output to make compiler errors easier to read", "type": ["boolean", "null"], "default": true, "markdownDescription": "Enable color and formatting in output to make compiler errors easier to read\n\nSee more: https://www.typescriptlang.org/tsconfig#pretty" }, "removeComments": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable emitting comments.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable emitting comments.\n\nSee more: https://www.typescriptlang.org/tsconfig#removeComments" }, + "rewriteRelativeImportExtensions": { + "description": "Rewrite '.ts', '.tsx', '.mts', and '.cts' file extensions in relative import paths to their JavaScript equivalent in output files.", + "type": ["boolean", "null"], + "default": false, + "markdownDescription": "Rewrite '.ts', '.tsx', '.mts', and '.cts' file extensions in relative import paths to their JavaScript equivalent in output files.\n\nSee more: https://www.typescriptlang.org/tsconfig#rewriteRelativeImportExtensions" + }, "rootDir": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the root folder within your source files.", "type": ["string", "null"], "markdownDescription": "Specify the root folder within your source files.\n\nSee more: https://www.typescriptlang.org/tsconfig#rootDir" }, "isolatedModules": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Ensure that each file can be safely transpiled without relying on other imports.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Ensure that each file can be safely transpiled without relying on other imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#isolatedModules" }, "sourceMap": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Create source map files for emitted JavaScript files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Create source map files for emitted JavaScript files.\n\nSee more: https://www.typescriptlang.org/tsconfig#sourceMap" }, "sourceRoot": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the root path for debuggers to find the reference source code.", "type": ["string", "null"], "markdownDescription": "Specify the root path for debuggers to find the reference source code.\n\nSee more: https://www.typescriptlang.org/tsconfig#sourceRoot" }, "suppressExcessPropertyErrors": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable reporting of excess property errors during the creation of object literals.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable reporting of excess property errors during the creation of object literals.\n\nSee more: https://www.typescriptlang.org/tsconfig#suppressExcessPropertyErrors" }, "suppressImplicitAnyIndexErrors": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Suppress `noImplicitAny` errors when indexing objects that lack index signatures.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Suppress `noImplicitAny` errors when indexing objects that lack index signatures.\n\nSee more: https://www.typescriptlang.org/tsconfig#suppressImplicitAnyIndexErrors" }, "stripInternal": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable emitting declarations that have `@internal` in their JSDoc comments.", "type": ["boolean", "null"], "markdownDescription": "Disable emitting declarations that have `@internal` in their JSDoc comments.\n\nSee more: https://www.typescriptlang.org/tsconfig#stripInternal" }, "target": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Set the JavaScript language version for emitted JavaScript and include compatible library declarations.", "type": ["string", "null"], - "default": "ES3", + "default": "es3", "anyOf": [ { "enum": [ - "ES3", - "ES5", - "ES6", - "ES2015", - "ES2016", - "ES2017", - "ES2018", - "ES2019", - "ES2020", - "ES2021", - "ES2022", - "ES2023", - "ES2024", - "ESNext" + "es3", + "es5", + "es6", + "es2015", + "es2016", + "es2017", + "es2018", + "es2019", + "es2020", + "es2021", + "es2022", + "es2023", + "es2024", + "esnext" ] }, { @@ -675,7 +625,6 @@ "markdownDescription": "Set the JavaScript language version for emitted JavaScript and include compatible library declarations.\n\nSee more: https://www.typescriptlang.org/tsconfig#target" }, "useUnknownInCatchVariables": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Default catch clause variables as `unknown` instead of `any`.", "type": ["boolean", "null"], "default": false, @@ -720,86 +669,72 @@ "default": "useFsEvents" }, "experimentalDecorators": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable experimental support for TC39 stage 2 draft decorators.", "type": ["boolean", "null"], "markdownDescription": "Enable experimental support for TC39 stage 2 draft decorators.\n\nSee more: https://www.typescriptlang.org/tsconfig#experimentalDecorators" }, "emitDecoratorMetadata": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Emit design-type metadata for decorated declarations in source files.", "type": ["boolean", "null"], "markdownDescription": "Emit design-type metadata for decorated declarations in source files.\n\nSee more: https://www.typescriptlang.org/tsconfig#emitDecoratorMetadata" }, "allowUnusedLabels": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable error reporting for unused labels.", "type": ["boolean", "null"], "markdownDescription": "Disable error reporting for unused labels.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowUnusedLabels" }, "noImplicitReturns": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable error reporting for codepaths that do not explicitly return in a function.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Enable error reporting for codepaths that do not explicitly return in a function.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitReturns" }, "noUncheckedIndexedAccess": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Add `undefined` to a type when accessed using an index.", "type": ["boolean", "null"], "markdownDescription": "Add `undefined` to a type when accessed using an index.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUncheckedIndexedAccess" }, "noFallthroughCasesInSwitch": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable error reporting for fallthrough cases in switch statements.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Enable error reporting for fallthrough cases in switch statements.\n\nSee more: https://www.typescriptlang.org/tsconfig#noFallthroughCasesInSwitch" }, "noImplicitOverride": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Ensure overriding members in derived classes are marked with an override modifier.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Ensure overriding members in derived classes are marked with an override modifier.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitOverride" }, "allowUnreachableCode": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable error reporting for unreachable code.", "type": ["boolean", "null"], "markdownDescription": "Disable error reporting for unreachable code.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowUnreachableCode" }, "forceConsistentCasingInFileNames": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Ensure that casing is correct in imports.", "type": ["boolean", "null"], "default": true, "markdownDescription": "Ensure that casing is correct in imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#forceConsistentCasingInFileNames" }, "generateCpuProfile": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Emit a v8 CPU profile of the compiler run for debugging.", "type": ["string", "null"], "default": "profile.cpuprofile", "markdownDescription": "Emit a v8 CPU profile of the compiler run for debugging.\n\nSee more: https://www.typescriptlang.org/tsconfig#generateCpuProfile" }, "baseUrl": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the base directory to resolve non-relative module names.", "type": ["string", "null"], "markdownDescription": "Specify the base directory to resolve non-relative module names.\n\nSee more: https://www.typescriptlang.org/tsconfig#baseUrl" }, "paths": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify a set of entries that re-map imports to additional lookup locations.", "type": ["object", "null"], "additionalProperties": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["array", "null"], "uniqueItems": true, "items": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["string", "null"], "description": "Path mapping to be computed relative to baseUrl option." } @@ -807,11 +742,9 @@ "markdownDescription": "Specify a set of entries that re-map imports to additional lookup locations.\n\nSee more: https://www.typescriptlang.org/tsconfig#paths" }, "plugins": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify a list of language service plugins to include.", "type": ["array", "null"], "items": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["object", "null"], "properties": { "name": { @@ -823,7 +756,6 @@ "markdownDescription": "Specify a list of language service plugins to include.\n\nSee more: https://www.typescriptlang.org/tsconfig#plugins" }, "rootDirs": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Allow multiple folders to be treated as one when resolving modules.", "type": ["array", "null"], "uniqueItems": true, @@ -833,7 +765,6 @@ "markdownDescription": "Allow multiple folders to be treated as one when resolving modules.\n\nSee more: https://www.typescriptlang.org/tsconfig#rootDirs" }, "typeRoots": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify multiple folders that act like `./node_modules/@types`.", "type": ["array", "null"], "uniqueItems": true, @@ -843,7 +774,6 @@ "markdownDescription": "Specify multiple folders that act like `./node_modules/@types`.\n\nSee more: https://www.typescriptlang.org/tsconfig#typeRoots" }, "types": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify type package names to be included without being referenced in a source file.", "type": ["array", "null"], "uniqueItems": true, @@ -853,59 +783,50 @@ "markdownDescription": "Specify type package names to be included without being referenced in a source file.\n\nSee more: https://www.typescriptlang.org/tsconfig#types" }, "traceResolution": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable tracing of the name resolution process. Requires TypeScript version 2.0 or later.", "type": ["boolean", "null"], "default": false }, "allowJs": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowJs" }, "noErrorTruncation": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable truncating types in error messages.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable truncating types in error messages.\n\nSee more: https://www.typescriptlang.org/tsconfig#noErrorTruncation" }, "allowSyntheticDefaultImports": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Allow 'import x from y' when a module doesn't have a default export.", "type": ["boolean", "null"], "markdownDescription": "Allow 'import x from y' when a module doesn't have a default export.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowSyntheticDefaultImports" }, "noImplicitUseStrict": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable adding 'use strict' directives in emitted JavaScript files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable adding 'use strict' directives in emitted JavaScript files.\n\nSee more: https://www.typescriptlang.org/tsconfig#noImplicitUseStrict" }, "listEmittedFiles": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Print the names of emitted files after a compilation.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Print the names of emitted files after a compilation.\n\nSee more: https://www.typescriptlang.org/tsconfig#listEmittedFiles" }, "disableSizeLimit": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Remove the 20mb cap on total source code size for JavaScript files in the TypeScript language server.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Remove the 20mb cap on total source code size for JavaScript files in the TypeScript language server.\n\nSee more: https://www.typescriptlang.org/tsconfig#disableSizeLimit" }, "lib": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify a set of bundled library declaration files that describe the target runtime environment.", "type": ["array", "null"], "uniqueItems": true, "items": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["string", "null"], "anyOf": [ { @@ -954,6 +875,7 @@ "ESNext.BigInt", "ESNext.Collection", "ESNext.Intl", + "ESNext.Iterator", "ESNext.Object", "ESNext.Promise", "ESNext.Regexp", @@ -1001,7 +923,9 @@ "ES2017.Date", "ES2023.Collection", "ESNext.Decorators", - "ESNext.Disposable" + "ESNext.Disposable", + "ESNext.Error", + "ESNext.Sharedmemory" ] }, { @@ -1056,26 +980,29 @@ }, "markdownDescription": "Specify a set of bundled library declaration files that describe the target runtime environment.\n\nSee more: https://www.typescriptlang.org/tsconfig#lib" }, + "libReplacement": { + "description": "Enable lib replacement.", + "type": ["boolean", "null"], + "default": true, + "markdownDescription": "Enable lib replacement.\n\nSee more: https://www.typescriptlang.org/tsconfig#libReplacement" + }, "moduleDetection": { "description": "Specify how TypeScript determine a file as module.", "enum": ["auto", "legacy", "force"] }, "strictNullChecks": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "When type checking, take into account `null` and `undefined`.", "type": ["boolean", "null"], "default": false, "markdownDescription": "When type checking, take into account `null` and `undefined`.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictNullChecks" }, "maxNodeModuleJsDepth": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`.", "type": ["number", "null"], "default": 0, "markdownDescription": "Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`.\n\nSee more: https://www.typescriptlang.org/tsconfig#maxNodeModuleJsDepth" }, "importHelpers": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Allow importing helper functions from tslib once per project, instead of including them per-file.", "type": ["boolean", "null"], "default": false, @@ -1087,104 +1014,89 @@ "enum": ["remove", "preserve", "error"] }, "alwaysStrict": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Ensure 'use strict' is always emitted.", "type": ["boolean", "null"], "markdownDescription": "Ensure 'use strict' is always emitted.\n\nSee more: https://www.typescriptlang.org/tsconfig#alwaysStrict" }, "strict": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable all strict type checking options.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Enable all strict type checking options.\n\nSee more: https://www.typescriptlang.org/tsconfig#strict" }, "strictBindCallApply": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Check that the arguments for `bind`, `call`, and `apply` methods match the original function.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Check that the arguments for `bind`, `call`, and `apply` methods match the original function.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictBindCallApply" }, "downlevelIteration": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Emit more compliant, but verbose and less performant JavaScript for iteration.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Emit more compliant, but verbose and less performant JavaScript for iteration.\n\nSee more: https://www.typescriptlang.org/tsconfig#downlevelIteration" }, "checkJs": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable error reporting in type-checked JavaScript files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Enable error reporting in type-checked JavaScript files.\n\nSee more: https://www.typescriptlang.org/tsconfig#checkJs" }, "strictFunctionTypes": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "When assigning functions, check to ensure parameters and the return values are subtype-compatible.", "type": ["boolean", "null"], "default": false, "markdownDescription": "When assigning functions, check to ensure parameters and the return values are subtype-compatible.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictFunctionTypes" }, "strictPropertyInitialization": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Check for class properties that are declared but not set in the constructor.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Check for class properties that are declared but not set in the constructor.\n\nSee more: https://www.typescriptlang.org/tsconfig#strictPropertyInitialization" }, "esModuleInterop": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility.\n\nSee more: https://www.typescriptlang.org/tsconfig#esModuleInterop" }, "allowUmdGlobalAccess": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Allow accessing UMD globals from modules.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Allow accessing UMD globals from modules.\n\nSee more: https://www.typescriptlang.org/tsconfig#allowUmdGlobalAccess" }, "keyofStringsOnly": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Make keyof only return strings instead of string, numbers or symbols. Legacy option.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Make keyof only return strings instead of string, numbers or symbols. Legacy option.\n\nSee more: https://www.typescriptlang.org/tsconfig#keyofStringsOnly" }, "useDefineForClassFields": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Emit ECMAScript-standard-compliant class fields.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Emit ECMAScript-standard-compliant class fields.\n\nSee more: https://www.typescriptlang.org/tsconfig#useDefineForClassFields" }, "declarationMap": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Create sourcemaps for d.ts files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Create sourcemaps for d.ts files.\n\nSee more: https://www.typescriptlang.org/tsconfig#declarationMap" }, "resolveJsonModule": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable importing .json files", "type": ["boolean", "null"], "default": false, "markdownDescription": "Enable importing .json files\n\nSee more: https://www.typescriptlang.org/tsconfig#resolveJsonModule" }, "resolvePackageJsonExports": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Use the package.json 'exports' field when resolving package imports.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Use the package.json 'exports' field when resolving package imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#resolvePackageJsonExports" }, "resolvePackageJsonImports": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Use the package.json 'imports' field when resolving imports.", "type": ["boolean", "null"], "default": false, @@ -1195,7 +1107,6 @@ "type": ["boolean", "null"] }, "extendedDiagnostics": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Output more detailed compiler performance information after building.", "type": ["boolean", "null"], "default": false, @@ -1206,46 +1117,39 @@ "type": ["boolean", "null"] }, "disableSourceOfProjectReferenceRedirect": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable preferring source files instead of declaration files when referencing composite projects", "type": ["boolean", "null"], "markdownDescription": "Disable preferring source files instead of declaration files when referencing composite projects\n\nSee more: https://www.typescriptlang.org/tsconfig#disableSourceOfProjectReferenceRedirect" }, "disableSolutionSearching": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Opt a project out of multi-project reference checking when editing.", "type": ["boolean", "null"], "markdownDescription": "Opt a project out of multi-project reference checking when editing.\n\nSee more: https://www.typescriptlang.org/tsconfig#disableSolutionSearching" }, "verbatimModuleSyntax": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.", "type": ["boolean", "null"], "markdownDescription": "Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting.\n\nSee more: https://www.typescriptlang.org/tsconfig#verbatimModuleSyntax" }, "noCheck": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Disable full type checking (only critical parse and emit errors will be reported)", "type": ["boolean", "null"], "default": false, "markdownDescription": "Disable full type checking (only critical parse and emit errors will be reported)\n\nSee more: https://www.typescriptlang.org/tsconfig#noCheck" }, "isolatedDeclarations": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Require sufficient annotation on exports so other tools can trivially generate declaration files.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Require sufficient annotation on exports so other tools can trivially generate declaration files.\n\nSee more: https://www.typescriptlang.org/tsconfig#isolatedDeclarations" }, "noUncheckedSideEffectImports": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Check side effect imports.", "type": ["boolean", "null"], "default": false, "markdownDescription": "Check side effect imports.\n\nSee more: https://www.typescriptlang.org/tsconfig#noUncheckedSideEffectImports" }, "strictBuiltinIteratorReturn": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'.", "type": ["boolean", "null"], "default": false, @@ -1258,18 +1162,15 @@ "typeAcquisitionDefinition": { "properties": { "typeAcquisition": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["object", "null"], "description": "Auto type (.d.ts) acquisition options for this project. Requires TypeScript version 2.1 or later.", "properties": { "enable": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Enable auto type acquisition", "type": ["boolean", "null"], "default": false }, "include": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specifies a list of type declarations to be included in auto type acquisition. Ex. [\"jquery\", \"lodash\"]", "type": ["array", "null"], "uniqueItems": true, @@ -1278,7 +1179,6 @@ } }, "exclude": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "description": "Specifies a list of type declarations to be excluded from auto type acquisition. Ex. [\"jquery\", \"lodash\"]", "type": ["array", "null"], "uniqueItems": true, @@ -1293,17 +1193,14 @@ "referencesDefinition": { "properties": { "references": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["array", "null"], "uniqueItems": true, "description": "Referenced projects. Requires TypeScript version 3.0 or later.", "items": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["object", "null"], "description": "Project reference.", "properties": { "path": { - "$comment": "The value of 'null' is UNDOCUMENTED (https://github.com/microsoft/TypeScript/pull/18058).", "type": ["string", "null"], "description": "Path to referenced tsconfig or to folder containing tsconfig." } From e602cfadd3a27ccd6ed7c3ea865661fd9a978195 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Mon, 22 Sep 2025 15:35:44 -0400 Subject: [PATCH 55/58] Restore user-defined ordering of profiles (#38665) This PR fixes a regression where settings profiles were no longer ordered in the same order that the user defined in their settings. Release Notes: - N/A --- crates/settings/src/settings_content.rs | 4 +- .../src/settings_profile_selector.rs | 38 +++++++++++++++++++ 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/crates/settings/src/settings_content.rs b/crates/settings/src/settings_content.rs index 38bff4d6a1428f017bcd65be3d27e945aebccabd..27c0976fb64dd4ecb473df937cf27f7a21ff3adc 100644 --- a/crates/settings/src/settings_content.rs +++ b/crates/settings/src/settings_content.rs @@ -16,7 +16,7 @@ pub use terminal::*; pub use theme::*; pub use workspace::*; -use collections::HashMap; +use collections::{HashMap, IndexMap}; use gpui::{App, SharedString}; use release_channel::ReleaseChannel; use schemars::JsonSchema; @@ -182,7 +182,7 @@ pub struct UserSettingsContent { pub linux: Option>, #[serde(default)] - pub profiles: HashMap, + pub profiles: IndexMap, } pub struct ExtensionsSettingsContent { diff --git a/crates/settings_profile_selector/src/settings_profile_selector.rs b/crates/settings_profile_selector/src/settings_profile_selector.rs index 0bba83beafb2ddc1c20767aefd15cc2095ca71ba..b2f01cf5c9a2b759eee3988762e43f07efc6952d 100644 --- a/crates/settings_profile_selector/src/settings_profile_selector.rs +++ b/crates/settings_profile_selector/src/settings_profile_selector.rs @@ -578,4 +578,42 @@ mod tests { assert_eq!(ThemeSettings::get_global(cx).buffer_font_size(cx).0, 10.0); }); } + + #[gpui::test] + async fn test_settings_profile_selector_is_in_user_configuration_order( + cx: &mut TestAppContext, + ) { + // Must be unique names (HashMap) + let profiles_json = json!({ + "z": {}, + "e": {}, + "d": {}, + " ": {}, + "r": {}, + "u": {}, + "l": {}, + "3": {}, + "s": {}, + "!": {}, + }); + let (workspace, cx) = init_test(profiles_json.clone(), cx).await; + + cx.dispatch_action(settings_profile_selector::Toggle); + let picker = active_settings_profile_picker(&workspace, cx); + + picker.read_with(cx, |picker, _| { + assert_eq!(picker.delegate.matches.len(), 11); + assert_eq!(picker.delegate.matches[0].string, display_name(&None)); + assert_eq!(picker.delegate.matches[1].string, "z"); + assert_eq!(picker.delegate.matches[2].string, "e"); + assert_eq!(picker.delegate.matches[3].string, "d"); + assert_eq!(picker.delegate.matches[4].string, " "); + assert_eq!(picker.delegate.matches[5].string, "r"); + assert_eq!(picker.delegate.matches[6].string, "u"); + assert_eq!(picker.delegate.matches[7].string, "l"); + assert_eq!(picker.delegate.matches[8].string, "3"); + assert_eq!(picker.delegate.matches[9].string, "s"); + assert_eq!(picker.delegate.matches[10].string, "!"); + }); + } } From 80dcabe95c8b708ed0057299df5ad135e54925fc Mon Sep 17 00:00:00 2001 From: Nia Date: Mon, 22 Sep 2025 22:37:51 +0200 Subject: [PATCH 56/58] perf: Better docs, internal refactors (#38664) Release Notes: - N/A --- .cargo/config.toml | 3 +- tooling/perf/Cargo.toml | 3 +- tooling/perf/src/lib.rs | 84 +++++--- tooling/perf/src/main.rs | 418 ++++++++++++++++++++++----------------- 4 files changed, 300 insertions(+), 208 deletions(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index 9da793fc48b62f7f03cd1d36a505fa1e1ef2a45a..f4e1d6f79c810205d71531fef5e56401f7e0d095 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -4,7 +4,7 @@ rustflags = ["-C", "symbol-mangling-version=v0", "--cfg", "tokio_unstable"] [alias] xtask = "run --package xtask --" -perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests", "--config", "target.'cfg(true)'.runner='target/release/perf'", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]"] +perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests", "--config", "target.'cfg(true)'.runner='cargo run -p perf --release'", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]"] perf-compare = ["run", "--release", "-p", "perf", "--", "compare"] [target.x86_64-unknown-linux-gnu] @@ -25,4 +25,3 @@ rustflags = [ [env] MACOSX_DEPLOYMENT_TARGET = "10.15.7" -CARGO_WORKSPACE_DIR = { value = "", relative = true } diff --git a/tooling/perf/Cargo.toml b/tooling/perf/Cargo.toml index 4766b58d8a760aa995dba7092d33c436559019c2..2b5ae6571ac45a3540860e66d0956fb9e0b05a72 100644 --- a/tooling/perf/Cargo.toml +++ b/tooling/perf/Cargo.toml @@ -18,7 +18,8 @@ pedantic = "warn" style = "warn" missing_docs_in_private_items = "warn" as_underscore = "deny" -allow_attributes_without_reason = "deny" +allow_attributes = "deny" +allow_attributes_without_reason = "deny" # This covers `expect` also, since we deny `allow` let_underscore_must_use = "forbid" undocumented_unsafe_blocks = "forbid" missing_safety_doc = "forbid" diff --git a/tooling/perf/src/lib.rs b/tooling/perf/src/lib.rs index 30909f646b061895e10f5c860149e2370892ccd2..18fc2984a6951189be1afc2a1bb76950a4a838a4 100644 --- a/tooling/perf/src/lib.rs +++ b/tooling/perf/src/lib.rs @@ -3,7 +3,7 @@ use collections::HashMap; use serde::{Deserialize, Serialize}; -use std::time::Duration; +use std::{num::NonZero, time::Duration}; pub mod consts { //! Preset idenitifiers and constants so that the profiler and proc macro agree @@ -80,6 +80,8 @@ pub enum FailKind { Profile, /// Failed due to an incompatible version for the test. VersionMismatch, + /// Could not parse metadata for a test. + BadMetadata, /// Skipped due to filters applied on the perf run. Skipped, } @@ -87,9 +89,10 @@ pub enum FailKind { impl std::fmt::Display for FailKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - FailKind::Triage => f.write_str("failed in triage"), - FailKind::Profile => f.write_str("failed while profiling"), + FailKind::Triage => f.write_str("errored in triage"), + FailKind::Profile => f.write_str("errored while profiling"), FailKind::VersionMismatch => f.write_str("test version mismatch"), + FailKind::BadMetadata => f.write_str("bad test metadata"), FailKind::Skipped => f.write_str("skipped"), } } @@ -108,8 +111,9 @@ pub struct TestMdata { /// INVARIANT: If `version` <= `MDATA_VER`, this tool *must* be able to /// correctly parse the output of this test. pub version: u32, - /// How many iterations to pass this test, if this is preset. - pub iterations: Option, + /// How many iterations to pass this test if this is preset, or how many + /// iterations a test ended up running afterwards if determined at runtime. + pub iterations: Option>, /// The importance of this particular test. See the docs on `Importance` for /// details. pub importance: Importance, @@ -134,16 +138,20 @@ impl Timings { reason = "We only care about a couple sig figs anyways" )] #[must_use] - pub fn iters_per_sec(&self, total_iters: usize) -> f64 { - (1000. / self.mean.as_millis() as f64) * total_iters as f64 + pub fn iters_per_sec(&self, total_iters: NonZero) -> f64 { + (1000. / self.mean.as_millis() as f64) * total_iters.get() as f64 } } +/// Aggregate results, meant to be used for a given importance category. Each +/// test name corresponds to its benchmark results, iteration count, and weight. +type CategoryInfo = HashMap, u8)>; + /// Aggregate output of all tests run by this handler. #[derive(Clone, Debug, Default, Serialize, Deserialize)] pub struct Output { - /// A list of test outputs. Format is `(test_name, iter_count, timings)`. - /// The latter being set indicates the test succeeded. + /// A list of test outputs. Format is `(test_name, mdata, timings)`. + /// The latter being `Ok(_)` indicates the test succeeded. /// /// INVARIANT: If the test succeeded, the second field is `Some(mdata)` and /// `mdata.iterations` is `Some(_)`. @@ -162,7 +170,7 @@ impl Output { &mut self, name: impl AsRef, mut mdata: TestMdata, - iters: usize, + iters: NonZero, timings: Timings, ) { mdata.iterations = Some(iters); @@ -179,7 +187,7 @@ impl Output { &mut self, name: impl AsRef, mut mdata: Option, - attempted_iters: Option, + attempted_iters: Option>, kind: FailKind, ) { if let Some(ref mut mdata) = mdata { @@ -195,7 +203,7 @@ impl Output { self.tests.is_empty() } - /// Sorts the runs in the output in the order that we want it printed. + /// Sorts the runs in the output in the order that we want them printed. pub fn sort(&mut self) { self.tests.sort_unstable_by(|a, b| match (a, b) { // Tests where we got no metadata go at the end. @@ -218,16 +226,20 @@ impl Output { /// Merges the output of two runs, appending a prefix to the results of the new run. /// To be used in conjunction with `Output::blank()`, or else only some tests will have /// a prefix set. - pub fn merge(&mut self, other: Self, pref_other: impl AsRef) { + pub fn merge<'a>(&mut self, other: Self, pref_other: impl Into>) { + let pref = if let Some(pref) = pref_other.into() { + "crates/".to_string() + pref + "::" + } else { + String::new() + }; self.tests = std::mem::take(&mut self.tests) .into_iter() - .chain(other.tests.into_iter().map(|(name, md, tm)| { - let mut new_name = "crates/".to_string(); - new_name.push_str(pref_other.as_ref()); - new_name.push_str("::"); - new_name.push_str(&name); - (new_name, md, tm) - })) + .chain( + other + .tests + .into_iter() + .map(|(name, md, tm)| (pref.clone() + &name, md, tm)), + ) .collect(); } @@ -273,8 +285,8 @@ impl Output { r_total_denominator += u32::from(weight); } let mean = r_total_numerator / f64::from(r_total_denominator); - // TODO: also aggregate standard deviation? that's harder to keep - // meaningful, though, since we dk which tests are correlated + // TODO: also aggregate standard deviation? That's harder to keep + // meaningful, though, since we dk which tests are correlated. Some((cat, PerfDelta { max, mean, min })) }) .collect(); @@ -282,9 +294,9 @@ impl Output { PerfReport { deltas } } - /// Collapses the `PerfReport` into a `HashMap` of `Importance` <-> tests - /// each represented as a map of `name, (Timings, iterations, weight)`. - fn collapse(self) -> HashMap> { + /// Collapses the `PerfReport` into a `HashMap` over `Importance`, with + /// each importance category having its tests contained. + fn collapse(self) -> HashMap { let mut categories = HashMap::>::default(); for entry in self.tests { if let Some(mdata) = entry.1 @@ -402,10 +414,28 @@ impl std::fmt::Display for PerfReport { // a little jankily like this. write!(f, "|:---|---:|---:|---:|")?; for (cat, delta) in sorted.into_iter().rev() { + const SIGN_POS: &str = "↑"; + const SIGN_NEG: &str = "↓"; + const SIGN_NEUTRAL: &str = "±"; + + let prettify = |time: f64| { + let sign = if time > 0.05 { + SIGN_POS + } else if time < 0.05 && time > -0.05 { + SIGN_NEUTRAL + } else { + SIGN_NEG + }; + format!("{} {:.1}%", sign, time.abs() * 100.) + }; + + // Pretty-print these instead of just using the float display impl. write!( f, - "\n| {cat} | {:.3} | {:.3} | {:.3} |", - delta.max, delta.mean, delta.min + "\n| {cat} | {} | {} | {} |", + prettify(delta.max), + prettify(delta.mean), + prettify(delta.min) )?; } Ok(()) diff --git a/tooling/perf/src/main.rs b/tooling/perf/src/main.rs index 2610adc66f88dfa675df975219f5b2937011e81b..b960d2dce60023b677c7f6cde12e36a0d66d88ae 100644 --- a/tooling/perf/src/main.rs +++ b/tooling/perf/src/main.rs @@ -3,8 +3,7 @@ //! for usage details on the actual attribute. //! //! # Setup -//! Make sure `hyperfine` is installed and in the shell path, then run -//! `cargo build -p perf --release` to build the profiler. +//! Make sure `hyperfine` is installed and in the shell path. //! //! # Usage //! Calling this tool rebuilds the targeted crate(s) with some cfg flags set for the @@ -44,21 +43,25 @@ //! This should probably not be called manually unless you're working on the profiler //! itself; use the `cargo perf-test` alias (after building this crate) instead. -#[allow(clippy::wildcard_imports, reason = "Our crate")] -use perf::*; +use perf::{FailKind, Importance, Output, TestMdata, Timings, consts}; use std::{ fs::OpenOptions, io::Write, + num::NonZero, path::{Path, PathBuf}, process::{Command, Stdio}, + sync::atomic::{AtomicBool, Ordering}, time::{Duration, Instant}, }; /// How many iterations to attempt the first time a test is run. -const DEFAULT_ITER_COUNT: usize = 3; +const DEFAULT_ITER_COUNT: NonZero = NonZero::new(3).unwrap(); /// Multiplier for the iteration count when a test doesn't pass the noise cutoff. -const ITER_COUNT_MUL: usize = 4; +const ITER_COUNT_MUL: NonZero = NonZero::new(4).unwrap(); + +/// Do we keep stderr empty while running the tests? +static QUIET: AtomicBool = AtomicBool::new(false); /// Report a failure into the output and skip an iteration. macro_rules! fail { @@ -84,14 +87,59 @@ enum OutputKind<'a> { Json(&'a Path), } +impl OutputKind<'_> { + /// Logs the output of a run as per the `OutputKind`. + fn log(&self, output: &Output, t_bin: &str) { + match self { + OutputKind::Markdown => print!("{output}"), + OutputKind::Json(ident) => { + let wspace_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + let runs_dir = PathBuf::from(&wspace_dir).join(consts::RUNS_DIR); + std::fs::create_dir_all(&runs_dir).unwrap(); + assert!( + !ident.to_string_lossy().is_empty(), + "FATAL: Empty filename specified!" + ); + // Get the test binary's crate's name; a path like + // target/release-fast/deps/gpui-061ff76c9b7af5d7 + // would be reduced to just "gpui". + let test_bin_stripped = Path::new(t_bin) + .file_name() + .unwrap() + .to_str() + .unwrap() + .rsplit_once('-') + .unwrap() + .0; + let mut file_path = runs_dir.join(ident); + file_path + .as_mut_os_string() + .push(format!(".{test_bin_stripped}.json")); + let mut out_file = OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(&file_path) + .unwrap(); + out_file + .write_all(&serde_json::to_vec(&output).unwrap()) + .unwrap(); + if !QUIET.load(Ordering::Relaxed) { + eprintln!("JSON output written to {}", file_path.display()); + } + } + } + } +} + /// Runs a given metadata-returning function from a test handler, parsing its /// output into a `TestMdata`. -fn parse_mdata(test_bin: &str, mdata_fn: &str) -> Result { - let mut cmd = Command::new(test_bin); +fn parse_mdata(t_bin: &str, mdata_fn: &str) -> Result { + let mut cmd = Command::new(t_bin); cmd.args([mdata_fn, "--exact", "--nocapture"]); let out = cmd .output() - .expect("FATAL: Could not run test binary {test_bin}"); + .expect("FATAL: Could not run test binary {t_bin}"); assert!(out.status.success()); let stdout = String::from_utf8_lossy(&out.stdout); let mut version = None; @@ -104,36 +152,53 @@ fn parse_mdata(test_bin: &str, mdata_fn: &str) -> Result { { let mut items = line.split_whitespace(); // For v0, we know the ident always comes first, then one field. - match items.next().unwrap() { + match items.next().ok_or(FailKind::BadMetadata)? { consts::VERSION_LINE_NAME => { - let v = items.next().unwrap().parse::().unwrap(); + let v = items + .next() + .ok_or(FailKind::BadMetadata)? + .parse::() + .map_err(|_| FailKind::BadMetadata)?; if v > consts::MDATA_VER { return Err(FailKind::VersionMismatch); } version = Some(v); } consts::ITER_COUNT_LINE_NAME => { - iterations = Some(items.next().unwrap().parse::().unwrap()); + // This should never be zero! + iterations = Some( + items + .next() + .ok_or(FailKind::BadMetadata)? + .parse::() + .map_err(|_| FailKind::BadMetadata)? + .try_into() + .map_err(|_| FailKind::BadMetadata)?, + ); } consts::IMPORTANCE_LINE_NAME => { - importance = match items.next().unwrap() { + importance = match items.next().ok_or(FailKind::BadMetadata)? { "critical" => Importance::Critical, "important" => Importance::Important, "average" => Importance::Average, "iffy" => Importance::Iffy, "fluff" => Importance::Fluff, - _ => unreachable!(), + _ => return Err(FailKind::BadMetadata), }; } consts::WEIGHT_LINE_NAME => { - weight = items.next().unwrap().parse::().unwrap(); + weight = items + .next() + .ok_or(FailKind::BadMetadata)? + .parse::() + .map_err(|_| FailKind::BadMetadata)?; } _ => unreachable!(), } } Ok(TestMdata { - version: version.unwrap(), + version: version.ok_or(FailKind::BadMetadata)?, // Iterations may be determined by us and thus left unspecified. iterations, // In principle this should always be set, but just for the sake of @@ -150,7 +215,7 @@ fn compare_profiles(args: &[String]) { let ident_new = args.first().expect("FATAL: missing identifier for new run"); let ident_old = args.get(1).expect("FATAL: missing identifier for old run"); // TODO: move this to a constant also tbh - let wspace_dir = std::env::var("CARGO_WORKSPACE_DIR").unwrap(); + let wspace_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let runs_dir = PathBuf::from(&wspace_dir).join(consts::RUNS_DIR); // Use the blank outputs initially, so we can merge into these with prefixes. @@ -193,59 +258,22 @@ fn compare_profiles(args: &[String]) { println!("{res}"); } -#[expect(clippy::too_many_lines, reason = "This will be split up soon!")] -fn main() { - let args = std::env::args().collect::>(); - // We get passed the test we need to run as the 1st argument after our own name. - let test_bin = args - .get(1) - .expect("FATAL: No test binary or command; this shouldn't be manually invoked!"); - - // We're being asked to compare two results, not run the profiler. - if test_bin == "compare" { - compare_profiles(&args[2..]); - return; - } - - // Whether to skip printing some information to stderr. - let mut quiet = false; - // Minimum test importance we care about this run. - let mut thresh = Importance::Iffy; - // Where to print the output of this run. - let mut out_kind = OutputKind::Markdown; - - for arg in args.iter().skip(2) { - match arg.as_str() { - "--critical" => thresh = Importance::Critical, - "--important" => thresh = Importance::Important, - "--average" => thresh = Importance::Average, - "--iffy" => thresh = Importance::Iffy, - "--fluff" => thresh = Importance::Fluff, - "--quiet" => quiet = true, - s if s.starts_with("--json") => { - out_kind = OutputKind::Json(Path::new( - s.strip_prefix("--json=") - .expect("FATAL: Invalid json parameter; pass --json=filename"), - )); - } - _ => (), - } - } - if !quiet { - eprintln!("Starting perf check"); - } - - let mut cmd = Command::new(test_bin); +/// Runs a test binary, filtering out tests which aren't marked for perf triage +/// and giving back the list of tests we care about. +/// +/// The output of this is an iterator over `test_fn_name, test_mdata_name`. +fn get_tests(t_bin: &str) -> impl ExactSizeIterator { + let mut cmd = Command::new(t_bin); // --format=json is nightly-only :( cmd.args(["--list", "--format=terse"]); let out = cmd .output() - .expect("FATAL: Could not run test binary {test_bin}"); + .expect("FATAL: Could not run test binary {t_bin}"); assert!( out.status.success(), - "FATAL: Cannot do perf check - test binary {test_bin} returned an error" + "FATAL: Cannot do perf check - test binary {t_bin} returned an error" ); - if !quiet { + if !QUIET.load(Ordering::Relaxed) { eprintln!("Test binary ran successfully; starting profile..."); } // Parse the test harness output to look for tests we care about. @@ -273,34 +301,156 @@ fn main() { test_list.sort_unstable(); test_list.dedup(); - let len = test_list.len(); - // Tests should come in pairs with their mdata fn! assert!( - len.is_multiple_of(2), - "Malformed tests in test binary {test_bin}" + test_list.len().is_multiple_of(2), + "Malformed tests in test binary {t_bin}" ); + let out = test_list + .chunks_exact_mut(2) + .map(|pair| { + // Be resilient against changes to these constants. + if consts::SUF_NORMAL < consts::SUF_MDATA { + (pair[0].to_owned(), pair[1].to_owned()) + } else { + (pair[1].to_owned(), pair[0].to_owned()) + } + }) + .collect::>(); + out.into_iter() +} + +/// Triage a test to determine the correct number of iterations that it should run. +/// Specifically, repeatedly runs the given test until its execution time exceeds +/// `thresh`, calling `step(iterations)` after every failed run to determine the new +/// iteration count. Returns `None` if the test errored or `step` returned `None`, +/// else `Some(iterations)`. +/// +/// # Panics +/// This will panic if `step(usize)` is not monotonically increasing. +fn triage_test( + t_bin: &str, + t_name: &str, + thresh: Duration, + mut step: impl FnMut(NonZero) -> Option>, +) -> Option> { + let mut iter_count = DEFAULT_ITER_COUNT; + loop { + let mut cmd = Command::new(t_bin); + cmd.args([t_name, "--exact"]); + cmd.env(consts::ITER_ENV_VAR, format!("{iter_count}")); + // Don't let the child muck up our stdin/out/err. + cmd.stdin(Stdio::null()); + cmd.stdout(Stdio::null()); + cmd.stderr(Stdio::null()); + let pre = Instant::now(); + // Discard the output beyond ensuring success. + let out = cmd.spawn().unwrap().wait(); + let post = Instant::now(); + if !out.unwrap().success() { + break None; + } + if post - pre > thresh { + break Some(iter_count); + } + let new = step(iter_count)?; + assert!( + new > iter_count, + "FATAL: step must be monotonically increasing" + ); + iter_count = new; + } +} + +/// Profiles a given test with hyperfine, returning the mean and standard deviation +/// for its runtime. If the test errors, returns `None` instead. +fn hyp_profile(t_bin: &str, t_name: &str, iterations: NonZero) -> Option { + let mut perf_cmd = Command::new("hyperfine"); + // Warm up the cache and print markdown output to stdout, which we parse. + perf_cmd.args([ + "--style", + "none", + "--warmup", + "1", + "--export-markdown", + "-", + &format!("{t_bin} {t_name}"), + ]); + perf_cmd.env(consts::ITER_ENV_VAR, format!("{iterations}")); + let p_out = perf_cmd.output().unwrap(); + if !p_out.status.success() { + return None; + } + + let cmd_output = String::from_utf8_lossy(&p_out.stdout); + // Can't use .last() since we have a trailing newline. Sigh. + let results_line = cmd_output.lines().nth(3).unwrap(); + // Grab the values out of the pretty-print. + // TODO: Parse json instead. + let mut res_iter = results_line.split_whitespace(); + // Durations are given in milliseconds, so account for that. + let mean = Duration::from_secs_f64(res_iter.nth(4).unwrap().parse::().unwrap() / 1000.); + let stddev = Duration::from_secs_f64(res_iter.nth(1).unwrap().parse::().unwrap() / 1000.); + + Some(Timings { mean, stddev }) +} + +fn main() { + let args = std::env::args().collect::>(); + // We get passed the test we need to run as the 1st argument after our own name. + let t_bin = args + .get(1) + .expect("FATAL: No test binary or command; this shouldn't be manually invoked!"); + + // We're being asked to compare two results, not run the profiler. + if t_bin == "compare" { + compare_profiles(&args[2..]); + return; + } + + // Minimum test importance we care about this run. + let mut thresh = Importance::Iffy; + // Where to print the output of this run. + let mut out_kind = OutputKind::Markdown; + + for arg in args.iter().skip(2) { + match arg.as_str() { + "--critical" => thresh = Importance::Critical, + "--important" => thresh = Importance::Important, + "--average" => thresh = Importance::Average, + "--iffy" => thresh = Importance::Iffy, + "--fluff" => thresh = Importance::Fluff, + "--quiet" => QUIET.store(true, Ordering::Relaxed), + s if s.starts_with("--json") => { + out_kind = OutputKind::Json(Path::new( + s.strip_prefix("--json=") + .expect("FATAL: Invalid json parameter; pass --json=ident"), + )); + } + _ => (), + } + } + if !QUIET.load(Ordering::Relaxed) { + eprintln!("Starting perf check"); + } + let mut output = Output::default(); // Spawn and profile an instance of each perf-sensitive test, via hyperfine. // Each test is a pair of (test, metadata-returning-fn), so grab both. We also // know the list is sorted. - for (idx, t_pair) in test_list.chunks_exact(2).enumerate() { - if !quiet { - eprint!("\rProfiling test {}/{}", idx + 1, len / 2); + let i = get_tests(t_bin); + let len = i.len(); + for (idx, (ref t_name, ref t_mdata)) in i.enumerate() { + if !QUIET.load(Ordering::Relaxed) { + eprint!("\rProfiling test {}/{}", idx + 1, len); } - // Be resilient against changes to these constants. - let (t_name, t_mdata) = if consts::SUF_NORMAL < consts::SUF_MDATA { - (t_pair[0], t_pair[1]) - } else { - (t_pair[1], t_pair[0]) - }; // Pretty-printable stripped name for the test. let t_name_pretty = t_name.replace(consts::SUF_NORMAL, ""); // Get the metadata this test reports for us. - let t_mdata = match parse_mdata(test_bin, t_mdata) { + let t_mdata = match parse_mdata(t_bin, t_mdata) { Ok(mdata) => mdata, Err(err) => fail!(output, t_name_pretty, err), }; @@ -312,78 +462,28 @@ fn main() { // Time test execution to see how many iterations we need to do in order // to account for random noise. This is skipped for tests with fixed // iteration counts. - let mut errored = false; - let final_iter_count = t_mdata.iterations.unwrap_or_else(|| { - let mut iter_count = DEFAULT_ITER_COUNT; - loop { - let mut cmd = Command::new(test_bin); - cmd.args([t_name, "--exact"]); - cmd.env(consts::ITER_ENV_VAR, format!("{iter_count}")); - // Don't let the child muck up our stdin/out/err. - cmd.stdin(Stdio::null()); - cmd.stdout(Stdio::null()); - cmd.stderr(Stdio::null()); - let pre = Instant::now(); - // Discard the output beyond ensuring success. - let out = cmd.spawn().unwrap().wait(); - let post = Instant::now(); - if !out.unwrap().success() { - errored = true; - break iter_count; - } - if post - pre > consts::NOISE_CUTOFF { - break iter_count; - } else if let Some(c) = iter_count.checked_mul(ITER_COUNT_MUL) { - iter_count = c; + let final_iter_count = t_mdata.iterations.or_else(|| { + triage_test(t_bin, t_name, consts::NOISE_CUTOFF, |c| { + if let Some(c) = c.checked_mul(ITER_COUNT_MUL) { + Some(c) } else { // This should almost never happen, but maybe..? eprintln!( - "WARNING: Running nearly usize::MAX iterations of test {t_name_pretty}" + "WARNING: Ran nearly usize::MAX iterations of test {t_name_pretty}; skipping" ); - break iter_count; + None } - } + }) }); // Don't profile failing tests. - if errored { + let Some(final_iter_count) = final_iter_count else { fail!(output, t_name_pretty, t_mdata, FailKind::Triage); - } + }; // Now profile! - let mut perf_cmd = Command::new("hyperfine"); - // Warm up the cache and print markdown output to stdout. - // TODO: json - perf_cmd.args([ - "--style", - "none", - "--warmup", - "1", - "--export-markdown", - "-", - &format!("{test_bin} {t_name}"), - ]); - perf_cmd.env(consts::ITER_ENV_VAR, format!("{final_iter_count}")); - let p_out = perf_cmd.output().unwrap(); - if p_out.status.success() { - let cmd_output = String::from_utf8_lossy(&p_out.stdout); - // Can't use .last() since we have a trailing newline. Sigh. - let results_line = cmd_output.lines().nth(3).unwrap(); - // Grab the values out of the pretty-print. - // TODO: Parse json instead. - let mut res_iter = results_line.split_whitespace(); - // Durations are given in milliseconds, so account for that. - let mean = - Duration::from_secs_f64(res_iter.nth(4).unwrap().parse::().unwrap() / 1000.); - let stddev = - Duration::from_secs_f64(res_iter.nth(1).unwrap().parse::().unwrap() / 1000.); - - output.success( - t_name_pretty, - t_mdata, - final_iter_count, - Timings { mean, stddev }, - ); + if let Some(timings) = hyp_profile(t_bin, t_name, final_iter_count) { + output.success(t_name_pretty, t_mdata, final_iter_count, timings); } else { fail!( output, @@ -394,7 +494,7 @@ fn main() { ); } } - if !quiet { + if !QUIET.load(Ordering::Relaxed) { if output.is_empty() { eprintln!("Nothing to do."); } else { @@ -409,43 +509,5 @@ fn main() { return; } - match out_kind { - OutputKind::Markdown => print!("{output}"), - OutputKind::Json(user_path) => { - let wspace_dir = std::env::var("CARGO_WORKSPACE_DIR").unwrap(); - let runs_dir = PathBuf::from(&wspace_dir).join(consts::RUNS_DIR); - std::fs::create_dir_all(&runs_dir).unwrap(); - assert!( - !user_path.to_string_lossy().is_empty(), - "FATAL: Empty filename specified!" - ); - // Get the test binary's crate's name; a path like - // target/release-fast/deps/gpui-061ff76c9b7af5d7 - // would be reduced to just "gpui". - let test_bin_stripped = Path::new(test_bin) - .file_name() - .unwrap() - .to_str() - .unwrap() - .rsplit_once('-') - .unwrap() - .0; - let mut file_path = runs_dir.join(user_path); - file_path - .as_mut_os_string() - .push(format!(".{test_bin_stripped}.json")); - let mut out_file = OpenOptions::new() - .write(true) - .create(true) - .truncate(true) - .open(&file_path) - .unwrap(); - out_file - .write_all(&serde_json::to_vec(&output).unwrap()) - .unwrap(); - if !quiet { - eprintln!("JSON output written to {}", file_path.display()); - } - } - } + out_kind.log(&output, t_bin); } From e484f49ee8a3c6d9cd390beb65fdff57f05bac7d Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 22 Sep 2025 16:40:56 -0400 Subject: [PATCH 57/58] language_models: Treat a `block_reason` from Gemini as a refusal (#38670) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR updates the Gemini provider to treat a `prompt_feedback.block_reason` as a refusal, as Gemini does not seem to return a `stop_reason` to use in this case. Screenshot 2025-09-22 at 4 23 15 PM Previously this would just result in no feedback to the user. Release Notes: - Added an error message when a Gemini response contains a `block_reason`. --- crates/language_models/src/provider/google.rs | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 70a7a27defdfba609765710902845f921a8333ac..48712e33a7afe4f1b669b551e66423a8c8b2c995 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -612,6 +612,24 @@ impl GoogleEventMapper { convert_usage(&self.usage), ))) } + + if let Some(prompt_feedback) = event.prompt_feedback + && let Some(block_reason) = prompt_feedback.block_reason.as_deref() + { + self.stop_reason = match block_reason { + "SAFETY" | "OTHER" | "BLOCKLIST" | "PROHIBITED_CONTENT" | "IMAGE_SAFETY" => { + StopReason::Refusal + } + _ => { + log::error!("Unexpected Google block_reason: {block_reason}"); + StopReason::Refusal + } + }; + events.push(Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))); + + return events; + } + if let Some(candidates) = event.candidates { for candidate in candidates { if let Some(finish_reason) = candidate.finish_reason.as_deref() { From 46d19d8a47ca059913c2fcb1a06c6543a62b434c Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 22 Sep 2025 23:03:37 +0200 Subject: [PATCH 58/58] helix: Fix helix-paste mode in line mode (#38663) In particular, * if the selection ends at the beginning of the next line, and the current line under the cursor is empty, we paste at the selection's end. * if however the current line under the cursor is empty, we need to move to the beginning of the next line to avoid pasting above the end of current selection In addition, in line mode, we always move the cursor to the end of the inserted text. Otherwise, while it looks fine visually, inserting/appending ends up in the next logical line which is not desirable. Release Notes: - N/A --- crates/vim/src/helix/paste.rs | 52 +++++++++++++++++++++++++++++------ 1 file changed, 43 insertions(+), 9 deletions(-) diff --git a/crates/vim/src/helix/paste.rs b/crates/vim/src/helix/paste.rs index ecfdaa499257ad91d8518f488be9a4d4dbb51f1c..957d459dac50892e8173f4f1ac12459277b6d6ae 100644 --- a/crates/vim/src/helix/paste.rs +++ b/crates/vim/src/helix/paste.rs @@ -84,13 +84,22 @@ impl Vim { let display_point = if line_mode { if action.before { movement::line_beginning(&display_map, sel.start, false) - } else if sel.end.column() == 0 { + } else if sel.start.column() > 0 + && sel.end.column() == 0 + && sel.start != sel.end + { sel.end } else { - movement::right( - &display_map, - movement::line_end(&display_map, sel.end, false), - ) + let point = movement::line_end(&display_map, sel.end, false); + if sel.end.column() == 0 && point.column() > 0 { + // If the selection ends at the beginning of the next line, and the current line + // under the cursor is not empty, we paste at the selection's end. + sel.end + } else { + // If however the current line under the cursor is empty, we need to move + // to the beginning of the next line to avoid pasting above the end of current selection. + movement::right(&display_map, point) + } } } else if action.before { sel.start @@ -123,6 +132,12 @@ impl Vim { let offset = anchor.to_offset(&snapshot); if action.before { offset.saturating_sub(len)..offset + } else if line_mode { + // In line mode, we always move the cursor to the end of the inserted text. + // Otherwise, while it looks fine visually, inserting/appending ends up + // in the next logical line which is not desirable. + debug_assert!(len > 0); + offset..(offset + len - 1) } else { offset..(offset + len) } @@ -386,8 +401,8 @@ mod test { indoc! {" The quick brown fox jumps over - «n - ˇ»the lazy dog."}, + «nˇ» + the lazy dog."}, Mode::HelixNormal, ); @@ -405,8 +420,27 @@ mod test { indoc! {" The quick brown fox jumps over - «n - ˇ»the lazy dog."}, + «nˇ» + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.set_state( + indoc! {" + + The quick brown + fox jumps overˇ + the lazy dog."}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("x y up up p"); + cx.assert_state( + indoc! {" + + «fox jumps overˇ» + The quick brown + fox jumps over + the lazy dog."}, Mode::HelixNormal, ); }