From 0d70bcb88c605f3f5e6850eedecd59c4a8fba7ca Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 24 Jun 2025 09:11:03 -0300 Subject: [PATCH 01/56] agent: Allow to force uninstall extension if it provides more than the MCP server (#33279) --- crates/agent_ui/src/agent_configuration.rs | 47 ++++++++++++++++++++-- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 1c12e51e2ddf4068f4f341089e3a084c5cf7a51f..e91a0f7ebe590d1f0480741f6eec3ebda220ccea 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -940,15 +940,56 @@ fn show_unable_to_uninstall_extension_with_context_server( id: ContextServerId, cx: &mut App, ) { + let workspace_handle = workspace.weak_handle(); + let context_server_id = id.clone(); + let status_toast = StatusToast::new( format!( - "Unable to uninstall the {} extension, as it provides more than just the MCP server.", + "The {} extension provides more than just the MCP server. Proceed to uninstall anyway?", id.0 ), cx, - |this, _cx| { + move |this, _cx| { + let workspace_handle = workspace_handle.clone(); + let context_server_id = context_server_id.clone(); + this.icon(ToastIcon::new(IconName::Warning).color(Color::Warning)) - .action("Dismiss", |_, _| {}) + .dismiss_button(true) + .action("Uninstall", move |_, _cx| { + if let Some((extension_id, _)) = + resolve_extension_for_context_server(&context_server_id, _cx) + { + ExtensionStore::global(_cx).update(_cx, |store, cx| { + store + .uninstall_extension(extension_id, cx) + .detach_and_log_err(cx); + }); + + workspace_handle + .update(_cx, |workspace, cx| { + let fs = workspace.app_state().fs.clone(); + cx.spawn({ + let context_server_id = context_server_id.clone(); + async move |_workspace_handle, cx| { + cx.update(|cx| { + update_settings_file::( + fs, + cx, + move |settings, _| { + settings + .context_servers + .remove(&context_server_id.0); + }, + ); + })?; + anyhow::Ok(()) + } + }) + .detach_and_log_err(cx); + }) + .log_err(); + } + }) }, ); From 94735aef69cb94426df578326198980633b36927 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 24 Jun 2025 11:02:06 -0300 Subject: [PATCH 02/56] Add support for Vercel as a language model provider (#33292) Vercel v0 is an OpenAI-compatible model, so this is mostly a dupe of the OpenAI provider files with some adaptations for v0, including going ahead and using the custom endpoint for the API URL field. Release Notes: - Added support for Vercel as a language model provider. --- Cargo.lock | 15 + Cargo.toml | 2 + assets/icons/ai_v_zero.svg | 16 + crates/icons/src/icons.rs | 1 + crates/language_models/Cargo.toml | 1 + crates/language_models/src/language_models.rs | 5 + crates/language_models/src/provider.rs | 1 + crates/language_models/src/provider/vercel.rs | 867 ++++++++++++++++++ crates/language_models/src/settings.rs | 21 + crates/vercel/Cargo.toml | 26 + crates/vercel/LICENSE-GPL | 1 + crates/vercel/src/vercel.rs | 438 +++++++++ 12 files changed, 1394 insertions(+) create mode 100644 assets/icons/ai_v_zero.svg create mode 100644 crates/language_models/src/provider/vercel.rs create mode 100644 crates/vercel/Cargo.toml create mode 120000 crates/vercel/LICENSE-GPL create mode 100644 crates/vercel/src/vercel.rs diff --git a/Cargo.lock b/Cargo.lock index 922fed0ae45dfac97b4c50dce82fc540fa96cc15..70a05cf4aa2a47de3973dbf64d4b8c0430d06a2c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8985,6 +8985,7 @@ dependencies = [ "ui", "ui_input", "util", + "vercel", "workspace-hack", "zed_llm_client", ] @@ -17424,6 +17425,20 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +[[package]] +name = "vercel" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.31", + "http_client", + "schemars", + "serde", + "serde_json", + "strum 0.27.1", + "workspace-hack", +] + [[package]] name = "version-compare" version = "0.2.0" diff --git a/Cargo.toml b/Cargo.toml index 8de3ad9f74033d5e03825849579ef4a9801b30d3..da2ed94ac4496eb468b06681bae72e0d8a57fa1b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -165,6 +165,7 @@ members = [ "crates/ui_prompt", "crates/util", "crates/util_macros", + "crates/vercel", "crates/vim", "crates/vim_mode_setting", "crates/watch", @@ -375,6 +376,7 @@ ui_macros = { path = "crates/ui_macros" } ui_prompt = { path = "crates/ui_prompt" } util = { path = "crates/util" } util_macros = { path = "crates/util_macros" } +vercel = { path = "crates/vercel" } vim = { path = "crates/vim" } vim_mode_setting = { path = "crates/vim_mode_setting" } diff --git a/assets/icons/ai_v_zero.svg b/assets/icons/ai_v_zero.svg new file mode 100644 index 0000000000000000000000000000000000000000..26d09ea26ac12ea4095d5fae0026f54fd332a161 --- /dev/null +++ b/assets/icons/ai_v_zero.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 182696919063a68fa33d8686527bff74c3b8ee6e..7e1d7db5753ff1517902880bda4c6c9e24cfe582 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -19,6 +19,7 @@ pub enum IconName { AiOllama, AiOpenAi, AiOpenRouter, + AiVZero, AiZed, ArrowCircle, ArrowDown, diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index c8b5e57b1a07c1abebec0e07e61d8b111771dce5..80412cb5d24910d2b8c1567025063102d1ccea41 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -40,6 +40,7 @@ mistral = { workspace = true, features = ["schemars"] } ollama = { workspace = true, features = ["schemars"] } open_ai = { workspace = true, features = ["schemars"] } open_router = { workspace = true, features = ["schemars"] } +vercel = { workspace = true, features = ["schemars"] } partial-json-fixer.workspace = true project.workspace = true proto.workspace = true diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 0224da4e6b530224e3ed81ff27143b91e58a104c..78dbc33c51cf3e74fd641028b5f84099a7ddbef3 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -20,6 +20,7 @@ use crate::provider::mistral::MistralLanguageModelProvider; use crate::provider::ollama::OllamaLanguageModelProvider; use crate::provider::open_ai::OpenAiLanguageModelProvider; use crate::provider::open_router::OpenRouterLanguageModelProvider; +use crate::provider::vercel::VercelLanguageModelProvider; pub use crate::settings::*; pub fn init(user_store: Entity, client: Arc, fs: Arc, cx: &mut App) { @@ -77,5 +78,9 @@ fn register_language_model_providers( OpenRouterLanguageModelProvider::new(client.http_client(), cx), cx, ); + registry.register_provider( + VercelLanguageModelProvider::new(client.http_client(), cx), + cx, + ); registry.register_provider(CopilotChatLanguageModelProvider::new(cx), cx); } diff --git a/crates/language_models/src/provider.rs b/crates/language_models/src/provider.rs index 4f2ea9cc09f6266b2bcd1f3d3e3d9c9aeff7ba1f..6bc93bd3661e86fc2c8f9bacafaf2d4121e0f7a6 100644 --- a/crates/language_models/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -9,3 +9,4 @@ pub mod mistral; pub mod ollama; pub mod open_ai; pub mod open_router; +pub mod vercel; diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs new file mode 100644 index 0000000000000000000000000000000000000000..46063aceff17f9e779435e3b3d26c6507ca2c019 --- /dev/null +++ b/crates/language_models/src/provider/vercel.rs @@ -0,0 +1,867 @@ +use anyhow::{Context as _, Result, anyhow}; +use collections::{BTreeMap, HashMap}; +use credentials_provider::CredentialsProvider; + +use futures::Stream; +use futures::{FutureExt, StreamExt, future::BoxFuture}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window}; +use http_client::HttpClient; +use language_model::{ + AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, + LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, + LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, + LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent, + RateLimiter, Role, StopReason, +}; +use menu; +use open_ai::{ImageUrl, ResponseStreamEvent, stream_completion}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use settings::{Settings, SettingsStore}; +use std::pin::Pin; +use std::str::FromStr as _; +use std::sync::Arc; +use strum::IntoEnumIterator; +use vercel::Model; + +use ui::{ElevationIndex, List, Tooltip, prelude::*}; +use ui_input::SingleLineInput; +use util::ResultExt; + +use crate::{AllLanguageModelSettings, ui::InstructionListItem}; + +const PROVIDER_ID: &str = "vercel"; +const PROVIDER_NAME: &str = "Vercel"; + +#[derive(Default, Clone, Debug, PartialEq)] +pub struct VercelSettings { + pub api_url: String, + pub available_models: Vec, + pub needs_setting_migration: bool, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] +pub struct AvailableModel { + pub name: String, + pub display_name: Option, + pub max_tokens: u64, + pub max_output_tokens: Option, + pub max_completion_tokens: Option, +} + +pub struct VercelLanguageModelProvider { + http_client: Arc, + state: gpui::Entity, +} + +pub struct State { + api_key: Option, + api_key_from_env: bool, + _subscription: Subscription, +} + +const VERCEL_API_KEY_VAR: &str = "VERCEL_API_KEY"; + +impl State { + fn is_authenticated(&self) -> bool { + self.api_key.is_some() + } + + fn reset_api_key(&self, cx: &mut Context) -> Task> { + let credentials_provider = ::global(cx); + let settings = &AllLanguageModelSettings::get_global(cx).vercel; + let api_url = if settings.api_url.is_empty() { + vercel::VERCEL_API_URL.to_string() + } else { + settings.api_url.clone() + }; + cx.spawn(async move |this, cx| { + credentials_provider + .delete_credentials(&api_url, &cx) + .await + .log_err(); + this.update(cx, |this, cx| { + this.api_key = None; + this.api_key_from_env = false; + cx.notify(); + }) + }) + } + + fn set_api_key(&mut self, api_key: String, cx: &mut Context) -> Task> { + let credentials_provider = ::global(cx); + let settings = &AllLanguageModelSettings::get_global(cx).vercel; + let api_url = if settings.api_url.is_empty() { + vercel::VERCEL_API_URL.to_string() + } else { + settings.api_url.clone() + }; + cx.spawn(async move |this, cx| { + credentials_provider + .write_credentials(&api_url, "Bearer", api_key.as_bytes(), &cx) + .await + .log_err(); + this.update(cx, |this, cx| { + this.api_key = Some(api_key); + cx.notify(); + }) + }) + } + + fn authenticate(&self, cx: &mut Context) -> Task> { + if self.is_authenticated() { + return Task::ready(Ok(())); + } + + let credentials_provider = ::global(cx); + let settings = &AllLanguageModelSettings::get_global(cx).vercel; + let api_url = if settings.api_url.is_empty() { + vercel::VERCEL_API_URL.to_string() + } else { + settings.api_url.clone() + }; + cx.spawn(async move |this, cx| { + let (api_key, from_env) = if let Ok(api_key) = std::env::var(VERCEL_API_KEY_VAR) { + (api_key, true) + } else { + let (_, api_key) = credentials_provider + .read_credentials(&api_url, &cx) + .await? + .ok_or(AuthenticateError::CredentialsNotFound)?; + ( + String::from_utf8(api_key).context("invalid {PROVIDER_NAME} API key")?, + false, + ) + }; + this.update(cx, |this, cx| { + this.api_key = Some(api_key); + this.api_key_from_env = from_env; + cx.notify(); + })?; + + Ok(()) + }) + } +} + +impl VercelLanguageModelProvider { + pub fn new(http_client: Arc, cx: &mut App) -> Self { + let state = cx.new(|cx| State { + api_key: None, + api_key_from_env: false, + _subscription: cx.observe_global::(|_this: &mut State, cx| { + cx.notify(); + }), + }); + + Self { http_client, state } + } + + fn create_language_model(&self, model: vercel::Model) -> Arc { + Arc::new(VercelLanguageModel { + id: LanguageModelId::from(model.id().to_string()), + model, + state: self.state.clone(), + http_client: self.http_client.clone(), + request_limiter: RateLimiter::new(4), + }) + } +} + +impl LanguageModelProviderState for VercelLanguageModelProvider { + type ObservableEntity = State; + + fn observable_entity(&self) -> Option> { + Some(self.state.clone()) + } +} + +impl LanguageModelProvider for VercelLanguageModelProvider { + fn id(&self) -> LanguageModelProviderId { + LanguageModelProviderId(PROVIDER_ID.into()) + } + + fn name(&self) -> LanguageModelProviderName { + LanguageModelProviderName(PROVIDER_NAME.into()) + } + + fn icon(&self) -> IconName { + IconName::AiVZero + } + + fn default_model(&self, _cx: &App) -> Option> { + Some(self.create_language_model(vercel::Model::default())) + } + + fn default_fast_model(&self, _cx: &App) -> Option> { + Some(self.create_language_model(vercel::Model::default_fast())) + } + + fn provided_models(&self, cx: &App) -> Vec> { + let mut models = BTreeMap::default(); + + // Add base models from vercel::Model::iter() + for model in vercel::Model::iter() { + if !matches!(model, vercel::Model::Custom { .. }) { + models.insert(model.id().to_string(), model); + } + } + + // Override with available models from settings + for model in &AllLanguageModelSettings::get_global(cx) + .vercel + .available_models + { + models.insert( + model.name.clone(), + vercel::Model::Custom { + name: model.name.clone(), + display_name: model.display_name.clone(), + max_tokens: model.max_tokens, + max_output_tokens: model.max_output_tokens, + max_completion_tokens: model.max_completion_tokens, + }, + ); + } + + models + .into_values() + .map(|model| self.create_language_model(model)) + .collect() + } + + fn is_authenticated(&self, cx: &App) -> bool { + self.state.read(cx).is_authenticated() + } + + fn authenticate(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.authenticate(cx)) + } + + fn configuration_view(&self, window: &mut Window, cx: &mut App) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + .into() + } + + fn reset_credentials(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.reset_api_key(cx)) + } +} + +pub struct VercelLanguageModel { + id: LanguageModelId, + model: vercel::Model, + state: gpui::Entity, + http_client: Arc, + request_limiter: RateLimiter, +} + +impl VercelLanguageModel { + fn stream_completion( + &self, + request: open_ai::Request, + cx: &AsyncApp, + ) -> BoxFuture<'static, Result>>> + { + let http_client = self.http_client.clone(); + let Ok((api_key, api_url)) = cx.read_entity(&self.state, |state, cx| { + let settings = &AllLanguageModelSettings::get_global(cx).vercel; + let api_url = if settings.api_url.is_empty() { + vercel::VERCEL_API_URL.to_string() + } else { + settings.api_url.clone() + }; + (state.api_key.clone(), api_url) + }) else { + return futures::future::ready(Err(anyhow!("App state dropped"))).boxed(); + }; + + let future = self.request_limiter.stream(async move { + let api_key = api_key.context("Missing Vercel API Key")?; + let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request); + let response = request.await?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } +} + +impl LanguageModel for VercelLanguageModel { + fn id(&self) -> LanguageModelId { + self.id.clone() + } + + fn name(&self) -> LanguageModelName { + LanguageModelName::from(self.model.display_name().to_string()) + } + + fn provider_id(&self) -> LanguageModelProviderId { + LanguageModelProviderId(PROVIDER_ID.into()) + } + + fn provider_name(&self) -> LanguageModelProviderName { + LanguageModelProviderName(PROVIDER_NAME.into()) + } + + fn supports_tools(&self) -> bool { + true + } + + fn supports_images(&self) -> bool { + false + } + + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { + match choice { + LanguageModelToolChoice::Auto => true, + LanguageModelToolChoice::Any => true, + LanguageModelToolChoice::None => true, + } + } + + fn telemetry_id(&self) -> String { + format!("vercel/{}", self.model.id()) + } + + fn max_token_count(&self) -> u64 { + self.model.max_token_count() + } + + fn max_output_tokens(&self) -> Option { + self.model.max_output_tokens() + } + + fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &App, + ) -> BoxFuture<'static, Result> { + count_vercel_tokens(request, self.model.clone(), cx) + } + + fn stream_completion( + &self, + request: LanguageModelRequest, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream< + 'static, + Result, + >, + LanguageModelCompletionError, + >, + > { + let request = into_vercel(request, &self.model, self.max_output_tokens()); + let completions = self.stream_completion(request, cx); + async move { + let mapper = VercelEventMapper::new(); + Ok(mapper.map_stream(completions.await?).boxed()) + } + .boxed() + } +} + +pub fn into_vercel( + request: LanguageModelRequest, + model: &vercel::Model, + max_output_tokens: Option, +) -> open_ai::Request { + let stream = !model.id().starts_with("o1-"); + + let mut messages = Vec::new(); + for message in request.messages { + for content in message.content { + match content { + MessageContent::Text(text) | MessageContent::Thinking { text, .. } => { + add_message_content_part( + open_ai::MessagePart::Text { text: text }, + message.role, + &mut messages, + ) + } + MessageContent::RedactedThinking(_) => {} + MessageContent::Image(image) => { + add_message_content_part( + open_ai::MessagePart::Image { + image_url: ImageUrl { + url: image.to_base64_url(), + detail: None, + }, + }, + message.role, + &mut messages, + ); + } + MessageContent::ToolUse(tool_use) => { + let tool_call = open_ai::ToolCall { + id: tool_use.id.to_string(), + content: open_ai::ToolCallContent::Function { + function: open_ai::FunctionContent { + name: tool_use.name.to_string(), + arguments: serde_json::to_string(&tool_use.input) + .unwrap_or_default(), + }, + }, + }; + + if let Some(open_ai::RequestMessage::Assistant { tool_calls, .. }) = + messages.last_mut() + { + tool_calls.push(tool_call); + } else { + messages.push(open_ai::RequestMessage::Assistant { + content: None, + tool_calls: vec![tool_call], + }); + } + } + MessageContent::ToolResult(tool_result) => { + let content = match &tool_result.content { + LanguageModelToolResultContent::Text(text) => { + vec![open_ai::MessagePart::Text { + text: text.to_string(), + }] + } + LanguageModelToolResultContent::Image(image) => { + vec![open_ai::MessagePart::Image { + image_url: ImageUrl { + url: image.to_base64_url(), + detail: None, + }, + }] + } + }; + + messages.push(open_ai::RequestMessage::Tool { + content: content.into(), + tool_call_id: tool_result.tool_use_id.to_string(), + }); + } + } + } + } + + open_ai::Request { + model: model.id().into(), + messages, + stream, + stop: request.stop, + temperature: request.temperature.unwrap_or(1.0), + max_completion_tokens: max_output_tokens, + parallel_tool_calls: if model.supports_parallel_tool_calls() && !request.tools.is_empty() { + // Disable parallel tool calls, as the Agent currently expects a maximum of one per turn. + Some(false) + } else { + None + }, + tools: request + .tools + .into_iter() + .map(|tool| open_ai::ToolDefinition::Function { + function: open_ai::FunctionDefinition { + name: tool.name, + description: Some(tool.description), + parameters: Some(tool.input_schema), + }, + }) + .collect(), + tool_choice: request.tool_choice.map(|choice| match choice { + LanguageModelToolChoice::Auto => open_ai::ToolChoice::Auto, + LanguageModelToolChoice::Any => open_ai::ToolChoice::Required, + LanguageModelToolChoice::None => open_ai::ToolChoice::None, + }), + } +} + +fn add_message_content_part( + new_part: open_ai::MessagePart, + role: Role, + messages: &mut Vec, +) { + match (role, messages.last_mut()) { + (Role::User, Some(open_ai::RequestMessage::User { content })) + | ( + Role::Assistant, + Some(open_ai::RequestMessage::Assistant { + content: Some(content), + .. + }), + ) + | (Role::System, Some(open_ai::RequestMessage::System { content, .. })) => { + content.push_part(new_part); + } + _ => { + messages.push(match role { + Role::User => open_ai::RequestMessage::User { + content: open_ai::MessageContent::from(vec![new_part]), + }, + Role::Assistant => open_ai::RequestMessage::Assistant { + content: Some(open_ai::MessageContent::from(vec![new_part])), + tool_calls: Vec::new(), + }, + Role::System => open_ai::RequestMessage::System { + content: open_ai::MessageContent::from(vec![new_part]), + }, + }); + } + } +} + +pub struct VercelEventMapper { + tool_calls_by_index: HashMap, +} + +impl VercelEventMapper { + pub fn new() -> Self { + Self { + tool_calls_by_index: HashMap::default(), + } + } + + pub fn map_stream( + mut self, + events: Pin>>>, + ) -> impl Stream> + { + events.flat_map(move |event| { + futures::stream::iter(match event { + Ok(event) => self.map_event(event), + Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))], + }) + }) + } + + pub fn map_event( + &mut self, + event: ResponseStreamEvent, + ) -> Vec> { + let Some(choice) = event.choices.first() else { + return Vec::new(); + }; + + let mut events = Vec::new(); + if let Some(content) = choice.delta.content.clone() { + events.push(Ok(LanguageModelCompletionEvent::Text(content))); + } + + if let Some(tool_calls) = choice.delta.tool_calls.as_ref() { + for tool_call in tool_calls { + let entry = self.tool_calls_by_index.entry(tool_call.index).or_default(); + + if let Some(tool_id) = tool_call.id.clone() { + entry.id = tool_id; + } + + if let Some(function) = tool_call.function.as_ref() { + if let Some(name) = function.name.clone() { + entry.name = name; + } + + if let Some(arguments) = function.arguments.clone() { + entry.arguments.push_str(&arguments); + } + } + } + } + + match choice.finish_reason.as_deref() { + Some("stop") => { + events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn))); + } + Some("tool_calls") => { + events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| { + match serde_json::Value::from_str(&tool_call.arguments) { + Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: tool_call.id.clone().into(), + name: tool_call.name.as_str().into(), + is_input_complete: true, + input, + raw_input: tool_call.arguments.clone(), + }, + )), + Err(error) => Err(LanguageModelCompletionError::BadInputJson { + id: tool_call.id.into(), + tool_name: tool_call.name.as_str().into(), + raw_input: tool_call.arguments.into(), + json_parse_error: error.to_string(), + }), + } + })); + + events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse))); + } + Some(stop_reason) => { + log::error!("Unexpected Vercel stop_reason: {stop_reason:?}",); + events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn))); + } + None => {} + } + + events + } +} + +#[derive(Default)] +struct RawToolCall { + id: String, + name: String, + arguments: String, +} + +pub fn count_vercel_tokens( + request: LanguageModelRequest, + model: Model, + cx: &App, +) -> BoxFuture<'static, Result> { + cx.background_spawn(async move { + let messages = request + .messages + .into_iter() + .map(|message| tiktoken_rs::ChatCompletionRequestMessage { + role: match message.role { + Role::User => "user".into(), + Role::Assistant => "assistant".into(), + Role::System => "system".into(), + }, + content: Some(message.string_contents()), + name: None, + function_call: None, + }) + .collect::>(); + + match model { + Model::Custom { max_tokens, .. } => { + let model = if max_tokens >= 100_000 { + // If the max tokens is 100k or more, it is likely the o200k_base tokenizer from gpt4o + "gpt-4o" + } else { + // Otherwise fallback to gpt-4, since only cl100k_base and o200k_base are + // supported with this tiktoken method + "gpt-4" + }; + tiktoken_rs::num_tokens_from_messages(model, &messages) + } + // Map Vercel models to appropriate OpenAI models for token counting + // since Vercel uses OpenAI-compatible API + Model::VZero => { + // Vercel v0 is similar to GPT-4o, so use gpt-4o for token counting + tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages) + } + } + .map(|tokens| tokens as u64) + }) + .boxed() +} + +struct ConfigurationView { + api_key_editor: Entity, + state: gpui::Entity, + load_credentials_task: Option>, +} + +impl ConfigurationView { + fn new(state: gpui::Entity, window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = cx.new(|cx| { + SingleLineInput::new( + window, + cx, + "v1:0000000000000000000000000000000000000000000000000", + ) + .label("API key") + }); + + cx.observe(&state, |_, _, cx| { + cx.notify(); + }) + .detach(); + + let load_credentials_task = Some(cx.spawn_in(window, { + let state = state.clone(); + async move |this, cx| { + if let Some(task) = state + .update(cx, |state, cx| state.authenticate(cx)) + .log_err() + { + // We don't log an error, because "not signed in" is also an error. + let _ = task.await; + } + this.update(cx, |this, cx| { + this.load_credentials_task = None; + cx.notify(); + }) + .log_err(); + } + })); + + Self { + api_key_editor, + state, + load_credentials_task, + } + } + + fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + let api_key = self + .api_key_editor + .read(cx) + .editor() + .read(cx) + .text(cx) + .trim() + .to_string(); + + // Don't proceed if no API key is provided and we're not authenticated + if api_key.is_empty() && !self.state.read(cx).is_authenticated() { + return; + } + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(api_key, cx))? + .await + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context) { + self.api_key_editor.update(cx, |input, cx| { + input.editor.update(cx, |editor, cx| { + editor.set_text("", window, cx); + }); + }); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state.update(cx, |state, cx| state.reset_api_key(cx))?.await + }) + .detach_and_log_err(cx); + + cx.notify(); + } + + fn should_render_editor(&self, cx: &mut Context) -> bool { + !self.state.read(cx).is_authenticated() + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let env_var_set = self.state.read(cx).api_key_from_env; + + let api_key_section = if self.should_render_editor(cx) { + v_flex() + .on_action(cx.listener(Self::save_api_key)) + .child(Label::new("To use Zed's agent with Vercel v0, you need to add an API key. Follow these steps:")) + .child( + List::new() + .child(InstructionListItem::new( + "Create one by visiting", + Some("Vercel v0's console"), + Some("https://v0.dev/chat/settings/keys"), + )) + .child(InstructionListItem::text_only( + "Paste your API key below and hit enter to start using the agent", + )), + ) + .child(self.api_key_editor.clone()) + .child( + Label::new(format!( + "You can also assign the {VERCEL_API_KEY_VAR} environment variable and restart Zed." + )) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + Label::new("Note that Vercel v0 is a custom OpenAI-compatible provider.") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any() + } else { + h_flex() + .mt_1() + .p_1() + .justify_between() + .rounded_md() + .border_1() + .border_color(cx.theme().colors().border) + .bg(cx.theme().colors().background) + .child( + h_flex() + .gap_1() + .child(Icon::new(IconName::Check).color(Color::Success)) + .child(Label::new(if env_var_set { + format!("API key set in {VERCEL_API_KEY_VAR} environment variable.") + } else { + "API key configured.".to_string() + })), + ) + .child( + Button::new("reset-api-key", "Reset API Key") + .label_size(LabelSize::Small) + .icon(IconName::Undo) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .layer(ElevationIndex::ModalSurface) + .when(env_var_set, |this| { + this.tooltip(Tooltip::text(format!("To reset your API key, unset the {VERCEL_API_KEY_VAR} environment variable."))) + }) + .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))), + ) + .into_any() + }; + + if self.load_credentials_task.is_some() { + div().child(Label::new("Loading credentials…")).into_any() + } else { + v_flex().size_full().child(api_key_section).into_any() + } + } +} + +#[cfg(test)] +mod tests { + use gpui::TestAppContext; + use language_model::LanguageModelRequestMessage; + + use super::*; + + #[gpui::test] + fn tiktoken_rs_support(cx: &TestAppContext) { + let request = LanguageModelRequest { + thread_id: None, + prompt_id: None, + intent: None, + mode: None, + messages: vec![LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::Text("message".into())], + cache: false, + }], + tools: vec![], + tool_choice: None, + stop: vec![], + temperature: None, + }; + + // Validate that all models are supported by tiktoken-rs + for model in Model::iter() { + let count = cx + .executor() + .block(count_vercel_tokens( + request.clone(), + model, + &cx.app.borrow(), + )) + .unwrap(); + assert!(count > 0); + } + } +} diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index 92fe5895c9ba905d8184c80e2ce05dfa2fe37538..644e59d397dcab684d03a0026bb797dc04f5803c 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -20,6 +20,7 @@ use crate::provider::{ ollama::OllamaSettings, open_ai::OpenAiSettings, open_router::OpenRouterSettings, + vercel::VercelSettings, }; /// Initializes the language model settings. @@ -64,6 +65,7 @@ pub struct AllLanguageModelSettings { pub open_router: OpenRouterSettings, pub zed_dot_dev: ZedDotDevSettings, pub google: GoogleSettings, + pub vercel: VercelSettings, pub lmstudio: LmStudioSettings, pub deepseek: DeepSeekSettings, @@ -82,6 +84,7 @@ pub struct AllLanguageModelSettingsContent { pub zed_dot_dev: Option, pub google: Option, pub deepseek: Option, + pub vercel: Option, pub mistral: Option, } @@ -259,6 +262,12 @@ pub struct OpenAiSettingsContentV1 { pub available_models: Option>, } +#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] +pub struct VercelSettingsContent { + pub api_url: Option, + pub available_models: Option>, +} + #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] pub struct GoogleSettingsContent { pub api_url: Option, @@ -385,6 +394,18 @@ impl settings::Settings for AllLanguageModelSettings { &mut settings.openai.available_models, openai.as_ref().and_then(|s| s.available_models.clone()), ); + + // Vercel + let vercel = value.vercel.clone(); + merge( + &mut settings.vercel.api_url, + vercel.as_ref().and_then(|s| s.api_url.clone()), + ); + merge( + &mut settings.vercel.available_models, + vercel.as_ref().and_then(|s| s.available_models.clone()), + ); + merge( &mut settings.zed_dot_dev.available_models, value diff --git a/crates/vercel/Cargo.toml b/crates/vercel/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..c4e1e4f99d56830272944ddef0b00427754e0fdc --- /dev/null +++ b/crates/vercel/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "vercel" +version = "0.1.0" +edition.workspace = true +publish.workspace = true +license = "GPL-3.0-or-later" + +[lints] +workspace = true + +[lib] +path = "src/vercel.rs" + +[features] +default = [] +schemars = ["dep:schemars"] + +[dependencies] +anyhow.workspace = true +futures.workspace = true +http_client.workspace = true +schemars = { workspace = true, optional = true } +serde.workspace = true +serde_json.workspace = true +strum.workspace = true +workspace-hack.workspace = true diff --git a/crates/vercel/LICENSE-GPL b/crates/vercel/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/vercel/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/vercel/src/vercel.rs b/crates/vercel/src/vercel.rs new file mode 100644 index 0000000000000000000000000000000000000000..3195355bbc0a64dba6f51ebd0e4b0087df8680a0 --- /dev/null +++ b/crates/vercel/src/vercel.rs @@ -0,0 +1,438 @@ +use anyhow::{Context as _, Result, anyhow}; +use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; +use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::{convert::TryFrom, future::Future}; +use strum::EnumIter; + +pub const VERCEL_API_URL: &str = "https://api.v0.dev/v1"; + +fn is_none_or_empty, U>(opt: &Option) -> bool { + opt.as_ref().map_or(true, |v| v.as_ref().is_empty()) +} + +#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] +#[serde(rename_all = "lowercase")] +pub enum Role { + User, + Assistant, + System, + Tool, +} + +impl TryFrom for Role { + type Error = anyhow::Error; + + fn try_from(value: String) -> Result { + match value.as_str() { + "user" => Ok(Self::User), + "assistant" => Ok(Self::Assistant), + "system" => Ok(Self::System), + "tool" => Ok(Self::Tool), + _ => anyhow::bail!("invalid role '{value}'"), + } + } +} + +impl From for String { + fn from(val: Role) -> Self { + match val { + Role::User => "user".to_owned(), + Role::Assistant => "assistant".to_owned(), + Role::System => "system".to_owned(), + Role::Tool => "tool".to_owned(), + } + } +} + +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] +pub enum Model { + #[serde(rename = "v-0")] + #[default] + VZero, + + #[serde(rename = "custom")] + Custom { + name: String, + /// The name displayed in the UI, such as in the assistant panel model dropdown menu. + display_name: Option, + max_tokens: u64, + max_output_tokens: Option, + max_completion_tokens: Option, + }, +} + +impl Model { + pub fn default_fast() -> Self { + Self::VZero + } + + pub fn from_id(id: &str) -> Result { + match id { + "v-0" => Ok(Self::VZero), + invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), + } + } + + pub fn id(&self) -> &str { + match self { + Self::VZero => "v-0", + Self::Custom { name, .. } => name, + } + } + + pub fn display_name(&self) -> &str { + match self { + Self::VZero => "Vercel v0", + Self::Custom { + name, display_name, .. + } => display_name.as_ref().unwrap_or(name), + } + } + + pub fn max_token_count(&self) -> u64 { + match self { + Self::VZero => 128_000, + Self::Custom { max_tokens, .. } => *max_tokens, + } + } + + pub fn max_output_tokens(&self) -> Option { + match self { + Self::Custom { + max_output_tokens, .. + } => *max_output_tokens, + Self::VZero => Some(32_768), + } + } + + /// Returns whether the given model supports the `parallel_tool_calls` parameter. + /// + /// If the model does not support the parameter, do not pass it up, or the API will return an error. + pub fn supports_parallel_tool_calls(&self) -> bool { + match self { + Self::VZero => true, + Model::Custom { .. } => false, + } + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Request { + pub model: String, + pub messages: Vec, + pub stream: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub max_completion_tokens: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub stop: Vec, + pub temperature: f32, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub tool_choice: Option, + /// Whether to enable parallel function calling during tool use. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub parallel_tool_calls: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub tools: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum ToolChoice { + Auto, + Required, + None, + Other(ToolDefinition), +} + +#[derive(Clone, Deserialize, Serialize, Debug)] +#[serde(tag = "type", rename_all = "snake_case")] +pub enum ToolDefinition { + #[allow(dead_code)] + Function { function: FunctionDefinition }, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct FunctionDefinition { + pub name: String, + pub description: Option, + pub parameters: Option, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +#[serde(tag = "role", rename_all = "lowercase")] +pub enum RequestMessage { + Assistant { + content: Option, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + tool_calls: Vec, + }, + User { + content: MessageContent, + }, + System { + content: MessageContent, + }, + Tool { + content: MessageContent, + tool_call_id: String, + }, +} + +#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] +#[serde(untagged)] +pub enum MessageContent { + Plain(String), + Multipart(Vec), +} + +impl MessageContent { + pub fn empty() -> Self { + MessageContent::Multipart(vec![]) + } + + pub fn push_part(&mut self, part: MessagePart) { + match self { + MessageContent::Plain(text) => { + *self = + MessageContent::Multipart(vec![MessagePart::Text { text: text.clone() }, part]); + } + MessageContent::Multipart(parts) if parts.is_empty() => match part { + MessagePart::Text { text } => *self = MessageContent::Plain(text), + MessagePart::Image { .. } => *self = MessageContent::Multipart(vec![part]), + }, + MessageContent::Multipart(parts) => parts.push(part), + } + } +} + +impl From> for MessageContent { + fn from(mut parts: Vec) -> Self { + if let [MessagePart::Text { text }] = parts.as_mut_slice() { + MessageContent::Plain(std::mem::take(text)) + } else { + MessageContent::Multipart(parts) + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] +#[serde(tag = "type")] +pub enum MessagePart { + #[serde(rename = "text")] + Text { text: String }, + #[serde(rename = "image_url")] + Image { image_url: ImageUrl }, +} + +#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] +pub struct ImageUrl { + pub url: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub detail: Option, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct ToolCall { + pub id: String, + #[serde(flatten)] + pub content: ToolCallContent, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +#[serde(tag = "type", rename_all = "lowercase")] +pub enum ToolCallContent { + Function { function: FunctionContent }, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct FunctionContent { + pub name: String, + pub arguments: String, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct ResponseMessageDelta { + pub role: Option, + pub content: Option, + #[serde(default, skip_serializing_if = "is_none_or_empty")] + pub tool_calls: Option>, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct ToolCallChunk { + pub index: usize, + pub id: Option, + + // There is also an optional `type` field that would determine if a + // function is there. Sometimes this streams in with the `function` before + // it streams in the `type` + pub function: Option, +} + +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct FunctionChunk { + pub name: Option, + pub arguments: Option, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct Usage { + pub prompt_tokens: u32, + pub completion_tokens: u32, + pub total_tokens: u32, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct ChoiceDelta { + pub index: u32, + pub delta: ResponseMessageDelta, + pub finish_reason: Option, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(untagged)] +pub enum ResponseStreamResult { + Ok(ResponseStreamEvent), + Err { error: String }, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct ResponseStreamEvent { + pub model: String, + pub choices: Vec, + pub usage: Option, +} + +pub async fn stream_completion( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + request: Request, +) -> Result>> { + let uri = format!("{api_url}/chat/completions"); + let request_builder = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", api_key)); + + let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; + let mut response = client.send(request).await?; + if response.status().is_success() { + let reader = BufReader::new(response.into_body()); + Ok(reader + .lines() + .filter_map(|line| async move { + match line { + Ok(line) => { + let line = line.strip_prefix("data: ")?; + if line == "[DONE]" { + None + } else { + match serde_json::from_str(line) { + Ok(ResponseStreamResult::Ok(response)) => Some(Ok(response)), + Ok(ResponseStreamResult::Err { error }) => { + Some(Err(anyhow!(error))) + } + Err(error) => Some(Err(anyhow!(error))), + } + } + } + Err(error) => Some(Err(anyhow!(error))), + } + }) + .boxed()) + } else { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + #[derive(Deserialize)] + struct VercelResponse { + error: VercelError, + } + + #[derive(Deserialize)] + struct VercelError { + message: String, + } + + match serde_json::from_str::(&body) { + Ok(response) if !response.error.message.is_empty() => Err(anyhow!( + "Failed to connect to Vercel API: {}", + response.error.message, + )), + + _ => anyhow::bail!( + "Failed to connect to Vercel API: {} {}", + response.status(), + body, + ), + } + } +} + +#[derive(Copy, Clone, Serialize, Deserialize)] +pub enum VercelEmbeddingModel { + #[serde(rename = "text-embedding-3-small")] + TextEmbedding3Small, + #[serde(rename = "text-embedding-3-large")] + TextEmbedding3Large, +} + +#[derive(Serialize)] +struct VercelEmbeddingRequest<'a> { + model: VercelEmbeddingModel, + input: Vec<&'a str>, +} + +#[derive(Deserialize)] +pub struct VercelEmbeddingResponse { + pub data: Vec, +} + +#[derive(Deserialize)] +pub struct VercelEmbedding { + pub embedding: Vec, +} + +pub fn embed<'a>( + client: &dyn HttpClient, + api_url: &str, + api_key: &str, + model: VercelEmbeddingModel, + texts: impl IntoIterator, +) -> impl 'static + Future> { + let uri = format!("{api_url}/embeddings"); + + let request = VercelEmbeddingRequest { + model, + input: texts.into_iter().collect(), + }; + let body = AsyncBody::from(serde_json::to_string(&request).unwrap()); + let request = HttpRequest::builder() + .method(Method::POST) + .uri(uri) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", api_key)) + .body(body) + .map(|request| client.send(request)); + + async move { + let mut response = request?.await?; + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + anyhow::ensure!( + response.status().is_success(), + "error during embedding, status: {:?}, body: {:?}", + response.status(), + body + ); + let response: VercelEmbeddingResponse = + serde_json::from_str(&body).context("failed to parse Vercel embedding response")?; + Ok(response) + } +} From 13f134448de4b309ec8265b05b1190f7995db328 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Tue, 24 Jun 2025 09:06:00 -0500 Subject: [PATCH 03/56] collab: Require billing address in all Stripe checkouts (#32980) Summary I've successfully implemented the required billing address collection feature for Stripe Checkout sessions. Here's what was done: ### 1. **Added New Data Structures** (`stripe_client.rs`): - Added `StripeBillingAddressCollection` enum with `Auto` and `Required` variants - Added `billing_address_collection` field to `StripeCreateCheckoutSessionParams` ### 2. **Updated Stripe Client Implementation** (`real_stripe_client.rs`): - Added conversion from `StripeBillingAddressCollection` to Stripe's `CheckoutSessionBillingAddressCollection` - Updated the `TryFrom` implementation to map the billing address collection field when creating checkout sessions - Added the necessary import ### 3. **Updated Billing Service** (`stripe_billing.rs`): - Set `billing_address_collection` to `Required` in both `checkout_with_zed_pro()` and `checkout_with_zed_pro_trial()` methods - Added the necessary import ### 4. **Updated Test Infrastructure** (`fake_stripe_client.rs`): - Added `billing_address_collection` field to `StripeCreateCheckoutSessionCall` - Updated the `create_checkout_session` implementation to capture the new field - Added the necessary import ### 5. **Updated Tests** (`stripe_billing_tests.rs`): - Added assertions to verify that `billing_address_collection` is set to `Required` in all three test cases: - `test_checkout_with_zed_pro` - `test_checkout_with_zed_pro_trial` (regular trial) - `test_checkout_with_zed_pro_trial` (extended trial) - Added the necessary import The implementation follows the pattern established in the codebase and ensures that whenever a Stripe Checkout session is created for Zed Pro subscriptions (both regular and trial), the billing address will be required from customers. This aligns with the Stripe documentation you provided, which shows that setting `billing_address_collection=required` will ensure the billing address is always collected during checkout. Release Notes: - N/A Co-authored-by: Marshall Bowers --- crates/collab/src/stripe_billing.rs | 7 ++++-- crates/collab/src/stripe_client.rs | 7 ++++++ .../src/stripe_client/fake_stripe_client.rs | 6 +++-- .../src/stripe_client/real_stripe_client.rs | 21 ++++++++++++++--- .../collab/src/tests/stripe_billing_tests.rs | 23 +++++++++++++++---- 5 files changed, 52 insertions(+), 12 deletions(-) diff --git a/crates/collab/src/stripe_billing.rs b/crates/collab/src/stripe_billing.rs index 68f8fa5042e8fb491509ac59d6377868c6b48c10..28eaf4de0885ca58c9aa81183a0cf5d5f0b2fd8b 100644 --- a/crates/collab/src/stripe_billing.rs +++ b/crates/collab/src/stripe_billing.rs @@ -11,8 +11,9 @@ use crate::Result; use crate::db::billing_subscription::SubscriptionKind; use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG; use crate::stripe_client::{ - RealStripeClient, StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, - StripeClient, StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, + RealStripeClient, StripeBillingAddressCollection, StripeCheckoutSessionMode, + StripeCheckoutSessionPaymentMethodCollection, StripeClient, + StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams, StripeCustomerId, StripeMeter, StripePrice, StripePriceId, StripeSubscription, @@ -245,6 +246,7 @@ impl StripeBilling { quantity: Some(1), }]); params.success_url = Some(success_url); + params.billing_address_collection = Some(StripeBillingAddressCollection::Required); let session = self.client.create_checkout_session(params).await?; Ok(session.url.context("no checkout session URL")?) @@ -298,6 +300,7 @@ impl StripeBilling { quantity: Some(1), }]); params.success_url = Some(success_url); + params.billing_address_collection = Some(StripeBillingAddressCollection::Required); let session = self.client.create_checkout_session(params).await?; Ok(session.url.context("no checkout session URL")?) diff --git a/crates/collab/src/stripe_client.rs b/crates/collab/src/stripe_client.rs index 3511fb447ed730e8a635af27d35a9e6a38b53136..48158e7cd95998a9dbed379d39a7bd66f42db498 100644 --- a/crates/collab/src/stripe_client.rs +++ b/crates/collab/src/stripe_client.rs @@ -148,6 +148,12 @@ pub struct StripeCreateMeterEventPayload<'a> { pub stripe_customer_id: &'a StripeCustomerId, } +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum StripeBillingAddressCollection { + Auto, + Required, +} + #[derive(Debug, Default)] pub struct StripeCreateCheckoutSessionParams<'a> { pub customer: Option<&'a StripeCustomerId>, @@ -157,6 +163,7 @@ pub struct StripeCreateCheckoutSessionParams<'a> { pub payment_method_collection: Option, pub subscription_data: Option, pub success_url: Option<&'a str>, + pub billing_address_collection: Option, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] diff --git a/crates/collab/src/stripe_client/fake_stripe_client.rs b/crates/collab/src/stripe_client/fake_stripe_client.rs index f679987f8b0173b84eff7008393e7f351c01b7ad..96596aa4141b156f00d855c00bcde352c1a99f30 100644 --- a/crates/collab/src/stripe_client/fake_stripe_client.rs +++ b/crates/collab/src/stripe_client/fake_stripe_client.rs @@ -8,8 +8,8 @@ use parking_lot::Mutex; use uuid::Uuid; use crate::stripe_client::{ - CreateCustomerParams, StripeCheckoutSession, StripeCheckoutSessionMode, - StripeCheckoutSessionPaymentMethodCollection, StripeClient, + CreateCustomerParams, StripeBillingAddressCollection, StripeCheckoutSession, + StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, StripeClient, StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripeMeterId, @@ -35,6 +35,7 @@ pub struct StripeCreateCheckoutSessionCall { pub payment_method_collection: Option, pub subscription_data: Option, pub success_url: Option, + pub billing_address_collection: Option, } pub struct FakeStripeClient { @@ -231,6 +232,7 @@ impl StripeClient for FakeStripeClient { payment_method_collection: params.payment_method_collection, subscription_data: params.subscription_data, success_url: params.success_url.map(|url| url.to_string()), + billing_address_collection: params.billing_address_collection, }); Ok(StripeCheckoutSession { diff --git a/crates/collab/src/stripe_client/real_stripe_client.rs b/crates/collab/src/stripe_client/real_stripe_client.rs index 56ddc8d7ac76387b562af4a8bb6c94ccb062af1a..917e23cac360aad5d27ecfc852775a8b352eaea7 100644 --- a/crates/collab/src/stripe_client/real_stripe_client.rs +++ b/crates/collab/src/stripe_client/real_stripe_client.rs @@ -17,9 +17,10 @@ use stripe::{ }; use crate::stripe_client::{ - CreateCustomerParams, StripeCancellationDetails, StripeCancellationDetailsReason, - StripeCheckoutSession, StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, - StripeClient, StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, + CreateCustomerParams, StripeBillingAddressCollection, StripeCancellationDetails, + StripeCancellationDetailsReason, StripeCheckoutSession, StripeCheckoutSessionMode, + StripeCheckoutSessionPaymentMethodCollection, StripeClient, + StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId, @@ -444,6 +445,7 @@ impl<'a> TryFrom> for CreateCheckoutSessio payment_method_collection: value.payment_method_collection.map(Into::into), subscription_data: value.subscription_data.map(Into::into), success_url: value.success_url, + billing_address_collection: value.billing_address_collection.map(Into::into), ..Default::default() }) } @@ -526,3 +528,16 @@ impl From for StripeCheckoutSession { Self { url: value.url } } } + +impl From for stripe::CheckoutSessionBillingAddressCollection { + fn from(value: StripeBillingAddressCollection) -> Self { + match value { + StripeBillingAddressCollection::Auto => { + stripe::CheckoutSessionBillingAddressCollection::Auto + } + StripeBillingAddressCollection::Required => { + stripe::CheckoutSessionBillingAddressCollection::Required + } + } + } +} diff --git a/crates/collab/src/tests/stripe_billing_tests.rs b/crates/collab/src/tests/stripe_billing_tests.rs index 9c0dbad54319e9d72a0c60e3e4bfffa347b2b3fe..941669362d6b7988c7165661834bece61ea00e73 100644 --- a/crates/collab/src/tests/stripe_billing_tests.rs +++ b/crates/collab/src/tests/stripe_billing_tests.rs @@ -6,11 +6,12 @@ use pretty_assertions::assert_eq; use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG; use crate::stripe_billing::StripeBilling; use crate::stripe_client::{ - FakeStripeClient, StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, - StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionSubscriptionData, - StripeCustomerId, StripeMeter, StripeMeterId, StripePrice, StripePriceId, StripePriceRecurring, - StripeSubscription, StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, - StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior, + FakeStripeClient, StripeBillingAddressCollection, StripeCheckoutSessionMode, + StripeCheckoutSessionPaymentMethodCollection, StripeCreateCheckoutSessionLineItems, + StripeCreateCheckoutSessionSubscriptionData, StripeCustomerId, StripeMeter, StripeMeterId, + StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId, + StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings, + StripeSubscriptionTrialSettingsEndBehavior, StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems, }; @@ -426,6 +427,10 @@ async fn test_checkout_with_zed_pro() { assert_eq!(call.payment_method_collection, None); assert_eq!(call.subscription_data, None); assert_eq!(call.success_url.as_deref(), Some(success_url)); + assert_eq!( + call.billing_address_collection, + Some(StripeBillingAddressCollection::Required) + ); } } @@ -507,6 +512,10 @@ async fn test_checkout_with_zed_pro_trial() { }) ); assert_eq!(call.success_url.as_deref(), Some(success_url)); + assert_eq!( + call.billing_address_collection, + Some(StripeBillingAddressCollection::Required) + ); } // Successful checkout with extended trial. @@ -561,5 +570,9 @@ async fn test_checkout_with_zed_pro_trial() { }) ); assert_eq!(call.success_url.as_deref(), Some(success_url)); + assert_eq!( + call.billing_address_collection, + Some(StripeBillingAddressCollection::Required) + ); } } From deb2564b31880f2a02f5d9557962293d016e33d3 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 24 Jun 2025 11:07:45 -0500 Subject: [PATCH 04/56] gpui: Add keybind metadata API (#33316) Closes #ISSUE Adds a very simple API to track metadata about keybindings in GPUI, namely the source of the binding. The motivation for this is displaying the source of keybindings in the [keymap UI](https://github.com/zed-industries/zed/pull/32436). The API is designed to be as simple and flexible as possible, storing only a `Option` on the bindings themselves to keep the struct small. It is intended to be used as an index or key into a table/map created and managed by the consumer of the API to map from indices to arbitrary meta-data. I.e. the consumer is responsible for both generating these indices and giving them meaning. The current usage in Zed is stateless, just a mapping between constants and User, Default, Base, and Vim keymap sources, however, this can be extended in the future to also track _which_ base keymap is being used. Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/gpui/src/keymap/binding.rs | 25 +++++++++ crates/settings/src/keymap_file.rs | 73 ++++++++++++++++++++++++- crates/settings/src/settings.rs | 3 +- crates/storybook/src/storybook.rs | 2 +- crates/vim/src/test/vim_test_context.rs | 8 ++- crates/zed/src/zed.rs | 21 +++++-- 6 files changed, 119 insertions(+), 13 deletions(-) diff --git a/crates/gpui/src/keymap/binding.rs b/crates/gpui/src/keymap/binding.rs index cbe934212ffaf312a7679cc814669cc4baa78ed1..ffc4656ff7d30e43c553d7f208e0ee1bb668684d 100644 --- a/crates/gpui/src/keymap/binding.rs +++ b/crates/gpui/src/keymap/binding.rs @@ -10,6 +10,7 @@ pub struct KeyBinding { pub(crate) action: Box, pub(crate) keystrokes: SmallVec<[Keystroke; 2]>, pub(crate) context_predicate: Option>, + pub(crate) meta: Option, } impl Clone for KeyBinding { @@ -18,6 +19,7 @@ impl Clone for KeyBinding { action: self.action.boxed_clone(), keystrokes: self.keystrokes.clone(), context_predicate: self.context_predicate.clone(), + meta: self.meta, } } } @@ -59,9 +61,21 @@ impl KeyBinding { keystrokes, action, context_predicate, + meta: None, }) } + /// Set the metadata for this binding. + pub fn with_meta(mut self, meta: KeyBindingMetaIndex) -> Self { + self.meta = Some(meta); + self + } + + /// Set the metadata for this binding. + pub fn set_meta(&mut self, meta: KeyBindingMetaIndex) { + self.meta = Some(meta); + } + /// Check if the given keystrokes match this binding. pub fn match_keystrokes(&self, typed: &[Keystroke]) -> Option { if self.keystrokes.len() < typed.len() { @@ -91,6 +105,11 @@ impl KeyBinding { pub fn predicate(&self) -> Option> { self.context_predicate.as_ref().map(|rc| rc.clone()) } + + /// Get the metadata for this binding + pub fn meta(&self) -> Option { + self.meta + } } impl std::fmt::Debug for KeyBinding { @@ -102,3 +121,9 @@ impl std::fmt::Debug for KeyBinding { .finish() } } + +/// A unique identifier for retrieval of metadata associated with a key binding. +/// Intended to be used as an index or key into a user-defined store of metadata +/// associated with the binding, such as the source of the binding. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct KeyBindingMetaIndex(pub u32); diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index 96736f512a835772defb75be846559090764b4ca..551920c8a038d2b3c3ad2432bbfa0da0b857fcac 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -3,7 +3,7 @@ use collections::{BTreeMap, HashMap, IndexMap}; use fs::Fs; use gpui::{ Action, ActionBuildError, App, InvalidKeystrokeError, KEYSTROKE_PARSE_EXPECTED_MESSAGE, - KeyBinding, KeyBindingContextPredicate, NoAction, + KeyBinding, KeyBindingContextPredicate, KeyBindingMetaIndex, NoAction, }; use schemars::{ JsonSchema, @@ -151,9 +151,21 @@ impl KeymapFile { parse_json_with_comments::(content) } - pub fn load_asset(asset_path: &str, cx: &App) -> anyhow::Result> { + pub fn load_asset( + asset_path: &str, + source: Option, + cx: &App, + ) -> anyhow::Result> { match Self::load(asset_str::(asset_path).as_ref(), cx) { - KeymapFileLoadResult::Success { key_bindings } => Ok(key_bindings), + KeymapFileLoadResult::Success { mut key_bindings } => match source { + Some(source) => Ok({ + for key_binding in &mut key_bindings { + key_binding.set_meta(source.meta()); + } + key_bindings + }), + None => Ok(key_bindings), + }, KeymapFileLoadResult::SomeFailedToLoad { error_message, .. } => { anyhow::bail!("Error loading built-in keymap \"{asset_path}\": {error_message}",) } @@ -619,6 +631,61 @@ impl KeymapFile { } } +#[derive(Clone, Copy)] +pub enum KeybindSource { + User, + Default, + Base, + Vim, +} + +impl KeybindSource { + const BASE: KeyBindingMetaIndex = KeyBindingMetaIndex(0); + const DEFAULT: KeyBindingMetaIndex = KeyBindingMetaIndex(1); + const VIM: KeyBindingMetaIndex = KeyBindingMetaIndex(2); + const USER: KeyBindingMetaIndex = KeyBindingMetaIndex(3); + + pub fn name(&self) -> &'static str { + match self { + KeybindSource::User => "User", + KeybindSource::Default => "Default", + KeybindSource::Base => "Base", + KeybindSource::Vim => "Vim", + } + } + + pub fn meta(&self) -> KeyBindingMetaIndex { + match self { + KeybindSource::User => Self::USER, + KeybindSource::Default => Self::DEFAULT, + KeybindSource::Base => Self::BASE, + KeybindSource::Vim => Self::VIM, + } + } + + pub fn from_meta(index: KeyBindingMetaIndex) -> Self { + match index { + _ if index == Self::USER => KeybindSource::User, + _ if index == Self::USER => KeybindSource::Base, + _ if index == Self::DEFAULT => KeybindSource::Default, + _ if index == Self::VIM => KeybindSource::Vim, + _ => unreachable!(), + } + } +} + +impl From for KeybindSource { + fn from(index: KeyBindingMetaIndex) -> Self { + Self::from_meta(index) + } +} + +impl From for KeyBindingMetaIndex { + fn from(source: KeybindSource) -> Self { + return source.meta(); + } +} + #[cfg(test)] mod tests { use crate::KeymapFile; diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 2ecb38b5c6c98ccb6ff797a64203c6371f451302..a01414b0b29f95dbadac88d5f577e5b0809322ff 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -15,7 +15,8 @@ pub use editable_setting_control::*; pub use json_schema::*; pub use key_equivalents::*; pub use keymap_file::{ - KeyBindingValidator, KeyBindingValidatorRegistration, KeymapFile, KeymapFileLoadResult, + KeyBindingValidator, KeyBindingValidatorRegistration, KeybindSource, KeymapFile, + KeymapFileLoadResult, }; pub use settings_file::*; pub use settings_store::{ diff --git a/crates/storybook/src/storybook.rs b/crates/storybook/src/storybook.rs index 8e2bbad3bb6d3e6e00ff8de54b851bfee2dc1462..c8b055a67e60a07c87696515013b1a6fd5fefb1d 100644 --- a/crates/storybook/src/storybook.rs +++ b/crates/storybook/src/storybook.rs @@ -146,7 +146,7 @@ fn load_embedded_fonts(cx: &App) -> anyhow::Result<()> { } fn load_storybook_keymap(cx: &mut App) { - cx.bind_keys(KeymapFile::load_asset("keymaps/storybook.json", cx).unwrap()); + cx.bind_keys(KeymapFile::load_asset("keymaps/storybook.json", None, cx).unwrap()); } pub fn init(cx: &mut App) { diff --git a/crates/vim/src/test/vim_test_context.rs b/crates/vim/src/test/vim_test_context.rs index f8acecc9b103426f25b805bd16460275e9edd2f1..3abec1c2eb325f93fcbc771c8d21ab659fefd445 100644 --- a/crates/vim/src/test/vim_test_context.rs +++ b/crates/vim/src/test/vim_test_context.rs @@ -74,8 +74,12 @@ impl VimTestContext { .unwrap(); cx.bind_keys(default_key_bindings); if enabled { - let vim_key_bindings = - settings::KeymapFile::load_asset("keymaps/vim.json", cx).unwrap(); + let vim_key_bindings = settings::KeymapFile::load_asset( + "keymaps/vim.json", + Some(settings::KeybindSource::Vim), + cx, + ) + .unwrap(); cx.bind_keys(vim_key_bindings); } } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 4cab84678c573469d6383d8a3a79181fd4e7894c..62e29eb7e2ace3c8da78815c2a6c16e30bb7e0cc 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -47,8 +47,8 @@ use release_channel::{AppCommitSha, ReleaseChannel}; use rope::Rope; use search::project_search::ProjectSearchBar; use settings::{ - DEFAULT_KEYMAP_PATH, InvalidSettingsError, KeymapFile, KeymapFileLoadResult, Settings, - SettingsStore, VIM_KEYMAP_PATH, initial_local_debug_tasks_content, + DEFAULT_KEYMAP_PATH, InvalidSettingsError, KeybindSource, KeymapFile, KeymapFileLoadResult, + Settings, SettingsStore, VIM_KEYMAP_PATH, initial_local_debug_tasks_content, initial_project_settings_content, initial_tasks_content, update_settings_file, }; use std::path::PathBuf; @@ -1403,10 +1403,15 @@ fn show_markdown_app_notification( .detach(); } -fn reload_keymaps(cx: &mut App, user_key_bindings: Vec) { +fn reload_keymaps(cx: &mut App, mut user_key_bindings: Vec) { cx.clear_key_bindings(); load_default_keymap(cx); + + for key_binding in &mut user_key_bindings { + key_binding.set_meta(KeybindSource::User.meta()); + } cx.bind_keys(user_key_bindings); + cx.set_menus(app_menus()); // On Windows, this is set in the `update_jump_list` method of the `HistoryManager`. #[cfg(not(target_os = "windows"))] @@ -1422,14 +1427,18 @@ pub fn load_default_keymap(cx: &mut App) { return; } - cx.bind_keys(KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, cx).unwrap()); + cx.bind_keys( + KeymapFile::load_asset(DEFAULT_KEYMAP_PATH, Some(KeybindSource::Default), cx).unwrap(), + ); if let Some(asset_path) = base_keymap.asset_path() { - cx.bind_keys(KeymapFile::load_asset(asset_path, cx).unwrap()); + cx.bind_keys(KeymapFile::load_asset(asset_path, Some(KeybindSource::Base), cx).unwrap()); } if VimModeSetting::get_global(cx).0 || vim_mode_setting::HelixModeSetting::get_global(cx).0 { - cx.bind_keys(KeymapFile::load_asset(VIM_KEYMAP_PATH, cx).unwrap()); + cx.bind_keys( + KeymapFile::load_asset(VIM_KEYMAP_PATH, Some(KeybindSource::Vim), cx).unwrap(), + ); } } From 39dc4b9040affb964b8405bbf1303c4ade60a2d7 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 24 Jun 2025 19:11:25 +0300 Subject: [PATCH 05/56] Fix being unable to input a whitespace character in collab channels filter (#33318) Before, `space` was always causing a channel join. Now it's less fluent, one has to press `ESC` to get the focus out of the filter editor and then `space` starts joining the channel. Release Notes: - Fixed being unable to input a whitespace character in collab channels filter --- crates/collab_ui/src/collab_panel.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index d45ce2f88dc5579fcb410180363b855d4b999fa3..6501d3a56649ec3cb2ef15099829d601fbbfadd4 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -1645,6 +1645,10 @@ impl CollabPanel { self.channel_name_editor.update(cx, |editor, cx| { editor.insert(" ", window, cx); }); + } else if self.filter_editor.focus_handle(cx).is_focused(window) { + self.filter_editor.update(cx, |editor, cx| { + editor.insert(" ", window, cx); + }); } } @@ -2045,7 +2049,9 @@ impl CollabPanel { dispatch_context.add("CollabPanel"); dispatch_context.add("menu"); - let identifier = if self.channel_name_editor.focus_handle(cx).is_focused(window) { + let identifier = if self.channel_name_editor.focus_handle(cx).is_focused(window) + || self.filter_editor.focus_handle(cx).is_focused(window) + { "editing" } else { "not_editing" @@ -3031,7 +3037,7 @@ impl Render for CollabPanel { .on_action(cx.listener(CollabPanel::start_move_selected_channel)) .on_action(cx.listener(CollabPanel::move_channel_up)) .on_action(cx.listener(CollabPanel::move_channel_down)) - .track_focus(&self.focus_handle(cx)) + .track_focus(&self.focus_handle) .size_full() .child(if self.user_store.read(cx).current_user().is_none() { self.render_signed_out(cx) From 7be57baef03dd8b8a4cb4ae2b643f2b553f2e3fa Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 24 Jun 2025 18:23:59 +0200 Subject: [PATCH 06/56] agent: Fix issue with Anthropic thinking models (#33317) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit cc @osyvokon We were seeing a bunch of errors in our backend when people were using Claude models with thinking enabled. In the logs we would see > an error occurred while interacting with the Anthropic API: invalid_request_error: messages.x.content.0.type: Expected `thinking` or `redacted_thinking`, but found `text`. When `thinking` is enabled, a final `assistant` message must start with a thinking block (preceeding the lastmost set of `tool_use` and `tool_result` blocks). We recommend you include thinking blocks from previous turns. To avoid this requirement, disable `thinking`. Please consult our documentation at https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking However, this issue did not occur frequently and was not easily reproducible. Turns out it was triggered by us not correctly handling [Redacted Thinking Blocks](https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#thinking-redaction). I could constantly reproduce this issue by including this magic string: `ANTHROPIC_MAGIC_STRING_TRIGGER_REDACTED_THINKING_46C9A13E193C177646C7398A98432ECCCE4C1253D5E2D82641AC0E52CC2876CB ` in the request, which forces `claude-3-7-sonnet` to emit redacted thinking blocks (confusingly the magic string does not seem to be working for `claude-sonnet-4`). As soon as we hit a tool call Anthropic would return an error. Thanks to @osyvokon for pointing me in the right direction 😄! Release Notes: - agent: Fixed an issue where Anthropic models would sometimes return an error when thinking was enabled --- crates/agent/src/thread.rs | 25 ++++++++++++++++++- crates/agent/src/thread_store.rs | 2 +- .../src/assistant_context.rs | 1 + crates/eval/src/instance.rs | 2 ++ crates/language_model/src/language_model.rs | 4 +++ crates/language_model/src/request.rs | 2 +- .../language_models/src/provider/anthropic.rs | 10 +++----- 7 files changed, 36 insertions(+), 10 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 7a08de7a0b54dccf792ce42b20666d1e19ca840a..a46aa9381ea45002495a8fc3d2ee408173d8b3d4 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -145,6 +145,10 @@ impl Message { } } + pub fn push_redacted_thinking(&mut self, data: String) { + self.segments.push(MessageSegment::RedactedThinking(data)); + } + pub fn push_text(&mut self, text: &str) { if let Some(MessageSegment::Text(segment)) = self.segments.last_mut() { segment.push_str(text); @@ -183,7 +187,7 @@ pub enum MessageSegment { text: String, signature: Option, }, - RedactedThinking(Vec), + RedactedThinking(String), } impl MessageSegment { @@ -1643,6 +1647,25 @@ impl Thread { }; } } + LanguageModelCompletionEvent::RedactedThinking { + data + } => { + thread.received_chunk(); + + if let Some(last_message) = thread.messages.last_mut() { + if last_message.role == Role::Assistant + && !thread.tool_use.has_tool_results(last_message.id) + { + last_message.push_redacted_thinking(data); + } else { + request_assistant_message_id = + Some(thread.insert_assistant_message( + vec![MessageSegment::RedactedThinking(data)], + cx, + )); + }; + } + } LanguageModelCompletionEvent::ToolUse(tool_use) => { let last_assistant_message_id = request_assistant_message_id .unwrap_or_else(|| { diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 0582e67a5c4bb13c91a63877b9f17dccd3b18031..3c9150ff75f53241120b45c3418288e5033489e2 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -731,7 +731,7 @@ pub enum SerializedMessageSegment { signature: Option, }, RedactedThinking { - data: Vec, + data: String, }, } diff --git a/crates/assistant_context/src/assistant_context.rs b/crates/assistant_context/src/assistant_context.rs index 1444701aac98e048e67468f420d0fa6512013824..a692502a9c390ec168aad2a6448c020428c0f5b1 100644 --- a/crates/assistant_context/src/assistant_context.rs +++ b/crates/assistant_context/src/assistant_context.rs @@ -2117,6 +2117,7 @@ impl AssistantContext { ); } } + LanguageModelCompletionEvent::RedactedThinking { .. } => {}, LanguageModelCompletionEvent::Text(mut chunk) => { if let Some(start) = thought_process_stack.pop() { let end = buffer.anchor_before(message_old_end_offset); diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index b6802537c65974cd7284159cdb3a7a379a2e2ce0..bb66a04e1f07f1f070d9c4c6536f260a05a11bb6 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -1030,6 +1030,7 @@ pub fn response_events_to_markdown( Ok(LanguageModelCompletionEvent::Thinking { text, .. }) => { thinking_buffer.push_str(text); } + Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) => {} Ok(LanguageModelCompletionEvent::Stop(reason)) => { flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer); response.push_str(&format!("**Stop**: {:?}\n\n", reason)); @@ -1126,6 +1127,7 @@ impl ThreadDialog { // Skip these Ok(LanguageModelCompletionEvent::UsageUpdate(_)) + | Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) | Ok(LanguageModelCompletionEvent::StatusUpdate { .. }) | Ok(LanguageModelCompletionEvent::StartMessage { .. }) | Ok(LanguageModelCompletionEvent::Stop(_)) => {} diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 9f165df301d2a378c678da2e3b8c6a5c3ffdb03e..f84357bd98936e478a826df9a4d0563f2c857e10 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -67,6 +67,9 @@ pub enum LanguageModelCompletionEvent { text: String, signature: Option, }, + RedactedThinking { + data: String, + }, ToolUse(LanguageModelToolUse), StartMessage { message_id: String, @@ -359,6 +362,7 @@ pub trait LanguageModel: Send + Sync { Ok(LanguageModelCompletionEvent::StartMessage { .. }) => None, Ok(LanguageModelCompletionEvent::Text(text)) => Some(Ok(text)), Ok(LanguageModelCompletionEvent::Thinking { .. }) => None, + Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) => None, Ok(LanguageModelCompletionEvent::Stop(_)) => None, Ok(LanguageModelCompletionEvent::ToolUse(_)) => None, Ok(LanguageModelCompletionEvent::UsageUpdate(token_usage)) => { diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index 559d8e9111405cef4c1b039a7c8ffa945de1d950..451a62775e6331b139ef5c4da57e4d7d930af6f8 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -303,7 +303,7 @@ pub enum MessageContent { text: String, signature: Option, }, - RedactedThinking(Vec), + RedactedThinking(String), Image(LanguageModelImage), ToolUse(LanguageModelToolUse), ToolResult(LanguageModelToolResult), diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index 719975c1d5ef51976a8d592c89d0a887892b9849..d19348eed6dcf8c65c06c20bfe5cdab4a2b41ddd 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -554,9 +554,7 @@ pub fn into_anthropic( } MessageContent::RedactedThinking(data) => { if !data.is_empty() { - Some(anthropic::RequestContent::RedactedThinking { - data: String::from_utf8(data).ok()?, - }) + Some(anthropic::RequestContent::RedactedThinking { data }) } else { None } @@ -730,10 +728,8 @@ impl AnthropicEventMapper { signature: None, })] } - ResponseContent::RedactedThinking { .. } => { - // Redacted thinking is encrypted and not accessible to the user, see: - // https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#suggestions-for-handling-redacted-thinking-in-production - Vec::new() + ResponseContent::RedactedThinking { data } => { + vec![Ok(LanguageModelCompletionEvent::RedactedThinking { data })] } ResponseContent::ToolUse { id, name, .. } => { self.tool_uses_by_index.insert( From 95cf153ad75a697f89b0b3affaafb51e1346e69e Mon Sep 17 00:00:00 2001 From: fantacell Date: Tue, 24 Jun 2025 18:51:41 +0200 Subject: [PATCH 07/56] Simulate helix line wrapping (#32763) In helix the `f`, `F`, `t`, `T`, left and right motions wrap lines. I added that by default. Release Notes: - vim: The `use_multiline_find` setting is replaced by binding to the correct action in the keymap: ``` "f": ["vim::PushFindForward", { "before": false, "multiline": true }], "t": ["vim::PushFindForward", { "before": true, "multiline": true }], "shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }], "shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }], ``` - helix: `f`/`t`/`shift-f`/`shift-t`/`h`/`l`/`left`/`right` are now multiline by default (like helix) --- assets/keymaps/vim.json | 16 +++++-- assets/settings/default.json | 1 - crates/vim/src/helix.rs | 33 ++++++++++++++ crates/vim/src/normal.rs | 84 ------------------------------------ crates/vim/src/state.rs | 10 +++-- crates/vim/src/vim.rs | 17 ++++---- docs/src/vim.md | 3 +- 7 files changed, 60 insertions(+), 104 deletions(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index dde07708488ceb6df5c64fa26311949b58967129..6b95839e2aecf404b0fcbc7d5267e863b2a2bc29 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -85,10 +85,10 @@ "[ {": ["vim::UnmatchedBackward", { "char": "{" }], "] )": ["vim::UnmatchedForward", { "char": ")" }], "[ (": ["vim::UnmatchedBackward", { "char": "(" }], - "f": ["vim::PushFindForward", { "before": false }], - "t": ["vim::PushFindForward", { "before": true }], - "shift-f": ["vim::PushFindBackward", { "after": false }], - "shift-t": ["vim::PushFindBackward", { "after": true }], + "f": ["vim::PushFindForward", { "before": false, "multiline": false }], + "t": ["vim::PushFindForward", { "before": true, "multiline": false }], + "shift-f": ["vim::PushFindBackward", { "after": false, "multiline": false }], + "shift-t": ["vim::PushFindBackward", { "after": true, "multiline": false }], "m": "vim::PushMark", "'": ["vim::PushJump", { "line": true }], "`": ["vim::PushJump", { "line": false }], @@ -368,6 +368,10 @@ "escape": "editor::Cancel", "ctrl-[": "editor::Cancel", ":": "command_palette::Toggle", + "left": "vim::WrappingLeft", + "right": "vim::WrappingRight", + "h": "vim::WrappingLeft", + "l": "vim::WrappingRight", "shift-d": "vim::DeleteToEndOfLine", "shift-j": "vim::JoinLines", "y": "editor::Copy", @@ -385,6 +389,10 @@ "shift-p": ["vim::Paste", { "before": true }], "u": "vim::Undo", "ctrl-r": "vim::Redo", + "f": ["vim::PushFindForward", { "before": false, "multiline": true }], + "t": ["vim::PushFindForward", { "before": true, "multiline": true }], + "shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }], + "shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }], "r": "vim::PushReplace", "s": "vim::Substitute", "shift-s": "vim::SubstituteLine", diff --git a/assets/settings/default.json b/assets/settings/default.json index 3dd85198d937b8cdb91fece58ca5fe26bc233c16..858055fbe63d7926c6826158f8f7f7676d7fdc46 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1734,7 +1734,6 @@ "default_mode": "normal", "toggle_relative_line_numbers": false, "use_system_clipboard": "always", - "use_multiline_find": false, "use_smartcase_find": false, "highlight_on_yank_duration": 200, "custom_digraphs": {}, diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index 2e7c371d359d114717fda5c878b553f3c9b3be77..8c1ab3297e28a7f3b910ba673cdcfc240506d5c4 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -435,4 +435,37 @@ mod test { // Mode::HelixNormal, // ); // } + + #[gpui::test] + async fn test_f_and_t(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! {" + The quˇick brown + fox jumps over + the lazy dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("f z"); + + cx.assert_state( + indoc! {" + The qu«ick brown + fox jumps over + the lazˇ»y dog."}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("2 T r"); + + cx.assert_state( + indoc! {" + The quick br«ˇown + fox jumps over + the laz»y dog."}, + Mode::HelixNormal, + ); + } } diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 5d4dcacd6cbeb8aa093e738e5ec0273c5e865222..ff9b347e41c49148f954b13acbb371cc7e23f458 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -1532,90 +1532,6 @@ mod test { } } - #[gpui::test] - async fn test_f_and_t_multiline(cx: &mut gpui::TestAppContext) { - let mut cx = VimTestContext::new(cx, true).await; - cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| { - s.use_multiline_find = Some(true); - }); - }); - - cx.assert_binding( - "f l", - indoc! {" - ˇfunction print() { - console.log('ok') - } - "}, - Mode::Normal, - indoc! {" - function print() { - consoˇle.log('ok') - } - "}, - Mode::Normal, - ); - - cx.assert_binding( - "t l", - indoc! {" - ˇfunction print() { - console.log('ok') - } - "}, - Mode::Normal, - indoc! {" - function print() { - consˇole.log('ok') - } - "}, - Mode::Normal, - ); - } - - #[gpui::test] - async fn test_capital_f_and_capital_t_multiline(cx: &mut gpui::TestAppContext) { - let mut cx = VimTestContext::new(cx, true).await; - cx.update_global(|store: &mut SettingsStore, cx| { - store.update_user_settings::(cx, |s| { - s.use_multiline_find = Some(true); - }); - }); - - cx.assert_binding( - "shift-f p", - indoc! {" - function print() { - console.ˇlog('ok') - } - "}, - Mode::Normal, - indoc! {" - function ˇprint() { - console.log('ok') - } - "}, - Mode::Normal, - ); - - cx.assert_binding( - "shift-t p", - indoc! {" - function print() { - console.ˇlog('ok') - } - "}, - Mode::Normal, - indoc! {" - function pˇrint() { - console.log('ok') - } - "}, - Mode::Normal, - ); - } - #[gpui::test] async fn test_f_and_t_smartcase(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 46dafdd6c80d878539df37cb7fa6cca45b83a27e..c4be0348717a31eac5fc5adc1f2f8b75e3526406 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -86,9 +86,11 @@ pub enum Operator { }, FindForward { before: bool, + multiline: bool, }, FindBackward { after: bool, + multiline: bool, }, Sneak { first_char: Option, @@ -994,12 +996,12 @@ impl Operator { Operator::Replace => "r", Operator::Digraph { .. } => "^K", Operator::Literal { .. } => "^V", - Operator::FindForward { before: false } => "f", - Operator::FindForward { before: true } => "t", + Operator::FindForward { before: false, .. } => "f", + Operator::FindForward { before: true, .. } => "t", Operator::Sneak { .. } => "s", Operator::SneakBackward { .. } => "S", - Operator::FindBackward { after: false } => "F", - Operator::FindBackward { after: true } => "T", + Operator::FindBackward { after: false, .. } => "F", + Operator::FindBackward { after: true, .. } => "T", Operator::AddSurrounds { .. } => "ys", Operator::ChangeSurrounds { .. } => "cs", Operator::DeleteSurrounds => "ds", diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 6447300ed40c9dacb501344244f417de2931afed..6b5d41f12ebf732781f6cb3234924c6ea48e92b5 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -72,6 +72,7 @@ struct PushObject { #[serde(deny_unknown_fields)] struct PushFindForward { before: bool, + multiline: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)] @@ -79,6 +80,7 @@ struct PushFindForward { #[serde(deny_unknown_fields)] struct PushFindBackward { after: bool, + multiline: bool, } #[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)] @@ -500,6 +502,7 @@ impl Vim { vim.push_operator( Operator::FindForward { before: action.before, + multiline: action.multiline, }, window, cx, @@ -510,6 +513,7 @@ impl Vim { vim.push_operator( Operator::FindBackward { after: action.after, + multiline: action.multiline, }, window, cx, @@ -1513,11 +1517,11 @@ impl Vim { } match self.active_operator() { - Some(Operator::FindForward { before }) => { + Some(Operator::FindForward { before, multiline }) => { let find = Motion::FindForward { before, char: text.chars().next().unwrap(), - mode: if VimSettings::get_global(cx).use_multiline_find { + mode: if multiline { FindRange::MultiLine } else { FindRange::SingleLine @@ -1527,11 +1531,11 @@ impl Vim { Vim::globals(cx).last_find = Some(find.clone()); self.motion(find, window, cx) } - Some(Operator::FindBackward { after }) => { + Some(Operator::FindBackward { after, multiline }) => { let find = Motion::FindBackward { after, char: text.chars().next().unwrap(), - mode: if VimSettings::get_global(cx).use_multiline_find { + mode: if multiline { FindRange::MultiLine } else { FindRange::SingleLine @@ -1729,7 +1733,6 @@ struct VimSettings { pub default_mode: Mode, pub toggle_relative_line_numbers: bool, pub use_system_clipboard: UseSystemClipboard, - pub use_multiline_find: bool, pub use_smartcase_find: bool, pub custom_digraphs: HashMap>, pub highlight_on_yank_duration: u64, @@ -1741,7 +1744,6 @@ struct VimSettingsContent { pub default_mode: Option, pub toggle_relative_line_numbers: Option, pub use_system_clipboard: Option, - pub use_multiline_find: Option, pub use_smartcase_find: Option, pub custom_digraphs: Option>>, pub highlight_on_yank_duration: Option, @@ -1794,9 +1796,6 @@ impl Settings for VimSettings { use_system_clipboard: settings .use_system_clipboard .ok_or_else(Self::missing_default)?, - use_multiline_find: settings - .use_multiline_find - .ok_or_else(Self::missing_default)?, use_smartcase_find: settings .use_smartcase_find .ok_or_else(Self::missing_default)?, diff --git a/docs/src/vim.md b/docs/src/vim.md index 2055e6d68d8fe2dcfb2a4737329cb436627b9fa6..3d3a1bac013f6fb417d297bd9c6587af68699a60 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -561,7 +561,7 @@ You can change the following settings to modify vim mode's behavior: | ---------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | | default_mode | The default mode to start in. One of "normal", "insert", "replace", "visual", "visual_line", "visual_block", "helix_normal". | "normal" | | use_system_clipboard | Determines how system clipboard is used:
  • "always": use for all operations
  • "never": only use when explicitly specified
  • "on_yank": use for yank operations
| "always" | -| use_multiline_find | If `true`, `f` and `t` motions extend across multiple lines. | false | +| use_multiline_find | deprecated | | use_smartcase_find | If `true`, `f` and `t` motions are case-insensitive when the target letter is lowercase. | false | | toggle_relative_line_numbers | If `true`, line numbers are relative in normal mode and absolute in insert mode, giving you the best of both options. | false | | custom_digraphs | An object that allows you to add custom digraphs. Read below for an example. | {} | @@ -586,7 +586,6 @@ Here's an example of these settings changed: "vim": { "default_mode": "insert", "use_system_clipboard": "never", - "use_multiline_find": true, "use_smartcase_find": true, "toggle_relative_line_numbers": true, "highlight_on_yank_duration": 50, From 800b925fd702b8f8ac526884f9b67cebf20124e2 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Tue, 24 Jun 2025 14:02:07 -0400 Subject: [PATCH 08/56] Improve Atom keymap (#33329) Closes: https://github.com/zed-industries/zed/issues/33256 Move some Editor keymap entries into `Editor && mode == full` Release Notes: - N/A --- assets/keymaps/linux/atom.json | 18 ++++++++++-------- assets/keymaps/macos/atom.json | 20 +++++++++++--------- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/assets/keymaps/linux/atom.json b/assets/keymaps/linux/atom.json index d471a54ea59b31ff713e7220d090b40c94d150ba..86ee068b06ef38ccec8215e4296c718dd873c824 100644 --- a/assets/keymaps/linux/atom.json +++ b/assets/keymaps/linux/atom.json @@ -9,6 +9,13 @@ }, { "context": "Editor", + "bindings": { + "ctrl-k ctrl-u": "editor::ConvertToUpperCase", // editor:upper-case + "ctrl-k ctrl-l": "editor::ConvertToLowerCase" // editor:lower-case + } + }, + { + "context": "Editor && mode == full", "bindings": { "ctrl-shift-l": "language_selector::Toggle", // grammar-selector:show "ctrl-|": "pane::RevealInProjectPanel", // tree-view:reveal-active-file @@ -19,25 +26,20 @@ "shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous "alt-shift-down": "editor::AddSelectionBelow", // editor:add-selection-below "alt-shift-up": "editor::AddSelectionAbove", // editor:add-selection-above - "ctrl-k ctrl-u": "editor::ConvertToUpperCase", // editor:upper-case - "ctrl-k ctrl-l": "editor::ConvertToLowerCase", // editor:lower-case "ctrl-j": "editor::JoinLines", // editor:join-lines "ctrl-shift-d": "editor::DuplicateLineDown", // editor:duplicate-lines "ctrl-up": "editor::MoveLineUp", // editor:move-line-up "ctrl-down": "editor::MoveLineDown", // editor:move-line-down "ctrl-\\": "workspace::ToggleLeftDock", // tree-view:toggle - "ctrl-shift-m": "markdown::OpenPreviewToTheSide" // markdown-preview:toggle - } - }, - { - "context": "Editor && mode == full", - "bindings": { + "ctrl-shift-m": "markdown::OpenPreviewToTheSide", // markdown-preview:toggle "ctrl-r": "outline::Toggle" // symbols-view:toggle-project-symbols } }, { "context": "BufferSearchBar", "bindings": { + "f3": ["editor::SelectNext", { "replace_newest": true }], // find-and-replace:find-next + "shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous "ctrl-f3": "search::SelectNextMatch", // find-and-replace:find-next-selected "ctrl-shift-f3": "search::SelectPreviousMatch" // find-and-replace:find-previous-selected } diff --git a/assets/keymaps/macos/atom.json b/assets/keymaps/macos/atom.json index 9ddf3538103136d62a51621bfebe99b1c4271267..df48e51767e54524c6645630d1fcb6b1cdeba599 100644 --- a/assets/keymaps/macos/atom.json +++ b/assets/keymaps/macos/atom.json @@ -9,6 +9,14 @@ }, { "context": "Editor", + "bindings": { + "cmd-shift-backspace": "editor::DeleteToBeginningOfLine", + "cmd-k cmd-u": "editor::ConvertToUpperCase", + "cmd-k cmd-l": "editor::ConvertToLowerCase" + } + }, + { + "context": "Editor && mode == full", "bindings": { "ctrl-shift-l": "language_selector::Toggle", "cmd-|": "pane::RevealInProjectPanel", @@ -19,26 +27,20 @@ "cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }], "ctrl-shift-down": "editor::AddSelectionBelow", "ctrl-shift-up": "editor::AddSelectionAbove", - "cmd-shift-backspace": "editor::DeleteToBeginningOfLine", - "cmd-k cmd-u": "editor::ConvertToUpperCase", - "cmd-k cmd-l": "editor::ConvertToLowerCase", "alt-enter": "editor::Newline", "cmd-shift-d": "editor::DuplicateLineDown", "ctrl-cmd-up": "editor::MoveLineUp", "ctrl-cmd-down": "editor::MoveLineDown", "cmd-\\": "workspace::ToggleLeftDock", - "ctrl-shift-m": "markdown::OpenPreviewToTheSide" - } - }, - { - "context": "Editor && mode == full", - "bindings": { + "ctrl-shift-m": "markdown::OpenPreviewToTheSide", "cmd-r": "outline::Toggle" } }, { "context": "BufferSearchBar", "bindings": { + "cmd-g": ["editor::SelectNext", { "replace_newest": true }], + "cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }], "cmd-f3": "search::SelectNextMatch", "cmd-shift-f3": "search::SelectPreviousMatch" } From fc1fc264ec02b326560291132e2e782b081f3c62 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Tue, 24 Jun 2025 14:24:43 -0400 Subject: [PATCH 09/56] debugger: Generate inline values based on debugger.scm file (#33081) ## Context To support inline values a language will have to implement their own provider trait that walks through tree sitter nodes. This is overly complicated, hard to accurately implement for each language, and lacks proper extension support. This PR switches to a singular inline provider that uses a language's `debugger.scm` query field to capture variables and scopes. The inline provider is able to use this information to generate inlays that take scope into account and work with any language that defines a debugger query file. ### Todos - [x] Implement a utility test function to easily test inline values - [x] Generate inline values based on captures - [x] Reimplement Python, Rust, and Go support - [x] Take scope into account when iterating through variable captures - [x] Add tests for Go inline values - [x] Remove old inline provider code and trait implementations Release Notes: - debugger: Generate inline values based on a language debugger.scm file --- Cargo.lock | 1 + crates/dap/src/inline_value.rs | 640 ------------------ crates/dap/src/registry.rs | 26 +- crates/dap_adapters/src/dap_adapters.rs | 6 - crates/debugger_ui/Cargo.toml | 1 + crates/debugger_ui/src/tests/inline_values.rs | 468 +++++++++++-- crates/editor/src/editor.rs | 3 +- crates/language/Cargo.toml | 1 + crates/language/src/buffer.rs | 84 ++- crates/language/src/language.rs | 50 ++ crates/language/src/language_registry.rs | 4 +- crates/languages/src/go/debugger.scm | 26 + crates/languages/src/python/debugger.scm | 43 ++ crates/languages/src/rust/debugger.scm | 50 ++ crates/project/src/debugger/dap_store.rs | 27 +- crates/project/src/debugger/session.rs | 11 +- crates/project/src/project.rs | 96 ++- 17 files changed, 786 insertions(+), 751 deletions(-) create mode 100644 crates/languages/src/go/debugger.scm create mode 100644 crates/languages/src/python/debugger.scm create mode 100644 crates/languages/src/rust/debugger.scm diff --git a/Cargo.lock b/Cargo.lock index 70a05cf4aa2a47de3973dbf64d4b8c0430d06a2c..0c832b83aa59834ee6fac4e8b936826de1465256 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4348,6 +4348,7 @@ dependencies = [ "terminal_view", "theme", "tree-sitter", + "tree-sitter-go", "tree-sitter-json", "ui", "unindent", diff --git a/crates/dap/src/inline_value.rs b/crates/dap/src/inline_value.rs index 881797e20fb5e400ebbbfa6c88c9b5691f8928a9..47d783308518d4317ff8c7f100253bef431a1962 100644 --- a/crates/dap/src/inline_value.rs +++ b/crates/dap/src/inline_value.rs @@ -1,5 +1,3 @@ -use std::collections::{HashMap, HashSet}; - #[derive(Debug, Clone, PartialEq, Eq)] pub enum VariableLookupKind { Variable, @@ -20,641 +18,3 @@ pub struct InlineValueLocation { pub row: usize, pub column: usize, } - -/// A trait for providing inline values for debugging purposes. -/// -/// Implementors of this trait are responsible for analyzing a given node in the -/// source code and extracting variable information, including their names, -/// scopes, and positions. This information is used to display inline values -/// during debugging sessions. Implementors must also handle variable scoping -/// themselves by traversing the syntax tree upwards to determine whether a -/// variable is local or global. -pub trait InlineValueProvider: 'static + Send + Sync { - /// Provides a list of inline value locations based on the given node and source code. - /// - /// # Parameters - /// - `node`: The root node of the active debug line. Implementors should traverse - /// upwards from this node to gather variable information and determine their scope. - /// - `source`: The source code as a string slice, used to extract variable names. - /// - `max_row`: The maximum row to consider when collecting variables. Variables - /// declared beyond this row should be ignored. - /// - /// # Returns - /// A vector of `InlineValueLocation` instances, each representing a variable's - /// name, scope, and the position of the inline value should be shown. - fn provide( - &self, - node: language::Node, - source: &str, - max_row: usize, - ) -> Vec; -} - -pub struct RustInlineValueProvider; - -impl InlineValueProvider for RustInlineValueProvider { - fn provide( - &self, - mut node: language::Node, - source: &str, - max_row: usize, - ) -> Vec { - let mut variables = Vec::new(); - let mut variable_names = HashSet::new(); - let mut scope = VariableScope::Local; - - loop { - let mut variable_names_in_scope = HashMap::new(); - for child in node.named_children(&mut node.walk()) { - if child.start_position().row >= max_row { - break; - } - - if scope == VariableScope::Local && child.kind() == "let_declaration" { - if let Some(identifier) = child.child_by_field_name("pattern") { - let variable_name = source[identifier.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = variable_names_in_scope.get(&variable_name) { - variables.remove(*index); - } - - variable_names_in_scope.insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: identifier.end_position().row, - column: identifier.end_position().column, - }); - } - } else if child.kind() == "static_item" { - if let Some(name) = child.child_by_field_name("name") { - let variable_name = source[name.byte_range()].to_string(); - variables.push(InlineValueLocation { - variable_name, - scope: scope.clone(), - lookup: VariableLookupKind::Expression, - row: name.end_position().row, - column: name.end_position().column, - }); - } - } - } - - variable_names.extend(variable_names_in_scope.keys().cloned()); - - if matches!(node.kind(), "function_item" | "closure_expression") { - scope = VariableScope::Global; - } - - if let Some(parent) = node.parent() { - node = parent; - } else { - break; - } - } - - variables - } -} - -pub struct PythonInlineValueProvider; - -impl InlineValueProvider for PythonInlineValueProvider { - fn provide( - &self, - mut node: language::Node, - source: &str, - max_row: usize, - ) -> Vec { - let mut variables = Vec::new(); - let mut variable_names = HashSet::new(); - let mut scope = VariableScope::Local; - - loop { - let mut variable_names_in_scope = HashMap::new(); - for child in node.named_children(&mut node.walk()) { - if child.start_position().row >= max_row { - break; - } - - if scope == VariableScope::Local { - match child.kind() { - "expression_statement" => { - if let Some(expr) = child.child(0) { - if expr.kind() == "assignment" { - if let Some(param) = expr.child(0) { - let param_identifier = if param.kind() == "identifier" { - Some(param) - } else if param.kind() == "typed_parameter" { - param.child(0) - } else { - None - }; - - if let Some(identifier) = param_identifier { - if identifier.kind() == "identifier" { - let variable_name = - source[identifier.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = - variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope - .insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: identifier.end_position().row, - column: identifier.end_position().column, - }); - } - } - } - } - } - } - "function_definition" => { - if let Some(params) = child.child_by_field_name("parameters") { - for param in params.named_children(&mut params.walk()) { - let param_identifier = if param.kind() == "identifier" { - Some(param) - } else if param.kind() == "typed_parameter" { - param.child(0) - } else { - None - }; - - if let Some(identifier) = param_identifier { - if identifier.kind() == "identifier" { - let variable_name = - source[identifier.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = - variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope - .insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: identifier.end_position().row, - column: identifier.end_position().column, - }); - } - } - } - } - } - "for_statement" => { - if let Some(target) = child.child_by_field_name("left") { - if target.kind() == "identifier" { - let variable_name = source[target.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope - .insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: target.end_position().row, - column: target.end_position().column, - }); - } - } - } - _ => {} - } - } - } - - variable_names.extend(variable_names_in_scope.keys().cloned()); - - if matches!(node.kind(), "function_definition" | "module") - && node.range().end_point.row < max_row - { - scope = VariableScope::Global; - } - - if let Some(parent) = node.parent() { - node = parent; - } else { - break; - } - } - - variables - } -} - -pub struct GoInlineValueProvider; - -impl InlineValueProvider for GoInlineValueProvider { - fn provide( - &self, - mut node: language::Node, - source: &str, - max_row: usize, - ) -> Vec { - let mut variables = Vec::new(); - let mut variable_names = HashSet::new(); - let mut scope = VariableScope::Local; - - loop { - let mut variable_names_in_scope = HashMap::new(); - for child in node.named_children(&mut node.walk()) { - if child.start_position().row >= max_row { - break; - } - - if scope == VariableScope::Local { - match child.kind() { - "var_declaration" => { - for var_spec in child.named_children(&mut child.walk()) { - if var_spec.kind() == "var_spec" { - if let Some(name_node) = var_spec.child_by_field_name("name") { - let variable_name = - source[name_node.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = - variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope - .insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: name_node.end_position().row, - column: name_node.end_position().column, - }); - } - } - } - } - "short_var_declaration" => { - if let Some(left_side) = child.child_by_field_name("left") { - for identifier in left_side.named_children(&mut left_side.walk()) { - if identifier.kind() == "identifier" { - let variable_name = - source[identifier.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = - variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope - .insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: identifier.end_position().row, - column: identifier.end_position().column, - }); - } - } - } - } - "assignment_statement" => { - if let Some(left_side) = child.child_by_field_name("left") { - for identifier in left_side.named_children(&mut left_side.walk()) { - if identifier.kind() == "identifier" { - let variable_name = - source[identifier.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = - variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope - .insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: identifier.end_position().row, - column: identifier.end_position().column, - }); - } - } - } - } - "function_declaration" | "method_declaration" => { - if let Some(params) = child.child_by_field_name("parameters") { - for param in params.named_children(&mut params.walk()) { - if param.kind() == "parameter_declaration" { - if let Some(name_node) = param.child_by_field_name("name") { - let variable_name = - source[name_node.byte_range()].to_string(); - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = - variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope - .insert(variable_name.clone(), variables.len()); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: name_node.end_position().row, - column: name_node.end_position().column, - }); - } - } - } - } - } - "for_statement" => { - if let Some(clause) = child.named_child(0) { - if clause.kind() == "for_clause" { - if let Some(init) = clause.named_child(0) { - if init.kind() == "short_var_declaration" { - if let Some(left_side) = - init.child_by_field_name("left") - { - if left_side.kind() == "expression_list" { - for identifier in left_side - .named_children(&mut left_side.walk()) - { - if identifier.kind() == "identifier" { - let variable_name = source - [identifier.byte_range()] - .to_string(); - - if variable_names - .contains(&variable_name) - { - continue; - } - - if let Some(index) = - variable_names_in_scope - .get(&variable_name) - { - variables.remove(*index); - } - - variable_names_in_scope.insert( - variable_name.clone(), - variables.len(), - ); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: - VariableLookupKind::Variable, - row: identifier.end_position().row, - column: identifier - .end_position() - .column, - }); - } - } - } - } - } - } - } else if clause.kind() == "range_clause" { - if let Some(left) = clause.child_by_field_name("left") { - if left.kind() == "expression_list" { - for identifier in left.named_children(&mut left.walk()) - { - if identifier.kind() == "identifier" { - let variable_name = - source[identifier.byte_range()].to_string(); - - if variable_name == "_" { - continue; - } - - if variable_names.contains(&variable_name) { - continue; - } - - if let Some(index) = - variable_names_in_scope.get(&variable_name) - { - variables.remove(*index); - } - variable_names_in_scope.insert( - variable_name.clone(), - variables.len(), - ); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Local, - lookup: VariableLookupKind::Variable, - row: identifier.end_position().row, - column: identifier.end_position().column, - }); - } - } - } - } - } - } - } - _ => {} - } - } else if child.kind() == "var_declaration" { - for var_spec in child.named_children(&mut child.walk()) { - if var_spec.kind() == "var_spec" { - if let Some(name_node) = var_spec.child_by_field_name("name") { - let variable_name = source[name_node.byte_range()].to_string(); - variables.push(InlineValueLocation { - variable_name, - scope: VariableScope::Global, - lookup: VariableLookupKind::Expression, - row: name_node.end_position().row, - column: name_node.end_position().column, - }); - } - } - } - } - } - - variable_names.extend(variable_names_in_scope.keys().cloned()); - - if matches!(node.kind(), "function_declaration" | "method_declaration") { - scope = VariableScope::Global; - } - - if let Some(parent) = node.parent() { - node = parent; - } else { - break; - } - } - - variables - } -} -#[cfg(test)] -mod tests { - use super::*; - use tree_sitter::Parser; - - #[test] - fn test_go_inline_value_provider() { - let provider = GoInlineValueProvider; - let source = r#" -package main - -func main() { - items := []int{1, 2, 3, 4, 5} - for i, v := range items { - println(i, v) - } - for j := 0; j < 10; j++ { - println(j) - } -} -"#; - - let mut parser = Parser::new(); - if parser - .set_language(&tree_sitter_go::LANGUAGE.into()) - .is_err() - { - return; - } - let Some(tree) = parser.parse(source, None) else { - return; - }; - let root_node = tree.root_node(); - - let mut main_body = None; - for child in root_node.named_children(&mut root_node.walk()) { - if child.kind() == "function_declaration" { - if let Some(name) = child.child_by_field_name("name") { - if &source[name.byte_range()] == "main" { - if let Some(body) = child.child_by_field_name("body") { - main_body = Some(body); - break; - } - } - } - } - } - - let Some(main_body) = main_body else { - return; - }; - - let variables = provider.provide(main_body, source, 100); - assert!(variables.len() >= 2); - - let variable_names: Vec<&str> = - variables.iter().map(|v| v.variable_name.as_str()).collect(); - assert!(variable_names.contains(&"items")); - assert!(variable_names.contains(&"j")); - } - - #[test] - fn test_go_inline_value_provider_counter_pattern() { - let provider = GoInlineValueProvider; - let source = r#" -package main - -func main() { - N := 10 - for i := range N { - println(i) - } -} -"#; - - let mut parser = Parser::new(); - if parser - .set_language(&tree_sitter_go::LANGUAGE.into()) - .is_err() - { - return; - } - let Some(tree) = parser.parse(source, None) else { - return; - }; - let root_node = tree.root_node(); - - let mut main_body = None; - for child in root_node.named_children(&mut root_node.walk()) { - if child.kind() == "function_declaration" { - if let Some(name) = child.child_by_field_name("name") { - if &source[name.byte_range()] == "main" { - if let Some(body) = child.child_by_field_name("body") { - main_body = Some(body); - break; - } - } - } - } - } - - let Some(main_body) = main_body else { - return; - }; - let variables = provider.provide(main_body, source, 100); - - let variable_names: Vec<&str> = - variables.iter().map(|v| v.variable_name.as_str()).collect(); - assert!(variable_names.contains(&"N")); - assert!(variable_names.contains(&"i")); - } -} diff --git a/crates/dap/src/registry.rs b/crates/dap/src/registry.rs index 2786de227e95ffa9d0b253f1309224d6f21ed877..9435b16b924e43406d5ed99c864df78c179f27b1 100644 --- a/crates/dap/src/registry.rs +++ b/crates/dap/src/registry.rs @@ -8,10 +8,7 @@ use task::{ AdapterSchema, AdapterSchemas, DebugRequest, DebugScenario, SpawnInTerminal, TaskTemplate, }; -use crate::{ - adapters::{DebugAdapter, DebugAdapterName}, - inline_value::InlineValueProvider, -}; +use crate::adapters::{DebugAdapter, DebugAdapterName}; use std::{collections::BTreeMap, sync::Arc}; /// Given a user build configuration, locator creates a fill-in debug target ([DebugScenario]) on behalf of the user. @@ -33,7 +30,6 @@ pub trait DapLocator: Send + Sync { struct DapRegistryState { adapters: BTreeMap>, locators: FxHashMap>, - inline_value_providers: FxHashMap>, } #[derive(Clone, Default)] @@ -82,22 +78,6 @@ impl DapRegistry { schemas } - pub fn add_inline_value_provider( - &self, - language: String, - provider: Arc, - ) { - let _previous_value = self - .0 - .write() - .inline_value_providers - .insert(language, provider); - debug_assert!( - _previous_value.is_none(), - "Attempted to insert a new inline value provider when one is already registered" - ); - } - pub fn locators(&self) -> FxHashMap> { self.0.read().locators.clone() } @@ -106,10 +86,6 @@ impl DapRegistry { self.0.read().adapters.get(name).cloned() } - pub fn inline_value_provider(&self, language: &str) -> Option> { - self.0.read().inline_value_providers.get(language).cloned() - } - pub fn enumerate_adapters(&self) -> Vec { self.0.read().adapters.keys().cloned().collect() } diff --git a/crates/dap_adapters/src/dap_adapters.rs b/crates/dap_adapters/src/dap_adapters.rs index 414d0a91a3de0d5a75ea4dc981d277c84962246f..79c56fdf25583e6cbe3a182b3abf464ac449eb27 100644 --- a/crates/dap_adapters/src/dap_adapters.rs +++ b/crates/dap_adapters/src/dap_adapters.rs @@ -18,7 +18,6 @@ use dap::{ GithubRepo, }, configure_tcp_connection, - inline_value::{GoInlineValueProvider, PythonInlineValueProvider, RustInlineValueProvider}, }; use gdb::GdbDebugAdapter; use go::GoDebugAdapter; @@ -44,10 +43,5 @@ pub fn init(cx: &mut App) { { registry.add_adapter(Arc::from(dap::FakeAdapter {})); } - - registry.add_inline_value_provider("Rust".to_string(), Arc::from(RustInlineValueProvider)); - registry - .add_inline_value_provider("Python".to_string(), Arc::from(PythonInlineValueProvider)); - registry.add_inline_value_provider("Go".to_string(), Arc::from(GoInlineValueProvider)); }) } diff --git a/crates/debugger_ui/Cargo.toml b/crates/debugger_ui/Cargo.toml index e259b8a4b38fef1d702ff5268c29f64fe45498c8..91f9acad3c73334980036880143df9c7b410b3b6 100644 --- a/crates/debugger_ui/Cargo.toml +++ b/crates/debugger_ui/Cargo.toml @@ -81,3 +81,4 @@ unindent.workspace = true util = { workspace = true, features = ["test-support"] } workspace = { workspace = true, features = ["test-support"] } zlog.workspace = true +tree-sitter-go.workspace = true diff --git a/crates/debugger_ui/src/tests/inline_values.rs b/crates/debugger_ui/src/tests/inline_values.rs index 6fed57ecacc9ad6062a27f3fa33a95bd52cc1a10..45cab2a3063a8741d01efb54059667026a646879 100644 --- a/crates/debugger_ui/src/tests/inline_values.rs +++ b/crates/debugger_ui/src/tests/inline_values.rs @@ -246,10 +246,10 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { - let x = 10; + let x: 10 = 10; let value = 42; let y = 4; let tester = { @@ -303,11 +303,11 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; - let value = 42; + let value: 42 = 42; let y = 4; let tester = { let y = 10; @@ -360,12 +360,12 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; let value: 42 = 42; - let y = 4; + let y: 4 = 4; let tester = { let y = 10; let y = 5; @@ -417,7 +417,7 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; @@ -474,14 +474,14 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; let value: 42 = 42; let y: 4 = 4; let tester = { - let y = 10; + let y: 4 = 10; let y = 5; let b = 3; vec![y, 20, 30] @@ -581,15 +581,15 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; let value: 42 = 42; - let y = 4; + let y: 10 = 4; let tester = { let y: 10 = 10; - let y = 5; + let y: 10 = 5; let b = 3; vec![y, 20, 30] }; @@ -688,14 +688,14 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; let value: 42 = 42; - let y = 4; + let y: 5 = 4; let tester = { - let y = 10; + let y: 5 = 10; let y: 5 = 5; let b = 3; vec![y, 20, 30] @@ -807,17 +807,17 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; let value: 42 = 42; - let y = 4; + let y: 5 = 4; let tester = { - let y = 10; + let y: 5 = 10; let y: 5 = 5; let b: 3 = 3; - vec![y, 20, 30] + vec![y: 5, 20, 30] }; let caller = || { @@ -926,7 +926,7 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; @@ -1058,7 +1058,7 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; @@ -1115,21 +1115,21 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { - let x = 10; - let value = 42; - let y = 4; - let tester = { + let x: 10 = 10; + let value: 42 = 42; + let y: 4 = 4; + let tester: size=3 = { let y = 10; let y = 5; let b = 3; vec![y, 20, 30] }; - let caller = || { - let x = 3; + let caller: = || { + let x: 10 = 3; println!("x={}", x); }; @@ -1193,10 +1193,10 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 1: usize = 1; + static mut GLOBAL: usize = 1; fn main() { - let x = 10; + let x: 3 = 10; let value = 42; let y = 4; let tester = { @@ -1208,7 +1208,7 @@ fn main() { let caller = || { let x: 3 = 3; - println!("x={}", x); + println!("x={}", x: 3); }; caller(); @@ -1338,7 +1338,7 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 2: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; @@ -1362,7 +1362,7 @@ fn main() { GLOBAL = 2; } - let result = value * 2 * x; + let result = value: 42 * 2 * x: 10; println!("Simple test executed: value={}, result={}", value, result); assert!(true); } @@ -1483,7 +1483,7 @@ fn main() { editor.update_in(cx, |editor, window, cx| { pretty_assertions::assert_eq!( r#" - static mut GLOBAL: 2: usize = 1; + static mut GLOBAL: usize = 1; fn main() { let x: 10 = 10; @@ -1507,8 +1507,8 @@ fn main() { GLOBAL = 2; } - let result: 840 = value * 2 * x; - println!("Simple test executed: value={}, result={}", value, result); + let result: 840 = value: 42 * 2 * x: 10; + println!("Simple test executed: value={}, result={}", value: 42, result: 840); assert!(true); } "# @@ -1519,6 +1519,7 @@ fn main() { } fn rust_lang() -> Language { + let debug_variables_query = include_str!("../../../languages/src/rust/debugger.scm"); Language::new( LanguageConfig { name: "Rust".into(), @@ -1530,6 +1531,8 @@ fn rust_lang() -> Language { }, Some(tree_sitter_rust::LANGUAGE.into()), ) + .with_debug_variables_query(debug_variables_query) + .unwrap() } #[gpui::test] @@ -1818,8 +1821,8 @@ def process_data(untyped_param, typed_param: int, another_typed: str): def process_data(untyped_param: test_value, typed_param: 42: int, another_typed: world: str): # Local variables x: 10 = 10 - result: 84 = typed_param * 2 - text: Hello, world = "Hello, " + another_typed + result: 84 = typed_param: 42 * 2 + text: Hello, world = "Hello, " + another_typed: world # For loop with range sum_value: 10 = 0 @@ -1837,6 +1840,7 @@ def process_data(untyped_param, typed_param: int, another_typed: str): } fn python_lang() -> Language { + let debug_variables_query = include_str!("../../../languages/src/python/debugger.scm"); Language::new( LanguageConfig { name: "Python".into(), @@ -1848,4 +1852,392 @@ fn python_lang() -> Language { }, Some(tree_sitter_python::LANGUAGE.into()), ) + .with_debug_variables_query(debug_variables_query) + .unwrap() +} + +fn go_lang() -> Language { + let debug_variables_query = include_str!("../../../languages/src/go/debugger.scm"); + Language::new( + LanguageConfig { + name: "Go".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["go".to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_go::LANGUAGE.into()), + ) + .with_debug_variables_query(debug_variables_query) + .unwrap() +} + +/// Test utility function for inline values testing +/// +/// # Arguments +/// * `variables` - List of tuples containing (variable_name, variable_value) +/// * `before` - Source code before inline values are applied +/// * `after` - Expected source code after inline values are applied +/// * `language` - Language configuration to use for parsing +/// * `executor` - Background executor for async operations +/// * `cx` - Test app context +async fn test_inline_values_util( + local_variables: &[(&str, &str)], + global_variables: &[(&str, &str)], + before: &str, + after: &str, + active_debug_line: Option, + language: Language, + executor: BackgroundExecutor, + cx: &mut TestAppContext, +) { + init_test(cx); + + let lines_count = before.lines().count(); + let stop_line = + active_debug_line.unwrap_or_else(|| if lines_count > 6 { 6 } else { lines_count - 1 }); + + let fs = FakeFs::new(executor.clone()); + fs.insert_tree(path!("/project"), json!({ "main.rs": before.to_string() })) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let workspace = init_test_workspace(&project, cx).await; + workspace + .update(cx, |workspace, window, cx| { + workspace.focus_panel::(window, cx); + }) + .unwrap(); + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + let session = start_debug_session(&workspace, cx, |_| {}).unwrap(); + let client = session.update(cx, |session, _| session.adapter_client().unwrap()); + + client.on_request::(|_, _| { + Ok(dap::ThreadsResponse { + threads: vec![dap::Thread { + id: 1, + name: "main".into(), + }], + }) + }); + + client.on_request::(move |_, _| { + Ok(dap::StackTraceResponse { + stack_frames: vec![dap::StackFrame { + id: 1, + name: "main".into(), + source: Some(dap::Source { + name: Some("main.rs".into()), + path: Some(path!("/project/main.rs").into()), + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }), + line: stop_line as u64, + column: 1, + end_line: None, + end_column: None, + can_restart: None, + instruction_pointer_reference: None, + module_id: None, + presentation_hint: None, + }], + total_frames: None, + }) + }); + + let local_vars: Vec = local_variables + .iter() + .map(|(name, value)| Variable { + name: (*name).into(), + value: (*value).into(), + type_: None, + presentation_hint: None, + evaluate_name: None, + variables_reference: 0, + named_variables: None, + indexed_variables: None, + memory_reference: None, + declaration_location_reference: None, + value_location_reference: None, + }) + .collect(); + + let global_vars: Vec = global_variables + .iter() + .map(|(name, value)| Variable { + name: (*name).into(), + value: (*value).into(), + type_: None, + presentation_hint: None, + evaluate_name: None, + variables_reference: 0, + named_variables: None, + indexed_variables: None, + memory_reference: None, + declaration_location_reference: None, + value_location_reference: None, + }) + .collect(); + + client.on_request::({ + let local_vars = Arc::new(local_vars.clone()); + let global_vars = Arc::new(global_vars.clone()); + move |_, args| { + let variables = match args.variables_reference { + 2 => (*local_vars).clone(), + 3 => (*global_vars).clone(), + _ => vec![], + }; + Ok(dap::VariablesResponse { variables }) + } + }); + + client.on_request::(move |_, _| { + Ok(dap::ScopesResponse { + scopes: vec![ + Scope { + name: "Local".into(), + presentation_hint: None, + variables_reference: 2, + named_variables: None, + indexed_variables: None, + expensive: false, + source: None, + line: None, + column: None, + end_line: None, + end_column: None, + }, + Scope { + name: "Global".into(), + presentation_hint: None, + variables_reference: 3, + named_variables: None, + indexed_variables: None, + expensive: false, + source: None, + line: None, + column: None, + end_line: None, + end_column: None, + }, + ], + }) + }); + + if !global_variables.is_empty() { + let global_evaluate_map: std::collections::HashMap = global_variables + .iter() + .map(|(name, value)| (name.to_string(), value.to_string())) + .collect(); + + client.on_request::(move |_, args| { + let value = global_evaluate_map + .get(&args.expression) + .unwrap_or(&"undefined".to_string()) + .clone(); + + Ok(dap::EvaluateResponse { + result: value, + type_: None, + presentation_hint: None, + variables_reference: 0, + named_variables: None, + indexed_variables: None, + memory_reference: None, + value_location_reference: None, + }) + }); + } + + client + .fake_event(dap::messages::Events::Stopped(dap::StoppedEvent { + reason: dap::StoppedEventReason::Pause, + description: None, + thread_id: Some(1), + preserve_focus_hint: None, + text: None, + all_threads_stopped: None, + hit_breakpoint_ids: None, + })) + .await; + + cx.run_until_parked(); + + let project_path = Path::new(path!("/project")); + let worktree = project + .update(cx, |project, cx| project.find_worktree(project_path, cx)) + .expect("This worktree should exist in project") + .0; + + let worktree_id = workspace + .update(cx, |_, _, cx| worktree.read(cx).id()) + .unwrap(); + + let buffer = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, "main.rs"), cx) + }) + .await + .unwrap(); + + buffer.update(cx, |buffer, cx| { + buffer.set_language(Some(Arc::new(language)), cx); + }); + + let (editor, cx) = cx.add_window_view(|window, cx| { + Editor::new( + EditorMode::full(), + MultiBuffer::build_from_buffer(buffer, cx), + Some(project), + window, + cx, + ) + }); + + active_debug_session_panel(workspace, cx).update_in(cx, |_, window, cx| { + cx.focus_self(window); + }); + cx.run_until_parked(); + + editor.update(cx, |editor, cx| editor.refresh_inline_values(cx)); + + cx.run_until_parked(); + + editor.update_in(cx, |editor, window, cx| { + pretty_assertions::assert_eq!(after, editor.snapshot(window, cx).text()); + }); +} + +#[gpui::test] +async fn test_inline_values_example(executor: BackgroundExecutor, cx: &mut TestAppContext) { + let variables = [("x", "10"), ("y", "20"), ("result", "30")]; + + let before = r#" +fn main() { + let x = 10; + let y = 20; + let result = x + y; + println!("Result: {}", result); +} +"# + .unindent(); + + let after = r#" +fn main() { + let x: 10 = 10; + let y: 20 = 20; + let result: 30 = x: 10 + y: 20; + println!("Result: {}", result: 30); +} +"# + .unindent(); + + test_inline_values_util( + &variables, + &[], + &before, + &after, + None, + rust_lang(), + executor, + cx, + ) + .await; +} + +#[gpui::test] +async fn test_inline_values_with_globals(executor: BackgroundExecutor, cx: &mut TestAppContext) { + let variables = [("x", "5"), ("y", "10")]; + + let before = r#" +static mut GLOBAL_COUNTER: usize = 42; + +fn main() { + let x = 5; + let y = 10; + unsafe { + GLOBAL_COUNTER += 1; + } + println!("x={}, y={}, global={}", x, y, unsafe { GLOBAL_COUNTER }); +} +"# + .unindent(); + + let after = r#" +static mut GLOBAL_COUNTER: 42: usize = 42; + +fn main() { + let x: 5 = 5; + let y: 10 = 10; + unsafe { + GLOBAL_COUNTER += 1; + } + println!("x={}, y={}, global={}", x, y, unsafe { GLOBAL_COUNTER }); +} +"# + .unindent(); + + test_inline_values_util( + &variables, + &[("GLOBAL_COUNTER", "42")], + &before, + &after, + None, + rust_lang(), + executor, + cx, + ) + .await; +} + +#[gpui::test] +async fn test_go_inline_values(executor: BackgroundExecutor, cx: &mut TestAppContext) { + let variables = [("x", "42"), ("y", "hello")]; + + let before = r#" +package main + +var globalCounter int = 100 + +func main() { + x := 42 + y := "hello" + z := x + 10 + println(x, y, z) +} +"# + .unindent(); + + let after = r#" +package main + +var globalCounter: 100 int = 100 + +func main() { + x: 42 := 42 + y := "hello" + z := x + 10 + println(x, y, z) +} +"# + .unindent(); + + test_inline_values_util( + &variables, + &[("globalCounter", "100")], + &before, + &after, + None, + go_lang(), + executor, + cx, + ) + .await; } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 568e9062c86bb5b2b584bfeaa4f430c88d251a76..6e9a9be0fe3267e5b13bfdb1b9d880848fa968bf 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -19167,7 +19167,7 @@ impl Editor { let current_execution_position = self .highlighted_rows .get(&TypeId::of::()) - .and_then(|lines| lines.last().map(|line| line.range.start)); + .and_then(|lines| lines.last().map(|line| line.range.end)); self.inline_value_cache.refresh_task = cx.spawn(async move |editor, cx| { let inline_values = editor @@ -21553,7 +21553,6 @@ impl SemanticsProvider for Entity { fn inline_values( &self, buffer_handle: Entity, - range: Range, cx: &mut App, ) -> Option>>> { diff --git a/crates/language/Cargo.toml b/crates/language/Cargo.toml index 278976d3cdfaf304b6d28bd3c88e9a81cbfdb69f..b0e06c3d65a7bc05df0cb41104a1139353372539 100644 --- a/crates/language/Cargo.toml +++ b/crates/language/Cargo.toml @@ -20,6 +20,7 @@ test-support = [ "text/test-support", "tree-sitter-rust", "tree-sitter-python", + "tree-sitter-rust", "tree-sitter-typescript", "settings/test-support", "util/test-support", diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 523efa49dc71694084529a13d45b67fdd7c09afd..90a899f79d42f33f91044f025bc22383c2f3881d 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1,12 +1,6 @@ -pub use crate::{ - Grammar, Language, LanguageRegistry, - diagnostic_set::DiagnosticSet, - highlight_map::{HighlightId, HighlightMap}, - proto, -}; use crate::{ - LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject, - TreeSitterOptions, + DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, + TextObject, TreeSitterOptions, diagnostic_set::{DiagnosticEntry, DiagnosticGroup}, language_settings::{LanguageSettings, language_settings}, outline::OutlineItem, @@ -17,6 +11,12 @@ use crate::{ task_context::RunnableRange, text_diff::text_diff, }; +pub use crate::{ + Grammar, Language, LanguageRegistry, + diagnostic_set::DiagnosticSet, + highlight_map::{HighlightId, HighlightMap}, + proto, +}; use anyhow::{Context as _, Result}; pub use clock::ReplicaId; use clock::{AGENT_REPLICA_ID, Lamport}; @@ -3848,6 +3848,74 @@ impl BufferSnapshot { .filter(|pair| !pair.newline_only) } + pub fn debug_variables_query( + &self, + range: Range, + ) -> impl Iterator, DebuggerTextObject)> + '_ { + let range = range.start.to_offset(self).saturating_sub(1) + ..self.len().min(range.end.to_offset(self) + 1); + + let mut matches = self.syntax.matches_with_options( + range.clone(), + &self.text, + TreeSitterOptions::default(), + |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query), + ); + + let configs = matches + .grammars() + .iter() + .map(|grammar| grammar.debug_variables_config.as_ref()) + .collect::>(); + + let mut captures = Vec::<(Range, DebuggerTextObject)>::new(); + + iter::from_fn(move || { + loop { + while let Some(capture) = captures.pop() { + if capture.0.overlaps(&range) { + return Some(capture); + } + } + + let mat = matches.peek()?; + + let Some(config) = configs[mat.grammar_index].as_ref() else { + matches.advance(); + continue; + }; + + for capture in mat.captures { + let Some(ix) = config + .objects_by_capture_ix + .binary_search_by_key(&capture.index, |e| e.0) + .ok() + else { + continue; + }; + let text_object = config.objects_by_capture_ix[ix].1; + let byte_range = capture.node.byte_range(); + + let mut found = false; + for (range, existing) in captures.iter_mut() { + if existing == &text_object { + range.start = range.start.min(byte_range.start); + range.end = range.end.max(byte_range.end); + found = true; + break; + } + } + + if !found { + captures.push((byte_range, text_object)); + } + } + + matches.advance(); + } + }) + } + pub fn text_object_ranges( &self, range: Range, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 8b8c411366f02793bdeb86bf5154fc77aa6d338b..f564b54ed52e028a8a19f13616bc42364ff4d4a4 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -1082,6 +1082,7 @@ pub struct Grammar { pub embedding_config: Option, pub(crate) injection_config: Option, pub(crate) override_config: Option, + pub(crate) debug_variables_config: Option, pub(crate) highlight_map: Mutex, } @@ -1104,6 +1105,22 @@ pub struct OutlineConfig { pub annotation_capture_ix: Option, } +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum DebuggerTextObject { + Variable, + Scope, +} + +impl DebuggerTextObject { + pub fn from_capture_name(name: &str) -> Option { + match name { + "debug-variable" => Some(DebuggerTextObject::Variable), + "debug-scope" => Some(DebuggerTextObject::Scope), + _ => None, + } + } +} + #[derive(Debug, Clone, Copy, PartialEq)] pub enum TextObject { InsideFunction, @@ -1206,6 +1223,11 @@ struct BracketsPatternConfig { newline_only: bool, } +pub struct DebugVariablesConfig { + pub query: Query, + pub objects_by_capture_ix: Vec<(u32, DebuggerTextObject)>, +} + impl Language { pub fn new(config: LanguageConfig, ts_language: Option) -> Self { Self::new_with_id(LanguageId::new(), config, ts_language) @@ -1237,6 +1259,7 @@ impl Language { redactions_config: None, runnable_config: None, error_query: Query::new(&ts_language, "(ERROR) @error").ok(), + debug_variables_config: None, ts_language, highlight_map: Default::default(), }) @@ -1307,6 +1330,11 @@ impl Language { .with_text_object_query(query.as_ref()) .context("Error loading textobject query")?; } + if let Some(query) = queries.debugger { + self = self + .with_debug_variables_query(query.as_ref()) + .context("Error loading debug variables query")?; + } Ok(self) } @@ -1425,6 +1453,24 @@ impl Language { Ok(self) } + pub fn with_debug_variables_query(mut self, source: &str) -> Result { + let grammar = self.grammar_mut().context("cannot mutate grammar")?; + let query = Query::new(&grammar.ts_language, source)?; + + let mut objects_by_capture_ix = Vec::new(); + for (ix, name) in query.capture_names().iter().enumerate() { + if let Some(text_object) = DebuggerTextObject::from_capture_name(name) { + objects_by_capture_ix.push((ix as u32, text_object)); + } + } + + grammar.debug_variables_config = Some(DebugVariablesConfig { + query, + objects_by_capture_ix, + }); + Ok(self) + } + pub fn with_brackets_query(mut self, source: &str) -> Result { let grammar = self.grammar_mut().context("cannot mutate grammar")?; let query = Query::new(&grammar.ts_language, source)?; @@ -1930,6 +1976,10 @@ impl Grammar { .capture_index_for_name(name)?; Some(self.highlight_map.lock().get(capture_id)) } + + pub fn debug_variables_config(&self) -> Option<&DebugVariablesConfig> { + self.debug_variables_config.as_ref() + } } impl CodeLabel { diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index 4d0837d8e30fc1fd9be961e2cc05487d276e3792..c157cd9e73a0bb2f208672d391e98e2445317e5c 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -226,7 +226,7 @@ pub const QUERY_FILENAME_PREFIXES: &[( ("overrides", |q| &mut q.overrides), ("redactions", |q| &mut q.redactions), ("runnables", |q| &mut q.runnables), - ("debug_variables", |q| &mut q.debug_variables), + ("debugger", |q| &mut q.debugger), ("textobjects", |q| &mut q.text_objects), ]; @@ -243,7 +243,7 @@ pub struct LanguageQueries { pub redactions: Option>, pub runnables: Option>, pub text_objects: Option>, - pub debug_variables: Option>, + pub debugger: Option>, } #[derive(Clone, Default)] diff --git a/crates/languages/src/go/debugger.scm b/crates/languages/src/go/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..f22b91f938e1159fa9bfec99f5000976766faf06 --- /dev/null +++ b/crates/languages/src/go/debugger.scm @@ -0,0 +1,26 @@ +(parameter_declaration (identifier) @debug-variable) + +(short_var_declaration (expression_list (identifier) @debug-variable)) + +(var_declaration (var_spec (identifier) @debug-variable)) + +(const_declaration (const_spec (identifier) @debug-variable)) + +(assignment_statement (expression_list (identifier) @debug-variable)) + +(binary_expression (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(call_expression (argument_list (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) + +(return_statement (expression_list (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) + +(range_clause (expression_list (identifier) @debug-variable)) + +(parenthesized_expression (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(block) @debug-scope +(function_declaration) @debug-scope diff --git a/crates/languages/src/python/debugger.scm b/crates/languages/src/python/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..807d6e865d2f60637f60b397ccc1a61fe3360fa1 --- /dev/null +++ b/crates/languages/src/python/debugger.scm @@ -0,0 +1,43 @@ +(identifier) @debug-variable +(#eq? @debug-variable "self") + +(assignment left: (identifier) @debug-variable) +(assignment left: (pattern_list (identifier) @debug-variable)) +(assignment left: (tuple_pattern (identifier) @debug-variable)) + +(augmented_assignment left: (identifier) @debug-variable) + +(for_statement left: (identifier) @debug-variable) +(for_statement left: (pattern_list (identifier) @debug-variable)) +(for_statement left: (tuple_pattern (identifier) @debug-variable)) + +(for_in_clause left: (identifier) @debug-variable) +(for_in_clause left: (pattern_list (identifier) @debug-variable)) +(for_in_clause left: (tuple_pattern (identifier) @debug-variable)) + +(as_pattern (identifier) @debug-variable) + +(binary_operator left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_operator right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(comparison_operator (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(tuple (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(set (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(subscript value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(attribute object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(argument_list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(if_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(while_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) + +(block) @debug-scope +(module) @debug-scope diff --git a/crates/languages/src/rust/debugger.scm b/crates/languages/src/rust/debugger.scm new file mode 100644 index 0000000000000000000000000000000000000000..5347413f698083287b9bedd25f4732d24fbbf76e --- /dev/null +++ b/crates/languages/src/rust/debugger.scm @@ -0,0 +1,50 @@ +(metavariable) @debug-variable + +(parameter (identifier) @debug-variable) + +(self) @debug-variable + +(static_item (identifier) @debug-variable) +(const_item (identifier) @debug-variable) + +(let_declaration pattern: (identifier) @debug-variable) + +(let_condition (identifier) @debug-variable) + +(match_arm (identifier) @debug-variable) + +(for_expression (identifier) @debug-variable) + +(closure_parameters (identifier) @debug-variable) + +(assignment_expression (identifier) @debug-variable) + +(field_expression (identifier) @debug-variable) + +(binary_expression (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(reference_expression (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array_expression (identifier) @debug-variable) +(tuple_expression (identifier) @debug-variable) +(return_expression (identifier) @debug-variable) +(await_expression (identifier) @debug-variable) +(try_expression (identifier) @debug-variable) +(index_expression (identifier) @debug-variable) +(range_expression (identifier) @debug-variable) +(unary_expression (identifier) @debug-variable) + +(if_expression (identifier) @debug-variable) +(while_expression (identifier) @debug-variable) + +(parenthesized_expression (identifier) @debug-variable) + +(arguments (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(macro_invocation (token_tree (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) + +(block) @debug-scope diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 28cfbe4e4d69ae67d99192cf0b99cfbca3f7ee31..be4964bbee2688c0025900c552eec3fbbc9af492 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -588,7 +588,14 @@ impl DapStore { cx: &mut Context, ) -> Task>> { let snapshot = buffer_handle.read(cx).snapshot(); - let all_variables = session.read(cx).variables_by_stack_frame_id(stack_frame_id); + let local_variables = + session + .read(cx) + .variables_by_stack_frame_id(stack_frame_id, false, true); + let global_variables = + session + .read(cx) + .variables_by_stack_frame_id(stack_frame_id, true, false); fn format_value(mut value: String) -> String { const LIMIT: usize = 100; @@ -617,10 +624,20 @@ impl DapStore { match inline_value_location.lookup { VariableLookupKind::Variable => { - let Some(variable) = all_variables - .iter() - .find(|variable| variable.name == inline_value_location.variable_name) - else { + let variable_search = + if inline_value_location.scope + == dap::inline_value::VariableScope::Local + { + local_variables.iter().chain(global_variables.iter()).find( + |variable| variable.name == inline_value_location.variable_name, + ) + } else { + global_variables.iter().find(|variable| { + variable.name == inline_value_location.variable_name + }) + }; + + let Some(variable) = variable_search else { continue; }; diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index 917506e523e7c7d64b58812baef78ec69e516ce8..300c598bfb9e1daa198baecda0ce5ef5c08aa3e7 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -2171,7 +2171,12 @@ impl Session { .unwrap_or_default() } - pub fn variables_by_stack_frame_id(&self, stack_frame_id: StackFrameId) -> Vec { + pub fn variables_by_stack_frame_id( + &self, + stack_frame_id: StackFrameId, + globals: bool, + locals: bool, + ) -> Vec { let Some(stack_frame) = self.stack_frames.get(&stack_frame_id) else { return Vec::new(); }; @@ -2179,6 +2184,10 @@ impl Session { stack_frame .scopes .iter() + .filter(|scope| { + (scope.name.to_lowercase().contains("local") && locals) + || (scope.name.to_lowercase().contains("global") && globals) + }) .filter_map(|scope| self.variables.get(&scope.variables_reference)) .flatten() .cloned() diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 2fc7fbbe7600889cb0c4d3c25a8453095aa878d4..e8b38148502fe161e0abb5b35dc5dd93ee331373 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -31,6 +31,8 @@ use git_store::{Repository, RepositoryId}; pub mod search_history; mod yarn; +use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope}; + use crate::git_store::GitStore; pub use git_store::{ ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate, @@ -45,7 +47,7 @@ use client::{ }; use clock::ReplicaId; -use dap::{DapRegistry, client::DebugAdapterClient}; +use dap::client::DebugAdapterClient; use collections::{BTreeSet, HashMap, HashSet}; use debounced_delay::DebouncedDelay; @@ -111,7 +113,7 @@ use std::{ use task_store::TaskStore; use terminals::Terminals; -use text::{Anchor, BufferId}; +use text::{Anchor, BufferId, Point}; use toolchain_store::EmptyToolchainStore; use util::{ ResultExt as _, @@ -3667,35 +3669,15 @@ impl Project { range: Range, cx: &mut Context, ) -> Task>> { - let language_name = buffer_handle - .read(cx) - .language() - .map(|language| language.name().to_string()); - - let Some(inline_value_provider) = language_name - .and_then(|language| DapRegistry::global(cx).inline_value_provider(&language)) - else { - return Task::ready(Err(anyhow::anyhow!("Inline value provider not found"))); - }; - let snapshot = buffer_handle.read(cx).snapshot(); - let Some(root_node) = snapshot.syntax_root_ancestor(range.end) else { - return Task::ready(Ok(vec![])); - }; + let captures = snapshot.debug_variables_query(Anchor::MIN..range.end); let row = snapshot .summary_for_anchor::(&range.end) .row as usize; - let inline_value_locations = inline_value_provider.provide( - root_node, - snapshot - .text_for_range(Anchor::MIN..range.end) - .collect::() - .as_str(), - row, - ); + let inline_value_locations = provide_inline_values(captures, &snapshot, row); let stack_frame_id = active_stack_frame.stack_frame_id; cx.spawn(async move |this, cx| { @@ -5377,3 +5359,69 @@ fn proto_to_prompt(level: proto::language_server_prompt_request::Level) -> gpui: proto::language_server_prompt_request::Level::Critical(_) => gpui::PromptLevel::Critical, } } + +fn provide_inline_values( + captures: impl Iterator, language::DebuggerTextObject)>, + snapshot: &language::BufferSnapshot, + max_row: usize, +) -> Vec { + let mut variables = Vec::new(); + let mut variable_position = HashSet::default(); + let mut scopes = Vec::new(); + + let active_debug_line_offset = snapshot.point_to_offset(Point::new(max_row as u32, 0)); + + for (capture_range, capture_kind) in captures { + match capture_kind { + language::DebuggerTextObject::Variable => { + let variable_name = snapshot + .text_for_range(capture_range.clone()) + .collect::(); + let point = snapshot.offset_to_point(capture_range.end); + + while scopes.last().map_or(false, |scope: &Range<_>| { + !scope.contains(&capture_range.start) + }) { + scopes.pop(); + } + + if point.row as usize > max_row { + break; + } + + let scope = if scopes + .last() + .map_or(true, |scope| !scope.contains(&active_debug_line_offset)) + { + VariableScope::Global + } else { + VariableScope::Local + }; + + if variable_position.insert(capture_range.end) { + variables.push(InlineValueLocation { + variable_name, + scope, + lookup: VariableLookupKind::Variable, + row: point.row as usize, + column: point.column as usize, + }); + } + } + language::DebuggerTextObject::Scope => { + while scopes.last().map_or_else( + || false, + |scope: &Range| { + !(scope.contains(&capture_range.start) + && scope.contains(&capture_range.end)) + }, + ) { + scopes.pop(); + } + scopes.push(capture_range); + } + } + } + + variables +} From 3c0475d182b2aecbb128d958ad76d91e52ff9c73 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Tue, 24 Jun 2025 15:15:34 -0400 Subject: [PATCH 10/56] debugger: Reorder step icons to be consistent with other editors (#33330) Closes #33303 Release Notes: - debugger: Swap step in/out icon positions in debug panel to be consistent with other editors --- crates/debugger_ui/src/debugger_panel.rs | 48 ++++++++++++------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 2bea91b2dc1b2c928a23ddf101000f2c5a333ffe..b7f3be0426e9c189eb0edf203859c7d2489c75d9 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -695,30 +695,6 @@ impl DebugPanel { } }), ) - .child( - IconButton::new("debug-step-out", IconName::ArrowUpRight) - .icon_size(IconSize::XSmall) - .shape(ui::IconButtonShape::Square) - .on_click(window.listener_for( - &running_state, - |this, _, _window, cx| { - this.step_out(cx); - }, - )) - .disabled(thread_status != ThreadStatus::Stopped) - .tooltip({ - let focus_handle = focus_handle.clone(); - move |window, cx| { - Tooltip::for_action_in( - "Step out", - &StepOut, - &focus_handle, - window, - cx, - ) - } - }), - ) .child( IconButton::new( "debug-step-into", @@ -746,6 +722,30 @@ impl DebugPanel { } }), ) + .child( + IconButton::new("debug-step-out", IconName::ArrowUpRight) + .icon_size(IconSize::XSmall) + .shape(ui::IconButtonShape::Square) + .on_click(window.listener_for( + &running_state, + |this, _, _window, cx| { + this.step_out(cx); + }, + )) + .disabled(thread_status != ThreadStatus::Stopped) + .tooltip({ + let focus_handle = focus_handle.clone(); + move |window, cx| { + Tooltip::for_action_in( + "Step out", + &StepOut, + &focus_handle, + window, + cx, + ) + } + }), + ) .child(Divider::vertical()) .child( IconButton::new("debug-restart", IconName::DebugRestart) From eec26c9a41817209c3331d1da4ab1c33b3e1c14f Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 24 Jun 2025 22:21:27 +0300 Subject: [PATCH 11/56] Add initial docs for editor diagnostics (#33325) Release Notes: - N/A --- docs/src/SUMMARY.md | 1 + docs/src/diagnostics.md | 70 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 docs/src/diagnostics.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index e5fd5744f1a40ff290e5222b6252f89bbf0966d1..1d43872547a366e03136876475004918d9b827b9 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -34,6 +34,7 @@ - [Collaboration](./collaboration.md) - [Git](./git.md) - [Debugger](./debugger.md) +- [Diagnostics](./diagnostics.md) - [Tasks](./tasks.md) - [Remote Development](./remote-development.md) - [Environment Variables](./environment.md) diff --git a/docs/src/diagnostics.md b/docs/src/diagnostics.md new file mode 100644 index 0000000000000000000000000000000000000000..a015fbebf88b64ebb75941133d3ab21279182685 --- /dev/null +++ b/docs/src/diagnostics.md @@ -0,0 +1,70 @@ +# Diagnostics + +Zed gets its diagnostics from the language servers and supports both push and pull variants of the LSP which makes it compatible with all existing language servers. + +# Regular diagnostics + +By default, Zed displays all diagnostics as underlined text in the editor and the scrollbar. + +Editor diagnostics could be filtered with the + +```json5 +"diagnostics_max_severity": null +``` + +editor setting (possible values: `"off"`, `"error"`, `"warning"`, `"info"`, `"hint"`, `null` (default, all diagnostics)). + +The scrollbar ones are configured with the + +```json5 +"scrollbar": { + "diagnostics": "all", +} +``` + +configuration (possible values: `"none"`, `"error"`, `"warning"`, `"information"`, `"all"` (default)) + +The diagnostics could be hovered to display a tooltip with full, rendered diagnostic message. +Or, `editor::GoToDiagnostic` and `editor::GoToPreviousDiagnostic` could be used to navigate between diagnostics in the editor, showing a popover for the currently active diagnostic. + +# Inline diagnostics (Error lens) + +Zed supports showing diagnostic as lens to the right of the code. +This is disabled by default, but can either be temporarily turned on (or off) using the editor menu, or permanently, using the + +```json5 +"diagnostics": { + "inline": { + "enabled": true, + "max_severity": null, // same values as the `diagnostics_max_severity` from the editor settings + } +} +``` + +# Other UI places + +## Project Panel + +Project panel can have its entries coloured based on the severity of the diagnostics in the file. + +To configure, use + +```json5 +"project_panel": { + "diagnostics": "all", +} +``` + +configuration (possible values: `"off"`, `"errors"`, `"all"` (default)) + +## Editor tabs + +Similar to the project panel, editor tabs can be colorized with the + +```json5 +"tabs": { + "show_diagnostics": "off", +} +``` + +configuration (possible values: `"off"` (default), `"errors"`, `"all"`) From 9427526a4191430f828129b7255353050d7899fc Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Tue, 24 Jun 2025 13:43:33 -0600 Subject: [PATCH 12/56] gpui: Clear the element arena after presenting the frame (#33338) This is an easy way to shave some microseconds off the critical path for frame rendering. On my machine this reduces typical frame rendering latency by ~100 microseconds, probably quite a bit more on slower machines. Here is how long it typically takes to drop elements from the arena, from a fairly brief run: ![image](https://github.com/user-attachments/assets/65cfd911-eccf-4393-887d-8cad2cd27148) Release Notes: - N/A --- crates/gpui/src/app.rs | 2 +- crates/gpui/src/window.rs | 34 ++++++++++++++++++++++--------- crates/workspace/src/workspace.rs | 2 +- 3 files changed, 26 insertions(+), 12 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 109d5e7454c4a2b0bcb276243f7f5a6cc072efce..1853e6e93488e0cba9db2380594eb3f28b4a0132 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -909,7 +909,7 @@ impl App { }) .collect::>() { - self.update_window(window, |_, window, cx| window.draw(cx)) + self.update_window(window, |_, window, cx| window.draw(cx).clear()) .unwrap(); } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index f0f4579b2909a805a3297595997965d32ca37ebb..0e3f5763dad3a92a7910b424a7f2f04d2074e3fb 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -210,6 +210,23 @@ thread_local! { pub(crate) static ELEMENT_ARENA: RefCell = RefCell::new(Arena::new(32 * 1024 * 1024)); } +/// Returned when the element arena has been used and so must be cleared before the next draw. +#[must_use] +pub struct ArenaClearNeeded; + +impl ArenaClearNeeded { + /// Clear the element arena. + pub fn clear(self) { + ELEMENT_ARENA.with_borrow_mut(|element_arena| { + let percentage = (element_arena.len() as f32 / element_arena.capacity() as f32) * 100.; + if percentage >= 80. { + log::warn!("elevated element arena occupation: {}.", percentage); + } + element_arena.clear(); + }) + } +} + pub(crate) type FocusMap = RwLock>; impl FocusId { @@ -968,8 +985,10 @@ impl Window { measure("frame duration", || { handle .update(&mut cx, |_, window, cx| { - window.draw(cx); + let arena_clear_needed = window.draw(cx); window.present(); + // drop the arena elements after present to reduce latency + arena_clear_needed.clear(); }) .log_err(); }) @@ -1730,7 +1749,7 @@ impl Window { /// Produces a new frame and assigns it to `rendered_frame`. To actually show /// the contents of the new [Scene], use [present]. #[profiling::function] - pub fn draw(&mut self, cx: &mut App) { + pub fn draw(&mut self, cx: &mut App) -> ArenaClearNeeded { self.invalidate_entities(); cx.entities.clear_accessed(); debug_assert!(self.rendered_entity_stack.is_empty()); @@ -1754,13 +1773,6 @@ impl Window { self.layout_engine.as_mut().unwrap().clear(); self.text_system().finish_frame(); self.next_frame.finish(&mut self.rendered_frame); - ELEMENT_ARENA.with_borrow_mut(|element_arena| { - let percentage = (element_arena.len() as f32 / element_arena.capacity() as f32) * 100.; - if percentage >= 80. { - log::warn!("elevated element arena occupation: {}.", percentage); - } - element_arena.clear(); - }); self.invalidator.set_phase(DrawPhase::Focus); let previous_focus_path = self.rendered_frame.focus_path(); @@ -1802,6 +1814,8 @@ impl Window { self.refreshing = false; self.invalidator.set_phase(DrawPhase::None); self.needs_present.set(true); + + ArenaClearNeeded } fn record_entities_accessed(&mut self, cx: &mut App) { @@ -3467,7 +3481,7 @@ impl Window { fn dispatch_key_event(&mut self, event: &dyn Any, cx: &mut App) { if self.invalidator.is_dirty() { - self.draw(cx); + self.draw(cx).clear(); } let node_id = self.focus_node_id_in_rendered_frame(self.focus); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index f9a25b2018243c520934a8e666b9c1b177e8149d..1e3d648d4245c175c026f4587902f7b3eb099bf2 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -2199,7 +2199,7 @@ impl Workspace { // (Note that the tests always do this implicitly, so you must manually test with something like: // "bindings": { "g z": ["workspace::SendKeystrokes", ": j u"]} // ) - window.draw(cx); + window.draw(cx).clear(); } })?; } From 0c78a115debd35f43d748b42bfeaa5486252a835 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Tue, 24 Jun 2025 16:24:06 -0400 Subject: [PATCH 13/56] Patch panic around pinned tab count (#33335) After much investigation, I have not been able to track down what is causing [this panic](https://github.com/zed-industries/zed/issues/33342). I'm clamping the value for now, because a bug is better than a crash. Hopefully someone finds reproduction steps, and I will implement a proper fix. Release Notes: - N/A --- crates/workspace/src/pane.rs | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 5fd04a556cfc996b5616f3bde1989ef36f0e236d..9644ef9e7967529098129a73d30442f800c391ad 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2784,7 +2784,19 @@ impl Pane { }) .collect::>(); let tab_count = tab_items.len(); - let unpinned_tabs = tab_items.split_off(self.pinned_tab_count); + let safe_pinned_count = if self.pinned_tab_count > tab_count { + log::warn!( + "Pinned tab count ({}) exceeds actual tab count ({}). \ + This should not happen. If possible, add reproduction steps, \ + in a comment, to https://github.com/zed-industries/zed/issues/33342", + self.pinned_tab_count, + tab_count + ); + tab_count + } else { + self.pinned_tab_count + }; + let unpinned_tabs = tab_items.split_off(safe_pinned_count); let pinned_tabs = tab_items; TabBar::new("tab_bar") .when( From f738fbd4f8754ae83c720a5b3cb81e8dc3315ee5 Mon Sep 17 00:00:00 2001 From: waffle Date: Tue, 24 Jun 2025 22:28:57 +0200 Subject: [PATCH 14/56] gpui: Disable rounding in the layout engine (#31836) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Rounding broke (among other things, probably) pixel-perfect image rendering with non-power-of-two scaling factor. An example which reproduces the problem can be found [here](https://github.com/WaffleLapkin/gpui_taffy_rounding_whyyyyy). How it looks with `gpui` from `main`: ![2025-05-31 11:34:25+CEST](https://github.com/user-attachments/assets/2cb19312-6ba6-4e80-8072-f89ddedff77b) How it looks with this patch: ![2025-05-31 11:35:28+CEST](https://github.com/user-attachments/assets/114b52a9-58c0-4600-871c-a20eceb7179e) Both screenshots are made on kde+wayland with magnification using kde's built-in magnification (`Meta`+`+`, `Meta`+`-`). Note that screenshot apps have a high chance of lying 🙃 The image itself is 400 by 300 pixels of red/green checkerboard pattern made specifically to exaggerate scaling issues. Release Notes: - N/A --- crates/editor/src/editor_tests.rs | 2 +- crates/gpui/src/taffy.rs | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index a6460a50483a2ff249bee7135d3488146caf6d76..7f4e19e7d4fab02a55ffabdf8362c586ab3503c8 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -15440,7 +15440,7 @@ async fn test_completions_default_resolve_data_handling(cx: &mut TestAppContext) // Completions that have already been resolved are skipped. assert_eq!( *resolved_items.lock(), - items[items.len() - 16..items.len() - 4] + items[items.len() - 17..items.len() - 4] .iter() .cloned() .map(|mut item| { diff --git a/crates/gpui/src/taffy.rs b/crates/gpui/src/taffy.rs index 597bff13e2acf875f264356e606237c71eb604c4..f12c62d504395a2afbf698685a4eb3cc5f0e4e1f 100644 --- a/crates/gpui/src/taffy.rs +++ b/crates/gpui/src/taffy.rs @@ -28,8 +28,10 @@ const EXPECT_MESSAGE: &str = "we should avoid taffy layout errors by constructio impl TaffyLayoutEngine { pub fn new() -> Self { + let mut taffy = TaffyTree::new(); + taffy.disable_rounding(); TaffyLayoutEngine { - taffy: TaffyTree::new(), + taffy, absolute_layout_bounds: FxHashMap::default(), computed_layouts: FxHashSet::default(), } From be95716e94948d3d01df523d83c29aedff77009e Mon Sep 17 00:00:00 2001 From: vipex <101529155+vipexv@users.noreply.github.com> Date: Tue, 24 Jun 2025 23:20:14 +0200 Subject: [PATCH 15/56] helix: Prevent cursor move on entering insert mode (#33201) Closes #33061 https://github.com/user-attachments/assets/3b3e146e-7c12-412e-b4dd-c70411891b9e Release Notes: - Fixed cursor unexpectedly moving when entering/exiting insert mode in Helix mode, making the behavior consistent with the Helix editor. --- crates/vim/src/insert.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/crates/vim/src/insert.rs b/crates/vim/src/insert.rs index 585f324683dcb2f16c652e1a7abbeed95d5f5c37..a30af8769fac99ac1d1b8c131b32e8c440e0b180 100644 --- a/crates/vim/src/insert.rs +++ b/crates/vim/src/insert.rs @@ -29,15 +29,20 @@ impl Vim { self.stop_recording_immediately(action.boxed_clone(), cx); if count <= 1 || Vim::globals(cx).dot_replaying { self.create_mark("^".into(), window, cx); + self.update_editor(window, cx, |_, editor, window, cx| { editor.dismiss_menus_and_popups(false, window, cx); - editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { - s.move_cursors_with(|map, mut cursor, _| { - *cursor.column_mut() = cursor.column().saturating_sub(1); - (map.clip_point(cursor, Bias::Left), SelectionGoal::None) + + if !HelixModeSetting::get_global(cx).0 { + editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| { + s.move_cursors_with(|map, mut cursor, _| { + *cursor.column_mut() = cursor.column().saturating_sub(1); + (map.clip_point(cursor, Bias::Left), SelectionGoal::None) + }); }); - }); + } }); + if HelixModeSetting::get_global(cx).0 { self.switch_mode(Mode::HelixNormal, false, window, cx); } else { From aa330fcf2c4c1153d5c4f0408a4f6bfc145d94d5 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Tue, 24 Jun 2025 17:54:03 -0600 Subject: [PATCH 16/56] Use background task for settings migrations + notify about errors (#30009) Release Notes: - N/A --- crates/zed/src/zed/migrate.rs | 41 +++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/crates/zed/src/zed/migrate.rs b/crates/zed/src/zed/migrate.rs index 32c8c17a6f13df7e95cda4c02c8e3ca2dee178c9..48bffb4114011d119e86ff28180bb2e4b898b3d1 100644 --- a/crates/zed/src/zed/migrate.rs +++ b/crates/zed/src/zed/migrate.rs @@ -4,6 +4,7 @@ use fs::Fs; use migrator::{migrate_keymap, migrate_settings}; use settings::{KeymapFile, Settings, SettingsStore}; use util::ResultExt; +use workspace::notifications::NotifyTaskExt; use std::sync::Arc; @@ -153,7 +154,7 @@ impl ToolbarItemView for MigrationBanner { if &target == paths::keymap_file() { self.migration_type = Some(MigrationType::Keymap); let fs = ::global(cx); - let should_migrate = should_migrate_keymap(fs); + let should_migrate = cx.background_spawn(should_migrate_keymap(fs)); self.should_migrate_task = Some(cx.spawn_in(window, async move |this, cx| { if let Ok(true) = should_migrate.await { this.update(cx, |this, cx| { @@ -165,7 +166,7 @@ impl ToolbarItemView for MigrationBanner { } else if &target == paths::settings_file() { self.migration_type = Some(MigrationType::Settings); let fs = ::global(cx); - let should_migrate = should_migrate_settings(fs); + let should_migrate = cx.background_spawn(should_migrate_settings(fs)); self.should_migrate_task = Some(cx.spawn_in(window, async move |this, cx| { if let Ok(true) = should_migrate.await { this.update(cx, |this, cx| { @@ -234,20 +235,22 @@ impl Render for MigrationBanner { ), ) .child( - Button::new("backup-and-migrate", "Backup and Update").on_click(move |_, _, cx| { - let fs = ::global(cx); - match migration_type { - Some(MigrationType::Keymap) => { - cx.spawn(async move |_| write_keymap_migration(&fs).await.ok()) - .detach(); + Button::new("backup-and-migrate", "Backup and Update").on_click( + move |_, window, cx| { + let fs = ::global(cx); + match migration_type { + Some(MigrationType::Keymap) => { + cx.background_spawn(write_keymap_migration(fs.clone())) + .detach_and_notify_err(window, cx); + } + Some(MigrationType::Settings) => { + cx.background_spawn(write_settings_migration(fs.clone())) + .detach_and_notify_err(window, cx); + } + None => unreachable!(), } - Some(MigrationType::Settings) => { - cx.spawn(async move |_| write_settings_migration(&fs).await.ok()) - .detach(); - } - None => unreachable!(), - } - }), + }, + ), ) .into_any_element() } @@ -269,8 +272,8 @@ async fn should_migrate_settings(fs: Arc) -> Result { Ok(false) } -async fn write_keymap_migration(fs: &Arc) -> Result<()> { - let old_text = KeymapFile::load_keymap_file(fs).await?; +async fn write_keymap_migration(fs: Arc) -> Result<()> { + let old_text = KeymapFile::load_keymap_file(&fs).await?; let Ok(Some(new_text)) = migrate_keymap(&old_text) else { return Ok(()); }; @@ -294,8 +297,8 @@ async fn write_keymap_migration(fs: &Arc) -> Result<()> { Ok(()) } -async fn write_settings_migration(fs: &Arc) -> Result<()> { - let old_text = SettingsStore::load_settings(fs).await?; +async fn write_settings_migration(fs: Arc) -> Result<()> { + let old_text = SettingsStore::load_settings(&fs).await?; let Ok(Some(new_text)) = migrate_settings(&old_text) else { return Ok(()); }; From cf086544e39b5e703bbde347ca0fecd3561b4060 Mon Sep 17 00:00:00 2001 From: Remco Smits Date: Wed, 25 Jun 2025 04:19:00 +0200 Subject: [PATCH 17/56] debugger: Add support for completion triggers in debug console (#33211) Release Notes: - Debugger: Add support for completion triggers in debug console --- .../src/session/running/console.rs | 29 +++++++++++++++---- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index e84e0d74e6c9302d7edf61f794809168c54c279e..0b4bc8865e0afacabb4ccec7f5a3f36016aed7c4 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -582,14 +582,31 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider { fn is_completion_trigger( &self, - _buffer: &Entity, - _position: language::Anchor, - _text: &str, + buffer: &Entity, + position: language::Anchor, + text: &str, _trigger_in_words: bool, - _menu_is_open: bool, - _cx: &mut Context, + menu_is_open: bool, + cx: &mut Context, ) -> bool { - true + let snapshot = buffer.read(cx).snapshot(); + if !menu_is_open && !snapshot.settings_at(position, cx).show_completions_on_input { + return false; + } + + self.0 + .read_with(cx, |console, cx| { + console + .session + .read(cx) + .capabilities() + .completion_trigger_characters + .as_ref() + .map(|triggers| triggers.contains(&text.to_string())) + }) + .ok() + .flatten() + .unwrap_or(true) } } From 17774b17fb5fc923e3354654a3b9711d9cead5b8 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Tue, 24 Jun 2025 22:20:31 -0400 Subject: [PATCH 18/56] debugger: Add a tooltip to the session picker with the session ID (#33331) This helps correlate sessions in the picker with entries in the debug adapter logs view. Release Notes: - N/A --- crates/debugger_ui/src/session.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/debugger_ui/src/session.rs b/crates/debugger_ui/src/session.rs index e0e6126867462e7440657b2dce3f40ead9a23e82..ce6730bee77495fa94ad2f079fdf6bda9d219be0 100644 --- a/crates/debugger_ui/src/session.rs +++ b/crates/debugger_ui/src/session.rs @@ -11,7 +11,7 @@ use project::worktree_store::WorktreeStore; use rpc::proto; use running::RunningState; use std::{cell::OnceCell, sync::OnceLock}; -use ui::{Indicator, prelude::*}; +use ui::{Indicator, Tooltip, prelude::*}; use workspace::{ CollaboratorId, FollowableItem, ViewId, Workspace, item::{self, Item}, @@ -153,6 +153,8 @@ impl DebugSession { }; h_flex() + .id("session-label") + .tooltip(Tooltip::text(format!("Session {}", self.session_id(cx).0,))) .ml(depth * px(16.0)) .gap_2() .when_some(icon, |this, indicator| this.child(indicator)) From 014f93008a3df942b2cd2c598a73d1c800f51f53 Mon Sep 17 00:00:00 2001 From: marton csutora Date: Wed, 25 Jun 2025 05:21:59 +0200 Subject: [PATCH 19/56] Make remote mkdir shell-independent for compatibility (#32997) - Closes: #30962 Nushell does not support mkdir -p So invoke sh -c "mkdir -p" instead which will also work under nushell. Release Notes: - Fixed ssh remotes running Nushell (and possibly other non posix-compliant shells) --------- Co-authored-by: Conrad Irwin --- crates/remote/src/ssh_session.rs | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index 660e5627807c2c18d4d7c3b6a0cbab1cf2cea07e..ffcf3b378340d145bcf253932aecc3bc2d35c557 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -1805,7 +1805,16 @@ impl SshRemoteConnection { ) -> Result<()> { if let Some(parent) = tmp_path_gz.parent() { self.socket - .run_command("mkdir", &["-p", &parent.to_string_lossy()]) + .run_command( + "sh", + &[ + "-c", + &shell_script!( + "mkdir -p {parent}", + parent = parent.to_string_lossy().as_ref() + ), + ], + ) .await?; } @@ -1877,7 +1886,16 @@ impl SshRemoteConnection { ) -> Result<()> { if let Some(parent) = tmp_path_gz.parent() { self.socket - .run_command("mkdir", &["-p", &parent.to_string_lossy()]) + .run_command( + "sh", + &[ + "-c", + &shell_script!( + "mkdir -p {parent}", + parent = parent.to_string_lossy().as_ref() + ), + ], + ) .await?; } From 96409965e428ec54f3c994e5a6b7d6d7a561ed40 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Tue, 24 Jun 2025 23:18:35 -0600 Subject: [PATCH 20/56] Cleanup handling of surrounding word logic, fixing crash in editor::SelectAllMatches (#33353) This reduces code complexity and avoids unnecessary roundtripping through `DisplayPoint`. Hopefully this doesn't cause behavior changes, but has one known behavior improvement: `clip_at_line_ends` logic caused `is_inside_word` to return false when on a word at the end of the line. In vim mode, this caused `select_all_matches` to not select words at the end of lines, and in some cases crashes due to not finding any selections. Closes #29823 Release Notes: - N/A --- crates/editor/src/editor.rs | 118 ++++++++++-------------- crates/editor/src/editor_tests.rs | 9 ++ crates/editor/src/movement.rs | 58 +----------- crates/multi_buffer/src/multi_buffer.rs | 13 +++ 4 files changed, 71 insertions(+), 127 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6e9a9be0fe3267e5b13bfdb1b9d880848fa968bf..770ad7fa706027aa8146192b3f1d2155d06a4e31 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3388,9 +3388,12 @@ impl Editor { auto_scroll = true; } 2 => { - let range = movement::surrounding_word(&display_map, position); - start = buffer.anchor_before(range.start.to_point(&display_map)); - end = buffer.anchor_before(range.end.to_point(&display_map)); + let position = display_map + .clip_point(position, Bias::Left) + .to_offset(&display_map, Bias::Left); + let (range, _) = buffer.surrounding_word(position, false); + start = buffer.anchor_before(range.start); + end = buffer.anchor_before(range.end); mode = SelectMode::Word(start..end); auto_scroll = true; } @@ -3523,37 +3526,39 @@ impl Editor { if self.columnar_selection_state.is_some() { self.select_columns(position, goal_column, &display_map, window, cx); } else if let Some(mut pending) = self.selections.pending_anchor() { - let buffer = self.buffer.read(cx).snapshot(cx); + let buffer = &display_map.buffer_snapshot; let head; let tail; let mode = self.selections.pending_mode().unwrap(); match &mode { SelectMode::Character => { head = position.to_point(&display_map); - tail = pending.tail().to_point(&buffer); + tail = pending.tail().to_point(buffer); } SelectMode::Word(original_range) => { - let original_display_range = original_range.start.to_display_point(&display_map) - ..original_range.end.to_display_point(&display_map); - let original_buffer_range = original_display_range.start.to_point(&display_map) - ..original_display_range.end.to_point(&display_map); - if movement::is_inside_word(&display_map, position) - || original_display_range.contains(&position) + let offset = display_map + .clip_point(position, Bias::Left) + .to_offset(&display_map, Bias::Left); + let original_range = original_range.to_offset(buffer); + + let head_offset = if buffer.is_inside_word(offset, false) + || original_range.contains(&offset) { - let word_range = movement::surrounding_word(&display_map, position); - if word_range.start < original_display_range.start { - head = word_range.start.to_point(&display_map); + let (word_range, _) = buffer.surrounding_word(offset, false); + if word_range.start < original_range.start { + word_range.start } else { - head = word_range.end.to_point(&display_map); + word_range.end } } else { - head = position.to_point(&display_map); - } + offset + }; - if head <= original_buffer_range.start { - tail = original_buffer_range.end; + head = head_offset.to_point(buffer); + if head_offset <= original_range.start { + tail = original_range.end.to_point(buffer); } else { - tail = original_buffer_range.start; + tail = original_range.start.to_point(buffer); } } SelectMode::Line(original_range) => { @@ -10794,7 +10799,6 @@ impl Editor { where Fn: FnMut(&str) -> String, { - let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let buffer = self.buffer.read(cx).snapshot(cx); let mut new_selections = Vec::new(); @@ -10805,13 +10809,8 @@ impl Editor { let selection_is_empty = selection.is_empty(); let (start, end) = if selection_is_empty { - let word_range = movement::surrounding_word( - &display_map, - selection.start.to_display_point(&display_map), - ); - let start = word_range.start.to_offset(&display_map, Bias::Left); - let end = word_range.end.to_offset(&display_map, Bias::Left); - (start, end) + let (word_range, _) = buffer.surrounding_word(selection.start, false); + (word_range.start, word_range.end) } else { (selection.start, selection.end) }; @@ -13255,12 +13254,10 @@ impl Editor { let query_match = query_match.unwrap(); // can only fail due to I/O let offset_range = start_offset + query_match.start()..start_offset + query_match.end(); - let display_range = offset_range.start.to_display_point(display_map) - ..offset_range.end.to_display_point(display_map); if !select_next_state.wordwise - || (!movement::is_inside_word(display_map, display_range.start) - && !movement::is_inside_word(display_map, display_range.end)) + || (!buffer.is_inside_word(offset_range.start, false) + && !buffer.is_inside_word(offset_range.end, false)) { // TODO: This is n^2, because we might check all the selections if !selections @@ -13324,12 +13321,9 @@ impl Editor { if only_carets { for selection in &mut selections { - let word_range = movement::surrounding_word( - display_map, - selection.start.to_display_point(display_map), - ); - selection.start = word_range.start.to_offset(display_map, Bias::Left); - selection.end = word_range.end.to_offset(display_map, Bias::Left); + let (word_range, _) = buffer.surrounding_word(selection.start, false); + selection.start = word_range.start; + selection.end = word_range.end; selection.goal = SelectionGoal::None; selection.reversed = false; self.select_match_ranges( @@ -13410,18 +13404,22 @@ impl Editor { } else { query_match.start()..query_match.end() }; - let display_range = offset_range.start.to_display_point(&display_map) - ..offset_range.end.to_display_point(&display_map); if !select_next_state.wordwise - || (!movement::is_inside_word(&display_map, display_range.start) - && !movement::is_inside_word(&display_map, display_range.end)) + || (!buffer.is_inside_word(offset_range.start, false) + && !buffer.is_inside_word(offset_range.end, false)) { new_selections.push(offset_range.start..offset_range.end); } } select_next_state.done = true; + + if new_selections.is_empty() { + log::error!("bug: new_selections is empty in select_all_matches"); + return Ok(()); + } + self.unfold_ranges(&new_selections.clone(), false, false, cx); self.change_selections(None, window, cx, |selections| { selections.select_ranges(new_selections) @@ -13481,12 +13479,10 @@ impl Editor { let query_match = query_match.unwrap(); // can only fail due to I/O let offset_range = end_offset - query_match.end()..end_offset - query_match.start(); - let display_range = offset_range.start.to_display_point(&display_map) - ..offset_range.end.to_display_point(&display_map); if !select_prev_state.wordwise - || (!movement::is_inside_word(&display_map, display_range.start) - && !movement::is_inside_word(&display_map, display_range.end)) + || (!buffer.is_inside_word(offset_range.start, false) + && !buffer.is_inside_word(offset_range.end, false)) { next_selected_range = Some(offset_range); break; @@ -13544,12 +13540,9 @@ impl Editor { if only_carets { for selection in &mut selections { - let word_range = movement::surrounding_word( - &display_map, - selection.start.to_display_point(&display_map), - ); - selection.start = word_range.start.to_offset(&display_map, Bias::Left); - selection.end = word_range.end.to_offset(&display_map, Bias::Left); + let (word_range, _) = buffer.surrounding_word(selection.start, false); + selection.start = word_range.start; + selection.end = word_range.end; selection.goal = SelectionGoal::None; selection.reversed = false; self.select_match_ranges( @@ -14024,26 +14017,11 @@ impl Editor { if let Some((node, _)) = buffer.syntax_ancestor(old_range.clone()) { // manually select word at selection if ["string_content", "inline"].contains(&node.kind()) { - let word_range = { - let display_point = buffer - .offset_to_point(old_range.start) - .to_display_point(&display_map); - let Range { start, end } = - movement::surrounding_word(&display_map, display_point); - start.to_point(&display_map).to_offset(&buffer) - ..end.to_point(&display_map).to_offset(&buffer) - }; + let (word_range, _) = buffer.surrounding_word(old_range.start, false); // ignore if word is already selected if !word_range.is_empty() && old_range != word_range { - let last_word_range = { - let display_point = buffer - .offset_to_point(old_range.end) - .to_display_point(&display_map); - let Range { start, end } = - movement::surrounding_word(&display_map, display_point); - start.to_point(&display_map).to_offset(&buffer) - ..end.to_point(&display_map).to_offset(&buffer) - }; + let (last_word_range, _) = + buffer.surrounding_word(old_range.end, false); // only select word if start and end point belongs to same word if word_range == last_word_range { selected_larger_node = true; diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7f4e19e7d4fab02a55ffabdf8362c586ab3503c8..6a579cb1cd310431a972329b97ce29c5ffefa864 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6667,6 +6667,15 @@ async fn test_select_all_matches(cx: &mut TestAppContext) { cx.update_editor(|e, window, cx| e.select_all_matches(&SelectAllMatches, window, cx)) .unwrap(); cx.assert_editor_state("abc\n« ˇ»abc\nabc"); + + // Test with a single word and clip_at_line_ends=true (#29823) + cx.set_state("aˇbc"); + cx.update_editor(|e, window, cx| { + e.set_clip_at_line_ends(true, cx); + e.select_all_matches(&SelectAllMatches, window, cx).unwrap(); + e.set_clip_at_line_ends(false, cx); + }); + cx.assert_editor_state("«abcˇ»"); } #[gpui::test] diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index e4167ee68ebf7e069d26609385b4063e21c3f09c..b9b7cb2e58c56cb3b1e14e1c52aa7b8b38f510b6 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -2,7 +2,7 @@ //! in editor given a given motion (e.g. it handles converting a "move left" command into coordinates in editor). It is exposed mostly for use by vim crate. use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint}; -use crate::{CharKind, DisplayRow, EditorStyle, ToOffset, ToPoint, scroll::ScrollAnchor}; +use crate::{DisplayRow, EditorStyle, ToOffset, ToPoint, scroll::ScrollAnchor}; use gpui::{Pixels, WindowTextSystem}; use language::Point; use multi_buffer::{MultiBufferRow, MultiBufferSnapshot}; @@ -721,38 +721,6 @@ pub fn chars_before( }) } -pub(crate) fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool { - let raw_point = point.to_point(map); - let classifier = map.buffer_snapshot.char_classifier_at(raw_point); - let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left); - let text = &map.buffer_snapshot; - let next_char_kind = text.chars_at(ix).next().map(|c| classifier.kind(c)); - let prev_char_kind = text - .reversed_chars_at(ix) - .next() - .map(|c| classifier.kind(c)); - prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word)) -} - -pub(crate) fn surrounding_word( - map: &DisplaySnapshot, - position: DisplayPoint, -) -> Range { - let position = map - .clip_point(position, Bias::Left) - .to_offset(map, Bias::Left); - let (range, _) = map.buffer_snapshot.surrounding_word(position, false); - let start = range - .start - .to_point(&map.buffer_snapshot) - .to_display_point(map); - let end = range - .end - .to_point(&map.buffer_snapshot) - .to_display_point(map); - start..end -} - /// Returns a list of lines (represented as a [`DisplayPoint`] range) contained /// within a passed range. /// @@ -1091,30 +1059,6 @@ mod tests { }); } - #[gpui::test] - fn test_surrounding_word(cx: &mut gpui::App) { - init_test(cx); - - fn assert(marked_text: &str, cx: &mut gpui::App) { - let (snapshot, display_points) = marked_display_snapshot(marked_text, cx); - assert_eq!( - surrounding_word(&snapshot, display_points[1]), - display_points[0]..display_points[2], - "{}", - marked_text - ); - } - - assert("ˇˇloremˇ ipsum", cx); - assert("ˇloˇremˇ ipsum", cx); - assert("ˇloremˇˇ ipsum", cx); - assert("loremˇ ˇ ˇipsum", cx); - assert("lorem\nˇˇˇ\nipsum", cx); - assert("lorem\nˇˇipsumˇ", cx); - assert("loremˇ,ˇˇ ipsum", cx); - assert("ˇloremˇˇ, ipsum", cx); - } - #[gpui::test] async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) { cx.update(|cx| { diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 955b17d523914ec03aa16eb6422aaa281159dbc0..e22fdb1ed5a978211d4dc6fd071107600ccf789f 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -4214,6 +4214,19 @@ impl MultiBufferSnapshot { self.diffs.values().any(|diff| !diff.is_empty()) } + pub fn is_inside_word(&self, position: T, for_completion: bool) -> bool { + let position = position.to_offset(self); + let classifier = self + .char_classifier_at(position) + .for_completion(for_completion); + let next_char_kind = self.chars_at(position).next().map(|c| classifier.kind(c)); + let prev_char_kind = self + .reversed_chars_at(position) + .next() + .map(|c| classifier.kind(c)); + prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word)) + } + pub fn surrounding_word( &self, start: T, From 098896146e00bdf7fc25b1d7c74ab87b0778619d Mon Sep 17 00:00:00 2001 From: Vladimir Kuznichenkov <5330267+kuzaxak@users.noreply.github.com> Date: Wed, 25 Jun 2025 10:37:07 +0300 Subject: [PATCH 21/56] bedrock: Fix subsequent bedrock tool calls fail (#33174) Closes #30714 Bedrock converse api expect to see tool options if at least one tool was used in conversation in the past messages. Right now if `LanguageModelToolChoice::None` isn't supported edit agent [remove][1] tools from request. That point breaks Converse API of Bedrock. As was proposed in [the issue][2] we won't drop tool choose but instead will deny any of them if model will respond with a tool choose. [1]: https://github.com/x-qdo/zed/blob/fceba6c79540677c2504d2c22191963b6170591a/crates/assistant_tools/src/edit_agent.rs#L703 [2]: https://github.com/zed-industries/zed/issues/30714#issuecomment-2886422716 Release Notes: - Fixed bedrock tool calls in edit mode --- .../language_models/src/provider/bedrock.rs | 37 ++++++++++++++++--- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index ed5e3726165ff9b67c7da1e8deb25a7f6fde2cc6..f0e644721e7f058525129e0fc216a3d21aea4729 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -503,7 +503,8 @@ impl LanguageModel for BedrockModel { LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => { self.model.supports_tool_use() } - LanguageModelToolChoice::None => false, + // Add support for None - we'll filter tool calls at response + LanguageModelToolChoice::None => self.model.supports_tool_use(), } } @@ -549,6 +550,8 @@ impl LanguageModel for BedrockModel { } }; + let deny_tool_calls = request.tool_choice == Some(LanguageModelToolChoice::None); + let request = match into_bedrock( request, model_id, @@ -565,11 +568,15 @@ impl LanguageModel for BedrockModel { let request = self.stream_completion(request, cx); let future = self.request_limiter.stream(async move { let response = request.map_err(|err| anyhow!(err))?.await; - Ok(map_to_language_model_completion_events( - response, - owned_handle, - )) + let events = map_to_language_model_completion_events(response, owned_handle); + + if deny_tool_calls { + Ok(deny_tool_use_events(events).boxed()) + } else { + Ok(events.boxed()) + } }); + async move { Ok(future.await?.boxed()) }.boxed() } @@ -578,6 +585,23 @@ impl LanguageModel for BedrockModel { } } +fn deny_tool_use_events( + events: impl Stream>, +) -> impl Stream> { + events.map(|event| { + match event { + Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => { + // Convert tool use to an error message if model decided to call it + Ok(LanguageModelCompletionEvent::Text(format!( + "\n\n[Error: Tool calls are disabled in this context. Attempted to call '{}']", + tool_use.name + ))) + } + other => other, + } + }) +} + pub fn into_bedrock( request: LanguageModelRequest, model: String, @@ -714,7 +738,8 @@ pub fn into_bedrock( BedrockToolChoice::Any(BedrockAnyToolChoice::builder().build()) } Some(LanguageModelToolChoice::None) => { - anyhow::bail!("LanguageModelToolChoice::None is not supported"); + // For None, we still use Auto but will filter out tool calls in the response + BedrockToolChoice::Auto(BedrockAutoToolChoice::builder().build()) } }; let tool_config: BedrockToolConfig = BedrockToolConfig::builder() From 108162423da6d5d37bd78bffed74e7ebc2e8a83b Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Wed, 25 Jun 2025 13:12:30 +0530 Subject: [PATCH 22/56] language_models: Emit UsageUpdate events for token usage in DeepSeek and OpenAI (#33242) Closes #ISSUE Release Notes: - N/A --- crates/deepseek/src/deepseek.rs | 11 ++++++----- crates/language_models/src/provider/deepseek.rs | 11 ++++++++++- crates/language_models/src/provider/open_ai.rs | 15 ++++++++++++--- crates/open_ai/src/open_ai.rs | 6 +++--- 4 files changed, 31 insertions(+), 12 deletions(-) diff --git a/crates/deepseek/src/deepseek.rs b/crates/deepseek/src/deepseek.rs index 22bde8e5943f1a82c7441354a916f980405582c2..c49270febe3b2b3702b808e2219f6e45d7252267 100644 --- a/crates/deepseek/src/deepseek.rs +++ b/crates/deepseek/src/deepseek.rs @@ -201,13 +201,13 @@ pub struct Response { #[derive(Serialize, Deserialize, Debug)] pub struct Usage { - pub prompt_tokens: u32, - pub completion_tokens: u32, - pub total_tokens: u32, + pub prompt_tokens: u64, + pub completion_tokens: u64, + pub total_tokens: u64, #[serde(default)] - pub prompt_cache_hit_tokens: u32, + pub prompt_cache_hit_tokens: u64, #[serde(default)] - pub prompt_cache_miss_tokens: u32, + pub prompt_cache_miss_tokens: u64, } #[derive(Serialize, Deserialize, Debug)] @@ -224,6 +224,7 @@ pub struct StreamResponse { pub created: u64, pub model: String, pub choices: Vec, + pub usage: Option, } #[derive(Serialize, Deserialize, Debug)] diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index 10030c909109e03c3aeac4e2472c5879740290a4..99a1ca70c6e9ced064c76d4ede427e3b2f5ace0f 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -14,7 +14,7 @@ use language_model::{ LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent, - RateLimiter, Role, StopReason, + RateLimiter, Role, StopReason, TokenUsage, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; @@ -513,6 +513,15 @@ impl DeepSeekEventMapper { } } + if let Some(usage) = event.usage { + events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: usage.prompt_tokens, + output_tokens: usage.completion_tokens, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }))); + } + match choice.finish_reason.as_deref() { Some("stop") => { events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn))); diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index f6e1ea559a3efc73de0b104dbc874e0452393b14..3fa5334eb055196e620fc4370d06e4956c6e576b 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -12,7 +12,7 @@ use language_model::{ LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent, - RateLimiter, Role, StopReason, + RateLimiter, Role, StopReason, TokenUsage, }; use menu; use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion}; @@ -528,11 +528,20 @@ impl OpenAiEventMapper { &mut self, event: ResponseStreamEvent, ) -> Vec> { + let mut events = Vec::new(); + if let Some(usage) = event.usage { + events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: usage.prompt_tokens, + output_tokens: usage.completion_tokens, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }))); + } + let Some(choice) = event.choices.first() else { - return Vec::new(); + return events; }; - let mut events = Vec::new(); if let Some(content) = choice.delta.content.clone() { events.push(Ok(LanguageModelCompletionEvent::Text(content))); } diff --git a/crates/open_ai/src/open_ai.rs b/crates/open_ai/src/open_ai.rs index 034b4b358a0bb8f89b0c33b65266eefe4a6cca69..5b09aa5cbc17a0c48e4a1fadcbdd0b44cba98e1c 100644 --- a/crates/open_ai/src/open_ai.rs +++ b/crates/open_ai/src/open_ai.rs @@ -364,9 +364,9 @@ pub struct FunctionChunk { #[derive(Serialize, Deserialize, Debug)] pub struct Usage { - pub prompt_tokens: u32, - pub completion_tokens: u32, - pub total_tokens: u32, + pub prompt_tokens: u64, + pub completion_tokens: u64, + pub total_tokens: u64, } #[derive(Serialize, Deserialize, Debug)] From 1c6b4712a3f53c924c2f4d51fe288e629a309e48 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 25 Jun 2025 11:48:38 +0200 Subject: [PATCH 23/56] agent: Fix issue where unconfigured MCP extensions would not start server (#33365) Release Notes: - agent: Fix an issue where MCP servers that were provided by extensions would sometimes not start up --- .../agent_configuration/configure_context_server_modal.rs | 5 +---- crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs | 5 ++++- crates/project/src/context_server_store.rs | 5 +---- crates/project/src/project_settings.rs | 7 +++++++ 4 files changed, 13 insertions(+), 9 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 6a0bd765c7969b910b321826c0ca44dc92fd82a9..30fad51cfcbc100bdf469278c0210a220c7e2833 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -295,10 +295,7 @@ impl ConfigureContextServerModal { ContextServerDescriptorRegistry::default_global(cx) .read(cx) .context_server_descriptor(&server_id.0) - .map(|_| ContextServerSettings::Extension { - enabled: true, - settings: serde_json::json!({}), - }) + .map(|_| ContextServerSettings::default_extension()) }) else { return Task::ready(Err(anyhow::anyhow!("Context server not found"))); diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs index a85e48226b7aacd9c29df89691c4bf620c86e7cf..f8f9ae1977687296790a562711c286e2fce026e4 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_6_0.rs @@ -945,7 +945,10 @@ impl ExtensionImports for WasmState { .get(key.as_str()) }) .cloned() - .context("Failed to get context server configuration")?; + .unwrap_or_else(|| { + project::project_settings::ContextServerSettings::default_extension( + ) + }); match settings { project::project_settings::ContextServerSettings::Custom { diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 36213f96c4aefe946aafa92024c55d0092eeda4c..3bde9d6b36b42fe30aaf0f0fce903c3c0e373f3f 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -505,10 +505,7 @@ impl ContextServerStore { { configured_servers .entry(id) - .or_insert(ContextServerSettings::Extension { - enabled: true, - settings: serde_json::json!({}), - }); + .or_insert(ContextServerSettings::default_extension()); } let (enabled_servers, disabled_servers): (HashMap<_, _>, HashMap<_, _>) = diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 3f584f969783ca5ac107f592a02c824de5147539..19029cdb1d1c6b567a1d651a9aadfb8c7f8808c7 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -111,6 +111,13 @@ pub enum ContextServerSettings { } impl ContextServerSettings { + pub fn default_extension() -> Self { + Self::Extension { + enabled: true, + settings: serde_json::json!({}), + } + } + pub fn enabled(&self) -> bool { match self { ContextServerSettings::Custom { enabled, .. } => *enabled, From c6ff58675f79d7846c7a2f4cd7518c872032bf1f Mon Sep 17 00:00:00 2001 From: Vladimir Kuznichenkov <5330267+kuzaxak@users.noreply.github.com> Date: Wed, 25 Jun 2025 14:28:36 +0300 Subject: [PATCH 24/56] bedrock: Fix empty tool input on project diagnostic in bedrock (#33369) Bedrock [do not accept][1] `null` as a JSON value input for the tool call when called back. Instead of passing null, we will pass back an empty object, which is accepted by API Closes #33204 Release Notes: - Fixed project diagnostic tool call for bedrock [1]: https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ToolUseBlock.html --- .../language_models/src/provider/bedrock.rs | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index f0e644721e7f058525129e0fc216a3d21aea4729..e305569ce27b31285aa04077cc7cb35d96836477 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -648,14 +648,22 @@ pub fn into_bedrock( Some(BedrockInnerContent::ReasoningContent(redacted)) } - MessageContent::ToolUse(tool_use) => BedrockToolUseBlock::builder() - .name(tool_use.name.to_string()) - .tool_use_id(tool_use.id.to_string()) - .input(value_to_aws_document(&tool_use.input)) - .build() - .context("failed to build Bedrock tool use block") - .log_err() - .map(BedrockInnerContent::ToolUse), + MessageContent::ToolUse(tool_use) => { + let input = if tool_use.input.is_null() { + // Bedrock API requires valid JsonValue, not null, for tool use input + value_to_aws_document(&serde_json::json!({})) + } else { + value_to_aws_document(&tool_use.input) + }; + BedrockToolUseBlock::builder() + .name(tool_use.name.to_string()) + .tool_use_id(tool_use.id.to_string()) + .input(input) + .build() + .context("failed to build Bedrock tool use block") + .log_err() + .map(BedrockInnerContent::ToolUse) + }, MessageContent::ToolResult(tool_result) => { BedrockToolResultBlock::builder() .tool_use_id(tool_result.tool_use_id.to_string()) From 4396ac9dd6307bb7d7870a8415f590cfecae16b7 Mon Sep 17 00:00:00 2001 From: Shardul Vaidya <31039336+5herlocked@users.noreply.github.com> Date: Wed, 25 Jun 2025 07:51:25 -0400 Subject: [PATCH 25/56] bedrock: DeepSeek does not support receiving Reasoning Blocks (#33326) Closes #32341 Release Notes: - Fixed DeepSeek R1 errors for reasoning blocks being sent back to the model. --- crates/language_models/src/provider/bedrock.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index e305569ce27b31285aa04077cc7cb35d96836477..2b2527f1accd3a1f72c51ffdcc96e3c3b4358ef8 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -631,6 +631,11 @@ pub fn into_bedrock( } } MessageContent::Thinking { text, signature } => { + if model.contains(Model::DeepSeekR1.request_id()) { + // DeepSeekR1 doesn't support thinking blocks + // And the AWS API demands that you strip them + return None; + } let thinking = BedrockThinkingTextBlock::builder() .text(text) .set_signature(signature) @@ -643,6 +648,11 @@ pub fn into_bedrock( )) } MessageContent::RedactedThinking(blob) => { + if model.contains(Model::DeepSeekR1.request_id()) { + // DeepSeekR1 doesn't support thinking blocks + // And the AWS API demands that you strip them + return None; + } let redacted = BedrockThinkingBlock::RedactedContent(BedrockBlob::new(blob)); From c979452c2d2b55d455f2bdc85f0ab7e7c097bb62 Mon Sep 17 00:00:00 2001 From: Rodrigo Freire <109775603+rodrigoFfreire@users.noreply.github.com> Date: Wed, 25 Jun 2025 13:02:42 +0100 Subject: [PATCH 26/56] Implement indent conversion editor commands (#32340) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Description of Feature or Change Zed currently lacks a built-in way to convert a file’s indentation style on the fly. While it's possible to change indentation behavior via global or language-specific settings, these changes are persistent and broad in scope as they apply to all files or all files of a given language. We believe this could be improved for quick one-off adjustments to specific files. This PR introduces two new editor commands: `Editor::convert_indentation_to_spaces` and `Editor::convert_indentation_to_tabs`. These commands allow users to convert the indentation of either the entire buffer or a selection of lines, to spaces or tabs. Indentation levels are preserved, and any mixed whitespace lines are properly normalized. This feature is inspired by VS Code’s "Convert Indentation to Tabs/Spaces" commands, but offers faster execution and supports selection-based conversion, making it more flexible for quick formatting changes. ## Implementation Details To enable selection-based indentation conversion, we initially considered reusing the existing `Editor::manipulate_lines` function, which handles selections for line-based manipulations. However, this method was designed specifically for operations like sorting or reversing lines, and does not allow modifications to the line contents themselves. To address this limitation, we refactored the method into a more flexible version: `Editor::manipulate_generic_lines`. This new method passes a reference to the selected text directly into a callback, giving the callback full control over how to process and construct the resulting lines. The callback returns a `String` containing the modified text, as well as the number of lines before and after the transformation. These counts are computed using `.len()` on the line vectors during manipulation, which is more efficient than calculating them after the fact. ```rust fn manipulate_generic_lines( &mut self, window: &mut Window, cx: &mut Context, mut manipulate: M, ) where M: FnMut(&str) -> (String, usize, usize), { // ... Get text from buffer.text_for_range() ... let (new_text, lines_before, lines_after) = manipulate(&text); // ... ``` We now introduce two specialized methods: `Editor::manipulate_mutable_lines` and `Editor::manipulate_immutable_lines`. Each editor command selects the appropriate method based on whether it needs to modify line contents or simply reorder them. This distinction is important for performance: when line contents remain unchanged, working with an immutable reference as `&mut Vec<&str>` is both faster and more memory-efficient than using an owned `&mut Vec`. ## Demonstration https://github.com/user-attachments/assets/e50b37ea-a128-4c2a-b252-46c3c4530d97 Release Notes: - Added `editor::ConvertIndentationToSpaces` and `editor::ConvertIndentationToTabs` actions to change editor indents --------- Co-authored-by: Pedro Silveira --- .../disable_cursor_blinking/before.rs | 208 ++++++++++++++-- crates/editor/src/actions.rs | 2 + crates/editor/src/editor.rs | 211 ++++++++++++++-- crates/editor/src/editor_tests.rs | 234 +++++++++++++++++- crates/editor/src/element.rs | 2 + 5 files changed, 608 insertions(+), 49 deletions(-) diff --git a/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs b/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs index 607daa8ce3a129e0f4bc53a00d1a62f479da3932..a070738b600f041cbd6b3cc8ad1e8a6462b1d85a 100644 --- a/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs +++ b/crates/assistant_tools/src/edit_agent/evals/fixtures/disable_cursor_blinking/before.rs @@ -9132,7 +9132,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| lines.sort()) + self.manipulate_immutable_lines(window, cx, |lines| lines.sort()) } pub fn sort_lines_case_insensitive( @@ -9141,7 +9141,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| { + self.manipulate_immutable_lines(window, cx, |lines| { lines.sort_by_key(|line| line.to_lowercase()) }) } @@ -9152,7 +9152,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| { + self.manipulate_immutable_lines(window, cx, |lines| { let mut seen = HashSet::default(); lines.retain(|line| seen.insert(line.to_lowercase())); }) @@ -9164,7 +9164,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| { + self.manipulate_immutable_lines(window, cx, |lines| { let mut seen = HashSet::default(); lines.retain(|line| seen.insert(*line)); }) @@ -9606,20 +9606,20 @@ impl Editor { } pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context) { - self.manipulate_lines(window, cx, |lines| lines.reverse()) + self.manipulate_immutable_lines(window, cx, |lines| lines.reverse()) } pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context) { - self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng())) + self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut thread_rng())) } - fn manipulate_lines( + fn manipulate_lines( &mut self, window: &mut Window, cx: &mut Context, - mut callback: Fn, + mut manipulate: M, ) where - Fn: FnMut(&mut Vec<&str>), + M: FnMut(&str) -> LineManipulationResult, { self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction); @@ -9652,18 +9652,14 @@ impl Editor { .text_for_range(start_point..end_point) .collect::(); - let mut lines = text.split('\n').collect_vec(); + let LineManipulationResult { new_text, line_count_before, line_count_after} = manipulate(&text); - let lines_before = lines.len(); - callback(&mut lines); - let lines_after = lines.len(); - - edits.push((start_point..end_point, lines.join("\n"))); + edits.push((start_point..end_point, new_text)); // Selections must change based on added and removed line count let start_row = MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32); - let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32); + let end_row = MultiBufferRow(start_row.0 + line_count_after.saturating_sub(1) as u32); new_selections.push(Selection { id: selection.id, start: start_row, @@ -9672,10 +9668,10 @@ impl Editor { reversed: selection.reversed, }); - if lines_after > lines_before { - added_lines += lines_after - lines_before; - } else if lines_before > lines_after { - removed_lines += lines_before - lines_after; + if line_count_after > line_count_before { + added_lines += line_count_after - line_count_before; + } else if line_count_before > line_count_after { + removed_lines += line_count_before - line_count_after; } } @@ -9720,6 +9716,171 @@ impl Editor { }) } + fn manipulate_immutable_lines( + &mut self, + window: &mut Window, + cx: &mut Context, + mut callback: Fn, + ) where + Fn: FnMut(&mut Vec<&str>), + { + self.manipulate_lines(window, cx, |text| { + let mut lines: Vec<&str> = text.split('\n').collect(); + let line_count_before = lines.len(); + + callback(&mut lines); + + LineManipulationResult { + new_text: lines.join("\n"), + line_count_before, + line_count_after: lines.len(), + } + }); + } + + fn manipulate_mutable_lines( + &mut self, + window: &mut Window, + cx: &mut Context, + mut callback: Fn, + ) where + Fn: FnMut(&mut Vec>), + { + self.manipulate_lines(window, cx, |text| { + let mut lines: Vec> = text.split('\n').map(Cow::from).collect(); + let line_count_before = lines.len(); + + callback(&mut lines); + + LineManipulationResult { + new_text: lines.join("\n"), + line_count_before, + line_count_after: lines.len(), + } + }); + } + + pub fn convert_indentation_to_spaces( + &mut self, + _: &ConvertIndentationToSpaces, + window: &mut Window, + cx: &mut Context, + ) { + let settings = self.buffer.read(cx).language_settings(cx); + let tab_size = settings.tab_size.get() as usize; + + self.manipulate_mutable_lines(window, cx, |lines| { + // Allocates a reasonably sized scratch buffer once for the whole loop + let mut reindented_line = String::with_capacity(MAX_LINE_LEN); + // Avoids recomputing spaces that could be inserted many times + let space_cache: Vec> = (1..=tab_size) + .map(|n| IndentSize::spaces(n as u32).chars().collect()) + .collect(); + + for line in lines.iter_mut().filter(|line| !line.is_empty()) { + let mut chars = line.as_ref().chars(); + let mut col = 0; + let mut changed = false; + + while let Some(ch) = chars.next() { + match ch { + ' ' => { + reindented_line.push(' '); + col += 1; + } + '\t' => { + // \t are converted to spaces depending on the current column + let spaces_len = tab_size - (col % tab_size); + reindented_line.extend(&space_cache[spaces_len - 1]); + col += spaces_len; + changed = true; + } + _ => { + // If we dont append before break, the character is consumed + reindented_line.push(ch); + break; + } + } + } + + if !changed { + reindented_line.clear(); + continue; + } + // Append the rest of the line and replace old reference with new one + reindented_line.extend(chars); + *line = Cow::Owned(reindented_line.clone()); + reindented_line.clear(); + } + }); + } + + pub fn convert_indentation_to_tabs( + &mut self, + _: &ConvertIndentationToTabs, + window: &mut Window, + cx: &mut Context, + ) { + let settings = self.buffer.read(cx).language_settings(cx); + let tab_size = settings.tab_size.get() as usize; + + self.manipulate_mutable_lines(window, cx, |lines| { + // Allocates a reasonably sized buffer once for the whole loop + let mut reindented_line = String::with_capacity(MAX_LINE_LEN); + // Avoids recomputing spaces that could be inserted many times + let space_cache: Vec> = (1..=tab_size) + .map(|n| IndentSize::spaces(n as u32).chars().collect()) + .collect(); + + for line in lines.iter_mut().filter(|line| !line.is_empty()) { + let mut chars = line.chars(); + let mut spaces_count = 0; + let mut first_non_indent_char = None; + let mut changed = false; + + while let Some(ch) = chars.next() { + match ch { + ' ' => { + // Keep track of spaces. Append \t when we reach tab_size + spaces_count += 1; + changed = true; + if spaces_count == tab_size { + reindented_line.push('\t'); + spaces_count = 0; + } + } + '\t' => { + reindented_line.push('\t'); + spaces_count = 0; + } + _ => { + // Dont append it yet, we might have remaining spaces + first_non_indent_char = Some(ch); + break; + } + } + } + + if !changed { + reindented_line.clear(); + continue; + } + // Remaining spaces that didn't make a full tab stop + if spaces_count > 0 { + reindented_line.extend(&space_cache[spaces_count - 1]); + } + // If we consume an extra character that was not indentation, add it back + if let Some(extra_char) = first_non_indent_char { + reindented_line.push(extra_char); + } + // Append the rest of the line and replace old reference with new one + reindented_line.extend(chars); + *line = Cow::Owned(reindented_line.clone()); + reindented_line.clear(); + } + }); + } + pub fn convert_to_upper_case( &mut self, _: &ConvertToUpperCase, @@ -21157,6 +21318,13 @@ pub struct LineHighlight { pub type_id: Option, } +struct LineManipulationResult { + pub new_text: String, + pub line_count_before: usize, + pub line_count_after: usize, +} + + fn render_diff_hunk_controls( row: u32, status: &DiffHunkStatus, diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index b8a3e5efa778579b61b969e8c224de1bd237bbd2..ff6263dfa71184ded4e7697dd6132aa12138063d 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -270,6 +270,8 @@ actions!( ContextMenuLast, ContextMenuNext, ContextMenuPrevious, + ConvertIndentationToSpaces, + ConvertIndentationToTabs, ConvertToKebabCase, ConvertToLowerCamelCase, ConvertToLowerCase, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 770ad7fa706027aa8146192b3f1d2155d06a4e31..ddecdcabcff11b411a01b66be31271b04057d945 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -10080,7 +10080,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| lines.sort()) + self.manipulate_immutable_lines(window, cx, |lines| lines.sort()) } pub fn sort_lines_case_insensitive( @@ -10089,7 +10089,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| { + self.manipulate_immutable_lines(window, cx, |lines| { lines.sort_by_key(|line| line.to_lowercase()) }) } @@ -10100,7 +10100,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| { + self.manipulate_immutable_lines(window, cx, |lines| { let mut seen = HashSet::default(); lines.retain(|line| seen.insert(line.to_lowercase())); }) @@ -10112,7 +10112,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_lines(window, cx, |lines| { + self.manipulate_immutable_lines(window, cx, |lines| { let mut seen = HashSet::default(); lines.retain(|line| seen.insert(*line)); }) @@ -10555,20 +10555,20 @@ impl Editor { } pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context) { - self.manipulate_lines(window, cx, |lines| lines.reverse()) + self.manipulate_immutable_lines(window, cx, |lines| lines.reverse()) } pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context) { - self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng())) + self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut thread_rng())) } - fn manipulate_lines( + fn manipulate_lines( &mut self, window: &mut Window, cx: &mut Context, - mut callback: Fn, + mut manipulate: M, ) where - Fn: FnMut(&mut Vec<&str>), + M: FnMut(&str) -> LineManipulationResult, { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); @@ -10601,18 +10601,18 @@ impl Editor { .text_for_range(start_point..end_point) .collect::(); - let mut lines = text.split('\n').collect_vec(); + let LineManipulationResult { + new_text, + line_count_before, + line_count_after, + } = manipulate(&text); - let lines_before = lines.len(); - callback(&mut lines); - let lines_after = lines.len(); - - edits.push((start_point..end_point, lines.join("\n"))); + edits.push((start_point..end_point, new_text)); // Selections must change based on added and removed line count let start_row = MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32); - let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32); + let end_row = MultiBufferRow(start_row.0 + line_count_after.saturating_sub(1) as u32); new_selections.push(Selection { id: selection.id, start: start_row, @@ -10621,10 +10621,10 @@ impl Editor { reversed: selection.reversed, }); - if lines_after > lines_before { - added_lines += lines_after - lines_before; - } else if lines_before > lines_after { - removed_lines += lines_before - lines_after; + if line_count_after > line_count_before { + added_lines += line_count_after - line_count_before; + } else if line_count_before > line_count_after { + removed_lines += line_count_before - line_count_after; } } @@ -10669,6 +10669,171 @@ impl Editor { }) } + fn manipulate_immutable_lines( + &mut self, + window: &mut Window, + cx: &mut Context, + mut callback: Fn, + ) where + Fn: FnMut(&mut Vec<&str>), + { + self.manipulate_lines(window, cx, |text| { + let mut lines: Vec<&str> = text.split('\n').collect(); + let line_count_before = lines.len(); + + callback(&mut lines); + + LineManipulationResult { + new_text: lines.join("\n"), + line_count_before, + line_count_after: lines.len(), + } + }); + } + + fn manipulate_mutable_lines( + &mut self, + window: &mut Window, + cx: &mut Context, + mut callback: Fn, + ) where + Fn: FnMut(&mut Vec>), + { + self.manipulate_lines(window, cx, |text| { + let mut lines: Vec> = text.split('\n').map(Cow::from).collect(); + let line_count_before = lines.len(); + + callback(&mut lines); + + LineManipulationResult { + new_text: lines.join("\n"), + line_count_before, + line_count_after: lines.len(), + } + }); + } + + pub fn convert_indentation_to_spaces( + &mut self, + _: &ConvertIndentationToSpaces, + window: &mut Window, + cx: &mut Context, + ) { + let settings = self.buffer.read(cx).language_settings(cx); + let tab_size = settings.tab_size.get() as usize; + + self.manipulate_mutable_lines(window, cx, |lines| { + // Allocates a reasonably sized scratch buffer once for the whole loop + let mut reindented_line = String::with_capacity(MAX_LINE_LEN); + // Avoids recomputing spaces that could be inserted many times + let space_cache: Vec> = (1..=tab_size) + .map(|n| IndentSize::spaces(n as u32).chars().collect()) + .collect(); + + for line in lines.iter_mut().filter(|line| !line.is_empty()) { + let mut chars = line.as_ref().chars(); + let mut col = 0; + let mut changed = false; + + while let Some(ch) = chars.next() { + match ch { + ' ' => { + reindented_line.push(' '); + col += 1; + } + '\t' => { + // \t are converted to spaces depending on the current column + let spaces_len = tab_size - (col % tab_size); + reindented_line.extend(&space_cache[spaces_len - 1]); + col += spaces_len; + changed = true; + } + _ => { + // If we dont append before break, the character is consumed + reindented_line.push(ch); + break; + } + } + } + + if !changed { + reindented_line.clear(); + continue; + } + // Append the rest of the line and replace old reference with new one + reindented_line.extend(chars); + *line = Cow::Owned(reindented_line.clone()); + reindented_line.clear(); + } + }); + } + + pub fn convert_indentation_to_tabs( + &mut self, + _: &ConvertIndentationToTabs, + window: &mut Window, + cx: &mut Context, + ) { + let settings = self.buffer.read(cx).language_settings(cx); + let tab_size = settings.tab_size.get() as usize; + + self.manipulate_mutable_lines(window, cx, |lines| { + // Allocates a reasonably sized buffer once for the whole loop + let mut reindented_line = String::with_capacity(MAX_LINE_LEN); + // Avoids recomputing spaces that could be inserted many times + let space_cache: Vec> = (1..=tab_size) + .map(|n| IndentSize::spaces(n as u32).chars().collect()) + .collect(); + + for line in lines.iter_mut().filter(|line| !line.is_empty()) { + let mut chars = line.chars(); + let mut spaces_count = 0; + let mut first_non_indent_char = None; + let mut changed = false; + + while let Some(ch) = chars.next() { + match ch { + ' ' => { + // Keep track of spaces. Append \t when we reach tab_size + spaces_count += 1; + changed = true; + if spaces_count == tab_size { + reindented_line.push('\t'); + spaces_count = 0; + } + } + '\t' => { + reindented_line.push('\t'); + spaces_count = 0; + } + _ => { + // Dont append it yet, we might have remaining spaces + first_non_indent_char = Some(ch); + break; + } + } + } + + if !changed { + reindented_line.clear(); + continue; + } + // Remaining spaces that didn't make a full tab stop + if spaces_count > 0 { + reindented_line.extend(&space_cache[spaces_count - 1]); + } + // If we consume an extra character that was not indentation, add it back + if let Some(extra_char) = first_non_indent_char { + reindented_line.push(extra_char); + } + // Append the rest of the line and replace old reference with new one + reindented_line.extend(chars); + *line = Cow::Owned(reindented_line.clone()); + reindented_line.clear(); + } + }); + } + pub fn convert_to_upper_case( &mut self, _: &ConvertToUpperCase, @@ -22941,6 +23106,12 @@ pub struct LineHighlight { pub type_id: Option, } +struct LineManipulationResult { + pub new_text: String, + pub line_count_before: usize, + pub line_count_after: usize, +} + fn render_diff_hunk_controls( row: u32, status: &DiffHunkStatus, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 6a579cb1cd310431a972329b97ce29c5ffefa864..3671653e16b0c6452e4c57b9108768b6376b87bf 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -3976,7 +3976,7 @@ async fn test_custom_newlines_cause_no_false_positive_diffs( } #[gpui::test] -async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) { +async fn test_manipulate_immutable_lines_with_single_selection(cx: &mut TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; @@ -4021,8 +4021,8 @@ async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) { // Skip testing shuffle_line() - // From here on out, test more complex cases of manipulate_lines() with a single driver method: sort_lines_case_sensitive() - // Since all methods calling manipulate_lines() are doing the exact same general thing (reordering lines) + // From here on out, test more complex cases of manipulate_immutable_lines() with a single driver method: sort_lines_case_sensitive() + // Since all methods calling manipulate_immutable_lines() are doing the exact same general thing (reordering lines) // Don't manipulate when cursor is on single line, but expand the selection cx.set_state(indoc! {" @@ -4089,7 +4089,7 @@ async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) { bbˇ»b "}); cx.update_editor(|e, window, cx| { - e.manipulate_lines(window, cx, |lines| lines.push("added_line")) + e.manipulate_immutable_lines(window, cx, |lines| lines.push("added_line")) }); cx.assert_editor_state(indoc! {" «aaa @@ -4103,7 +4103,7 @@ async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) { bbbˇ» "}); cx.update_editor(|e, window, cx| { - e.manipulate_lines(window, cx, |lines| { + e.manipulate_immutable_lines(window, cx, |lines| { lines.pop(); }) }); @@ -4117,7 +4117,7 @@ async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) { bbbˇ» "}); cx.update_editor(|e, window, cx| { - e.manipulate_lines(window, cx, |lines| { + e.manipulate_immutable_lines(window, cx, |lines| { lines.drain(..); }) }); @@ -4217,7 +4217,7 @@ async fn test_unique_lines_single_selection(cx: &mut TestAppContext) { } #[gpui::test] -async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) { +async fn test_manipulate_immutable_lines_with_multi_selection(cx: &mut TestAppContext) { init_test(cx, |_| {}); let mut cx = EditorTestContext::new(cx).await; @@ -4277,7 +4277,7 @@ async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) { aaaˇ»aa "}); cx.update_editor(|e, window, cx| { - e.manipulate_lines(window, cx, |lines| lines.push("added line")) + e.manipulate_immutable_lines(window, cx, |lines| lines.push("added line")) }); cx.assert_editor_state(indoc! {" «2 @@ -4298,7 +4298,7 @@ async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) { aaaˇ»aa "}); cx.update_editor(|e, window, cx| { - e.manipulate_lines(window, cx, |lines| { + e.manipulate_immutable_lines(window, cx, |lines| { lines.pop(); }) }); @@ -4309,6 +4309,222 @@ async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) { "}); } +#[gpui::test] +async fn test_convert_indentation_to_spaces(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(3) + }); + + let mut cx = EditorTestContext::new(cx).await; + + // MULTI SELECTION + // Ln.1 "«" tests empty lines + // Ln.9 tests just leading whitespace + cx.set_state(indoc! {" + « + abc // No indentationˇ» + «\tabc // 1 tabˇ» + \t\tabc « ˇ» // 2 tabs + \t ab«c // Tab followed by space + \tabc // Space followed by tab (3 spaces should be the result) + \t \t \t \tabc // Mixed indentation (tab conversion depends on the column) + abˇ»ˇc ˇ ˇ // Already space indented« + \t + \tabc\tdef // Only the leading tab is manipulatedˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_indentation_to_spaces(&ConvertIndentationToSpaces, window, cx); + }); + cx.assert_editor_state(indoc! {" + « + abc // No indentation + abc // 1 tab + abc // 2 tabs + abc // Tab followed by space + abc // Space followed by tab (3 spaces should be the result) + abc // Mixed indentation (tab conversion depends on the column) + abc // Already space indented + + abc\tdef // Only the leading tab is manipulatedˇ» + "}); + + // Test on just a few lines, the others should remain unchanged + // Only lines (3, 5, 10, 11) should change + cx.set_state(indoc! {" + + abc // No indentation + \tabcˇ // 1 tab + \t\tabc // 2 tabs + \t abcˇ // Tab followed by space + \tabc // Space followed by tab (3 spaces should be the result) + \t \t \t \tabc // Mixed indentation (tab conversion depends on the column) + abc // Already space indented + «\t + \tabc\tdef // Only the leading tab is manipulatedˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_indentation_to_spaces(&ConvertIndentationToSpaces, window, cx); + }); + cx.assert_editor_state(indoc! {" + + abc // No indentation + « abc // 1 tabˇ» + \t\tabc // 2 tabs + « abc // Tab followed by spaceˇ» + \tabc // Space followed by tab (3 spaces should be the result) + \t \t \t \tabc // Mixed indentation (tab conversion depends on the column) + abc // Already space indented + « + abc\tdef // Only the leading tab is manipulatedˇ» + "}); + + // SINGLE SELECTION + // Ln.1 "«" tests empty lines + // Ln.9 tests just leading whitespace + cx.set_state(indoc! {" + « + abc // No indentation + \tabc // 1 tab + \t\tabc // 2 tabs + \t abc // Tab followed by space + \tabc // Space followed by tab (3 spaces should be the result) + \t \t \t \tabc // Mixed indentation (tab conversion depends on the column) + abc // Already space indented + \t + \tabc\tdef // Only the leading tab is manipulatedˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_indentation_to_spaces(&ConvertIndentationToSpaces, window, cx); + }); + cx.assert_editor_state(indoc! {" + « + abc // No indentation + abc // 1 tab + abc // 2 tabs + abc // Tab followed by space + abc // Space followed by tab (3 spaces should be the result) + abc // Mixed indentation (tab conversion depends on the column) + abc // Already space indented + + abc\tdef // Only the leading tab is manipulatedˇ» + "}); +} + +#[gpui::test] +async fn test_convert_indentation_to_tabs(cx: &mut TestAppContext) { + init_test(cx, |settings| { + settings.defaults.tab_size = NonZeroU32::new(3) + }); + + let mut cx = EditorTestContext::new(cx).await; + + // MULTI SELECTION + // Ln.1 "«" tests empty lines + // Ln.11 tests just leading whitespace + cx.set_state(indoc! {" + « + abˇ»ˇc // No indentation + abc ˇ ˇ // 1 space (< 3 so dont convert) + abc « // 2 spaces (< 3 so dont convert) + abc // 3 spaces (convert) + abc ˇ» // 5 spaces (1 tab + 2 spaces) + «\tˇ»\t«\tˇ»abc // Already tab indented + «\t abc // Tab followed by space + \tabc // Space followed by tab (should be consumed due to tab) + \t \t \t \tabc // Mixed indentation (first 3 spaces are consumed, the others are converted) + \tˇ» «\t + abcˇ» \t ˇˇˇ // Only the leading spaces should be converted + "}); + cx.update_editor(|e, window, cx| { + e.convert_indentation_to_tabs(&ConvertIndentationToTabs, window, cx); + }); + cx.assert_editor_state(indoc! {" + « + abc // No indentation + abc // 1 space (< 3 so dont convert) + abc // 2 spaces (< 3 so dont convert) + \tabc // 3 spaces (convert) + \t abc // 5 spaces (1 tab + 2 spaces) + \t\t\tabc // Already tab indented + \t abc // Tab followed by space + \tabc // Space followed by tab (should be consumed due to tab) + \t\t\t\t\tabc // Mixed indentation (first 3 spaces are consumed, the others are converted) + \t\t\t + \tabc \t // Only the leading spaces should be convertedˇ» + "}); + + // Test on just a few lines, the other should remain unchanged + // Only lines (4, 8, 11, 12) should change + cx.set_state(indoc! {" + + abc // No indentation + abc // 1 space (< 3 so dont convert) + abc // 2 spaces (< 3 so dont convert) + « abc // 3 spaces (convert)ˇ» + abc // 5 spaces (1 tab + 2 spaces) + \t\t\tabc // Already tab indented + \t abc // Tab followed by space + \tabc ˇ // Space followed by tab (should be consumed due to tab) + \t\t \tabc // Mixed indentation + \t \t \t \tabc // Mixed indentation + \t \tˇ + « abc \t // Only the leading spaces should be convertedˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_indentation_to_tabs(&ConvertIndentationToTabs, window, cx); + }); + cx.assert_editor_state(indoc! {" + + abc // No indentation + abc // 1 space (< 3 so dont convert) + abc // 2 spaces (< 3 so dont convert) + «\tabc // 3 spaces (convert)ˇ» + abc // 5 spaces (1 tab + 2 spaces) + \t\t\tabc // Already tab indented + \t abc // Tab followed by space + «\tabc // Space followed by tab (should be consumed due to tab)ˇ» + \t\t \tabc // Mixed indentation + \t \t \t \tabc // Mixed indentation + «\t\t\t + \tabc \t // Only the leading spaces should be convertedˇ» + "}); + + // SINGLE SELECTION + // Ln.1 "«" tests empty lines + // Ln.11 tests just leading whitespace + cx.set_state(indoc! {" + « + abc // No indentation + abc // 1 space (< 3 so dont convert) + abc // 2 spaces (< 3 so dont convert) + abc // 3 spaces (convert) + abc // 5 spaces (1 tab + 2 spaces) + \t\t\tabc // Already tab indented + \t abc // Tab followed by space + \tabc // Space followed by tab (should be consumed due to tab) + \t \t \t \tabc // Mixed indentation (first 3 spaces are consumed, the others are converted) + \t \t + abc \t // Only the leading spaces should be convertedˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_indentation_to_tabs(&ConvertIndentationToTabs, window, cx); + }); + cx.assert_editor_state(indoc! {" + « + abc // No indentation + abc // 1 space (< 3 so dont convert) + abc // 2 spaces (< 3 so dont convert) + \tabc // 3 spaces (convert) + \t abc // 5 spaces (1 tab + 2 spaces) + \t\t\tabc // Already tab indented + \t abc // Tab followed by space + \tabc // Space followed by tab (should be consumed due to tab) + \t\t\t\t\tabc // Mixed indentation (first 3 spaces are consumed, the others are converted) + \t\t\t + \tabc \t // Only the leading spaces should be convertedˇ» + "}); +} + #[gpui::test] async fn test_toggle_case(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index b002a96de8d0e1f615e865b7908c19a5f4bcbbb4..602a0579b3a23b4449d08a732580ac261bd841c2 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -230,6 +230,8 @@ impl EditorElement { register_action(editor, window, Editor::reverse_lines); register_action(editor, window, Editor::shuffle_lines); register_action(editor, window, Editor::toggle_case); + register_action(editor, window, Editor::convert_indentation_to_spaces); + register_action(editor, window, Editor::convert_indentation_to_tabs); register_action(editor, window, Editor::convert_to_upper_case); register_action(editor, window, Editor::convert_to_lower_case); register_action(editor, window, Editor::convert_to_title_case); From 18f1221a446786d1cacf7fa51c65d7946a9dc0bd Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 25 Jun 2025 15:04:43 +0200 Subject: [PATCH 27/56] vercel: Reuse existing OpenAI code (#33362) Follow up to #33292 Since Vercel's API is OpenAI compatible, we can reuse a bunch of code. Release Notes: - N/A --- Cargo.lock | 3 - crates/language_models/src/provider/cloud.rs | 7 +- .../language_models/src/provider/open_ai.rs | 16 +- crates/language_models/src/provider/vercel.rs | 313 +-------------- crates/vercel/Cargo.toml | 3 - crates/vercel/src/vercel.rs | 362 +----------------- 6 files changed, 30 insertions(+), 674 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c832b83aa59834ee6fac4e8b936826de1465256..224aa421a63b9700a448d17b2a0dd02bd538a7ee 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17431,11 +17431,8 @@ name = "vercel" version = "0.1.0" dependencies = [ "anyhow", - "futures 0.3.31", - "http_client", "schemars", "serde", - "serde_json", "strum 0.27.1", "workspace-hack", ] diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 1062d732a42d0d7fdd15e99d15a50b72826ed03c..58902850ea1d66d843306e9612a0ed2538a29ac9 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -888,7 +888,12 @@ impl LanguageModel for CloudLanguageModel { Ok(model) => model, Err(err) => return async move { Err(anyhow!(err).into()) }.boxed(), }; - let request = into_open_ai(request, &model, None); + let request = into_open_ai( + request, + model.id(), + model.supports_parallel_tool_calls(), + None, + ); let llm_api_token = self.llm_api_token.clone(); let future = self.request_limiter.stream(async move { let PerformLlmCompletionResponse { diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 3fa5334eb055196e620fc4370d06e4956c6e576b..56a81d36e955ee8fadece0fea59a240215759965 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -344,7 +344,12 @@ impl LanguageModel for OpenAiLanguageModel { LanguageModelCompletionError, >, > { - let request = into_open_ai(request, &self.model, self.max_output_tokens()); + let request = into_open_ai( + request, + self.model.id(), + self.model.supports_parallel_tool_calls(), + self.max_output_tokens(), + ); let completions = self.stream_completion(request, cx); async move { let mapper = OpenAiEventMapper::new(); @@ -356,10 +361,11 @@ impl LanguageModel for OpenAiLanguageModel { pub fn into_open_ai( request: LanguageModelRequest, - model: &Model, + model_id: &str, + supports_parallel_tool_calls: bool, max_output_tokens: Option, ) -> open_ai::Request { - let stream = !model.id().starts_with("o1-"); + let stream = !model_id.starts_with("o1-"); let mut messages = Vec::new(); for message in request.messages { @@ -435,13 +441,13 @@ pub fn into_open_ai( } open_ai::Request { - model: model.id().into(), + model: model_id.into(), messages, stream, stop: request.stop, temperature: request.temperature.unwrap_or(1.0), max_completion_tokens: max_output_tokens, - parallel_tool_calls: if model.supports_parallel_tool_calls() && !request.tools.is_empty() { + parallel_tool_calls: if supports_parallel_tool_calls && !request.tools.is_empty() { // Disable parallel tool calls, as the Agent currently expects a maximum of one per turn. Some(false) } else { diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 46063aceff17f9e779435e3b3d26c6507ca2c019..65058cbb74ad78d5151ff7a3d4fd4b06f4fc6c7c 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -1,8 +1,6 @@ use anyhow::{Context as _, Result, anyhow}; -use collections::{BTreeMap, HashMap}; +use collections::BTreeMap; use credentials_provider::CredentialsProvider; - -use futures::Stream; use futures::{FutureExt, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window}; use http_client::HttpClient; @@ -10,16 +8,13 @@ use language_model::{ AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest, - LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent, - RateLimiter, Role, StopReason, + LanguageModelToolChoice, RateLimiter, Role, }; use menu; -use open_ai::{ImageUrl, ResponseStreamEvent, stream_completion}; +use open_ai::ResponseStreamEvent; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore}; -use std::pin::Pin; -use std::str::FromStr as _; use std::sync::Arc; use strum::IntoEnumIterator; use vercel::Model; @@ -200,14 +195,12 @@ impl LanguageModelProvider for VercelLanguageModelProvider { fn provided_models(&self, cx: &App) -> Vec> { let mut models = BTreeMap::default(); - // Add base models from vercel::Model::iter() for model in vercel::Model::iter() { if !matches!(model, vercel::Model::Custom { .. }) { models.insert(model.id().to_string(), model); } } - // Override with available models from settings for model in &AllLanguageModelSettings::get_global(cx) .vercel .available_models @@ -278,7 +271,8 @@ impl VercelLanguageModel { let future = self.request_limiter.stream(async move { let api_key = api_key.context("Missing Vercel API Key")?; - let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request); + let request = + open_ai::stream_completion(http_client.as_ref(), &api_url, &api_key, request); let response = request.await?; Ok(response) }); @@ -354,264 +348,21 @@ impl LanguageModel for VercelLanguageModel { LanguageModelCompletionError, >, > { - let request = into_vercel(request, &self.model, self.max_output_tokens()); + let request = crate::provider::open_ai::into_open_ai( + request, + self.model.id(), + self.model.supports_parallel_tool_calls(), + self.max_output_tokens(), + ); let completions = self.stream_completion(request, cx); async move { - let mapper = VercelEventMapper::new(); + let mapper = crate::provider::open_ai::OpenAiEventMapper::new(); Ok(mapper.map_stream(completions.await?).boxed()) } .boxed() } } -pub fn into_vercel( - request: LanguageModelRequest, - model: &vercel::Model, - max_output_tokens: Option, -) -> open_ai::Request { - let stream = !model.id().starts_with("o1-"); - - let mut messages = Vec::new(); - for message in request.messages { - for content in message.content { - match content { - MessageContent::Text(text) | MessageContent::Thinking { text, .. } => { - add_message_content_part( - open_ai::MessagePart::Text { text: text }, - message.role, - &mut messages, - ) - } - MessageContent::RedactedThinking(_) => {} - MessageContent::Image(image) => { - add_message_content_part( - open_ai::MessagePart::Image { - image_url: ImageUrl { - url: image.to_base64_url(), - detail: None, - }, - }, - message.role, - &mut messages, - ); - } - MessageContent::ToolUse(tool_use) => { - let tool_call = open_ai::ToolCall { - id: tool_use.id.to_string(), - content: open_ai::ToolCallContent::Function { - function: open_ai::FunctionContent { - name: tool_use.name.to_string(), - arguments: serde_json::to_string(&tool_use.input) - .unwrap_or_default(), - }, - }, - }; - - if let Some(open_ai::RequestMessage::Assistant { tool_calls, .. }) = - messages.last_mut() - { - tool_calls.push(tool_call); - } else { - messages.push(open_ai::RequestMessage::Assistant { - content: None, - tool_calls: vec![tool_call], - }); - } - } - MessageContent::ToolResult(tool_result) => { - let content = match &tool_result.content { - LanguageModelToolResultContent::Text(text) => { - vec![open_ai::MessagePart::Text { - text: text.to_string(), - }] - } - LanguageModelToolResultContent::Image(image) => { - vec![open_ai::MessagePart::Image { - image_url: ImageUrl { - url: image.to_base64_url(), - detail: None, - }, - }] - } - }; - - messages.push(open_ai::RequestMessage::Tool { - content: content.into(), - tool_call_id: tool_result.tool_use_id.to_string(), - }); - } - } - } - } - - open_ai::Request { - model: model.id().into(), - messages, - stream, - stop: request.stop, - temperature: request.temperature.unwrap_or(1.0), - max_completion_tokens: max_output_tokens, - parallel_tool_calls: if model.supports_parallel_tool_calls() && !request.tools.is_empty() { - // Disable parallel tool calls, as the Agent currently expects a maximum of one per turn. - Some(false) - } else { - None - }, - tools: request - .tools - .into_iter() - .map(|tool| open_ai::ToolDefinition::Function { - function: open_ai::FunctionDefinition { - name: tool.name, - description: Some(tool.description), - parameters: Some(tool.input_schema), - }, - }) - .collect(), - tool_choice: request.tool_choice.map(|choice| match choice { - LanguageModelToolChoice::Auto => open_ai::ToolChoice::Auto, - LanguageModelToolChoice::Any => open_ai::ToolChoice::Required, - LanguageModelToolChoice::None => open_ai::ToolChoice::None, - }), - } -} - -fn add_message_content_part( - new_part: open_ai::MessagePart, - role: Role, - messages: &mut Vec, -) { - match (role, messages.last_mut()) { - (Role::User, Some(open_ai::RequestMessage::User { content })) - | ( - Role::Assistant, - Some(open_ai::RequestMessage::Assistant { - content: Some(content), - .. - }), - ) - | (Role::System, Some(open_ai::RequestMessage::System { content, .. })) => { - content.push_part(new_part); - } - _ => { - messages.push(match role { - Role::User => open_ai::RequestMessage::User { - content: open_ai::MessageContent::from(vec![new_part]), - }, - Role::Assistant => open_ai::RequestMessage::Assistant { - content: Some(open_ai::MessageContent::from(vec![new_part])), - tool_calls: Vec::new(), - }, - Role::System => open_ai::RequestMessage::System { - content: open_ai::MessageContent::from(vec![new_part]), - }, - }); - } - } -} - -pub struct VercelEventMapper { - tool_calls_by_index: HashMap, -} - -impl VercelEventMapper { - pub fn new() -> Self { - Self { - tool_calls_by_index: HashMap::default(), - } - } - - pub fn map_stream( - mut self, - events: Pin>>>, - ) -> impl Stream> - { - events.flat_map(move |event| { - futures::stream::iter(match event { - Ok(event) => self.map_event(event), - Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))], - }) - }) - } - - pub fn map_event( - &mut self, - event: ResponseStreamEvent, - ) -> Vec> { - let Some(choice) = event.choices.first() else { - return Vec::new(); - }; - - let mut events = Vec::new(); - if let Some(content) = choice.delta.content.clone() { - events.push(Ok(LanguageModelCompletionEvent::Text(content))); - } - - if let Some(tool_calls) = choice.delta.tool_calls.as_ref() { - for tool_call in tool_calls { - let entry = self.tool_calls_by_index.entry(tool_call.index).or_default(); - - if let Some(tool_id) = tool_call.id.clone() { - entry.id = tool_id; - } - - if let Some(function) = tool_call.function.as_ref() { - if let Some(name) = function.name.clone() { - entry.name = name; - } - - if let Some(arguments) = function.arguments.clone() { - entry.arguments.push_str(&arguments); - } - } - } - } - - match choice.finish_reason.as_deref() { - Some("stop") => { - events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn))); - } - Some("tool_calls") => { - events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| { - match serde_json::Value::from_str(&tool_call.arguments) { - Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse( - LanguageModelToolUse { - id: tool_call.id.clone().into(), - name: tool_call.name.as_str().into(), - is_input_complete: true, - input, - raw_input: tool_call.arguments.clone(), - }, - )), - Err(error) => Err(LanguageModelCompletionError::BadInputJson { - id: tool_call.id.into(), - tool_name: tool_call.name.as_str().into(), - raw_input: tool_call.arguments.into(), - json_parse_error: error.to_string(), - }), - } - })); - - events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse))); - } - Some(stop_reason) => { - log::error!("Unexpected Vercel stop_reason: {stop_reason:?}",); - events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn))); - } - None => {} - } - - events - } -} - -#[derive(Default)] -struct RawToolCall { - id: String, - name: String, - arguments: String, -} - pub fn count_vercel_tokens( request: LanguageModelRequest, model: Model, @@ -825,43 +576,3 @@ impl Render for ConfigurationView { } } } - -#[cfg(test)] -mod tests { - use gpui::TestAppContext; - use language_model::LanguageModelRequestMessage; - - use super::*; - - #[gpui::test] - fn tiktoken_rs_support(cx: &TestAppContext) { - let request = LanguageModelRequest { - thread_id: None, - prompt_id: None, - intent: None, - mode: None, - messages: vec![LanguageModelRequestMessage { - role: Role::User, - content: vec![MessageContent::Text("message".into())], - cache: false, - }], - tools: vec![], - tool_choice: None, - stop: vec![], - temperature: None, - }; - - // Validate that all models are supported by tiktoken-rs - for model in Model::iter() { - let count = cx - .executor() - .block(count_vercel_tokens( - request.clone(), - model, - &cx.app.borrow(), - )) - .unwrap(); - assert!(count > 0); - } - } -} diff --git a/crates/vercel/Cargo.toml b/crates/vercel/Cargo.toml index c4e1e4f99d56830272944ddef0b00427754e0fdc..60fa1a2390b2ea4e1169765e55f62a36d3d281bf 100644 --- a/crates/vercel/Cargo.toml +++ b/crates/vercel/Cargo.toml @@ -17,10 +17,7 @@ schemars = ["dep:schemars"] [dependencies] anyhow.workspace = true -futures.workspace = true -http_client.workspace = true schemars = { workspace = true, optional = true } serde.workspace = true -serde_json.workspace = true strum.workspace = true workspace-hack.workspace = true diff --git a/crates/vercel/src/vercel.rs b/crates/vercel/src/vercel.rs index 3195355bbc0a64dba6f51ebd0e4b0087df8680a0..cce219eca41a0e79e1dd0a61fbe1021f474fa11a 100644 --- a/crates/vercel/src/vercel.rs +++ b/crates/vercel/src/vercel.rs @@ -1,51 +1,9 @@ -use anyhow::{Context as _, Result, anyhow}; -use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream}; -use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest}; +use anyhow::Result; use serde::{Deserialize, Serialize}; -use serde_json::Value; -use std::{convert::TryFrom, future::Future}; use strum::EnumIter; pub const VERCEL_API_URL: &str = "https://api.v0.dev/v1"; -fn is_none_or_empty, U>(opt: &Option) -> bool { - opt.as_ref().map_or(true, |v| v.as_ref().is_empty()) -} - -#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)] -#[serde(rename_all = "lowercase")] -pub enum Role { - User, - Assistant, - System, - Tool, -} - -impl TryFrom for Role { - type Error = anyhow::Error; - - fn try_from(value: String) -> Result { - match value.as_str() { - "user" => Ok(Self::User), - "assistant" => Ok(Self::Assistant), - "system" => Ok(Self::System), - "tool" => Ok(Self::Tool), - _ => anyhow::bail!("invalid role '{value}'"), - } - } -} - -impl From for String { - fn from(val: Role) -> Self { - match val { - Role::User => "user".to_owned(), - Role::Assistant => "assistant".to_owned(), - Role::System => "system".to_owned(), - Role::Tool => "tool".to_owned(), - } - } -} - #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] pub enum Model { @@ -118,321 +76,3 @@ impl Model { } } } - -#[derive(Debug, Serialize, Deserialize)] -pub struct Request { - pub model: String, - pub messages: Vec, - pub stream: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub max_completion_tokens: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub stop: Vec, - pub temperature: f32, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub tool_choice: Option, - /// Whether to enable parallel function calling during tool use. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub parallel_tool_calls: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub tools: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum ToolChoice { - Auto, - Required, - None, - Other(ToolDefinition), -} - -#[derive(Clone, Deserialize, Serialize, Debug)] -#[serde(tag = "type", rename_all = "snake_case")] -pub enum ToolDefinition { - #[allow(dead_code)] - Function { function: FunctionDefinition }, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct FunctionDefinition { - pub name: String, - pub description: Option, - pub parameters: Option, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -#[serde(tag = "role", rename_all = "lowercase")] -pub enum RequestMessage { - Assistant { - content: Option, - #[serde(default, skip_serializing_if = "Vec::is_empty")] - tool_calls: Vec, - }, - User { - content: MessageContent, - }, - System { - content: MessageContent, - }, - Tool { - content: MessageContent, - tool_call_id: String, - }, -} - -#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] -#[serde(untagged)] -pub enum MessageContent { - Plain(String), - Multipart(Vec), -} - -impl MessageContent { - pub fn empty() -> Self { - MessageContent::Multipart(vec![]) - } - - pub fn push_part(&mut self, part: MessagePart) { - match self { - MessageContent::Plain(text) => { - *self = - MessageContent::Multipart(vec![MessagePart::Text { text: text.clone() }, part]); - } - MessageContent::Multipart(parts) if parts.is_empty() => match part { - MessagePart::Text { text } => *self = MessageContent::Plain(text), - MessagePart::Image { .. } => *self = MessageContent::Multipart(vec![part]), - }, - MessageContent::Multipart(parts) => parts.push(part), - } - } -} - -impl From> for MessageContent { - fn from(mut parts: Vec) -> Self { - if let [MessagePart::Text { text }] = parts.as_mut_slice() { - MessageContent::Plain(std::mem::take(text)) - } else { - MessageContent::Multipart(parts) - } - } -} - -#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] -#[serde(tag = "type")] -pub enum MessagePart { - #[serde(rename = "text")] - Text { text: String }, - #[serde(rename = "image_url")] - Image { image_url: ImageUrl }, -} - -#[derive(Serialize, Deserialize, Clone, Debug, Eq, PartialEq)] -pub struct ImageUrl { - pub url: String, - #[serde(skip_serializing_if = "Option::is_none")] - pub detail: Option, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -pub struct ToolCall { - pub id: String, - #[serde(flatten)] - pub content: ToolCallContent, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -#[serde(tag = "type", rename_all = "lowercase")] -pub enum ToolCallContent { - Function { function: FunctionContent }, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -pub struct FunctionContent { - pub name: String, - pub arguments: String, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -pub struct ResponseMessageDelta { - pub role: Option, - pub content: Option, - #[serde(default, skip_serializing_if = "is_none_or_empty")] - pub tool_calls: Option>, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -pub struct ToolCallChunk { - pub index: usize, - pub id: Option, - - // There is also an optional `type` field that would determine if a - // function is there. Sometimes this streams in with the `function` before - // it streams in the `type` - pub function: Option, -} - -#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] -pub struct FunctionChunk { - pub name: Option, - pub arguments: Option, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct Usage { - pub prompt_tokens: u32, - pub completion_tokens: u32, - pub total_tokens: u32, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct ChoiceDelta { - pub index: u32, - pub delta: ResponseMessageDelta, - pub finish_reason: Option, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(untagged)] -pub enum ResponseStreamResult { - Ok(ResponseStreamEvent), - Err { error: String }, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct ResponseStreamEvent { - pub model: String, - pub choices: Vec, - pub usage: Option, -} - -pub async fn stream_completion( - client: &dyn HttpClient, - api_url: &str, - api_key: &str, - request: Request, -) -> Result>> { - let uri = format!("{api_url}/chat/completions"); - let request_builder = HttpRequest::builder() - .method(Method::POST) - .uri(uri) - .header("Content-Type", "application/json") - .header("Authorization", format!("Bearer {}", api_key)); - - let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?; - let mut response = client.send(request).await?; - if response.status().is_success() { - let reader = BufReader::new(response.into_body()); - Ok(reader - .lines() - .filter_map(|line| async move { - match line { - Ok(line) => { - let line = line.strip_prefix("data: ")?; - if line == "[DONE]" { - None - } else { - match serde_json::from_str(line) { - Ok(ResponseStreamResult::Ok(response)) => Some(Ok(response)), - Ok(ResponseStreamResult::Err { error }) => { - Some(Err(anyhow!(error))) - } - Err(error) => Some(Err(anyhow!(error))), - } - } - } - Err(error) => Some(Err(anyhow!(error))), - } - }) - .boxed()) - } else { - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - - #[derive(Deserialize)] - struct VercelResponse { - error: VercelError, - } - - #[derive(Deserialize)] - struct VercelError { - message: String, - } - - match serde_json::from_str::(&body) { - Ok(response) if !response.error.message.is_empty() => Err(anyhow!( - "Failed to connect to Vercel API: {}", - response.error.message, - )), - - _ => anyhow::bail!( - "Failed to connect to Vercel API: {} {}", - response.status(), - body, - ), - } - } -} - -#[derive(Copy, Clone, Serialize, Deserialize)] -pub enum VercelEmbeddingModel { - #[serde(rename = "text-embedding-3-small")] - TextEmbedding3Small, - #[serde(rename = "text-embedding-3-large")] - TextEmbedding3Large, -} - -#[derive(Serialize)] -struct VercelEmbeddingRequest<'a> { - model: VercelEmbeddingModel, - input: Vec<&'a str>, -} - -#[derive(Deserialize)] -pub struct VercelEmbeddingResponse { - pub data: Vec, -} - -#[derive(Deserialize)] -pub struct VercelEmbedding { - pub embedding: Vec, -} - -pub fn embed<'a>( - client: &dyn HttpClient, - api_url: &str, - api_key: &str, - model: VercelEmbeddingModel, - texts: impl IntoIterator, -) -> impl 'static + Future> { - let uri = format!("{api_url}/embeddings"); - - let request = VercelEmbeddingRequest { - model, - input: texts.into_iter().collect(), - }; - let body = AsyncBody::from(serde_json::to_string(&request).unwrap()); - let request = HttpRequest::builder() - .method(Method::POST) - .uri(uri) - .header("Content-Type", "application/json") - .header("Authorization", format!("Bearer {}", api_key)) - .body(body) - .map(|request| client.send(request)); - - async move { - let mut response = request?.await?; - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - - anyhow::ensure!( - response.status().is_success(), - "error during embedding, status: {:?}, body: {:?}", - response.status(), - body - ); - let response: VercelEmbeddingResponse = - serde_json::from_str(&body).context("failed to parse Vercel embedding response")?; - Ok(response) - } -} From 59aeede50d71cbedca243cab607b4966339db027 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 25 Jun 2025 15:26:41 +0200 Subject: [PATCH 28/56] vercel: Use proper model identifiers and add image support (#33377) Follow up to previous PRs: - Return `true` in `supports_images` - v0 supports images already - Rename model id to match the exact version of the model `v0-1.5-md` (For now we do not expose `sm`/`lg` variants since they seem not to be available via the API) - Provide autocompletion in settings for using `vercel` as a `provider` Release Notes: - N/A --- crates/agent_settings/src/agent_settings.rs | 1 + crates/language_models/src/provider/vercel.rs | 10 ++++----- crates/vercel/src/vercel.rs | 22 ++++++++----------- 3 files changed, 15 insertions(+), 18 deletions(-) diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 1386555582ecf0d47a6e6fcdb2474a655edc3a5e..a1162b8066c03d9ca3ee10eddedeba91d45fab54 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -734,6 +734,7 @@ impl JsonSchema for LanguageModelProviderSetting { "deepseek".into(), "openrouter".into(), "mistral".into(), + "vercel".into(), ]), ..Default::default() } diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 65058cbb74ad78d5151ff7a3d4fd4b06f4fc6c7c..c86902fe76538fdb9ad857657a880d6dc6faf834 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -303,14 +303,14 @@ impl LanguageModel for VercelLanguageModel { } fn supports_images(&self) -> bool { - false + true } fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { - LanguageModelToolChoice::Auto => true, - LanguageModelToolChoice::Any => true, - LanguageModelToolChoice::None => true, + LanguageModelToolChoice::Auto + | LanguageModelToolChoice::Any + | LanguageModelToolChoice::None => true, } } @@ -398,7 +398,7 @@ pub fn count_vercel_tokens( } // Map Vercel models to appropriate OpenAI models for token counting // since Vercel uses OpenAI-compatible API - Model::VZero => { + Model::VZeroOnePointFiveMedium => { // Vercel v0 is similar to GPT-4o, so use gpt-4o for token counting tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages) } diff --git a/crates/vercel/src/vercel.rs b/crates/vercel/src/vercel.rs index cce219eca41a0e79e1dd0a61fbe1021f474fa11a..1ae22c5fefa742979eb01f57703e75f5d4546a5c 100644 --- a/crates/vercel/src/vercel.rs +++ b/crates/vercel/src/vercel.rs @@ -7,10 +7,9 @@ pub const VERCEL_API_URL: &str = "https://api.v0.dev/v1"; #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] pub enum Model { - #[serde(rename = "v-0")] #[default] - VZero, - + #[serde(rename = "v0-1.5-md")] + VZeroOnePointFiveMedium, #[serde(rename = "custom")] Custom { name: String, @@ -24,26 +23,26 @@ pub enum Model { impl Model { pub fn default_fast() -> Self { - Self::VZero + Self::VZeroOnePointFiveMedium } pub fn from_id(id: &str) -> Result { match id { - "v-0" => Ok(Self::VZero), + "v0-1.5-md" => Ok(Self::VZeroOnePointFiveMedium), invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"), } } pub fn id(&self) -> &str { match self { - Self::VZero => "v-0", + Self::VZeroOnePointFiveMedium => "v0-1.5-md", Self::Custom { name, .. } => name, } } pub fn display_name(&self) -> &str { match self { - Self::VZero => "Vercel v0", + Self::VZeroOnePointFiveMedium => "v0-1.5-md", Self::Custom { name, display_name, .. } => display_name.as_ref().unwrap_or(name), @@ -52,26 +51,23 @@ impl Model { pub fn max_token_count(&self) -> u64 { match self { - Self::VZero => 128_000, + Self::VZeroOnePointFiveMedium => 128_000, Self::Custom { max_tokens, .. } => *max_tokens, } } pub fn max_output_tokens(&self) -> Option { match self { + Self::VZeroOnePointFiveMedium => Some(32_000), Self::Custom { max_output_tokens, .. } => *max_output_tokens, - Self::VZero => Some(32_768), } } - /// Returns whether the given model supports the `parallel_tool_calls` parameter. - /// - /// If the model does not support the parameter, do not pass it up, or the API will return an error. pub fn supports_parallel_tool_calls(&self) -> bool { match self { - Self::VZero => true, + Self::VZeroOnePointFiveMedium => true, Model::Custom { .. } => false, } } From 0905255fd14df57b4dd250ea31bdda368e62d6ce Mon Sep 17 00:00:00 2001 From: Vladimir Kuznichenkov <5330267+kuzaxak@users.noreply.github.com> Date: Wed, 25 Jun 2025 17:15:13 +0300 Subject: [PATCH 29/56] bedrock: Add prompt caching support (#33194) Closes https://github.com/zed-industries/zed/issues/33221 Bedrock has similar to anthropic caching api, if we want to cache messages up to a certain point, we should add a special block into that message. Additionally, we can cache tools definition by adding cache point block after tools spec. See: [Bedrock User Guide: Prompt Caching](https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-caching.html#prompt-caching-models) Release Notes: - bedrock: Added prompt caching support --------- Co-authored-by: Oleksiy Syvokon --- crates/bedrock/src/models.rs | 59 +++++++++++++++++++ .../language_models/src/provider/bedrock.rs | 52 ++++++++++++---- 2 files changed, 101 insertions(+), 10 deletions(-) diff --git a/crates/bedrock/src/models.rs b/crates/bedrock/src/models.rs index 272ac0e52c4123fe864a5c12b80111657c9078a3..b6eeafa2d6b273a8cc3f0c6cc7a18ea0589c4ba2 100644 --- a/crates/bedrock/src/models.rs +++ b/crates/bedrock/src/models.rs @@ -11,6 +11,13 @@ pub enum BedrockModelMode { }, } +#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] +pub struct BedrockModelCacheConfiguration { + pub max_cache_anchors: usize, + pub min_total_token: u64, +} + #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)] pub enum Model { @@ -104,6 +111,7 @@ pub enum Model { display_name: Option, max_output_tokens: Option, default_temperature: Option, + cache_configuration: Option, }, } @@ -401,6 +409,56 @@ impl Model { } } + pub fn supports_caching(&self) -> bool { + match self { + // Only Claude models on Bedrock support caching + // Nova models support only text caching + // https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-caching.html#prompt-caching-models + Self::Claude3_5Haiku + | Self::Claude3_7Sonnet + | Self::Claude3_7SonnetThinking + | Self::ClaudeSonnet4 + | Self::ClaudeSonnet4Thinking + | Self::ClaudeOpus4 + | Self::ClaudeOpus4Thinking => true, + + // Custom models - check if they have cache configuration + Self::Custom { + cache_configuration, + .. + } => cache_configuration.is_some(), + + // All other models don't support caching + _ => false, + } + } + + pub fn cache_configuration(&self) -> Option { + match self { + Self::Claude3_7Sonnet + | Self::Claude3_7SonnetThinking + | Self::ClaudeSonnet4 + | Self::ClaudeSonnet4Thinking + | Self::ClaudeOpus4 + | Self::ClaudeOpus4Thinking => Some(BedrockModelCacheConfiguration { + max_cache_anchors: 4, + min_total_token: 1024, + }), + + Self::Claude3_5Haiku => Some(BedrockModelCacheConfiguration { + max_cache_anchors: 4, + min_total_token: 2048, + }), + + Self::Custom { + cache_configuration, + .. + } => cache_configuration.clone(), + + _ => None, + } + } + pub fn mode(&self) -> BedrockModelMode { match self { Model::Claude3_7SonnetThinking => BedrockModelMode::Thinking { @@ -660,6 +718,7 @@ mod tests { display_name: Some("My Custom Model".to_string()), max_output_tokens: Some(8192), default_temperature: Some(0.7), + cache_configuration: None, }; // Custom model should return its name unchanged diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index 2b2527f1accd3a1f72c51ffdcc96e3c3b4358ef8..a55fc5bc1142dcacf1d6cf5193345f6904c76b37 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -11,8 +11,8 @@ use aws_http_client::AwsHttpClient; use bedrock::bedrock_client::Client as BedrockClient; use bedrock::bedrock_client::config::timeout::TimeoutConfig; use bedrock::bedrock_client::types::{ - ContentBlockDelta, ContentBlockStart, ConverseStreamOutput, ReasoningContentBlockDelta, - StopReason, + CachePointBlock, CachePointType, ContentBlockDelta, ContentBlockStart, ConverseStreamOutput, + ReasoningContentBlockDelta, StopReason, }; use bedrock::{ BedrockAnyToolChoice, BedrockAutoToolChoice, BedrockBlob, BedrockError, BedrockInnerContent, @@ -48,7 +48,7 @@ use strum::{EnumIter, IntoEnumIterator, IntoStaticStr}; use theme::ThemeSettings; use tokio::runtime::Handle; use ui::{Icon, IconName, List, Tooltip, prelude::*}; -use util::{ResultExt, default}; +use util::ResultExt; use crate::AllLanguageModelSettings; @@ -329,6 +329,12 @@ impl LanguageModelProvider for BedrockLanguageModelProvider { max_tokens: model.max_tokens, max_output_tokens: model.max_output_tokens, default_temperature: model.default_temperature, + cache_configuration: model.cache_configuration.as_ref().map(|config| { + bedrock::BedrockModelCacheConfiguration { + max_cache_anchors: config.max_cache_anchors, + min_total_token: config.min_total_token, + } + }), }, ); } @@ -558,6 +564,7 @@ impl LanguageModel for BedrockModel { self.model.default_temperature(), self.model.max_output_tokens(), self.model.mode(), + self.model.supports_caching(), ) { Ok(request) => request, Err(err) => return futures::future::ready(Err(err.into())).boxed(), @@ -581,7 +588,13 @@ impl LanguageModel for BedrockModel { } fn cache_configuration(&self) -> Option { - None + self.model + .cache_configuration() + .map(|config| LanguageModelCacheConfiguration { + max_cache_anchors: config.max_cache_anchors, + should_speculate: false, + min_total_token: config.min_total_token, + }) } } @@ -608,6 +621,7 @@ pub fn into_bedrock( default_temperature: f32, max_output_tokens: u64, mode: BedrockModelMode, + supports_caching: bool, ) -> Result { let mut new_messages: Vec = Vec::new(); let mut system_message = String::new(); @@ -619,7 +633,7 @@ pub fn into_bedrock( match message.role { Role::User | Role::Assistant => { - let bedrock_message_content: Vec = message + let mut bedrock_message_content: Vec = message .content .into_iter() .filter_map(|content| match content { @@ -703,6 +717,14 @@ pub fn into_bedrock( _ => None, }) .collect(); + if message.cache && supports_caching { + bedrock_message_content.push(BedrockInnerContent::CachePoint( + CachePointBlock::builder() + .r#type(CachePointType::Default) + .build() + .context("failed to build cache point block")?, + )); + } let bedrock_role = match message.role { Role::User => bedrock::BedrockRole::User, Role::Assistant => bedrock::BedrockRole::Assistant, @@ -731,7 +753,7 @@ pub fn into_bedrock( } } - let tool_spec: Vec = request + let mut tool_spec: Vec = request .tools .iter() .filter_map(|tool| { @@ -748,6 +770,15 @@ pub fn into_bedrock( }) .collect(); + if !tool_spec.is_empty() && supports_caching { + tool_spec.push(BedrockTool::CachePoint( + CachePointBlock::builder() + .r#type(CachePointType::Default) + .build() + .context("failed to build cache point block")?, + )); + } + let tool_choice = match request.tool_choice { Some(LanguageModelToolChoice::Auto) | None => { BedrockToolChoice::Auto(BedrockAutoToolChoice::builder().build()) @@ -990,10 +1021,11 @@ pub fn map_to_language_model_completion_events( LanguageModelCompletionEvent::UsageUpdate( TokenUsage { input_tokens: metadata.input_tokens as u64, - output_tokens: metadata.output_tokens - as u64, - cache_creation_input_tokens: default(), - cache_read_input_tokens: default(), + output_tokens: metadata.output_tokens as u64, + cache_creation_input_tokens: + metadata.cache_write_input_tokens.unwrap_or_default() as u64, + cache_read_input_tokens: + metadata.cache_read_input_tokens.unwrap_or_default() as u64, }, ); return Some((Some(Ok(completion_event)), state)); From 308debe47f25553b2ce6f17c43df7e483304ec6f Mon Sep 17 00:00:00 2001 From: Sarmad Gulzar Date: Wed, 25 Jun 2025 19:21:33 +0500 Subject: [PATCH 30/56] terminal: Fix trailing single quote included when opening link from terminal (#33376) Closes #33210 Release Notes: - Fixed an issue where a trailing single quote was included when opening a link from the terminal. --- crates/terminal/src/terminal_hyperlinks.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/crates/terminal/src/terminal_hyperlinks.rs b/crates/terminal/src/terminal_hyperlinks.rs index 8e9950388d9536a4946b6b8517807b7c32cba918..18675bbe02f94fc20995e90ba98799fbaf0fc92a 100644 --- a/crates/terminal/src/terminal_hyperlinks.rs +++ b/crates/terminal/src/terminal_hyperlinks.rs @@ -8,7 +8,7 @@ use alacritty_terminal::{ use regex::Regex; use std::{ops::Index, sync::LazyLock}; -const URL_REGEX: &str = r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`]+"#; +const URL_REGEX: &str = r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`']+"#; // Optional suffix matches MSBuild diagnostic suffixes for path parsing in PathLikeWithPosition // https://learn.microsoft.com/en-us/visualstudio/msbuild/msbuild-diagnostic-format-for-tasks const WORD_REGEX: &str = @@ -224,8 +224,12 @@ mod tests { fn test_url_regex() { re_test( URL_REGEX, - "test http://example.com test mailto:bob@example.com train", - vec!["http://example.com", "mailto:bob@example.com"], + "test http://example.com test 'https://website1.com' test mailto:bob@example.com train", + vec![ + "http://example.com", + "https://website1.com", + "mailto:bob@example.com", + ], ); } From eb51041154c593178566651cdb1be7721a9f0111 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Wed, 25 Jun 2025 19:54:08 +0530 Subject: [PATCH 31/56] debugger_ui: Fix variable completion accept in console appends the whole word (#33378) Closes #32959 Release Notes: - Fixed the issue where accepting variable completion in the Debugger would append the entire variable name instead of the remaining part. --- .../src/session/running/console.rs | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/debugger_ui/src/session/running/console.rs b/crates/debugger_ui/src/session/running/console.rs index 0b4bc8865e0afacabb4ccec7f5a3f36016aed7c4..83d2d46547ada9da328cc44443813a87a6f681f1 100644 --- a/crates/debugger_ui/src/session/running/console.rs +++ b/crates/debugger_ui/src/session/running/console.rs @@ -646,8 +646,23 @@ impl ConsoleQueryBarCompletionProvider { (variables, string_matches) }); - let query = buffer.read(cx).text(); - + let snapshot = buffer.read(cx).text_snapshot(); + let query = snapshot.text(); + let replace_range = { + let buffer_offset = buffer_position.to_offset(&snapshot); + let reversed_chars = snapshot.reversed_chars_for_range(0..buffer_offset); + let mut word_len = 0; + for ch in reversed_chars { + if ch.is_alphanumeric() || ch == '_' { + word_len += 1; + } else { + break; + } + } + let word_start_offset = buffer_offset - word_len; + let start_anchor = snapshot.anchor_at(word_start_offset, Bias::Left); + start_anchor..buffer_position + }; cx.spawn(async move |_, cx| { const LIMIT: usize = 10; let matches = fuzzy::match_strings( @@ -667,7 +682,7 @@ impl ConsoleQueryBarCompletionProvider { let variable_value = variables.get(&string_match.string)?; Some(project::Completion { - replace_range: buffer_position..buffer_position, + replace_range: replace_range.clone(), new_text: string_match.string.clone(), label: CodeLabel { filter_range: 0..string_match.string.len(), From 6848073c382efcb73622d0453c92f8d0ccf6bd40 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 25 Jun 2025 12:10:11 -0400 Subject: [PATCH 32/56] Bump Zed to v0.194 (#33390) Release Notes: -N/A --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 224aa421a63b9700a448d17b2a0dd02bd538a7ee..f0afdb02fea529096262d6d96928ef0b7ada318f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19919,7 +19919,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.193.0" +version = "0.194.0" dependencies = [ "activity_indicator", "agent", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 58db67a06efb7c8a70f24a3ebb3094c29d07f3ed..534d79c6ac4fb5ab792482f248021ee71197d082 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.193.0" +version = "0.194.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] From 630a326a078b31ab67036bb160a652a4bf630e3f Mon Sep 17 00:00:00 2001 From: CharlesChen0823 Date: Thu, 26 Jun 2025 00:17:41 +0800 Subject: [PATCH 33/56] file_finder: Fix create wrong file in multiple worktree (#33139) When open multiple worktree, using `file_finder` to create a new file shoud respect current focused worktree. test case: ``` project: worktree A file1 worktree B file2 <- focused ``` when focused `file2`, `ctrl-p` toggle `file_finder` to create `file3` should exists in worktreeB. I try add test case for `CreateNew` in file_finder, but found not worked, if you help me, I can try add this test case. Release Notes: - Fixed file finder selecting wrong worktree when creating a file --- crates/file_finder/src/file_finder.rs | 41 +++++- crates/file_finder/src/file_finder_tests.rs | 142 ++++++++++++++++++++ 2 files changed, 176 insertions(+), 7 deletions(-) diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs index bfdb8fc4f482f4d6f7965d4d0940eaedfe8cca7d..5096be673342f2cfa365e8806be330bfc3bd26cf 100644 --- a/crates/file_finder/src/file_finder.rs +++ b/crates/file_finder/src/file_finder.rs @@ -939,20 +939,47 @@ impl FileFinderDelegate { matches.into_iter(), extend_old_matches, ); - let worktree = self.project.read(cx).visible_worktrees(cx).next(); - let filename = query.raw_query.to_string(); - let path = Path::new(&filename); + let filename = &query.raw_query; + let mut query_path = Path::new(filename); // add option of creating new file only if path is relative - if let Some(worktree) = worktree { + let available_worktree = self + .project + .read(cx) + .visible_worktrees(cx) + .filter(|worktree| !worktree.read(cx).is_single_file()) + .collect::>(); + let worktree_count = available_worktree.len(); + let mut expect_worktree = available_worktree.first().cloned(); + for worktree in available_worktree { + let worktree_root = worktree + .read(cx) + .abs_path() + .file_name() + .map_or(String::new(), |f| f.to_string_lossy().to_string()); + if worktree_count > 1 && query_path.starts_with(&worktree_root) { + query_path = query_path + .strip_prefix(&worktree_root) + .unwrap_or(query_path); + expect_worktree = Some(worktree); + break; + } + } + + if let Some(FoundPath { ref project, .. }) = self.currently_opened_path { + let worktree_id = project.worktree_id; + expect_worktree = self.project.read(cx).worktree_for_id(worktree_id, cx); + } + + if let Some(worktree) = expect_worktree { let worktree = worktree.read(cx); - if path.is_relative() - && worktree.entry_for_path(&path).is_none() + if query_path.is_relative() + && worktree.entry_for_path(&query_path).is_none() && !filename.ends_with("/") { self.matches.matches.push(Match::CreateNew(ProjectPath { worktree_id: worktree.id(), - path: Arc::from(path), + path: Arc::from(query_path), })); } } diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index dbb6d45f916c7251e181c414d315d197aed7bd0a..db259ccef854b1d3c5c4fae3bc9ebad08e398891 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -881,6 +881,148 @@ async fn test_single_file_worktrees(cx: &mut TestAppContext) { picker.update(cx, |f, _| assert_eq!(f.delegate.matches.len(), 0)); } +#[gpui::test] +async fn test_create_file_for_multiple_worktrees(cx: &mut TestAppContext) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/roota"), + json!({ "the-parent-dira": { "filea": "" } }), + ) + .await; + + app_state + .fs + .as_fake() + .insert_tree( + path!("/rootb"), + json!({ "the-parent-dirb": { "fileb": "" } }), + ) + .await; + + let project = Project::test( + app_state.fs.clone(), + [path!("/roota").as_ref(), path!("/rootb").as_ref()], + cx, + ) + .await; + + let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (_worktree_id1, worktree_id2) = cx.read(|cx| { + let worktrees = workspace.read(cx).worktrees(cx).collect::>(); + ( + WorktreeId::from_usize(worktrees[0].entity_id().as_u64() as usize), + WorktreeId::from_usize(worktrees[1].entity_id().as_u64() as usize), + ) + }); + + let b_path = ProjectPath { + worktree_id: worktree_id2, + path: Arc::from(Path::new(path!("the-parent-dirb/fileb"))), + }; + workspace + .update_in(cx, |workspace, window, cx| { + workspace.open_path(b_path, None, true, window, cx) + }) + .await + .unwrap(); + + let finder = open_file_picker(&workspace, cx); + + finder + .update_in(cx, |f, window, cx| { + f.delegate.spawn_search( + test_path_position(path!("the-parent-dirb/filec")), + window, + cx, + ) + }) + .await; + cx.run_until_parked(); + finder.update_in(cx, |picker, window, cx| { + assert_eq!(picker.delegate.matches.len(), 1); + picker.delegate.confirm(false, window, cx) + }); + cx.run_until_parked(); + cx.read(|cx| { + let active_editor = workspace.read(cx).active_item_as::(cx).unwrap(); + let project_path = active_editor.read(cx).project_path(cx); + assert_eq!( + project_path, + Some(ProjectPath { + worktree_id: worktree_id2, + path: Arc::from(Path::new(path!("the-parent-dirb/filec"))) + }) + ); + }); +} + +#[gpui::test] +async fn test_create_file_no_focused_with_multiple_worktrees(cx: &mut TestAppContext) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/roota"), + json!({ "the-parent-dira": { "filea": "" } }), + ) + .await; + + app_state + .fs + .as_fake() + .insert_tree( + path!("/rootb"), + json!({ "the-parent-dirb": { "fileb": "" } }), + ) + .await; + + let project = Project::test( + app_state.fs.clone(), + [path!("/roota").as_ref(), path!("/rootb").as_ref()], + cx, + ) + .await; + + let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let (_worktree_id1, worktree_id2) = cx.read(|cx| { + let worktrees = workspace.read(cx).worktrees(cx).collect::>(); + ( + WorktreeId::from_usize(worktrees[0].entity_id().as_u64() as usize), + WorktreeId::from_usize(worktrees[1].entity_id().as_u64() as usize), + ) + }); + + let finder = open_file_picker(&workspace, cx); + + finder + .update_in(cx, |f, window, cx| { + f.delegate + .spawn_search(test_path_position(path!("rootb/filec")), window, cx) + }) + .await; + cx.run_until_parked(); + finder.update_in(cx, |picker, window, cx| { + assert_eq!(picker.delegate.matches.len(), 1); + picker.delegate.confirm(false, window, cx) + }); + cx.run_until_parked(); + cx.read(|cx| { + let active_editor = workspace.read(cx).active_item_as::(cx).unwrap(); + let project_path = active_editor.read(cx).project_path(cx); + assert_eq!( + project_path, + Some(ProjectPath { + worktree_id: worktree_id2, + path: Arc::from(Path::new("filec")) + }) + ); + }); +} + #[gpui::test] async fn test_path_distance_ordering(cx: &mut TestAppContext) { let app_state = init_test(cx); From b0bab0bf9a4b4307dfd2e20a6a920228917d981c Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Wed, 25 Jun 2025 19:30:22 +0300 Subject: [PATCH 34/56] agent: Prevent use of disabled tools (#33392) The agent now checks if a tool is enabled in the current profile before calling it. Previously, the agent could still call disabled tools, which commonly happened after switching profiles in the middle of a thread. Release Notes: - Fixed a bug where the agent could use disabled tools sometimes --- crates/agent/src/agent_profile.rs | 8 ++++ crates/agent/src/thread.rs | 78 +++++++++++++++++-------------- 2 files changed, 52 insertions(+), 34 deletions(-) diff --git a/crates/agent/src/agent_profile.rs b/crates/agent/src/agent_profile.rs index c27a534a56e65dac7da9ae9a69304276205f97a4..07030c744fc085914ed5d085afd3699482fc6739 100644 --- a/crates/agent/src/agent_profile.rs +++ b/crates/agent/src/agent_profile.rs @@ -85,6 +85,14 @@ impl AgentProfile { .collect() } + pub fn is_tool_enabled(&self, source: ToolSource, tool_name: String, cx: &App) -> bool { + let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else { + return false; + }; + + return Self::is_enabled(settings, source, tool_name); + } + fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool { match source { ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false), diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index a46aa9381ea45002495a8fc3d2ee408173d8b3d4..4494446a6dcbcdeb1f4aec510cecc7f2c527ba56 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1770,7 +1770,7 @@ impl Thread { match result.as_ref() { Ok(stop_reason) => match stop_reason { StopReason::ToolUse => { - let tool_uses = thread.use_pending_tools(window, cx, model.clone()); + let tool_uses = thread.use_pending_tools(window, model.clone(), cx); cx.emit(ThreadEvent::UsePendingTools { tool_uses }); } StopReason::EndTurn | StopReason::MaxTokens => { @@ -2120,8 +2120,8 @@ impl Thread { pub fn use_pending_tools( &mut self, window: Option, - cx: &mut Context, model: Arc, + cx: &mut Context, ) -> Vec { self.auto_capture_telemetry(cx); let request = @@ -2135,43 +2135,53 @@ impl Thread { .collect::>(); for tool_use in pending_tool_uses.iter() { - if let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) { - if tool.needs_confirmation(&tool_use.input, cx) - && !AgentSettings::get_global(cx).always_allow_tool_actions - { - self.tool_use.confirm_tool_use( - tool_use.id.clone(), - tool_use.ui_text.clone(), - tool_use.input.clone(), - request.clone(), - tool, - ); - cx.emit(ThreadEvent::ToolConfirmationNeeded); - } else { - self.run_tool( - tool_use.id.clone(), - tool_use.ui_text.clone(), - tool_use.input.clone(), - request.clone(), - tool, - model.clone(), - window, - cx, - ); - } - } else { - self.handle_hallucinated_tool_use( - tool_use.id.clone(), - tool_use.name.clone(), - window, - cx, - ); - } + self.use_pending_tool(tool_use.clone(), request.clone(), model.clone(), window, cx); } pending_tool_uses } + fn use_pending_tool( + &mut self, + tool_use: PendingToolUse, + request: Arc, + model: Arc, + window: Option, + cx: &mut Context, + ) { + let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) else { + return self.handle_hallucinated_tool_use(tool_use.id, tool_use.name, window, cx); + }; + + if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) { + return self.handle_hallucinated_tool_use(tool_use.id, tool_use.name, window, cx); + } + + if tool.needs_confirmation(&tool_use.input, cx) + && !AgentSettings::get_global(cx).always_allow_tool_actions + { + self.tool_use.confirm_tool_use( + tool_use.id, + tool_use.ui_text, + tool_use.input, + request, + tool, + ); + cx.emit(ThreadEvent::ToolConfirmationNeeded); + } else { + self.run_tool( + tool_use.id, + tool_use.ui_text, + tool_use.input, + request, + tool, + model, + window, + cx, + ); + } + } + pub fn handle_hallucinated_tool_use( &mut self, tool_use_id: LanguageModelToolUseId, From 19c9fb3118ff82bca356b9fca26e69f7299ab628 Mon Sep 17 00:00:00 2001 From: ddoemonn <109994179+ddoemonn@users.noreply.github.com> Date: Wed, 25 Jun 2025 19:43:00 +0300 Subject: [PATCH 35/56] Allow multiple Markdown preview tabs (#32859) Closes #32791 https://github.com/user-attachments/assets/8cb90e3d-ef7b-407f-b78b-7ba4ff6d8df2 Release Notes: - Allowed multiple Markdown preview tabs --- .../markdown_preview/src/markdown_preview.rs | 5 +- .../src/markdown_preview_view.rs | 76 +++++++++++++++++-- 2 files changed, 74 insertions(+), 7 deletions(-) diff --git a/crates/markdown_preview/src/markdown_preview.rs b/crates/markdown_preview/src/markdown_preview.rs index de3554286bfb68957656660fe834837c5a576ee6..fad6355d8adf489017c122ae9390ebafc6b3ac78 100644 --- a/crates/markdown_preview/src/markdown_preview.rs +++ b/crates/markdown_preview/src/markdown_preview.rs @@ -6,7 +6,10 @@ pub mod markdown_parser; pub mod markdown_preview_view; pub mod markdown_renderer; -actions!(markdown, [OpenPreview, OpenPreviewToTheSide]); +actions!( + markdown, + [OpenPreview, OpenPreviewToTheSide, OpenFollowingPreview] +); pub fn init(cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, window, cx| { diff --git a/crates/markdown_preview/src/markdown_preview_view.rs b/crates/markdown_preview/src/markdown_preview_view.rs index c9c32e216aa158776c8a318b82f6810ffed02dbc..40c1783482f8b2a91126962d58b84b495e96a039 100644 --- a/crates/markdown_preview/src/markdown_preview_view.rs +++ b/crates/markdown_preview/src/markdown_preview_view.rs @@ -20,7 +20,7 @@ use workspace::{Pane, Workspace}; use crate::OpenPreviewToTheSide; use crate::markdown_elements::ParsedMarkdownElement; use crate::{ - OpenPreview, + OpenFollowingPreview, OpenPreview, markdown_elements::ParsedMarkdown, markdown_parser::parse_markdown, markdown_renderer::{RenderContext, render_markdown_block}, @@ -39,6 +39,7 @@ pub struct MarkdownPreviewView { tab_content_text: Option, language_registry: Arc, parsing_markdown_task: Option>>, + mode: MarkdownPreviewMode, } #[derive(Clone, Copy, Debug, PartialEq)] @@ -58,9 +59,11 @@ impl MarkdownPreviewView { pub fn register(workspace: &mut Workspace, _window: &mut Window, _cx: &mut Context) { workspace.register_action(move |workspace, _: &OpenPreview, window, cx| { if let Some(editor) = Self::resolve_active_item_as_markdown_editor(workspace, cx) { - let view = Self::create_markdown_view(workspace, editor, window, cx); + let view = Self::create_markdown_view(workspace, editor.clone(), window, cx); workspace.active_pane().update(cx, |pane, cx| { - if let Some(existing_view_idx) = Self::find_existing_preview_item_idx(pane) { + if let Some(existing_view_idx) = + Self::find_existing_independent_preview_item_idx(pane, &editor, cx) + { pane.activate_item(existing_view_idx, true, true, window, cx); } else { pane.add_item(Box::new(view.clone()), true, true, None, window, cx) @@ -84,7 +87,9 @@ impl MarkdownPreviewView { ) }); pane.update(cx, |pane, cx| { - if let Some(existing_view_idx) = Self::find_existing_preview_item_idx(pane) { + if let Some(existing_view_idx) = + Self::find_existing_independent_preview_item_idx(pane, &editor, cx) + { pane.activate_item(existing_view_idx, true, true, window, cx); } else { pane.add_item(Box::new(view.clone()), false, false, None, window, cx) @@ -94,11 +99,49 @@ impl MarkdownPreviewView { cx.notify(); } }); + + workspace.register_action(move |workspace, _: &OpenFollowingPreview, window, cx| { + if let Some(editor) = Self::resolve_active_item_as_markdown_editor(workspace, cx) { + // Check if there's already a following preview + let existing_follow_view_idx = { + let active_pane = workspace.active_pane().read(cx); + active_pane + .items_of_type::() + .find(|view| view.read(cx).mode == MarkdownPreviewMode::Follow) + .and_then(|view| active_pane.index_for_item(&view)) + }; + + if let Some(existing_follow_view_idx) = existing_follow_view_idx { + workspace.active_pane().update(cx, |pane, cx| { + pane.activate_item(existing_follow_view_idx, true, true, window, cx); + }); + } else { + let view = + Self::create_following_markdown_view(workspace, editor.clone(), window, cx); + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item(Box::new(view.clone()), true, true, None, window, cx) + }); + } + cx.notify(); + } + }); } - fn find_existing_preview_item_idx(pane: &Pane) -> Option { + fn find_existing_independent_preview_item_idx( + pane: &Pane, + editor: &Entity, + cx: &App, + ) -> Option { pane.items_of_type::() - .nth(0) + .find(|view| { + let view_read = view.read(cx); + // Only look for independent (Default mode) previews, not Follow previews + view_read.mode == MarkdownPreviewMode::Default + && view_read + .active_editor + .as_ref() + .is_some_and(|active_editor| active_editor.editor == *editor) + }) .and_then(|view| pane.index_for_item(&view)) } @@ -122,6 +165,25 @@ impl MarkdownPreviewView { editor: Entity, window: &mut Window, cx: &mut Context, + ) -> Entity { + let language_registry = workspace.project().read(cx).languages().clone(); + let workspace_handle = workspace.weak_handle(); + MarkdownPreviewView::new( + MarkdownPreviewMode::Default, + editor, + workspace_handle, + language_registry, + None, + window, + cx, + ) + } + + fn create_following_markdown_view( + workspace: &mut Workspace, + editor: Entity, + window: &mut Window, + cx: &mut Context, ) -> Entity { let language_registry = workspace.project().read(cx).languages().clone(); let workspace_handle = workspace.weak_handle(); @@ -266,6 +328,7 @@ impl MarkdownPreviewView { language_registry, parsing_markdown_task: None, image_cache: RetainAllImageCache::new(cx), + mode, }; this.set_editor(active_editor, window, cx); @@ -343,6 +406,7 @@ impl MarkdownPreviewView { ); let tab_content = editor.read(cx).tab_content_text(0, cx); + if self.tab_content_text.is_none() { self.tab_content_text = Some(format!("Preview {}", tab_content).into()); } From 7d087ea5d2ed3d07a01ff17669bafac1e19b0030 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Jun 2025 12:48:03 -0400 Subject: [PATCH 36/56] docs: Improve visual-customization.md docs for Zed prompts (#33254) Release Notes: - N/A --- docs/src/visual-customization.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 4b48c8430afbe0b05aebb6f69b926e4d112da7fc..e68e7ffabf52a6fc1ae88dedfd8abf89342fc453 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -120,6 +120,13 @@ To disable this behavior use: ```json { + // Force usage of Zed build in path prompts (file and directory pickers) + // instead of OS native pickers (false). + "use_system_path_prompts": true, + // Force usage of Zed built in confirmation prompts ("Do you want to save?") + // instead of OS native prompts (false). On linux this is ignored (always false). + "use_system_prompts": true, + // Whether to use the system provided dialogs for Open and Save As (true) or // Zed's built-in keyboard-first pickers (false) "use_system_path_prompts": true, From 93d670af132879d23964e96e1f808733dcaa0da0 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Jun 2025 12:48:15 -0400 Subject: [PATCH 37/56] Fix empty code actions menu trapping cursor (#33386) Closes: https://github.com/zed-industries/zed/issues/33382 Follow-up to: https://github.com/zed-industries/zed/pull/32579 CC: @ConradIrwin @Anthony-Eid Release Notes: - Fixed an issue with empty code actions menu locking the cursor (Preview Only) --- crates/editor/src/code_context_menus.rs | 2 +- crates/editor/src/editor.rs | 20 ++++++++++++++------ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index e9642657f8b7cba328bce0413e09d29ebbc9cfd4..291c03422def426054457c04ab8c9e4e710112a7 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -1205,7 +1205,7 @@ impl CodeActionContents { tasks_len + code_actions_len + self.debug_scenarios.len() } - fn is_empty(&self) -> bool { + pub fn is_empty(&self) -> bool { self.len() == 0 } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ddecdcabcff11b411a01b66be31271b04057d945..ffb08e4290e6eb359e011e4a1e817abb247be33a 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -5976,15 +5976,23 @@ impl Editor { editor.update_in(cx, |editor, window, cx| { crate::hover_popover::hide_hover(editor, cx); + let actions = CodeActionContents::new( + resolved_tasks, + code_actions, + debug_scenarios, + task_context.unwrap_or_default(), + ); + + // Don't show the menu if there are no actions available + if actions.is_empty() { + cx.notify(); + return Task::ready(Ok(())); + } + *editor.context_menu.borrow_mut() = Some(CodeContextMenu::CodeActions(CodeActionsMenu { buffer, - actions: CodeActionContents::new( - resolved_tasks, - code_actions, - debug_scenarios, - task_context.unwrap_or_default(), - ), + actions, selected_item: Default::default(), scroll_handle: UniformListScrollHandle::default(), deployed_from, From 84494ab26baba148a6104df5df3c2df541c03c21 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Jun 2025 12:48:46 -0400 Subject: [PATCH 38/56] Make ctrl-alt-b / cmd-alt-b toggle right dock (#33190) Closes: https://github.com/zed-industries/zed/issues/33147 In VSCode ctrl-alt-b / cmd-alt-b toggles the right dock. Zed should follow this behavior. See also: - https://github.com/zed-industries/zed/pull/31630 Release Notes: - N/A --- assets/keymaps/default-linux.json | 3 +-- assets/keymaps/default-macos.json | 4 +--- assets/keymaps/linux/cursor.json | 2 -- assets/keymaps/macos/cursor.json | 2 -- 4 files changed, 2 insertions(+), 9 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 1a9108f1084a929bec2261f8f94f9ea9ab6b5b04..23a1aead688a414bd509c192191a6957bbbfcfc7 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -243,8 +243,7 @@ "ctrl-alt-e": "agent::RemoveAllContext", "ctrl-shift-e": "project_panel::ToggleFocus", "ctrl-shift-enter": "agent::ContinueThread", - "alt-enter": "agent::ContinueWithBurnMode", - "ctrl-alt-b": "agent::ToggleBurnMode" + "alt-enter": "agent::ContinueWithBurnMode" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 42bba24d6d84da8bb98cbe5b7f80ac711b624ad4..785103aa92797436a578c7c1b16282619b37df6d 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -283,8 +283,7 @@ "cmd-alt-e": "agent::RemoveAllContext", "cmd-shift-e": "project_panel::ToggleFocus", "cmd-shift-enter": "agent::ContinueThread", - "alt-enter": "agent::ContinueWithBurnMode", - "cmd-alt-b": "agent::ToggleBurnMode" + "alt-enter": "agent::ContinueWithBurnMode" } }, { @@ -587,7 +586,6 @@ "alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }], "ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }], "ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }], - "alt-cmd-b": "branches::OpenRecent", "ctrl-~": "workspace::NewTerminal", "cmd-s": "workspace::Save", "cmd-k s": "workspace::SaveWithoutFormat", diff --git a/assets/keymaps/linux/cursor.json b/assets/keymaps/linux/cursor.json index 14cfcc43eca76239202dc386df23e67d0ca75bd0..347b7885fcc6b013f62e0c6f2ca1504ecc24fb51 100644 --- a/assets/keymaps/linux/cursor.json +++ b/assets/keymaps/linux/cursor.json @@ -8,7 +8,6 @@ "ctrl-shift-i": "agent::ToggleFocus", "ctrl-l": "agent::ToggleFocus", "ctrl-shift-l": "agent::ToggleFocus", - "ctrl-alt-b": "agent::ToggleFocus", "ctrl-shift-j": "agent::OpenConfiguration" } }, @@ -42,7 +41,6 @@ "ctrl-shift-i": "workspace::ToggleRightDock", "ctrl-l": "workspace::ToggleRightDock", "ctrl-shift-l": "workspace::ToggleRightDock", - "ctrl-alt-b": "workspace::ToggleRightDock", "ctrl-w": "workspace::ToggleRightDock", // technically should close chat "ctrl-.": "agent::ToggleProfileSelector", "ctrl-/": "agent::ToggleModelSelector", diff --git a/assets/keymaps/macos/cursor.json b/assets/keymaps/macos/cursor.json index 5d26974f056a2d3f918319342fb82d1f8828e767..b1d39bef9eb1397ceaeb0fb82956f14a0391b068 100644 --- a/assets/keymaps/macos/cursor.json +++ b/assets/keymaps/macos/cursor.json @@ -8,7 +8,6 @@ "cmd-shift-i": "agent::ToggleFocus", "cmd-l": "agent::ToggleFocus", "cmd-shift-l": "agent::ToggleFocus", - "cmd-alt-b": "agent::ToggleFocus", "cmd-shift-j": "agent::OpenConfiguration" } }, @@ -43,7 +42,6 @@ "cmd-shift-i": "workspace::ToggleRightDock", "cmd-l": "workspace::ToggleRightDock", "cmd-shift-l": "workspace::ToggleRightDock", - "cmd-alt-b": "workspace::ToggleRightDock", "cmd-w": "workspace::ToggleRightDock", // technically should close chat "cmd-.": "agent::ToggleProfileSelector", "cmd-/": "agent::ToggleModelSelector", From 91c9281cea88e22fed57bffee0effe068daa8cea Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Wed, 25 Jun 2025 12:49:37 -0400 Subject: [PATCH 39/56] Default to cargo-zigbuild for ZED_BUILD_REMOTE_SERVER (#33391) Follow-up to #31467. `cargo-zigbuild` will be installed if it's not there already, but you have to install Zig yourself. Pass `ZED_BUILD_REMOTE_SERVER=cross` to use the old way. Release Notes: - N/A --- Cargo.lock | 1 + crates/remote/Cargo.toml | 1 + crates/remote/src/ssh_session.rs | 64 ++++++++++++++++++++------------ 3 files changed, 43 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f0afdb02fea529096262d6d96928ef0b7ada318f..979fc9441c593bb2bdd945a2cbca92779aaafa65 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -13168,6 +13168,7 @@ dependencies = [ "thiserror 2.0.12", "urlencoding", "util", + "which 6.0.3", "workspace-hack", ] diff --git a/crates/remote/Cargo.toml b/crates/remote/Cargo.toml index 6042e63fd98b4354b719f30d1a3f3e2fc33cdeb1..5985bcae827c42f4ae535b1dd859e436167e3fe5 100644 --- a/crates/remote/Cargo.toml +++ b/crates/remote/Cargo.toml @@ -41,6 +41,7 @@ tempfile.workspace = true thiserror.workspace = true urlencoding.workspace = true util.workspace = true +which.workspace = true workspace-hack.workspace = true [dev-dependencies] diff --git a/crates/remote/src/ssh_session.rs b/crates/remote/src/ssh_session.rs index ffcf3b378340d145bcf253932aecc3bc2d35c557..e01f4cfb0462baef01656755ebdd1abdcdd56d2c 100644 --- a/crates/remote/src/ssh_session.rs +++ b/crates/remote/src/ssh_session.rs @@ -2030,27 +2030,7 @@ impl SshRemoteConnection { }; smol::fs::create_dir_all("target/remote_server").await?; - if build_remote_server.contains("zigbuild") { - delegate.set_status( - Some(&format!( - "Building remote binary from source for {triple} with Zig" - )), - cx, - ); - log::info!("building remote binary from source for {triple} with Zig"); - run_cmd(Command::new("cargo").args([ - "zigbuild", - "--package", - "remote_server", - "--features", - "debug-embed", - "--target-dir", - "target/remote_server", - "--target", - &triple, - ])) - .await?; - } else { + if build_remote_server.contains("cross") { delegate.set_status(Some("Installing cross.rs for cross-compilation"), cx); log::info!("installing cross"); run_cmd(Command::new("cargo").args([ @@ -2088,12 +2068,50 @@ impl SshRemoteConnection { ), ) .await?; - } + } else { + let which = cx + .background_spawn(async move { which::which("zig") }) + .await; + + if which.is_err() { + anyhow::bail!( + "zig not found on $PATH, install zig (see https://ziglang.org/learn/getting-started or use zigup) or pass ZED_BUILD_REMOTE_SERVER=cross to use cross" + ) + } + + delegate.set_status(Some("Adding rustup target for cross-compilation"), cx); + log::info!("adding rustup target"); + run_cmd(Command::new("rustup").args(["target", "add"]).arg(&triple)).await?; - delegate.set_status(Some("Compressing binary"), cx); + delegate.set_status(Some("Installing cargo-zigbuild for cross-compilation"), cx); + log::info!("installing cargo-zigbuild"); + run_cmd(Command::new("cargo").args(["install", "--locked", "cargo-zigbuild"])).await?; + + delegate.set_status( + Some(&format!( + "Building remote binary from source for {triple} with Zig" + )), + cx, + ); + log::info!("building remote binary from source for {triple} with Zig"); + run_cmd(Command::new("cargo").args([ + "zigbuild", + "--package", + "remote_server", + "--features", + "debug-embed", + "--target-dir", + "target/remote_server", + "--target", + &triple, + ])) + .await?; + }; let mut path = format!("target/remote_server/{triple}/debug/remote_server").into(); if !build_remote_server.contains("nocompress") { + delegate.set_status(Some("Compressing binary"), cx); + run_cmd(Command::new("gzip").args([ "-9", "-f", From c0acd8e8b165418195ecc072005e97fe6f963038 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 25 Jun 2025 19:57:28 +0300 Subject: [PATCH 40/56] Add language server control tool into the status bar (#32490) Release Notes: - Added the language server control tool into the status bar --------- Co-authored-by: Nate Butler --- Cargo.lock | 2 + assets/keymaps/default-linux.json | 3 +- assets/keymaps/default-macos.json | 3 +- assets/settings/default.json | 5 + crates/activity_indicator/Cargo.toml | 1 + .../src/activity_indicator.rs | 103 +- crates/collab/src/rpc.rs | 1 + crates/editor/src/editor.rs | 6 +- .../src/extension_store_test.rs | 18 +- crates/git_ui/src/branch_picker.rs | 4 - crates/git_ui/src/repository_selector.rs | 13 +- crates/language/src/language_registry.rs | 19 +- .../src/extension_lsp_adapter.rs | 10 +- crates/language_tools/Cargo.toml | 4 +- crates/language_tools/src/language_tools.rs | 39 +- crates/language_tools/src/lsp_log.rs | 153 ++- crates/language_tools/src/lsp_tool.rs | 917 ++++++++++++++++++ crates/lsp/src/lsp.rs | 6 + crates/picker/src/picker.rs | 2 + crates/project/src/buffer_store.rs | 4 +- .../project/src/debugger/breakpoint_store.rs | 2 +- crates/project/src/git_store.rs | 6 +- crates/project/src/lsp_store.rs | 792 +++++++++++---- .../src/lsp_store/rust_analyzer_ext.rs | 57 +- .../project/src/manifest_tree/server_tree.rs | 6 + crates/project/src/project.rs | 38 +- crates/project/src/project_settings.rs | 22 + crates/project/src/project_tests.rs | 11 +- crates/proto/proto/lsp.proto | 43 + crates/remote_server/src/headless_project.rs | 2 + crates/workspace/src/workspace.rs | 1 - crates/zed/src/zed.rs | 11 +- 32 files changed, 1992 insertions(+), 312 deletions(-) create mode 100644 crates/language_tools/src/lsp_tool.rs diff --git a/Cargo.lock b/Cargo.lock index 979fc9441c593bb2bdd945a2cbca92779aaafa65..4684bec47e32b478dd6208c2c974852c2d308fce 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14,6 +14,7 @@ dependencies = [ "gpui", "language", "project", + "proto", "release_channel", "smallvec", "ui", @@ -9025,6 +9026,7 @@ dependencies = [ "itertools 0.14.0", "language", "lsp", + "picker", "project", "release_channel", "serde_json", diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 23a1aead688a414bd509c192191a6957bbbfcfc7..0c4de0e0532f09f01aaf420a4e2803067c9e25b1 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -41,7 +41,8 @@ "shift-f11": "debugger::StepOut", "f11": "zed::ToggleFullScreen", "ctrl-alt-z": "edit_prediction::RateCompletions", - "ctrl-shift-i": "edit_prediction::ToggleMenu" + "ctrl-shift-i": "edit_prediction::ToggleMenu", + "ctrl-alt-l": "lsp_tool::ToggleMenu" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 785103aa92797436a578c7c1b16282619b37df6d..5bd99963bdb7f22ea1a63d0daa60a55b8d8baccd 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -47,7 +47,8 @@ "fn-f": "zed::ToggleFullScreen", "ctrl-cmd-f": "zed::ToggleFullScreen", "ctrl-cmd-z": "edit_prediction::RateCompletions", - "ctrl-cmd-i": "edit_prediction::ToggleMenu" + "ctrl-cmd-i": "edit_prediction::ToggleMenu", + "ctrl-cmd-l": "lsp_tool::ToggleMenu" } }, { diff --git a/assets/settings/default.json b/assets/settings/default.json index 858055fbe63d7926c6826158f8f7f7676d7fdc46..1b9a19615d4d705de2f8662863f9222cfdd51cf3 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1720,6 +1720,11 @@ // } // } }, + // Common language server settings. + "global_lsp_settings": { + // Whether to show the LSP servers button in the status bar. + "button": true + }, // Jupyter settings "jupyter": { "enabled": true diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 778cf472df3f7c4234065232ee4c4a023e3ab31f..3a80f012f9fb0e5b056a7b2f8763a2019dfcdf2b 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -21,6 +21,7 @@ futures.workspace = true gpui.workspace = true language.workspace = true project.workspace = true +proto.workspace = true smallvec.workspace = true ui.workspace = true util.workspace = true diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 24762cb7270de14bf4b2e5f42e6209fdd52bc5e2..b3287e8222ccdd1f4f4ca92ff4fd4559b9fcc3f6 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -80,10 +80,13 @@ impl ActivityIndicator { let this = cx.new(|cx| { let mut status_events = languages.language_server_binary_statuses(); cx.spawn(async move |this, cx| { - while let Some((name, status)) = status_events.next().await { + while let Some((name, binary_status)) = status_events.next().await { this.update(cx, |this: &mut ActivityIndicator, cx| { this.statuses.retain(|s| s.name != name); - this.statuses.push(ServerStatus { name, status }); + this.statuses.push(ServerStatus { + name, + status: LanguageServerStatusUpdate::Binary(binary_status), + }); cx.notify(); })?; } @@ -112,8 +115,76 @@ impl ActivityIndicator { cx.subscribe( &project.read(cx).lsp_store(), - |_, _, event, cx| match event { - LspStoreEvent::LanguageServerUpdate { .. } => cx.notify(), + |activity_indicator, _, event, cx| match event { + LspStoreEvent::LanguageServerUpdate { name, message, .. } => { + if let proto::update_language_server::Variant::StatusUpdate(status_update) = + message + { + let Some(name) = name.clone() else { + return; + }; + let status = match &status_update.status { + Some(proto::status_update::Status::Binary(binary_status)) => { + if let Some(binary_status) = + proto::ServerBinaryStatus::from_i32(*binary_status) + { + let binary_status = match binary_status { + proto::ServerBinaryStatus::None => BinaryStatus::None, + proto::ServerBinaryStatus::CheckingForUpdate => { + BinaryStatus::CheckingForUpdate + } + proto::ServerBinaryStatus::Downloading => { + BinaryStatus::Downloading + } + proto::ServerBinaryStatus::Starting => { + BinaryStatus::Starting + } + proto::ServerBinaryStatus::Stopping => { + BinaryStatus::Stopping + } + proto::ServerBinaryStatus::Stopped => { + BinaryStatus::Stopped + } + proto::ServerBinaryStatus::Failed => { + let Some(error) = status_update.message.clone() + else { + return; + }; + BinaryStatus::Failed { error } + } + }; + LanguageServerStatusUpdate::Binary(binary_status) + } else { + return; + } + } + Some(proto::status_update::Status::Health(health_status)) => { + if let Some(health) = + proto::ServerHealth::from_i32(*health_status) + { + let health = match health { + proto::ServerHealth::Ok => ServerHealth::Ok, + proto::ServerHealth::Warning => ServerHealth::Warning, + proto::ServerHealth::Error => ServerHealth::Error, + }; + LanguageServerStatusUpdate::Health( + health, + status_update.message.clone().map(SharedString::from), + ) + } else { + return; + } + } + None => return, + }; + + activity_indicator.statuses.retain(|s| s.name != name); + activity_indicator + .statuses + .push(ServerStatus { name, status }); + } + cx.notify() + } _ => {} }, ) @@ -228,9 +299,23 @@ impl ActivityIndicator { _: &mut Window, cx: &mut Context, ) { - if let Some(updater) = &self.auto_updater { - updater.update(cx, |updater, cx| updater.dismiss_error(cx)); + let error_dismissed = if let Some(updater) = &self.auto_updater { + updater.update(cx, |updater, cx| updater.dismiss_error(cx)) + } else { + false + }; + if error_dismissed { + return; } + + self.project.update(cx, |project, cx| { + if project.last_formatting_failure(cx).is_some() { + project.reset_last_formatting_failure(cx); + true + } else { + false + } + }); } fn pending_language_server_work<'a>( @@ -399,6 +484,12 @@ impl ActivityIndicator { let mut servers_to_clear_statuses = HashSet::::default(); for status in &self.statuses { match &status.status { + LanguageServerStatusUpdate::Binary( + BinaryStatus::Starting | BinaryStatus::Stopping, + ) => {} + LanguageServerStatusUpdate::Binary(BinaryStatus::Stopped) => { + servers_to_clear_statuses.insert(status.name.clone()); + } LanguageServerStatusUpdate::Binary(BinaryStatus::CheckingForUpdate) => { checking_for_update.push(status.name.clone()); } diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 6b84ca998ec4b8225a3304b267385e41c88f2def..22daab491c499bf568f155cd6e049868c58192ce 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -2008,6 +2008,7 @@ async fn join_project( session.connection_id, proto::UpdateLanguageServer { project_id: project_id.to_proto(), + server_name: Some(language_server.name.clone()), language_server_id: language_server.id, variant: Some( proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ffb08e4290e6eb359e011e4a1e817abb247be33a..ea30cc6fab94d7a80e8855efd3832b21a945b6c1 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -16164,7 +16164,7 @@ impl Editor { }) } - fn restart_language_server( + pub fn restart_language_server( &mut self, _: &RestartLanguageServer, _: &mut Window, @@ -16175,6 +16175,7 @@ impl Editor { project.update(cx, |project, cx| { project.restart_language_servers_for_buffers( multi_buffer.all_buffers().into_iter().collect(), + HashSet::default(), cx, ); }); @@ -16182,7 +16183,7 @@ impl Editor { } } - fn stop_language_server( + pub fn stop_language_server( &mut self, _: &StopLanguageServer, _: &mut Window, @@ -16193,6 +16194,7 @@ impl Editor { project.update(cx, |project, cx| { project.stop_language_servers_for_buffers( multi_buffer.all_buffers().into_iter().collect(), + HashSet::default(), cx, ); cx.emit(project::Event::RefreshInlayHints); diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index cea3f0dbc3262211d2f28941e3daefa69da15d73..cfe97f167553dfb2dba880bfdfd9eb82399b5492 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -4,13 +4,13 @@ use crate::{ GrammarManifestEntry, RELOAD_DEBOUNCE_DURATION, SchemaVersion, }; use async_compression::futures::bufread::GzipEncoder; -use collections::BTreeMap; +use collections::{BTreeMap, HashSet}; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs, RealFs}; use futures::{AsyncReadExt, StreamExt, io::BufReader}; use gpui::{AppContext as _, SemanticVersion, TestAppContext}; use http_client::{FakeHttpClient, Response}; -use language::{BinaryStatus, LanguageMatcher, LanguageRegistry, LanguageServerStatusUpdate}; +use language::{BinaryStatus, LanguageMatcher, LanguageRegistry}; use lsp::LanguageServerName; use node_runtime::NodeRuntime; use parking_lot::Mutex; @@ -720,20 +720,22 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { status_updates.next().await.unwrap(), status_updates.next().await.unwrap(), status_updates.next().await.unwrap(), + status_updates.next().await.unwrap(), ], [ ( LanguageServerName::new_static("gleam"), - LanguageServerStatusUpdate::Binary(BinaryStatus::CheckingForUpdate) + BinaryStatus::Starting ), ( LanguageServerName::new_static("gleam"), - LanguageServerStatusUpdate::Binary(BinaryStatus::Downloading) + BinaryStatus::CheckingForUpdate ), ( LanguageServerName::new_static("gleam"), - LanguageServerStatusUpdate::Binary(BinaryStatus::None) - ) + BinaryStatus::Downloading + ), + (LanguageServerName::new_static("gleam"), BinaryStatus::None) ] ); @@ -794,7 +796,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { // Start a new instance of the language server. project.update(cx, |project, cx| { - project.restart_language_servers_for_buffers(vec![buffer.clone()], cx) + project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx) }); cx.executor().run_until_parked(); @@ -816,7 +818,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { cx.executor().run_until_parked(); project.update(cx, |project, cx| { - project.restart_language_servers_for_buffers(vec![buffer.clone()], cx) + project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx) }); // The extension re-fetches the latest version of the language server. diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index a4b77eff7487e62262d6f4e71c1bb7cc792610eb..635876dace889bde4f461a9feee9c8df4d1c24cc 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -413,10 +413,6 @@ impl PickerDelegate for BranchListDelegate { cx.emit(DismissEvent); } - fn render_header(&self, _: &mut Window, _cx: &mut Context>) -> Option { - None - } - fn render_match( &self, ix: usize, diff --git a/crates/git_ui/src/repository_selector.rs b/crates/git_ui/src/repository_selector.rs index 322e623e60ecbce91c86eab95fb740e7621eb1b0..b5865e9a8578e24dffb129eb373b718219344e1c 100644 --- a/crates/git_ui/src/repository_selector.rs +++ b/crates/git_ui/src/repository_selector.rs @@ -1,6 +1,4 @@ -use gpui::{ - AnyElement, App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity, -}; +use gpui::{App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity}; use itertools::Itertools; use picker::{Picker, PickerDelegate}; use project::{Project, git_store::Repository}; @@ -207,15 +205,6 @@ impl PickerDelegate for RepositorySelectorDelegate { .ok(); } - fn render_header( - &self, - _window: &mut Window, - _cx: &mut Context>, - ) -> Option { - // TODO: Implement header rendering if needed - None - } - fn render_match( &self, ix: usize, diff --git a/crates/language/src/language_registry.rs b/crates/language/src/language_registry.rs index c157cd9e73a0bb2f208672d391e98e2445317e5c..b2bb684e1bb10d6edc72a41d3006d114a4b5f371 100644 --- a/crates/language/src/language_registry.rs +++ b/crates/language/src/language_registry.rs @@ -157,6 +157,9 @@ pub enum BinaryStatus { None, CheckingForUpdate, Downloading, + Starting, + Stopping, + Stopped, Failed { error: String }, } @@ -248,7 +251,7 @@ pub struct LanguageQueries { #[derive(Clone, Default)] struct ServerStatusSender { - txs: Arc>>>, + txs: Arc>>>, } pub struct LoadedLanguage { @@ -1085,11 +1088,7 @@ impl LanguageRegistry { self.state.read().all_lsp_adapters.get(name).cloned() } - pub fn update_lsp_status( - &self, - server_name: LanguageServerName, - status: LanguageServerStatusUpdate, - ) { + pub fn update_lsp_binary_status(&self, server_name: LanguageServerName, status: BinaryStatus) { self.lsp_binary_status_tx.send(server_name, status); } @@ -1145,7 +1144,7 @@ impl LanguageRegistry { pub fn language_server_binary_statuses( &self, - ) -> mpsc::UnboundedReceiver<(LanguageServerName, LanguageServerStatusUpdate)> { + ) -> mpsc::UnboundedReceiver<(LanguageServerName, BinaryStatus)> { self.lsp_binary_status_tx.subscribe() } @@ -1260,15 +1259,13 @@ impl LanguageRegistryState { } impl ServerStatusSender { - fn subscribe( - &self, - ) -> mpsc::UnboundedReceiver<(LanguageServerName, LanguageServerStatusUpdate)> { + fn subscribe(&self) -> mpsc::UnboundedReceiver<(LanguageServerName, BinaryStatus)> { let (tx, rx) = mpsc::unbounded(); self.txs.lock().push(tx); rx } - fn send(&self, name: LanguageServerName, status: LanguageServerStatusUpdate) { + fn send(&self, name: LanguageServerName, status: BinaryStatus) { let mut txs = self.txs.lock(); txs.retain(|tx| tx.unbounded_send((name.clone(), status.clone())).is_ok()); } diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index a32292daa3feac352754ee1507bda403c438aba8..d2eabf0a3e36669f1dfb34af0c5da8077c6be87e 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -12,8 +12,8 @@ use fs::Fs; use futures::{Future, FutureExt}; use gpui::AsyncApp; use language::{ - BinaryStatus, CodeLabel, HighlightId, Language, LanguageName, LanguageServerStatusUpdate, - LanguageToolchainStore, LspAdapter, LspAdapterDelegate, + BinaryStatus, CodeLabel, HighlightId, Language, LanguageName, LanguageToolchainStore, + LspAdapter, LspAdapterDelegate, }; use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName}; use serde::Serialize; @@ -82,10 +82,8 @@ impl ExtensionLanguageServerProxy for LanguageServerRegistryProxy { language_server_id: LanguageServerName, status: BinaryStatus, ) { - self.language_registry.update_lsp_status( - language_server_id, - LanguageServerStatusUpdate::Binary(status), - ); + self.language_registry + .update_lsp_binary_status(language_server_id, status); } } diff --git a/crates/language_tools/Cargo.toml b/crates/language_tools/Cargo.toml index cb07b46215d1bd207c91fd505f5042dbcb4d0463..3a0f487f7a17ddc3a43550a998590c5aa937a19a 100644 --- a/crates/language_tools/Cargo.toml +++ b/crates/language_tools/Cargo.toml @@ -14,6 +14,7 @@ doctest = false [dependencies] anyhow.workspace = true +client.workspace = true collections.workspace = true copilot.workspace = true editor.workspace = true @@ -22,18 +23,19 @@ gpui.workspace = true itertools.workspace = true language.workspace = true lsp.workspace = true +picker.workspace = true project.workspace = true serde_json.workspace = true settings.workspace = true theme.workspace = true tree-sitter.workspace = true ui.workspace = true +util.workspace = true workspace.workspace = true zed_actions.workspace = true workspace-hack.workspace = true [dev-dependencies] -client = { workspace = true, features = ["test-support"] } editor = { workspace = true, features = ["test-support"] } release_channel.workspace = true gpui = { workspace = true, features = ["test-support"] } diff --git a/crates/language_tools/src/language_tools.rs b/crates/language_tools/src/language_tools.rs index 6b18103c245f4bb70fbe9e85f91ecaae3c30f96b..cbf5756875f723b52fabbfe877c32265dd6f0aef 100644 --- a/crates/language_tools/src/language_tools.rs +++ b/crates/language_tools/src/language_tools.rs @@ -1,17 +1,54 @@ mod key_context_view; mod lsp_log; +pub mod lsp_tool; mod syntax_tree_view; #[cfg(test)] mod lsp_log_tests; -use gpui::App; +use gpui::{App, AppContext, Entity}; pub use lsp_log::{LogStore, LspLogToolbarItemView, LspLogView}; pub use syntax_tree_view::{SyntaxTreeToolbarItemView, SyntaxTreeView}; +use ui::{Context, Window}; +use workspace::{Item, ItemHandle, SplitDirection, Workspace}; pub fn init(cx: &mut App) { lsp_log::init(cx); syntax_tree_view::init(cx); key_context_view::init(cx); } + +fn get_or_create_tool( + workspace: &mut Workspace, + destination: SplitDirection, + window: &mut Window, + cx: &mut Context, + new_tool: impl FnOnce(&mut Window, &mut Context) -> T, +) -> Entity +where + T: Item, +{ + if let Some(item) = workspace.item_of_type::(cx) { + return item; + } + + let new_tool = cx.new(|cx| new_tool(window, cx)); + match workspace.find_pane_in_direction(destination, cx) { + Some(right_pane) => { + workspace.add_item( + right_pane, + new_tool.boxed_clone(), + None, + true, + true, + window, + cx, + ); + } + None => { + workspace.split_item(destination, new_tool.boxed_clone(), window, cx); + } + } + new_tool +} diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index bddfbc5c71643cdb4bd9c65de74e38a59b4f24f9..de474c1d9f3a272407c72be52b6b2e2dd4dbb0db 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -3,14 +3,14 @@ use copilot::Copilot; use editor::{Editor, EditorEvent, actions::MoveToEnd, scroll::Autoscroll}; use futures::{StreamExt, channel::mpsc}; use gpui::{ - AnyView, App, Context, Corner, Entity, EventEmitter, FocusHandle, Focusable, IntoElement, - ParentElement, Render, Styled, Subscription, WeakEntity, Window, actions, div, + AnyView, App, Context, Corner, Entity, EventEmitter, FocusHandle, Focusable, Global, + IntoElement, ParentElement, Render, Styled, Subscription, WeakEntity, Window, actions, div, }; use itertools::Itertools; use language::{LanguageServerId, language_settings::SoftWrap}; use lsp::{ - IoKind, LanguageServer, LanguageServerName, MessageType, SetTraceParams, TraceValue, - notification::SetTrace, + IoKind, LanguageServer, LanguageServerName, LanguageServerSelector, MessageType, + SetTraceParams, TraceValue, notification::SetTrace, }; use project::{Project, WorktreeId, search::SearchQuery}; use std::{any::TypeId, borrow::Cow, sync::Arc}; @@ -21,6 +21,8 @@ use workspace::{ searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle}, }; +use crate::get_or_create_tool; + const SEND_LINE: &str = "\n// Send:"; const RECEIVE_LINE: &str = "\n// Receive:"; const MAX_STORED_LOG_ENTRIES: usize = 2000; @@ -44,7 +46,7 @@ trait Message: AsRef { } } -struct LogMessage { +pub(super) struct LogMessage { message: String, typ: MessageType, } @@ -71,7 +73,7 @@ impl Message for LogMessage { } } -struct TraceMessage { +pub(super) struct TraceMessage { message: String, } @@ -99,7 +101,7 @@ impl Message for RpcMessage { type Level = (); } -struct LanguageServerState { +pub(super) struct LanguageServerState { name: Option, worktree_id: Option, kind: LanguageServerKind, @@ -204,8 +206,13 @@ pub(crate) struct LogMenuItem { actions!(dev, [OpenLanguageServerLogs]); +pub(super) struct GlobalLogStore(pub WeakEntity); + +impl Global for GlobalLogStore {} + pub fn init(cx: &mut App) { let log_store = cx.new(LogStore::new); + cx.set_global(GlobalLogStore(log_store.downgrade())); cx.observe_new(move |workspace: &mut Workspace, _, cx| { let project = workspace.project(); @@ -219,13 +226,14 @@ pub fn init(cx: &mut App) { workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, window, cx| { let project = workspace.project().read(cx); if project.is_local() || project.is_via_ssh() { - workspace.split_item( + let project = workspace.project().clone(); + let log_store = log_store.clone(); + get_or_create_tool( + workspace, SplitDirection::Right, - Box::new(cx.new(|cx| { - LspLogView::new(workspace.project().clone(), log_store.clone(), window, cx) - })), window, cx, + move |window, cx| LspLogView::new(project, log_store, window, cx), ); } }); @@ -354,7 +362,7 @@ impl LogStore { ); } - fn get_language_server_state( + pub(super) fn get_language_server_state( &mut self, id: LanguageServerId, ) -> Option<&mut LanguageServerState> { @@ -480,11 +488,14 @@ impl LogStore { cx.notify(); } - fn server_logs(&self, server_id: LanguageServerId) -> Option<&VecDeque> { + pub(super) fn server_logs(&self, server_id: LanguageServerId) -> Option<&VecDeque> { Some(&self.language_servers.get(&server_id)?.log_messages) } - fn server_trace(&self, server_id: LanguageServerId) -> Option<&VecDeque> { + pub(super) fn server_trace( + &self, + server_id: LanguageServerId, + ) -> Option<&VecDeque> { Some(&self.language_servers.get(&server_id)?.trace_messages) } @@ -529,6 +540,110 @@ impl LogStore { Some(()) } + pub fn has_server_logs(&self, server: &LanguageServerSelector) -> bool { + match server { + LanguageServerSelector::Id(id) => self.language_servers.contains_key(id), + LanguageServerSelector::Name(name) => self + .language_servers + .iter() + .any(|(_, state)| state.name.as_ref() == Some(name)), + } + } + + pub fn open_server_log( + &mut self, + workspace: WeakEntity, + server: LanguageServerSelector, + window: &mut Window, + cx: &mut Context, + ) { + cx.spawn_in(window, async move |log_store, cx| { + let Some(log_store) = log_store.upgrade() else { + return; + }; + workspace + .update_in(cx, |workspace, window, cx| { + let project = workspace.project().clone(); + let tool_log_store = log_store.clone(); + let log_view = get_or_create_tool( + workspace, + SplitDirection::Right, + window, + cx, + move |window, cx| LspLogView::new(project, tool_log_store, window, cx), + ); + log_view.update(cx, |log_view, cx| { + let server_id = match server { + LanguageServerSelector::Id(id) => Some(id), + LanguageServerSelector::Name(name) => { + log_store.read(cx).language_servers.iter().find_map( + |(id, state)| { + if state.name.as_ref() == Some(&name) { + Some(*id) + } else { + None + } + }, + ) + } + }; + if let Some(server_id) = server_id { + log_view.show_logs_for_server(server_id, window, cx); + } + }); + }) + .ok(); + }) + .detach(); + } + + pub fn open_server_trace( + &mut self, + workspace: WeakEntity, + server: LanguageServerSelector, + window: &mut Window, + cx: &mut Context, + ) { + cx.spawn_in(window, async move |log_store, cx| { + let Some(log_store) = log_store.upgrade() else { + return; + }; + workspace + .update_in(cx, |workspace, window, cx| { + let project = workspace.project().clone(); + let tool_log_store = log_store.clone(); + let log_view = get_or_create_tool( + workspace, + SplitDirection::Right, + window, + cx, + move |window, cx| LspLogView::new(project, tool_log_store, window, cx), + ); + log_view.update(cx, |log_view, cx| { + let server_id = match server { + LanguageServerSelector::Id(id) => Some(id), + LanguageServerSelector::Name(name) => { + log_store.read(cx).language_servers.iter().find_map( + |(id, state)| { + if state.name.as_ref() == Some(&name) { + Some(*id) + } else { + None + } + }, + ) + } + }; + if let Some(server_id) = server_id { + log_view.show_rpc_trace_for_server(server_id, window, cx); + } + }); + }) + .ok(); + }) + .detach(); + } + fn on_io( &mut self, language_server_id: LanguageServerId, @@ -856,7 +971,7 @@ impl LspLogView { self.editor_subscriptions = editor_subscriptions; cx.notify(); } - window.focus(&self.focus_handle); + self.editor.read(cx).focus_handle(cx).focus(window); } fn update_log_level( @@ -882,7 +997,7 @@ impl LspLogView { cx.notify(); } - window.focus(&self.focus_handle); + self.editor.read(cx).focus_handle(cx).focus(window); } fn show_trace_for_server( @@ -904,7 +1019,7 @@ impl LspLogView { self.editor_subscriptions = editor_subscriptions; cx.notify(); } - window.focus(&self.focus_handle); + self.editor.read(cx).focus_handle(cx).focus(window); } fn show_rpc_trace_for_server( @@ -947,7 +1062,7 @@ impl LspLogView { cx.notify(); } - window.focus(&self.focus_handle); + self.editor.read(cx).focus_handle(cx).focus(window); } fn toggle_rpc_trace_for_server( @@ -1011,7 +1126,7 @@ impl LspLogView { self.editor = editor; self.editor_subscriptions = editor_subscriptions; cx.notify(); - window.focus(&self.focus_handle); + self.editor.read(cx).focus_handle(cx).focus(window); } } diff --git a/crates/language_tools/src/lsp_tool.rs b/crates/language_tools/src/lsp_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..fc1efc7794eb33986cb26ecbd0941075111da700 --- /dev/null +++ b/crates/language_tools/src/lsp_tool.rs @@ -0,0 +1,917 @@ +use std::{collections::hash_map, path::PathBuf, sync::Arc, time::Duration}; + +use client::proto; +use collections::{HashMap, HashSet}; +use editor::{Editor, EditorEvent}; +use gpui::{Corner, DismissEvent, Entity, Focusable as _, Subscription, Task, WeakEntity, actions}; +use language::{BinaryStatus, BufferId, LocalFile, ServerHealth}; +use lsp::{LanguageServerId, LanguageServerName, LanguageServerSelector}; +use picker::{Picker, PickerDelegate, popover_menu::PickerPopoverMenu}; +use project::{LspStore, LspStoreEvent, project_settings::ProjectSettings}; +use settings::{Settings as _, SettingsStore}; +use ui::{Context, IconButtonShape, Indicator, Tooltip, Window, prelude::*}; + +use workspace::{StatusItemView, Workspace}; + +use crate::lsp_log::GlobalLogStore; + +actions!(lsp_tool, [ToggleMenu]); + +pub struct LspTool { + state: Entity, + lsp_picker: Option>>, + _subscriptions: Vec, +} + +struct PickerState { + workspace: WeakEntity, + lsp_store: WeakEntity, + active_editor: Option, + language_servers: LanguageServers, +} + +#[derive(Debug)] +struct LspPickerDelegate { + state: Entity, + selected_index: usize, + items: Vec, + other_servers_start_index: Option, +} + +struct ActiveEditor { + editor: WeakEntity, + _editor_subscription: Subscription, + editor_buffers: HashSet, +} + +#[derive(Debug, Default, Clone)] +struct LanguageServers { + health_statuses: HashMap, + binary_statuses: HashMap, + servers_per_buffer_abs_path: + HashMap>>, +} + +#[derive(Debug, Clone)] +struct LanguageServerHealthStatus { + name: LanguageServerName, + health: Option<(Option, ServerHealth)>, +} + +#[derive(Debug, Clone)] +struct LanguageServerBinaryStatus { + status: BinaryStatus, + message: Option, +} + +impl LanguageServerHealthStatus { + fn health(&self) -> Option { + self.health.as_ref().map(|(_, health)| *health) + } + + fn message(&self) -> Option { + self.health + .as_ref() + .and_then(|(message, _)| message.clone()) + } +} + +impl LspPickerDelegate { + fn regenerate_items(&mut self, cx: &mut Context>) { + self.state.update(cx, |state, cx| { + let editor_buffers = state + .active_editor + .as_ref() + .map(|active_editor| active_editor.editor_buffers.clone()) + .unwrap_or_default(); + let editor_buffer_paths = editor_buffers + .iter() + .filter_map(|buffer_id| { + let buffer_path = state + .lsp_store + .update(cx, |lsp_store, cx| { + Some( + project::File::from_dyn( + lsp_store + .buffer_store() + .read(cx) + .get(*buffer_id)? + .read(cx) + .file(), + )? + .abs_path(cx), + ) + }) + .ok()??; + Some(buffer_path) + }) + .collect::>(); + + let mut servers_with_health_checks = HashSet::default(); + let mut server_ids_with_health_checks = HashSet::default(); + let mut buffer_servers = + Vec::with_capacity(state.language_servers.health_statuses.len()); + let mut other_servers = + Vec::with_capacity(state.language_servers.health_statuses.len()); + let buffer_server_ids = editor_buffer_paths + .iter() + .filter_map(|buffer_path| { + state + .language_servers + .servers_per_buffer_abs_path + .get(buffer_path) + }) + .flatten() + .fold(HashMap::default(), |mut acc, (server_id, name)| { + match acc.entry(*server_id) { + hash_map::Entry::Occupied(mut o) => { + let old_name: &mut Option<&LanguageServerName> = o.get_mut(); + if old_name.is_none() { + *old_name = name.as_ref(); + } + } + hash_map::Entry::Vacant(v) => { + v.insert(name.as_ref()); + } + } + acc + }); + for (server_id, server_state) in &state.language_servers.health_statuses { + let binary_status = state + .language_servers + .binary_statuses + .get(&server_state.name); + servers_with_health_checks.insert(&server_state.name); + server_ids_with_health_checks.insert(*server_id); + if buffer_server_ids.contains_key(server_id) { + buffer_servers.push(ServerData::WithHealthCheck( + *server_id, + server_state, + binary_status, + )); + } else { + other_servers.push(ServerData::WithHealthCheck( + *server_id, + server_state, + binary_status, + )); + } + } + + for (server_name, status) in state + .language_servers + .binary_statuses + .iter() + .filter(|(name, _)| !servers_with_health_checks.contains(name)) + { + let has_matching_server = state + .language_servers + .servers_per_buffer_abs_path + .iter() + .filter(|(path, _)| editor_buffer_paths.contains(path)) + .flat_map(|(_, server_associations)| server_associations.iter()) + .any(|(_, name)| name.as_ref() == Some(server_name)); + if has_matching_server { + buffer_servers.push(ServerData::WithBinaryStatus(server_name, status)); + } else { + other_servers.push(ServerData::WithBinaryStatus(server_name, status)); + } + } + + buffer_servers.sort_by_key(|data| data.name().clone()); + other_servers.sort_by_key(|data| data.name().clone()); + let mut other_servers_start_index = None; + let mut new_lsp_items = + Vec::with_capacity(buffer_servers.len() + other_servers.len() + 2); + if !buffer_servers.is_empty() { + new_lsp_items.push(LspItem::Header(SharedString::new("Current Buffer"))); + new_lsp_items.extend(buffer_servers.into_iter().map(ServerData::into_lsp_item)); + } + if !other_servers.is_empty() { + other_servers_start_index = Some(new_lsp_items.len()); + new_lsp_items.push(LspItem::Header(SharedString::new("Other Active Servers"))); + new_lsp_items.extend(other_servers.into_iter().map(ServerData::into_lsp_item)); + } + + self.items = new_lsp_items; + self.other_servers_start_index = other_servers_start_index; + }); + } +} + +impl LanguageServers { + fn update_binary_status( + &mut self, + binary_status: BinaryStatus, + message: Option<&str>, + name: LanguageServerName, + ) { + let binary_status_message = message.map(SharedString::new); + if matches!( + binary_status, + BinaryStatus::Stopped | BinaryStatus::Failed { .. } + ) { + self.health_statuses.retain(|_, server| server.name != name); + } + self.binary_statuses.insert( + name, + LanguageServerBinaryStatus { + status: binary_status, + message: binary_status_message, + }, + ); + } + + fn update_server_health( + &mut self, + id: LanguageServerId, + health: ServerHealth, + message: Option<&str>, + name: Option, + ) { + if let Some(state) = self.health_statuses.get_mut(&id) { + state.health = Some((message.map(SharedString::new), health)); + if let Some(name) = name { + state.name = name; + } + } else if let Some(name) = name { + self.health_statuses.insert( + id, + LanguageServerHealthStatus { + health: Some((message.map(SharedString::new), health)), + name, + }, + ); + } + } +} + +#[derive(Debug)] +enum ServerData<'a> { + WithHealthCheck( + LanguageServerId, + &'a LanguageServerHealthStatus, + Option<&'a LanguageServerBinaryStatus>, + ), + WithBinaryStatus(&'a LanguageServerName, &'a LanguageServerBinaryStatus), +} + +#[derive(Debug)] +enum LspItem { + WithHealthCheck( + LanguageServerId, + LanguageServerHealthStatus, + Option, + ), + WithBinaryStatus(LanguageServerName, LanguageServerBinaryStatus), + Header(SharedString), +} + +impl ServerData<'_> { + fn name(&self) -> &LanguageServerName { + match self { + Self::WithHealthCheck(_, state, _) => &state.name, + Self::WithBinaryStatus(name, ..) => name, + } + } + + fn into_lsp_item(self) -> LspItem { + match self { + Self::WithHealthCheck(id, name, status) => { + LspItem::WithHealthCheck(id, name.clone(), status.cloned()) + } + Self::WithBinaryStatus(name, status) => { + LspItem::WithBinaryStatus(name.clone(), status.clone()) + } + } + } +} + +impl PickerDelegate for LspPickerDelegate { + type ListItem = AnyElement; + + fn match_count(&self) -> usize { + self.items.len() + } + + fn selected_index(&self) -> usize { + self.selected_index + } + + fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context>) { + self.selected_index = ix; + cx.notify(); + } + + fn update_matches( + &mut self, + _: String, + _: &mut Window, + cx: &mut Context>, + ) -> Task<()> { + cx.spawn(async move |lsp_picker, cx| { + cx.background_executor() + .timer(Duration::from_millis(30)) + .await; + lsp_picker + .update(cx, |lsp_picker, cx| { + lsp_picker.delegate.regenerate_items(cx); + }) + .ok(); + }) + } + + fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc { + Arc::default() + } + + fn confirm(&mut self, _: bool, _: &mut Window, _: &mut Context>) {} + + fn dismissed(&mut self, _: &mut Window, cx: &mut Context>) { + cx.emit(DismissEvent); + } + + fn render_match( + &self, + ix: usize, + _: bool, + _: &mut Window, + cx: &mut Context>, + ) -> Option { + let is_other_server = self + .other_servers_start_index + .map_or(false, |start| ix >= start); + let server_binary_status; + let server_health; + let server_message; + let server_id; + let server_name; + match self.items.get(ix)? { + LspItem::WithHealthCheck( + language_server_id, + language_server_health_status, + language_server_binary_status, + ) => { + server_binary_status = language_server_binary_status.as_ref(); + server_health = language_server_health_status.health(); + server_message = language_server_health_status.message(); + server_id = Some(*language_server_id); + server_name = language_server_health_status.name.clone(); + } + LspItem::WithBinaryStatus(language_server_name, language_server_binary_status) => { + server_binary_status = Some(language_server_binary_status); + server_health = None; + server_message = language_server_binary_status.message.clone(); + server_id = None; + server_name = language_server_name.clone(); + } + LspItem::Header(header) => { + return Some( + h_flex() + .justify_center() + .child(Label::new(header.clone())) + .into_any_element(), + ); + } + }; + + let workspace = self.state.read(cx).workspace.clone(); + let lsp_logs = cx.global::().0.upgrade()?; + let lsp_store = self.state.read(cx).lsp_store.upgrade()?; + let server_selector = server_id + .map(LanguageServerSelector::Id) + .unwrap_or_else(|| LanguageServerSelector::Name(server_name.clone())); + let can_stop = server_binary_status.is_none_or(|status| { + matches!(status.status, BinaryStatus::None | BinaryStatus::Starting) + }); + // TODO currently, Zed remote does not work well with the LSP logs + // https://github.com/zed-industries/zed/issues/28557 + let has_logs = lsp_store.read(cx).as_local().is_some() + && lsp_logs.read(cx).has_server_logs(&server_selector); + let status_color = server_binary_status + .and_then(|binary_status| match binary_status.status { + BinaryStatus::None => None, + BinaryStatus::CheckingForUpdate + | BinaryStatus::Downloading + | BinaryStatus::Starting => Some(Color::Modified), + BinaryStatus::Stopping => Some(Color::Disabled), + BinaryStatus::Stopped => Some(Color::Disabled), + BinaryStatus::Failed { .. } => Some(Color::Error), + }) + .or_else(|| { + Some(match server_health? { + ServerHealth::Ok => Color::Success, + ServerHealth::Warning => Color::Warning, + ServerHealth::Error => Color::Error, + }) + }) + .unwrap_or(Color::Success); + + Some( + h_flex() + .w_full() + .justify_between() + .gap_2() + .child( + h_flex() + .id("server-status-indicator") + .gap_2() + .child(Indicator::dot().color(status_color)) + .child(Label::new(server_name.0.clone())) + .when_some(server_message.clone(), |div, server_message| { + div.tooltip(move |_, cx| Tooltip::simple(server_message.clone(), cx)) + }), + ) + .child( + h_flex() + .gap_1() + .when(has_logs, |div| { + div.child( + IconButton::new("debug-language-server", IconName::MessageBubbles) + .icon_size(IconSize::XSmall) + .tooltip(|_, cx| Tooltip::simple("Debug Language Server", cx)) + .on_click({ + let workspace = workspace.clone(); + let lsp_logs = lsp_logs.downgrade(); + let server_selector = server_selector.clone(); + move |_, window, cx| { + lsp_logs + .update(cx, |lsp_logs, cx| { + lsp_logs.open_server_trace( + workspace.clone(), + server_selector.clone(), + window, + cx, + ); + }) + .ok(); + } + }), + ) + }) + .when(can_stop, |div| { + div.child( + IconButton::new("stop-server", IconName::Stop) + .icon_size(IconSize::Small) + .tooltip(|_, cx| Tooltip::simple("Stop server", cx)) + .on_click({ + let lsp_store = lsp_store.downgrade(); + let server_selector = server_selector.clone(); + move |_, _, cx| { + lsp_store + .update(cx, |lsp_store, cx| { + lsp_store.stop_language_servers_for_buffers( + Vec::new(), + HashSet::from_iter([ + server_selector.clone() + ]), + cx, + ); + }) + .ok(); + } + }), + ) + }) + .child( + IconButton::new("restart-server", IconName::Rerun) + .icon_size(IconSize::XSmall) + .tooltip(|_, cx| Tooltip::simple("Restart server", cx)) + .on_click({ + let state = self.state.clone(); + let workspace = workspace.clone(); + let lsp_store = lsp_store.downgrade(); + let editor_buffers = state + .read(cx) + .active_editor + .as_ref() + .map(|active_editor| active_editor.editor_buffers.clone()) + .unwrap_or_default(); + let server_selector = server_selector.clone(); + move |_, _, cx| { + if let Some(workspace) = workspace.upgrade() { + let project = workspace.read(cx).project().clone(); + let buffer_store = + project.read(cx).buffer_store().clone(); + let buffers = if is_other_server { + let worktree_store = + project.read(cx).worktree_store(); + state + .read(cx) + .language_servers + .servers_per_buffer_abs_path + .iter() + .filter_map(|(abs_path, servers)| { + if servers.values().any(|server| { + server.as_ref() == Some(&server_name) + }) { + worktree_store + .read(cx) + .find_worktree(abs_path, cx) + } else { + None + } + }) + .filter_map(|(worktree, relative_path)| { + let entry = worktree + .read(cx) + .entry_for_path(&relative_path)?; + project + .read(cx) + .path_for_entry(entry.id, cx) + }) + .filter_map(|project_path| { + buffer_store + .read(cx) + .get_by_path(&project_path) + }) + .collect::>() + } else { + editor_buffers + .iter() + .flat_map(|buffer_id| { + buffer_store.read(cx).get(*buffer_id) + }) + .collect::>() + }; + if !buffers.is_empty() { + lsp_store + .update(cx, |lsp_store, cx| { + lsp_store + .restart_language_servers_for_buffers( + buffers, + HashSet::from_iter([ + server_selector.clone(), + ]), + cx, + ); + }) + .ok(); + } + } + } + }), + ), + ) + .cursor_default() + .into_any_element(), + ) + } + + fn render_editor( + &self, + editor: &Entity, + _: &mut Window, + cx: &mut Context>, + ) -> Div { + div().child(div().track_focus(&editor.focus_handle(cx))) + } + + fn render_footer(&self, _: &mut Window, cx: &mut Context>) -> Option { + if self.items.is_empty() { + Some( + h_flex() + .w_full() + .border_color(cx.theme().colors().border_variant) + .child( + Button::new("stop-all-servers", "Stop all servers") + .disabled(true) + .on_click(move |_, _, _| {}) + .full_width(), + ) + .into_any_element(), + ) + } else { + let lsp_store = self.state.read(cx).lsp_store.clone(); + Some( + h_flex() + .w_full() + .border_color(cx.theme().colors().border_variant) + .child( + Button::new("stop-all-servers", "Stop all servers") + .on_click({ + move |_, _, cx| { + lsp_store + .update(cx, |lsp_store, cx| { + lsp_store.stop_all_language_servers(cx); + }) + .ok(); + } + }) + .full_width(), + ) + .into_any_element(), + ) + } + } + + fn separators_after_indices(&self) -> Vec { + if self.items.is_empty() { + Vec::new() + } else { + vec![self.items.len() - 1] + } + } +} + +// TODO kb keyboard story +impl LspTool { + pub fn new(workspace: &Workspace, window: &mut Window, cx: &mut Context) -> Self { + let settings_subscription = + cx.observe_global_in::(window, move |lsp_tool, window, cx| { + if ProjectSettings::get_global(cx).global_lsp_settings.button { + if lsp_tool.lsp_picker.is_none() { + lsp_tool.lsp_picker = + Some(Self::new_lsp_picker(lsp_tool.state.clone(), window, cx)); + cx.notify(); + return; + } + } else if lsp_tool.lsp_picker.take().is_some() { + cx.notify(); + } + }); + + let lsp_store = workspace.project().read(cx).lsp_store(); + let lsp_store_subscription = + cx.subscribe_in(&lsp_store, window, |lsp_tool, _, e, window, cx| { + lsp_tool.on_lsp_store_event(e, window, cx) + }); + + let state = cx.new(|_| PickerState { + workspace: workspace.weak_handle(), + lsp_store: lsp_store.downgrade(), + active_editor: None, + language_servers: LanguageServers::default(), + }); + + Self { + state, + lsp_picker: None, + _subscriptions: vec![settings_subscription, lsp_store_subscription], + } + } + + fn on_lsp_store_event( + &mut self, + e: &LspStoreEvent, + window: &mut Window, + cx: &mut Context, + ) { + let Some(lsp_picker) = self.lsp_picker.clone() else { + return; + }; + let mut updated = false; + + match e { + LspStoreEvent::LanguageServerUpdate { + language_server_id, + name, + message: proto::update_language_server::Variant::StatusUpdate(status_update), + } => match &status_update.status { + Some(proto::status_update::Status::Binary(binary_status)) => { + let Some(name) = name.as_ref() else { + return; + }; + if let Some(binary_status) = proto::ServerBinaryStatus::from_i32(*binary_status) + { + let binary_status = match binary_status { + proto::ServerBinaryStatus::None => BinaryStatus::None, + proto::ServerBinaryStatus::CheckingForUpdate => { + BinaryStatus::CheckingForUpdate + } + proto::ServerBinaryStatus::Downloading => BinaryStatus::Downloading, + proto::ServerBinaryStatus::Starting => BinaryStatus::Starting, + proto::ServerBinaryStatus::Stopping => BinaryStatus::Stopping, + proto::ServerBinaryStatus::Stopped => BinaryStatus::Stopped, + proto::ServerBinaryStatus::Failed => { + let Some(error) = status_update.message.clone() else { + return; + }; + BinaryStatus::Failed { error } + } + }; + self.state.update(cx, |state, _| { + state.language_servers.update_binary_status( + binary_status, + status_update.message.as_deref(), + name.clone(), + ); + }); + updated = true; + }; + } + Some(proto::status_update::Status::Health(health_status)) => { + if let Some(health) = proto::ServerHealth::from_i32(*health_status) { + let health = match health { + proto::ServerHealth::Ok => ServerHealth::Ok, + proto::ServerHealth::Warning => ServerHealth::Warning, + proto::ServerHealth::Error => ServerHealth::Error, + }; + self.state.update(cx, |state, _| { + state.language_servers.update_server_health( + *language_server_id, + health, + status_update.message.as_deref(), + name.clone(), + ); + }); + updated = true; + } + } + None => {} + }, + LspStoreEvent::LanguageServerUpdate { + language_server_id, + name, + message: proto::update_language_server::Variant::RegisteredForBuffer(update), + .. + } => { + self.state.update(cx, |state, _| { + state + .language_servers + .servers_per_buffer_abs_path + .entry(PathBuf::from(&update.buffer_abs_path)) + .or_default() + .insert(*language_server_id, name.clone()); + }); + updated = true; + } + _ => {} + }; + + if updated { + lsp_picker.update(cx, |lsp_picker, cx| { + lsp_picker.refresh(window, cx); + }); + } + } + + fn new_lsp_picker( + state: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Entity> { + cx.new(|cx| { + let mut delegate = LspPickerDelegate { + selected_index: 0, + other_servers_start_index: None, + items: Vec::new(), + state, + }; + delegate.regenerate_items(cx); + Picker::list(delegate, window, cx) + }) + } +} + +impl StatusItemView for LspTool { + fn set_active_pane_item( + &mut self, + active_pane_item: Option<&dyn workspace::ItemHandle>, + window: &mut Window, + cx: &mut Context, + ) { + if ProjectSettings::get_global(cx).global_lsp_settings.button { + if let Some(editor) = active_pane_item.and_then(|item| item.downcast::()) { + if Some(&editor) + != self + .state + .read(cx) + .active_editor + .as_ref() + .and_then(|active_editor| active_editor.editor.upgrade()) + .as_ref() + { + let editor_buffers = + HashSet::from_iter(editor.read(cx).buffer().read(cx).excerpt_buffer_ids()); + let _editor_subscription = cx.subscribe_in( + &editor, + window, + |lsp_tool, _, e: &EditorEvent, window, cx| match e { + EditorEvent::ExcerptsAdded { buffer, .. } => { + lsp_tool.state.update(cx, |state, cx| { + if let Some(active_editor) = state.active_editor.as_mut() { + let buffer_id = buffer.read(cx).remote_id(); + if active_editor.editor_buffers.insert(buffer_id) { + if let Some(picker) = &lsp_tool.lsp_picker { + picker.update(cx, |picker, cx| { + picker.refresh(window, cx) + }); + } + } + } + }); + } + EditorEvent::ExcerptsRemoved { + removed_buffer_ids, .. + } => { + lsp_tool.state.update(cx, |state, cx| { + if let Some(active_editor) = state.active_editor.as_mut() { + let mut removed = false; + for id in removed_buffer_ids { + active_editor.editor_buffers.retain(|buffer_id| { + let retain = buffer_id != id; + removed |= !retain; + retain + }); + } + if removed { + if let Some(picker) = &lsp_tool.lsp_picker { + picker.update(cx, |picker, cx| { + picker.refresh(window, cx) + }); + } + } + } + }); + } + _ => {} + }, + ); + self.state.update(cx, |state, _| { + state.active_editor = Some(ActiveEditor { + editor: editor.downgrade(), + _editor_subscription, + editor_buffers, + }); + }); + + let lsp_picker = Self::new_lsp_picker(self.state.clone(), window, cx); + self.lsp_picker = Some(lsp_picker.clone()); + lsp_picker.update(cx, |lsp_picker, cx| lsp_picker.refresh(window, cx)); + } + } else if self.state.read(cx).active_editor.is_some() { + self.state.update(cx, |state, _| { + state.active_editor = None; + }); + if let Some(lsp_picker) = self.lsp_picker.as_ref() { + lsp_picker.update(cx, |lsp_picker, cx| { + lsp_picker.refresh(window, cx); + }); + }; + } + } else if self.state.read(cx).active_editor.is_some() { + self.state.update(cx, |state, _| { + state.active_editor = None; + }); + if let Some(lsp_picker) = self.lsp_picker.as_ref() { + lsp_picker.update(cx, |lsp_picker, cx| { + lsp_picker.refresh(window, cx); + }); + } + } + } +} + +impl Render for LspTool { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl ui::IntoElement { + let Some(lsp_picker) = self.lsp_picker.clone() else { + return div(); + }; + + let mut has_errors = false; + let mut has_warnings = false; + let mut has_other_notifications = false; + let state = self.state.read(cx); + for server in state.language_servers.health_statuses.values() { + if let Some(binary_status) = &state.language_servers.binary_statuses.get(&server.name) { + has_errors |= matches!(binary_status.status, BinaryStatus::Failed { .. }); + has_other_notifications |= binary_status.message.is_some(); + } + + if let Some((message, health)) = &server.health { + has_other_notifications |= message.is_some(); + match health { + ServerHealth::Ok => {} + ServerHealth::Warning => has_warnings = true, + ServerHealth::Error => has_errors = true, + } + } + } + + let indicator = if has_errors { + Some(Indicator::dot().color(Color::Error)) + } else if has_warnings { + Some(Indicator::dot().color(Color::Warning)) + } else if has_other_notifications { + Some(Indicator::dot().color(Color::Modified)) + } else { + None + }; + + div().child( + PickerPopoverMenu::new( + lsp_picker.clone(), + IconButton::new("zed-lsp-tool-button", IconName::Bolt) + .when_some(indicator, IconButton::indicator) + .shape(IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .indicator_border_color(Some(cx.theme().colors().status_bar_background)), + move |_, cx| Tooltip::simple("Language servers", cx), + Corner::BottomRight, + cx, + ) + .render(window, cx), + ) + } +} diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 625a459e20ab1e50033292b83a8562f45976dbe5..28ad606132fcc61fc5e801c8442dcc62fad45357 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -108,6 +108,12 @@ pub struct LanguageServer { root_uri: Url, } +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum LanguageServerSelector { + Id(LanguageServerId), + Name(LanguageServerName), +} + /// Identifies a running language server. #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index eda4ae641fc804d2b32a1980ce47824712c0a1a8..c1ebe25538c4db1f02539f5138c065661be47085 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -205,6 +205,7 @@ pub trait PickerDelegate: Sized + 'static { window: &mut Window, cx: &mut Context>, ) -> Option; + fn render_header( &self, _window: &mut Window, @@ -212,6 +213,7 @@ pub trait PickerDelegate: Sized + 'static { ) -> Option { None } + fn render_footer( &self, _window: &mut Window, diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index b2c21abcdbc58bd2af041dddeb10b7d0afebe49c..b8101e14f39b4faf54b76eaab955864e4ef82ae5 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -783,7 +783,7 @@ impl BufferStore { project_path: ProjectPath, cx: &mut Context, ) -> Task>> { - if let Some(buffer) = self.get_by_path(&project_path, cx) { + if let Some(buffer) = self.get_by_path(&project_path) { cx.emit(BufferStoreEvent::BufferOpened { buffer: buffer.clone(), project_path, @@ -946,7 +946,7 @@ impl BufferStore { self.path_to_buffer_id.get(project_path) } - pub fn get_by_path(&self, path: &ProjectPath, _cx: &App) -> Option> { + pub fn get_by_path(&self, path: &ProjectPath) -> Option> { self.path_to_buffer_id.get(path).and_then(|buffer_id| { let buffer = self.get(*buffer_id); buffer diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index 5f3e49f7dd3715752c747d4f39386bddf0103a48..025dca410069db0350d8d32509244a4889c62415 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -275,7 +275,7 @@ impl BreakpointStore { .context("Could not resolve provided abs path")?; let buffer = this .update(&mut cx, |this, cx| { - this.buffer_store().read(cx).get_by_path(&path, cx) + this.buffer_store().read(cx).get_by_path(&path) })? .context("Could not find buffer for a given path")?; let breakpoint = message diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index f7d0de48e2f3295ab67a3b0299fea79c85e33113..7002f83ab35bc9f9aa500fd1d96aded03df072c9 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -3322,7 +3322,7 @@ impl Repository { let Some(project_path) = self.repo_path_to_project_path(path, cx) else { continue; }; - if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) { + if let Some(buffer) = buffer_store.get_by_path(&project_path) { if buffer .read(cx) .file() @@ -3389,7 +3389,7 @@ impl Repository { let Some(project_path) = self.repo_path_to_project_path(path, cx) else { continue; }; - if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) { + if let Some(buffer) = buffer_store.get_by_path(&project_path) { if buffer .read(cx) .file() @@ -3749,7 +3749,7 @@ impl Repository { let buffer_id = git_store .buffer_store .read(cx) - .get_by_path(&project_path?, cx)? + .get_by_path(&project_path?)? .read(cx) .remote_id(); let diff_state = git_store.diffs.get(&buffer_id)?; diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index a9c257f3ea5aa97d7141c353d465d93e77be542e..d6f5d7a3cc98a872a1ce6822c88b6fee8599540e 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -42,9 +42,8 @@ use itertools::Itertools as _; use language::{ Bias, BinaryStatus, Buffer, BufferSnapshot, CachedLspAdapter, CodeLabel, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, Diff, File as _, Language, LanguageName, - LanguageRegistry, LanguageServerStatusUpdate, LanguageToolchainStore, LocalFile, LspAdapter, - LspAdapterDelegate, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, - Unclipped, + LanguageRegistry, LanguageToolchainStore, LocalFile, LspAdapter, LspAdapterDelegate, Patch, + PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped, language_settings::{ FormatOnSave, Formatter, LanguageSettings, SelectedFormatter, language_settings, }, @@ -60,9 +59,9 @@ use lsp::{ DidChangeWatchedFilesRegistrationOptions, Edit, FileOperationFilter, FileOperationPatternKind, FileOperationRegistrationOptions, FileRename, FileSystemWatcher, LanguageServer, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerId, LanguageServerName, - LspRequestFuture, MessageActionItem, MessageType, OneOf, RenameFilesParams, SymbolKind, - TextEdit, WillRenameFiles, WorkDoneProgressCancelParams, WorkspaceFolder, - notification::DidRenameFiles, + LanguageServerSelector, LspRequestFuture, MessageActionItem, MessageType, OneOf, + RenameFilesParams, SymbolKind, TextEdit, WillRenameFiles, WorkDoneProgressCancelParams, + WorkspaceFolder, notification::DidRenameFiles, }; use node_runtime::read_package_installed_version; use parking_lot::Mutex; @@ -256,7 +255,7 @@ impl LocalLspStore { let delegate = delegate as Arc; let key = key.clone(); let adapter = adapter.clone(); - let this = self.weak.clone(); + let lsp_store = self.weak.clone(); let pending_workspace_folders = pending_workspace_folders.clone(); let fs = self.fs.clone(); let pull_diagnostics = ProjectSettings::get_global(cx) @@ -265,7 +264,8 @@ impl LocalLspStore { .enabled; cx.spawn(async move |cx| { let result = async { - let toolchains = this.update(cx, |this, cx| this.toolchain_store(cx))?; + let toolchains = + lsp_store.update(cx, |lsp_store, cx| lsp_store.toolchain_store(cx))?; let language_server = pending_server.await?; let workspace_config = Self::workspace_configuration_for_adapter( @@ -300,7 +300,7 @@ impl LocalLspStore { })??; Self::setup_lsp_messages( - this.clone(), + lsp_store.clone(), fs, &language_server, delegate.clone(), @@ -321,7 +321,7 @@ impl LocalLspStore { })? .await .inspect_err(|_| { - if let Some(lsp_store) = this.upgrade() { + if let Some(lsp_store) = lsp_store.upgrade() { lsp_store .update(cx, |lsp_store, cx| { lsp_store.cleanup_lsp_data(server_id); @@ -343,17 +343,18 @@ impl LocalLspStore { match result { Ok(server) => { - this.update(cx, |this, mut cx| { - this.insert_newly_running_language_server( - adapter, - server.clone(), - server_id, - key, - pending_workspace_folders, - &mut cx, - ); - }) - .ok(); + lsp_store + .update(cx, |lsp_store, mut cx| { + lsp_store.insert_newly_running_language_server( + adapter, + server.clone(), + server_id, + key, + pending_workspace_folders, + &mut cx, + ); + }) + .ok(); stderr_capture.lock().take(); Some(server) } @@ -366,7 +367,9 @@ impl LocalLspStore { error: format!("{err}\n-- stderr--\n{log}"), }, ); - log::error!("Failed to start language server {server_name:?}: {err:#?}"); + let message = + format!("Failed to start language server {server_name:?}: {err:#?}"); + log::error!("{message}"); log::error!("server stderr: {log}"); None } @@ -378,6 +381,9 @@ impl LocalLspStore { pending_workspace_folders, }; + self.languages + .update_lsp_binary_status(adapter.name(), BinaryStatus::Starting); + self.language_servers.insert(server_id, state); self.language_server_ids .entry(key) @@ -1028,20 +1034,14 @@ impl LocalLspStore { clangd_ext::register_notifications(this, language_server, adapter); } - fn shutdown_language_servers( + fn shutdown_language_servers_on_quit( &mut self, - _cx: &mut Context, + _: &mut Context, ) -> impl Future + use<> { let shutdown_futures = self .language_servers .drain() - .map(|(_, server_state)| async { - use LanguageServerState::*; - match server_state { - Running { server, .. } => server.shutdown()?.await, - Starting { startup, .. } => startup.await?.shutdown()?.await, - } - }) + .map(|(_, server_state)| Self::shutdown_server(server_state)) .collect::>(); async move { @@ -1049,6 +1049,24 @@ impl LocalLspStore { } } + async fn shutdown_server(server_state: LanguageServerState) -> anyhow::Result<()> { + match server_state { + LanguageServerState::Running { server, .. } => { + if let Some(shutdown) = server.shutdown() { + shutdown.await; + } + } + LanguageServerState::Starting { startup, .. } => { + if let Some(server) = startup.await { + if let Some(shutdown) = server.shutdown() { + shutdown.await; + } + } + } + } + Ok(()) + } + fn language_servers_for_worktree( &self, worktree_id: WorktreeId, @@ -2318,6 +2336,7 @@ impl LocalLspStore { fn register_buffer_with_language_servers( &mut self, buffer_handle: &Entity, + only_register_servers: HashSet, cx: &mut Context, ) { let buffer = buffer_handle.read(cx); @@ -2383,6 +2402,18 @@ impl LocalLspStore { if reused && server_node.server_id().is_none() { return None; } + if !only_register_servers.is_empty() { + if let Some(server_id) = server_node.server_id() { + if !only_register_servers.contains(&LanguageServerSelector::Id(server_id)) { + return None; + } + } + if let Some(name) = server_node.name() { + if !only_register_servers.contains(&LanguageServerSelector::Name(name)) { + return None; + } + } + } let server_id = server_node.server_id_or_init( |LaunchDisposition { @@ -2390,66 +2421,82 @@ impl LocalLspStore { attach, path, settings, - }| match attach { - language::Attach::InstancePerRoot => { - // todo: handle instance per root proper. - if let Some(server_ids) = self - .language_server_ids - .get(&(worktree_id, server_name.clone())) - { - server_ids.iter().cloned().next().unwrap() - } else { - let language_name = language.name(); - - self.start_language_server( - &worktree, - delegate.clone(), - self.languages - .lsp_adapters(&language_name) - .into_iter() - .find(|adapter| &adapter.name() == server_name) - .expect("To find LSP adapter"), - settings, - cx, - ) - } - } - language::Attach::Shared => { - let uri = Url::from_file_path( - worktree.read(cx).abs_path().join(&path.path), - ); - let key = (worktree_id, server_name.clone()); - if !self.language_server_ids.contains_key(&key) { - let language_name = language.name(); - self.start_language_server( - &worktree, - delegate.clone(), - self.languages - .lsp_adapters(&language_name) - .into_iter() - .find(|adapter| &adapter.name() == server_name) - .expect("To find LSP adapter"), - settings, - cx, - ); - } - if let Some(server_ids) = self - .language_server_ids - .get(&key) - { - debug_assert_eq!(server_ids.len(), 1); - let server_id = server_ids.iter().cloned().next().unwrap(); - - if let Some(state) = self.language_servers.get(&server_id) { - if let Ok(uri) = uri { - state.add_workspace_folder(uri); - }; - } - server_id - } else { - unreachable!("Language server ID should be available, as it's registered on demand") - } - } + }| { + let server_id = match attach { + language::Attach::InstancePerRoot => { + // todo: handle instance per root proper. + if let Some(server_ids) = self + .language_server_ids + .get(&(worktree_id, server_name.clone())) + { + server_ids.iter().cloned().next().unwrap() + } else { + let language_name = language.name(); + let adapter = self.languages + .lsp_adapters(&language_name) + .into_iter() + .find(|adapter| &adapter.name() == server_name) + .expect("To find LSP adapter"); + let server_id = self.start_language_server( + &worktree, + delegate.clone(), + adapter, + settings, + cx, + ); + server_id + } + } + language::Attach::Shared => { + let uri = Url::from_file_path( + worktree.read(cx).abs_path().join(&path.path), + ); + let key = (worktree_id, server_name.clone()); + if !self.language_server_ids.contains_key(&key) { + let language_name = language.name(); + let adapter = self.languages + .lsp_adapters(&language_name) + .into_iter() + .find(|adapter| &adapter.name() == server_name) + .expect("To find LSP adapter"); + self.start_language_server( + &worktree, + delegate.clone(), + adapter, + settings, + cx, + ); + } + if let Some(server_ids) = self + .language_server_ids + .get(&key) + { + debug_assert_eq!(server_ids.len(), 1); + let server_id = server_ids.iter().cloned().next().unwrap(); + if let Some(state) = self.language_servers.get(&server_id) { + if let Ok(uri) = uri { + state.add_workspace_folder(uri); + }; + } + server_id + } else { + unreachable!("Language server ID should be available, as it's registered on demand") + } + } + }; + let lsp_tool = self.weak.clone(); + let server_name = server_node.name(); + let buffer_abs_path = abs_path.to_string_lossy().to_string(); + cx.defer(move |cx| { + lsp_tool.update(cx, |_, cx| cx.emit(LspStoreEvent::LanguageServerUpdate { + language_server_id: server_id, + name: server_name, + message: proto::update_language_server::Variant::RegisteredForBuffer(proto::RegisteredForBuffer { + buffer_abs_path, + }) + })).ok(); + }); + server_id }, )?; let server_state = self.language_servers.get(&server_id)?; @@ -2498,6 +2545,16 @@ impl LocalLspStore { vec![snapshot] }); + + cx.emit(LspStoreEvent::LanguageServerUpdate { + language_server_id: server.server_id(), + name: None, + message: proto::update_language_server::Variant::RegisteredForBuffer( + proto::RegisteredForBuffer { + buffer_abs_path: abs_path.to_string_lossy().to_string(), + }, + ), + }); } } @@ -3479,7 +3536,7 @@ pub struct LspStore { worktree_store: Entity, toolchain_store: Option>, pub languages: Arc, - pub language_server_statuses: BTreeMap, + language_server_statuses: BTreeMap, active_entry: Option, _maintain_workspace_config: (Task>, watch::Sender<()>), _maintain_buffer_languages: Task<()>, @@ -3503,11 +3560,13 @@ struct BufferLspData { colors: Option>, } +#[derive(Debug)] pub enum LspStoreEvent { LanguageServerAdded(LanguageServerId, LanguageServerName, Option), LanguageServerRemoved(LanguageServerId), LanguageServerUpdate { language_server_id: LanguageServerId, + name: Option, message: proto::update_language_server::Variant, }, LanguageServerLog(LanguageServerId, LanguageServerLogType, String), @@ -3682,6 +3741,7 @@ impl LspStore { } cx.observe_global::(Self::on_settings_changed) .detach(); + subscribe_to_binary_statuses(&languages, cx).detach(); let _maintain_workspace_config = { let (sender, receiver) = watch::channel(); @@ -3714,7 +3774,9 @@ impl LspStore { next_diagnostic_group_id: Default::default(), diagnostics: Default::default(), _subscription: cx.on_app_quit(|this, cx| { - this.as_local_mut().unwrap().shutdown_language_servers(cx) + this.as_local_mut() + .unwrap() + .shutdown_language_servers_on_quit(cx) }), lsp_tree: LanguageServerTree::new(manifest_tree, languages.clone(), cx), registered_buffers: HashMap::default(), @@ -3768,6 +3830,7 @@ impl LspStore { .detach(); cx.subscribe(&worktree_store, Self::on_worktree_store_event) .detach(); + subscribe_to_binary_statuses(&languages, cx).detach(); let _maintain_workspace_config = { let (sender, receiver) = watch::channel(); (Self::maintain_workspace_config(fs, receiver, cx), sender) @@ -3819,7 +3882,7 @@ impl LspStore { if let Some(local) = self.as_local_mut() { local.initialize_buffer(buffer, cx); if local.registered_buffers.contains_key(&buffer_id) { - local.register_buffer_with_language_servers(buffer, cx); + local.register_buffer_with_language_servers(buffer, HashSet::default(), cx); } } } @@ -4047,6 +4110,7 @@ impl LspStore { pub(crate) fn register_buffer_with_language_servers( &mut self, buffer: &Entity, + only_register_servers: HashSet, ignore_refcounts: bool, cx: &mut Context, ) -> OpenLspBufferHandle { @@ -4070,7 +4134,7 @@ impl LspStore { } if ignore_refcounts || *refcount == 1 { - local.register_buffer_with_language_servers(buffer, cx); + local.register_buffer_with_language_servers(buffer, only_register_servers, cx); } if !ignore_refcounts { cx.observe_release(&handle, move |this, buffer, cx| { @@ -4097,6 +4161,26 @@ impl LspStore { .request(proto::RegisterBufferWithLanguageServers { project_id: upstream_project_id, buffer_id, + only_servers: only_register_servers + .into_iter() + .map(|selector| { + let selector = match selector { + LanguageServerSelector::Id(language_server_id) => { + proto::language_server_selector::Selector::ServerId( + language_server_id.to_proto(), + ) + } + LanguageServerSelector::Name(language_server_name) => { + proto::language_server_selector::Selector::Name( + language_server_name.to_string(), + ) + } + }; + proto::LanguageServerSelector { + selector: Some(selector), + } + }) + .collect(), }) .await }) @@ -4182,7 +4266,11 @@ impl LspStore { .registered_buffers .contains_key(&buffer.read(cx).remote_id()) { - local.register_buffer_with_language_servers(&buffer, cx); + local.register_buffer_with_language_servers( + &buffer, + HashSet::default(), + cx, + ); } } } @@ -4267,7 +4355,11 @@ impl LspStore { if let Some(local) = self.as_local_mut() { if local.registered_buffers.contains_key(&buffer_id) { - local.register_buffer_with_language_servers(buffer_entity, cx); + local.register_buffer_with_language_servers( + buffer_entity, + HashSet::default(), + cx, + ); } } Some(worktree.read(cx).id()) @@ -4488,28 +4580,29 @@ impl LspStore { let buffer_store = self.buffer_store.clone(); if let Some(local) = self.as_local_mut() { let mut adapters = BTreeMap::default(); - let to_stop = local.lsp_tree.clone().update(cx, |lsp_tree, cx| { - let get_adapter = { - let languages = local.languages.clone(); - let environment = local.environment.clone(); - let weak = local.weak.clone(); - let worktree_store = local.worktree_store.clone(); - let http_client = local.http_client.clone(); - let fs = local.fs.clone(); - move |worktree_id, cx: &mut App| { - let worktree = worktree_store.read(cx).worktree_for_id(worktree_id, cx)?; - Some(LocalLspAdapterDelegate::new( - languages.clone(), - &environment, - weak.clone(), - &worktree, - http_client.clone(), - fs.clone(), - cx, - )) - } - }; + let get_adapter = { + let languages = local.languages.clone(); + let environment = local.environment.clone(); + let weak = local.weak.clone(); + let worktree_store = local.worktree_store.clone(); + let http_client = local.http_client.clone(); + let fs = local.fs.clone(); + move |worktree_id, cx: &mut App| { + let worktree = worktree_store.read(cx).worktree_for_id(worktree_id, cx)?; + Some(LocalLspAdapterDelegate::new( + languages.clone(), + &environment, + weak.clone(), + &worktree, + http_client.clone(), + fs.clone(), + cx, + )) + } + }; + let mut messages_to_report = Vec::new(); + let to_stop = local.lsp_tree.clone().update(cx, |lsp_tree, cx| { let mut rebase = lsp_tree.rebase(); for buffer_handle in buffer_store.read(cx).buffers().sorted_by_key(|buffer| { Reverse( @@ -4570,9 +4663,10 @@ impl LspStore { continue; }; + let abs_path = file.abs_path(cx); for node in nodes { if !reused { - node.server_id_or_init( + let server_id = node.server_id_or_init( |LaunchDisposition { server_name, attach, @@ -4587,20 +4681,20 @@ impl LspStore { { server_ids.iter().cloned().next().unwrap() } else { - local.start_language_server( + let adapter = local + .languages + .lsp_adapters(&language) + .into_iter() + .find(|adapter| &adapter.name() == server_name) + .expect("To find LSP adapter"); + let server_id = local.start_language_server( &worktree, delegate.clone(), - local - .languages - .lsp_adapters(&language) - .into_iter() - .find(|adapter| { - &adapter.name() == server_name - }) - .expect("To find LSP adapter"), + adapter, settings, cx, - ) + ); + server_id } } language::Attach::Shared => { @@ -4610,15 +4704,16 @@ impl LspStore { let key = (worktree_id, server_name.clone()); local.language_server_ids.remove(&key); + let adapter = local + .languages + .lsp_adapters(&language) + .into_iter() + .find(|adapter| &adapter.name() == server_name) + .expect("To find LSP adapter"); let server_id = local.start_language_server( &worktree, delegate.clone(), - local - .languages - .lsp_adapters(&language) - .into_iter() - .find(|adapter| &adapter.name() == server_name) - .expect("To find LSP adapter"), + adapter, settings, cx, ); @@ -4633,14 +4728,30 @@ impl LspStore { } }, ); + + if let Some(language_server_id) = server_id { + messages_to_report.push(LspStoreEvent::LanguageServerUpdate { + language_server_id, + name: node.name(), + message: + proto::update_language_server::Variant::RegisteredForBuffer( + proto::RegisteredForBuffer { + buffer_abs_path: abs_path.to_string_lossy().to_string(), + }, + ), + }); + } } } } } rebase.finish() }); - for (id, name) in to_stop { - self.stop_local_language_server(id, name, cx).detach(); + for message in messages_to_report { + cx.emit(message); + } + for (id, _) in to_stop { + self.stop_local_language_server(id, cx).detach(); } } } @@ -7122,7 +7233,7 @@ impl LspStore { path: relative_path.into(), }; - if let Some(buffer_handle) = self.buffer_store.read(cx).get_by_path(&project_path, cx) { + if let Some(buffer_handle) = self.buffer_store.read(cx).get_by_path(&project_path) { let snapshot = buffer_handle.read(cx).snapshot(); let buffer = buffer_handle.read(cx); let reused_diagnostics = buffer @@ -7801,6 +7912,7 @@ impl LspStore { return upstream_client.send(proto::RegisterBufferWithLanguageServers { project_id: upstream_project_id, buffer_id: buffer_id.to_proto(), + only_servers: envelope.payload.only_servers, }); } @@ -7808,7 +7920,28 @@ impl LspStore { anyhow::bail!("buffer is not open"); }; - let handle = this.register_buffer_with_language_servers(&buffer, false, cx); + let handle = this.register_buffer_with_language_servers( + &buffer, + envelope + .payload + .only_servers + .into_iter() + .filter_map(|selector| { + Some(match selector.selector? { + proto::language_server_selector::Selector::ServerId(server_id) => { + LanguageServerSelector::Id(LanguageServerId::from_proto(server_id)) + } + proto::language_server_selector::Selector::Name(name) => { + LanguageServerSelector::Name(LanguageServerName( + SharedString::from(name), + )) + } + }) + }) + .collect(), + false, + cx, + ); this.buffer_store().update(cx, |buffer_store, _| { buffer_store.register_shared_lsp_handle(peer_id, buffer_id, handle); }); @@ -7980,16 +8113,16 @@ impl LspStore { } async fn handle_update_language_server( - this: Entity, + lsp_store: Entity, envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result<()> { - this.update(&mut cx, |this, cx| { + lsp_store.update(&mut cx, |lsp_store, cx| { let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize); match envelope.payload.variant.context("invalid variant")? { proto::update_language_server::Variant::WorkStart(payload) => { - this.on_lsp_work_start( + lsp_store.on_lsp_work_start( language_server_id, payload.token, LanguageServerProgress { @@ -8003,9 +8136,8 @@ impl LspStore { cx, ); } - proto::update_language_server::Variant::WorkProgress(payload) => { - this.on_lsp_work_progress( + lsp_store.on_lsp_work_progress( language_server_id, payload.token, LanguageServerProgress { @@ -8021,15 +8153,28 @@ impl LspStore { } proto::update_language_server::Variant::WorkEnd(payload) => { - this.on_lsp_work_end(language_server_id, payload.token, cx); + lsp_store.on_lsp_work_end(language_server_id, payload.token, cx); } proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => { - this.disk_based_diagnostics_started(language_server_id, cx); + lsp_store.disk_based_diagnostics_started(language_server_id, cx); } proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => { - this.disk_based_diagnostics_finished(language_server_id, cx) + lsp_store.disk_based_diagnostics_finished(language_server_id, cx) + } + + non_lsp @ proto::update_language_server::Variant::StatusUpdate(_) + | non_lsp @ proto::update_language_server::Variant::RegisteredForBuffer(_) => { + cx.emit(LspStoreEvent::LanguageServerUpdate { + language_server_id, + name: envelope + .payload + .server_name + .map(SharedString::new) + .map(LanguageServerName), + message: non_lsp, + }); } } @@ -8145,6 +8290,9 @@ impl LspStore { cx.emit(LspStoreEvent::DiskBasedDiagnosticsStarted { language_server_id }); cx.emit(LspStoreEvent::LanguageServerUpdate { language_server_id, + name: self + .language_server_adapter_for_id(language_server_id) + .map(|adapter| adapter.name()), message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating( Default::default(), ), @@ -8165,6 +8313,9 @@ impl LspStore { cx.emit(LspStoreEvent::DiskBasedDiagnosticsFinished { language_server_id }); cx.emit(LspStoreEvent::LanguageServerUpdate { language_server_id, + name: self + .language_server_adapter_for_id(language_server_id) + .map(|adapter| adapter.name()), message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated( Default::default(), ), @@ -8473,6 +8624,9 @@ impl LspStore { } cx.emit(LspStoreEvent::LanguageServerUpdate { language_server_id, + name: self + .language_server_adapter_for_id(language_server_id) + .map(|adapter| adapter.name()), message: proto::update_language_server::Variant::WorkStart(proto::LspWorkStart { token, title: progress.title, @@ -8521,6 +8675,9 @@ impl LspStore { if did_update { cx.emit(LspStoreEvent::LanguageServerUpdate { language_server_id, + name: self + .language_server_adapter_for_id(language_server_id) + .map(|adapter| adapter.name()), message: proto::update_language_server::Variant::WorkProgress( proto::LspWorkProgress { token, @@ -8550,6 +8707,9 @@ impl LspStore { cx.emit(LspStoreEvent::LanguageServerUpdate { language_server_id, + name: self + .language_server_adapter_for_id(language_server_id) + .map(|adapter| adapter.name()), message: proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd { token }), }) } @@ -8930,22 +9090,73 @@ impl LspStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - this.update(&mut cx, |this, cx| { - let buffers = this.buffer_ids_to_buffers(envelope.payload.buffer_ids.into_iter(), cx); - this.restart_language_servers_for_buffers(buffers, cx); + this.update(&mut cx, |lsp_store, cx| { + let buffers = + lsp_store.buffer_ids_to_buffers(envelope.payload.buffer_ids.into_iter(), cx); + lsp_store.restart_language_servers_for_buffers( + buffers, + envelope + .payload + .only_servers + .into_iter() + .filter_map(|selector| { + Some(match selector.selector? { + proto::language_server_selector::Selector::ServerId(server_id) => { + LanguageServerSelector::Id(LanguageServerId::from_proto(server_id)) + } + proto::language_server_selector::Selector::Name(name) => { + LanguageServerSelector::Name(LanguageServerName( + SharedString::from(name), + )) + } + }) + }) + .collect(), + cx, + ); })?; Ok(proto::Ack {}) } pub async fn handle_stop_language_servers( - this: Entity, + lsp_store: Entity, envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - this.update(&mut cx, |this, cx| { - let buffers = this.buffer_ids_to_buffers(envelope.payload.buffer_ids.into_iter(), cx); - this.stop_language_servers_for_buffers(buffers, cx); + lsp_store.update(&mut cx, |lsp_store, cx| { + if envelope.payload.all + && envelope.payload.also_servers.is_empty() + && envelope.payload.buffer_ids.is_empty() + { + lsp_store.stop_all_language_servers(cx); + } else { + let buffers = + lsp_store.buffer_ids_to_buffers(envelope.payload.buffer_ids.into_iter(), cx); + lsp_store.stop_language_servers_for_buffers( + buffers, + envelope + .payload + .also_servers + .into_iter() + .filter_map(|selector| { + Some(match selector.selector? { + proto::language_server_selector::Selector::ServerId(server_id) => { + LanguageServerSelector::Id(LanguageServerId::from_proto( + server_id, + )) + } + proto::language_server_selector::Selector::Name(name) => { + LanguageServerSelector::Name(LanguageServerName( + SharedString::from(name), + )) + } + }) + }) + .collect(), + cx, + ); + } })?; Ok(proto::Ack {}) @@ -9269,11 +9480,8 @@ impl LspStore { select! { server = startup.fuse() => server, - _ = timer => { - log::info!( - "timeout waiting for language server {} to finish launching before stopping", - name - ); + () = timer => { + log::info!("timeout waiting for language server {name} to finish launching before stopping"); None }, } @@ -9296,7 +9504,6 @@ impl LspStore { fn stop_local_language_server( &mut self, server_id: LanguageServerId, - name: LanguageServerName, cx: &mut Context, ) -> Task> { let local = match &mut self.mode { @@ -9306,7 +9513,7 @@ impl LspStore { } }; - let mut orphaned_worktrees = vec![]; + let mut orphaned_worktrees = Vec::new(); // Remove this server ID from all entries in the given worktree. local.language_server_ids.retain(|(worktree, _), ids| { if !ids.remove(&server_id) { @@ -9320,8 +9527,6 @@ impl LspStore { true } }); - let _ = self.language_server_statuses.remove(&server_id); - log::info!("stopping language server {name}"); self.buffer_store.update(cx, |buffer_store, cx| { for buffer in buffer_store.buffers() { buffer.update(cx, |buffer, cx| { @@ -9367,19 +9572,85 @@ impl LspStore { }); } local.language_server_watched_paths.remove(&server_id); + let server_state = local.language_servers.remove(&server_id); - cx.notify(); self.cleanup_lsp_data(server_id); - cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); - cx.spawn(async move |_, cx| { - Self::shutdown_language_server(server_state, name, cx).await; - orphaned_worktrees - }) + let name = self + .language_server_statuses + .remove(&server_id) + .map(|status| LanguageServerName::from(status.name.as_str())) + .or_else(|| { + if let Some(LanguageServerState::Running { adapter, .. }) = server_state.as_ref() { + Some(adapter.name()) + } else { + None + } + }); + + if let Some(name) = name { + log::info!("stopping language server {name}"); + self.languages + .update_lsp_binary_status(name.clone(), BinaryStatus::Stopping); + cx.notify(); + + return cx.spawn(async move |lsp_store, cx| { + Self::shutdown_language_server(server_state, name.clone(), cx).await; + lsp_store + .update(cx, |lsp_store, cx| { + lsp_store + .languages + .update_lsp_binary_status(name, BinaryStatus::Stopped); + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); + cx.notify(); + }) + .ok(); + orphaned_worktrees + }); + } + + if server_state.is_some() { + cx.emit(LspStoreEvent::LanguageServerRemoved(server_id)); + } + Task::ready(orphaned_worktrees) + } + + pub fn stop_all_language_servers(&mut self, cx: &mut Context) { + if let Some((client, project_id)) = self.upstream_client() { + let request = client.request(proto::StopLanguageServers { + project_id, + buffer_ids: Vec::new(), + also_servers: Vec::new(), + all: true, + }); + cx.background_spawn(request).detach_and_log_err(cx); + } else { + let Some(local) = self.as_local_mut() else { + return; + }; + let language_servers_to_stop = local + .language_server_ids + .values() + .flatten() + .copied() + .collect(); + local.lsp_tree.update(cx, |this, _| { + this.remove_nodes(&language_servers_to_stop); + }); + let tasks = language_servers_to_stop + .into_iter() + .map(|server| self.stop_local_language_server(server, cx)) + .collect::>(); + cx.background_spawn(async move { + futures::future::join_all(tasks).await; + }) + .detach(); + } } pub fn restart_language_servers_for_buffers( &mut self, buffers: Vec>, + only_restart_servers: HashSet, cx: &mut Context, ) { if let Some((client, project_id)) = self.upstream_client() { @@ -9389,18 +9660,49 @@ impl LspStore { .into_iter() .map(|b| b.read(cx).remote_id().to_proto()) .collect(), + only_servers: only_restart_servers + .into_iter() + .map(|selector| { + let selector = match selector { + LanguageServerSelector::Id(language_server_id) => { + proto::language_server_selector::Selector::ServerId( + language_server_id.to_proto(), + ) + } + LanguageServerSelector::Name(language_server_name) => { + proto::language_server_selector::Selector::Name( + language_server_name.to_string(), + ) + } + }; + proto::LanguageServerSelector { + selector: Some(selector), + } + }) + .collect(), + all: false, }); cx.background_spawn(request).detach_and_log_err(cx); } else { - let stop_task = self.stop_local_language_servers_for_buffers(&buffers, cx); - cx.spawn(async move |this, cx| { + let stop_task = if only_restart_servers.is_empty() { + self.stop_local_language_servers_for_buffers(&buffers, HashSet::default(), cx) + } else { + self.stop_local_language_servers_for_buffers(&[], only_restart_servers.clone(), cx) + }; + cx.spawn(async move |lsp_store, cx| { stop_task.await; - this.update(cx, |this, cx| { - for buffer in buffers { - this.register_buffer_with_language_servers(&buffer, true, cx); - } - }) - .ok() + lsp_store + .update(cx, |lsp_store, cx| { + for buffer in buffers { + lsp_store.register_buffer_with_language_servers( + &buffer, + only_restart_servers.clone(), + true, + cx, + ); + } + }) + .ok() }) .detach(); } @@ -9409,6 +9711,7 @@ impl LspStore { pub fn stop_language_servers_for_buffers( &mut self, buffers: Vec>, + also_restart_servers: HashSet, cx: &mut Context, ) { if let Some((client, project_id)) = self.upstream_client() { @@ -9418,10 +9721,31 @@ impl LspStore { .into_iter() .map(|b| b.read(cx).remote_id().to_proto()) .collect(), + also_servers: also_restart_servers + .into_iter() + .map(|selector| { + let selector = match selector { + LanguageServerSelector::Id(language_server_id) => { + proto::language_server_selector::Selector::ServerId( + language_server_id.to_proto(), + ) + } + LanguageServerSelector::Name(language_server_name) => { + proto::language_server_selector::Selector::Name( + language_server_name.to_string(), + ) + } + }; + proto::LanguageServerSelector { + selector: Some(selector), + } + }) + .collect(), + all: false, }); cx.background_spawn(request).detach_and_log_err(cx); } else { - self.stop_local_language_servers_for_buffers(&buffers, cx) + self.stop_local_language_servers_for_buffers(&buffers, also_restart_servers, cx) .detach(); } } @@ -9429,32 +9753,62 @@ impl LspStore { fn stop_local_language_servers_for_buffers( &mut self, buffers: &[Entity], + also_restart_servers: HashSet, cx: &mut Context, ) -> Task<()> { let Some(local) = self.as_local_mut() else { return Task::ready(()); }; - let language_servers_to_stop = buffers - .iter() - .flat_map(|buffer| { - buffer.update(cx, |buffer, cx| { - local.language_server_ids_for_buffer(buffer, cx) - }) + let mut language_server_names_to_stop = BTreeSet::default(); + let mut language_servers_to_stop = also_restart_servers + .into_iter() + .flat_map(|selector| match selector { + LanguageServerSelector::Id(id) => Some(id), + LanguageServerSelector::Name(name) => { + language_server_names_to_stop.insert(name); + None + } }) .collect::>(); + + let mut covered_worktrees = HashSet::default(); + for buffer in buffers { + buffer.update(cx, |buffer, cx| { + language_servers_to_stop.extend(local.language_server_ids_for_buffer(buffer, cx)); + if let Some(worktree_id) = buffer.file().map(|f| f.worktree_id(cx)) { + if covered_worktrees.insert(worktree_id) { + language_server_names_to_stop.retain(|name| { + match local.language_server_ids.get(&(worktree_id, name.clone())) { + Some(server_ids) => { + language_servers_to_stop + .extend(server_ids.into_iter().copied()); + false + } + None => true, + } + }); + } + } + }); + } + for name in language_server_names_to_stop { + if let Some(server_ids) = local + .language_server_ids + .iter() + .filter(|((_, server_name), _)| server_name == &name) + .map(|((_, _), server_ids)| server_ids) + .max_by_key(|server_ids| server_ids.len()) + { + language_servers_to_stop.extend(server_ids.into_iter().copied()); + } + } + local.lsp_tree.update(cx, |this, _| { this.remove_nodes(&language_servers_to_stop); }); let tasks = language_servers_to_stop .into_iter() - .map(|server| { - let name = self - .language_server_statuses - .get(&server) - .map(|state| state.name.as_str().into()) - .unwrap_or_else(|| LanguageServerName::from("Unknown")); - self.stop_local_language_server(server, name, cx) - }) + .map(|server| self.stop_local_language_server(server, cx)) .collect::>(); cx.background_spawn(futures::future::join_all(tasks).map(|_| ())) @@ -9472,7 +9826,7 @@ impl LspStore { Some( self.buffer_store() .read(cx) - .get_by_path(&project_path, cx)? + .get_by_path(&project_path)? .read(cx), ) } @@ -9686,6 +10040,9 @@ impl LspStore { simulate_disk_based_diagnostics_completion: None, }, ); + local + .languages + .update_lsp_binary_status(adapter.name(), BinaryStatus::None); if let Some(file_ops_caps) = language_server .capabilities() .workspace @@ -10331,6 +10688,53 @@ impl LspStore { } } +fn subscribe_to_binary_statuses( + languages: &Arc, + cx: &mut Context<'_, LspStore>, +) -> Task<()> { + let mut server_statuses = languages.language_server_binary_statuses(); + cx.spawn(async move |lsp_store, cx| { + while let Some((server_name, binary_status)) = server_statuses.next().await { + if lsp_store + .update(cx, |_, cx| { + let mut message = None; + let binary_status = match binary_status { + BinaryStatus::None => proto::ServerBinaryStatus::None, + BinaryStatus::CheckingForUpdate => { + proto::ServerBinaryStatus::CheckingForUpdate + } + BinaryStatus::Downloading => proto::ServerBinaryStatus::Downloading, + BinaryStatus::Starting => proto::ServerBinaryStatus::Starting, + BinaryStatus::Stopping => proto::ServerBinaryStatus::Stopping, + BinaryStatus::Stopped => proto::ServerBinaryStatus::Stopped, + BinaryStatus::Failed { error } => { + message = Some(error); + proto::ServerBinaryStatus::Failed + } + }; + cx.emit(LspStoreEvent::LanguageServerUpdate { + // Binary updates are about the binary that might not have any language server id at that point. + // Reuse `LanguageServerUpdate` for them and provide a fake id that won't be used on the receiver side. + language_server_id: LanguageServerId(0), + name: Some(server_name), + message: proto::update_language_server::Variant::StatusUpdate( + proto::StatusUpdate { + message, + status: Some(proto::status_update::Status::Binary( + binary_status as i32, + )), + }, + ), + }); + }) + .is_err() + { + break; + } + } + }) +} + fn lsp_workspace_diagnostics_refresh( server: Arc, cx: &mut Context<'_, LspStore>, @@ -11286,7 +11690,7 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate { fn update_status(&self, server_name: LanguageServerName, status: language::BinaryStatus) { self.language_registry - .update_lsp_status(server_name, LanguageServerStatusUpdate::Binary(status)); + .update_lsp_binary_status(server_name, status); } fn registered_lsp_adapters(&self) -> Vec> { diff --git a/crates/project/src/lsp_store/rust_analyzer_ext.rs b/crates/project/src/lsp_store/rust_analyzer_ext.rs index 78401ac79773a57dade345b332932832740904a6..d78715d38579c24b6aa0f5c1841c8c0298ddd9d7 100644 --- a/crates/project/src/lsp_store/rust_analyzer_ext.rs +++ b/crates/project/src/lsp_store/rust_analyzer_ext.rs @@ -1,11 +1,11 @@ use ::serde::{Deserialize, Serialize}; use anyhow::Context as _; -use gpui::{App, Entity, SharedString, Task, WeakEntity}; -use language::{LanguageServerStatusUpdate, ServerHealth}; +use gpui::{App, Entity, Task, WeakEntity}; +use language::ServerHealth; use lsp::LanguageServer; use rpc::proto; -use crate::{LspStore, Project, ProjectPath, lsp_store}; +use crate::{LspStore, LspStoreEvent, Project, ProjectPath, lsp_store}; pub const RUST_ANALYZER_NAME: &str = "rust-analyzer"; pub const CARGO_DIAGNOSTICS_SOURCE_NAME: &str = "rustc"; @@ -36,24 +36,45 @@ pub fn register_notifications(lsp_store: WeakEntity, language_server: .on_notification::({ let name = name.clone(); move |params, cx| { - let status = params.message; - let log_message = - format!("Language server {name} (id {server_id}) status update: {status:?}"); - match ¶ms.health { - ServerHealth::Ok => log::info!("{log_message}"), - ServerHealth::Warning => log::warn!("{log_message}"), - ServerHealth::Error => log::error!("{log_message}"), - } + let message = params.message; + let log_message = message.as_ref().map(|message| { + format!("Language server {name} (id {server_id}) status update: {message}") + }); + let status = match ¶ms.health { + ServerHealth::Ok => { + if let Some(log_message) = log_message { + log::info!("{log_message}"); + } + proto::ServerHealth::Ok + } + ServerHealth::Warning => { + if let Some(log_message) = log_message { + log::warn!("{log_message}"); + } + proto::ServerHealth::Warning + } + ServerHealth::Error => { + if let Some(log_message) = log_message { + log::error!("{log_message}"); + } + proto::ServerHealth::Error + } + }; lsp_store - .update(cx, |lsp_store, _| { - lsp_store.languages.update_lsp_status( - name.clone(), - LanguageServerStatusUpdate::Health( - params.health, - status.map(SharedString::from), + .update(cx, |_, cx| { + cx.emit(LspStoreEvent::LanguageServerUpdate { + language_server_id: server_id, + name: Some(name.clone()), + message: proto::update_language_server::Variant::StatusUpdate( + proto::StatusUpdate { + message, + status: Some(proto::status_update::Status::Health( + status as i32, + )), + }, ), - ); + }); }) .ok(); } diff --git a/crates/project/src/manifest_tree/server_tree.rs b/crates/project/src/manifest_tree/server_tree.rs index 1ac990a5084945848c79ab4cf89c67fb56267f9f..0283f06eec0f2859f99bddb0e5be10bb8f4197fa 100644 --- a/crates/project/src/manifest_tree/server_tree.rs +++ b/crates/project/src/manifest_tree/server_tree.rs @@ -74,6 +74,7 @@ impl LanguageServerTreeNode { pub(crate) fn server_id(&self) -> Option { self.0.upgrade()?.id.get().copied() } + /// Returns a language server ID for this node if it has already been initialized; otherwise runs the provided closure to initialize the language server node in a tree. /// May return None if the node no longer belongs to the server tree it was created in. pub(crate) fn server_id_or_init( @@ -87,6 +88,11 @@ impl LanguageServerTreeNode { .get_or_init(|| init(LaunchDisposition::from(&*this))), ) } + + /// Returns a language server name as the language server adapter would return. + pub fn name(&self) -> Option { + self.0.upgrade().map(|node| node.name.clone()) + } } impl From> for LanguageServerTreeNode { diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index e8b38148502fe161e0abb5b35dc5dd93ee331373..cdf66610633178d24637b752ea04de97c205ebca 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -81,7 +81,7 @@ use language::{ }; use lsp::{ CodeActionKind, CompletionContext, CompletionItemKind, DocumentHighlightKind, InsertTextMode, - LanguageServerId, LanguageServerName, MessageActionItem, + LanguageServerId, LanguageServerName, LanguageServerSelector, MessageActionItem, }; use lsp_command::*; use lsp_store::{CompletionDocumentation, LspFormatTarget, OpenLspBufferHandle}; @@ -251,6 +251,7 @@ enum BufferOrderedMessage { LanguageServerUpdate { language_server_id: LanguageServerId, message: proto::update_language_server::Variant, + name: Option, }, Resync, } @@ -1790,7 +1791,7 @@ impl Project { pub fn has_open_buffer(&self, path: impl Into, cx: &App) -> bool { self.buffer_store .read(cx) - .get_by_path(&path.into(), cx) + .get_by_path(&path.into()) .is_some() } @@ -2500,7 +2501,7 @@ impl Project { cx: &mut App, ) -> OpenLspBufferHandle { self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.register_buffer_with_language_servers(&buffer, false, cx) + lsp_store.register_buffer_with_language_servers(&buffer, HashSet::default(), false, cx) }) } @@ -2590,7 +2591,7 @@ impl Project { } pub fn get_open_buffer(&self, path: &ProjectPath, cx: &App) -> Option> { - self.buffer_store.read(cx).get_by_path(path, cx) + self.buffer_store.read(cx).get_by_path(path) } fn register_buffer(&mut self, buffer: &Entity, cx: &mut Context) -> Result<()> { @@ -2640,7 +2641,7 @@ impl Project { } async fn send_buffer_ordered_messages( - this: WeakEntity, + project: WeakEntity, rx: UnboundedReceiver, cx: &mut AsyncApp, ) -> Result<()> { @@ -2677,7 +2678,7 @@ impl Project { let mut changes = rx.ready_chunks(MAX_BATCH_SIZE); while let Some(changes) = changes.next().await { - let is_local = this.read_with(cx, |this, _| this.is_local())?; + let is_local = project.read_with(cx, |this, _| this.is_local())?; for change in changes { match change { @@ -2697,7 +2698,7 @@ impl Project { BufferOrderedMessage::Resync => { operations_by_buffer_id.clear(); - if this + if project .update(cx, |this, cx| this.synchronize_remote_buffers(cx))? .await .is_ok() @@ -2709,9 +2710,10 @@ impl Project { BufferOrderedMessage::LanguageServerUpdate { language_server_id, message, + name, } => { flush_operations( - &this, + &project, &mut operations_by_buffer_id, &mut needs_resync_with_host, is_local, @@ -2719,12 +2721,14 @@ impl Project { ) .await?; - this.read_with(cx, |this, _| { - if let Some(project_id) = this.remote_id() { - this.client + project.read_with(cx, |project, _| { + if let Some(project_id) = project.remote_id() { + project + .client .send(proto::UpdateLanguageServer { project_id, - language_server_id: language_server_id.0 as u64, + server_name: name.map(|name| String::from(name.0)), + language_server_id: language_server_id.to_proto(), variant: Some(message), }) .log_err(); @@ -2735,7 +2739,7 @@ impl Project { } flush_operations( - &this, + &project, &mut operations_by_buffer_id, &mut needs_resync_with_host, is_local, @@ -2856,12 +2860,14 @@ impl Project { LspStoreEvent::LanguageServerUpdate { language_server_id, message, + name, } => { if self.is_local() { self.enqueue_buffer_ordered_message( BufferOrderedMessage::LanguageServerUpdate { language_server_id: *language_server_id, message: message.clone(), + name: name.clone(), }, ) .ok(); @@ -3140,20 +3146,22 @@ impl Project { pub fn restart_language_servers_for_buffers( &mut self, buffers: Vec>, + only_restart_servers: HashSet, cx: &mut Context, ) { self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.restart_language_servers_for_buffers(buffers, cx) + lsp_store.restart_language_servers_for_buffers(buffers, only_restart_servers, cx) }) } pub fn stop_language_servers_for_buffers( &mut self, buffers: Vec>, + also_restart_servers: HashSet, cx: &mut Context, ) { self.lsp_store.update(cx, |lsp_store, cx| { - lsp_store.stop_language_servers_for_buffers(buffers, cx) + lsp_store.stop_language_servers_for_buffers(buffers, also_restart_servers, cx) }) } diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 19029cdb1d1c6b567a1d651a9aadfb8c7f8808c7..d2a4e5126c973bf9a1454c0a96c17e17c4c593e2 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -49,6 +49,10 @@ pub struct ProjectSettings { #[serde(default)] pub lsp: HashMap, + /// Common language server settings. + #[serde(default)] + pub global_lsp_settings: GlobalLspSettings, + /// Configuration for Debugger-related features #[serde(default)] pub dap: HashMap, @@ -110,6 +114,16 @@ pub enum ContextServerSettings { }, } +/// Common language server settings. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema)] +pub struct GlobalLspSettings { + /// Whether to show the LSP servers button in the status bar. + /// + /// Default: `true` + #[serde(default = "default_true")] + pub button: bool, +} + impl ContextServerSettings { pub fn default_extension() -> Self { Self::Extension { @@ -271,6 +285,14 @@ impl Default for InlineDiagnosticsSettings { } } +impl Default for GlobalLspSettings { + fn default() -> Self { + Self { + button: default_true(), + } + } +} + #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)] pub struct CargoDiagnosticsSettings { /// When enabled, Zed disables rust-analyzer's check on save and starts to query diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index cab6ccc0fb95d285d75bca2deea76394ed4b51da..19b88c069554a483c0412bc313dec6f4f0350055 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -918,6 +918,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { project.update(cx, |project, cx| { project.restart_language_servers_for_buffers( vec![rust_buffer.clone(), json_buffer.clone()], + HashSet::default(), cx, ); }); @@ -1715,12 +1716,16 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC // Restart the server before the diagnostics finish updating. project.update(cx, |project, cx| { - project.restart_language_servers_for_buffers(vec![buffer], cx); + project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx); }); let mut events = cx.events(&project); // Simulate the newly started server sending more diagnostics. let fake_server = fake_servers.next().await.unwrap(); + assert_eq!( + events.next().await.unwrap(), + Event::LanguageServerRemoved(LanguageServerId(0)) + ); assert_eq!( events.next().await.unwrap(), Event::LanguageServerAdded( @@ -1820,7 +1825,7 @@ async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAp }); project.update(cx, |project, cx| { - project.restart_language_servers_for_buffers(vec![buffer.clone()], cx); + project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx); }); // The diagnostics are cleared. @@ -1875,7 +1880,7 @@ async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::T }); cx.executor().run_until_parked(); project.update(cx, |project, cx| { - project.restart_language_servers_for_buffers(vec![buffer.clone()], cx); + project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx); }); let mut fake_server = fake_servers.next().await.unwrap(); diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index 71831759e55c89c3113f49be692c47cd3d5a5008..0743b94e55a2169161f37a411d064a0687c90c4c 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -534,12 +534,15 @@ message DiagnosticSummary { message UpdateLanguageServer { uint64 project_id = 1; uint64 language_server_id = 2; + optional string server_name = 8; oneof variant { LspWorkStart work_start = 3; LspWorkProgress work_progress = 4; LspWorkEnd work_end = 5; LspDiskBasedDiagnosticsUpdating disk_based_diagnostics_updating = 6; LspDiskBasedDiagnosticsUpdated disk_based_diagnostics_updated = 7; + StatusUpdate status_update = 9; + RegisteredForBuffer registered_for_buffer = 10; } } @@ -566,6 +569,34 @@ message LspDiskBasedDiagnosticsUpdating {} message LspDiskBasedDiagnosticsUpdated {} +message StatusUpdate { + optional string message = 1; + oneof status { + ServerBinaryStatus binary = 2; + ServerHealth health = 3; + } +} + +enum ServerHealth { + OK = 0; + WARNING = 1; + ERROR = 2; +} + +enum ServerBinaryStatus { + NONE = 0; + CHECKING_FOR_UPDATE = 1; + DOWNLOADING = 2; + STARTING = 3; + STOPPING = 4; + STOPPED = 5; + FAILED = 6; +} + +message RegisteredForBuffer { + string buffer_abs_path = 1; +} + message LanguageServerLog { uint64 project_id = 1; uint64 language_server_id = 2; @@ -593,6 +624,7 @@ message ApplyCodeActionKindResponse { message RegisterBufferWithLanguageServers { uint64 project_id = 1; uint64 buffer_id = 2; + repeated LanguageServerSelector only_servers = 3; } enum FormatTrigger { @@ -730,14 +762,25 @@ message MultiLspQuery { message AllLanguageServers {} +message LanguageServerSelector { + oneof selector { + uint64 server_id = 1; + string name = 2; + } +} + message RestartLanguageServers { uint64 project_id = 1; repeated uint64 buffer_ids = 2; + repeated LanguageServerSelector only_servers = 3; + bool all = 4; } message StopLanguageServers { uint64 project_id = 1; repeated uint64 buffer_ids = 2; + repeated LanguageServerSelector also_servers = 3; + bool all = 4; } message MultiLspQueryResponse { diff --git a/crates/remote_server/src/headless_project.rs b/crates/remote_server/src/headless_project.rs index c12d8dd37cd23c02b8d5f16c178a78aa752b29a0..fa5f2617dff7db1b56e434ef8ce37ffa0dded110 100644 --- a/crates/remote_server/src/headless_project.rs +++ b/crates/remote_server/src/headless_project.rs @@ -301,11 +301,13 @@ impl HeadlessProject { match event { LspStoreEvent::LanguageServerUpdate { language_server_id, + name, message, } => { self.session .send(proto::UpdateLanguageServer { project_id: SSH_PROJECT_ID, + server_name: name.as_ref().map(|name| name.to_string()), language_server_id: language_server_id.to_proto(), variant: Some(message.clone()), }) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 1e3d648d4245c175c026f4587902f7b3eb099bf2..38532292435fdfa5948835cdf0d332c70bec3aa8 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -5617,7 +5617,6 @@ impl Workspace { } else if let Some((notification_id, _)) = self.notifications.pop() { dismiss_app_notification(¬ification_id, cx); } else { - cx.emit(Event::ClearActivityIndicator); cx.propagate(); } } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 62e29eb7e2ace3c8da78815c2a6c16e30bb7e0cc..510cdb2b46e64678af7e051fc4db71a5452d56a3 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -30,6 +30,7 @@ use gpui::{ px, retain_all, }; use image_viewer::ImageInfo; +use language_tools::lsp_tool::LspTool; use migrate::{MigrationBanner, MigrationEvent, MigrationNotification, MigrationType}; use migrator::{migrate_keymap, migrate_settings}; pub use open_listener::*; @@ -295,7 +296,7 @@ pub fn initialize_workspace( let popover_menu_handle = PopoverMenuHandle::default(); - let inline_completion_button = cx.new(|cx| { + let edit_prediction_button = cx.new(|cx| { inline_completion_button::InlineCompletionButton::new( app_state.fs.clone(), app_state.user_store.clone(), @@ -315,7 +316,7 @@ pub fn initialize_workspace( cx.new(|cx| diagnostics::items::DiagnosticIndicator::new(workspace, cx)); let activity_indicator = activity_indicator::ActivityIndicator::new( workspace, - app_state.languages.clone(), + workspace.project().read(cx).languages().clone(), window, cx, ); @@ -325,13 +326,16 @@ pub fn initialize_workspace( cx.new(|cx| toolchain_selector::ActiveToolchain::new(workspace, window, cx)); let vim_mode_indicator = cx.new(|cx| vim::ModeIndicator::new(window, cx)); let image_info = cx.new(|_cx| ImageInfo::new(workspace)); + let lsp_tool = cx.new(|cx| LspTool::new(workspace, window, cx)); + let cursor_position = cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace)); workspace.status_bar().update(cx, |status_bar, cx| { status_bar.add_left_item(search_button, window, cx); status_bar.add_left_item(diagnostic_summary, window, cx); + status_bar.add_left_item(lsp_tool, window, cx); status_bar.add_left_item(activity_indicator, window, cx); - status_bar.add_right_item(inline_completion_button, window, cx); + status_bar.add_right_item(edit_prediction_button, window, cx); status_bar.add_right_item(active_buffer_language, window, cx); status_bar.add_right_item(active_toolchain_language, window, cx); status_bar.add_right_item(vim_mode_indicator, window, cx); @@ -4300,6 +4304,7 @@ mod tests { "jj", "journal", "language_selector", + "lsp_tool", "markdown", "menu", "notebook", From 224de2ec6cb35b506db87b91e293389da46f7d19 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 25 Jun 2025 19:05:29 +0200 Subject: [PATCH 41/56] settings: Remove version fields (#33372) This cleans up our settings to not include any `version` fields, as we have an actual settings migrator now. This PR removes `language_models > anthropic > version`, `language_models > openai > version` and `agent > version`. We had migration paths in the code for a long time, so in practice almost everyone should be using the latest version of these settings. Release Notes: - Remove `version` fields in settings for `agent`, `language_models > anthropic`, `language_models > openai`. Your settings will automatically be migrated. If you're running into issues with this open an issue [here](https://github.com/zed-industries/zed/issues) --- Cargo.lock | 7 - crates/agent_settings/Cargo.toml | 7 - crates/agent_settings/src/agent_settings.rs | 747 ++---------------- .../src/agent_configuration/tool_picker.rs | 65 +- .../assistant_tools/src/edit_agent/evals.rs | 2 +- crates/eval/src/eval.rs | 2 +- crates/language_models/Cargo.toml | 1 - crates/language_models/src/language_models.rs | 5 +- .../language_models/src/provider/anthropic.rs | 1 - .../language_models/src/provider/mistral.rs | 1 - .../language_models/src/provider/open_ai.rs | 46 +- crates/language_models/src/provider/vercel.rs | 1 - crates/language_models/src/settings.rs | 188 +---- crates/migrator/src/migrations.rs | 6 + .../src/migrations/m_2025_06_25/settings.rs | 133 ++++ crates/migrator/src/migrator.rs | 79 ++ crates/zed/src/main.rs | 7 +- crates/zed/src/zed.rs | 7 +- 18 files changed, 332 insertions(+), 973 deletions(-) create mode 100644 crates/migrator/src/migrations/m_2025_06_25/settings.rs diff --git a/Cargo.lock b/Cargo.lock index 4684bec47e32b478dd6208c2c974852c2d308fce..16ccb89fc6895b1f24802cf66f45c6bbe55a2612 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -110,18 +110,11 @@ dependencies = [ name = "agent_settings" version = "0.1.0" dependencies = [ - "anthropic", "anyhow", "collections", - "deepseek", "fs", "gpui", "language_model", - "lmstudio", - "log", - "mistral", - "ollama", - "open_ai", "paths", "schemars", "serde", diff --git a/crates/agent_settings/Cargo.toml b/crates/agent_settings/Cargo.toml index c6a4bedbb5e848d48a03b1d7cbb4329322d1c99b..3afe5ae54757953a43a6bdd465c095dc70c27288 100644 --- a/crates/agent_settings/Cargo.toml +++ b/crates/agent_settings/Cargo.toml @@ -12,17 +12,10 @@ workspace = true path = "src/agent_settings.rs" [dependencies] -anthropic = { workspace = true, features = ["schemars"] } anyhow.workspace = true collections.workspace = true gpui.workspace = true language_model.workspace = true -lmstudio = { workspace = true, features = ["schemars"] } -log.workspace = true -ollama = { workspace = true, features = ["schemars"] } -open_ai = { workspace = true, features = ["schemars"] } -deepseek = { workspace = true, features = ["schemars"] } -mistral = { workspace = true, features = ["schemars"] } schemars.workspace = true serde.workspace = true settings.workspace = true diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index a1162b8066c03d9ca3ee10eddedeba91d45fab54..294d793e79ec534e2318f03db5fbc9a75821ecc0 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -2,16 +2,10 @@ mod agent_profile; use std::sync::Arc; -use ::open_ai::Model as OpenAiModel; -use anthropic::Model as AnthropicModel; use anyhow::{Result, bail}; use collections::IndexMap; -use deepseek::Model as DeepseekModel; use gpui::{App, Pixels, SharedString}; use language_model::LanguageModel; -use lmstudio::Model as LmStudioModel; -use mistral::Model as MistralModel; -use ollama::Model as OllamaModel; use schemars::{JsonSchema, schema::Schema}; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsSources}; @@ -48,45 +42,6 @@ pub enum NotifyWhenAgentWaiting { Never, } -#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)] -#[serde(tag = "name", rename_all = "snake_case")] -#[schemars(deny_unknown_fields)] -pub enum AgentProviderContentV1 { - #[serde(rename = "zed.dev")] - ZedDotDev { default_model: Option }, - #[serde(rename = "openai")] - OpenAi { - default_model: Option, - api_url: Option, - available_models: Option>, - }, - #[serde(rename = "anthropic")] - Anthropic { - default_model: Option, - api_url: Option, - }, - #[serde(rename = "ollama")] - Ollama { - default_model: Option, - api_url: Option, - }, - #[serde(rename = "lmstudio")] - LmStudio { - default_model: Option, - api_url: Option, - }, - #[serde(rename = "deepseek")] - DeepSeek { - default_model: Option, - api_url: Option, - }, - #[serde(rename = "mistral")] - Mistral { - default_model: Option, - api_url: Option, - }, -} - #[derive(Default, Clone, Debug)] pub struct AgentSettings { pub enabled: bool, @@ -168,366 +123,56 @@ impl LanguageModelParameters { } } -/// Agent panel settings -#[derive(Clone, Serialize, Deserialize, Debug, Default)] -pub struct AgentSettingsContent { - #[serde(flatten)] - pub inner: Option, -} - -#[derive(Clone, Serialize, Deserialize, Debug)] -#[serde(untagged)] -pub enum AgentSettingsContentInner { - Versioned(Box), - Legacy(LegacyAgentSettingsContent), -} - -impl AgentSettingsContentInner { - fn for_v2(content: AgentSettingsContentV2) -> Self { - AgentSettingsContentInner::Versioned(Box::new(VersionedAgentSettingsContent::V2(content))) - } -} - -impl JsonSchema for AgentSettingsContent { - fn schema_name() -> String { - VersionedAgentSettingsContent::schema_name() - } - - fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema { - VersionedAgentSettingsContent::json_schema(r#gen) - } - - fn is_referenceable() -> bool { - VersionedAgentSettingsContent::is_referenceable() - } -} - impl AgentSettingsContent { - pub fn is_version_outdated(&self) -> bool { - match &self.inner { - Some(AgentSettingsContentInner::Versioned(settings)) => match **settings { - VersionedAgentSettingsContent::V1(_) => true, - VersionedAgentSettingsContent::V2(_) => false, - }, - Some(AgentSettingsContentInner::Legacy(_)) => true, - None => false, - } - } - - fn upgrade(&self) -> AgentSettingsContentV2 { - match &self.inner { - Some(AgentSettingsContentInner::Versioned(settings)) => match **settings { - VersionedAgentSettingsContent::V1(ref settings) => AgentSettingsContentV2 { - enabled: settings.enabled, - button: settings.button, - dock: settings.dock, - default_width: settings.default_width, - default_height: settings.default_width, - default_model: settings - .provider - .clone() - .and_then(|provider| match provider { - AgentProviderContentV1::ZedDotDev { default_model } => default_model - .map(|model| LanguageModelSelection { - provider: "zed.dev".into(), - model, - }), - AgentProviderContentV1::OpenAi { default_model, .. } => default_model - .map(|model| LanguageModelSelection { - provider: "openai".into(), - model: model.id().to_string(), - }), - AgentProviderContentV1::Anthropic { default_model, .. } => { - default_model.map(|model| LanguageModelSelection { - provider: "anthropic".into(), - model: model.id().to_string(), - }) - } - AgentProviderContentV1::Ollama { default_model, .. } => default_model - .map(|model| LanguageModelSelection { - provider: "ollama".into(), - model: model.id().to_string(), - }), - AgentProviderContentV1::LmStudio { default_model, .. } => default_model - .map(|model| LanguageModelSelection { - provider: "lmstudio".into(), - model: model.id().to_string(), - }), - AgentProviderContentV1::DeepSeek { default_model, .. } => default_model - .map(|model| LanguageModelSelection { - provider: "deepseek".into(), - model: model.id().to_string(), - }), - AgentProviderContentV1::Mistral { default_model, .. } => default_model - .map(|model| LanguageModelSelection { - provider: "mistral".into(), - model: model.id().to_string(), - }), - }), - inline_assistant_model: None, - commit_message_model: None, - thread_summary_model: None, - inline_alternatives: None, - default_profile: None, - default_view: None, - profiles: None, - always_allow_tool_actions: None, - notify_when_agent_waiting: None, - stream_edits: None, - single_file_review: None, - model_parameters: Vec::new(), - preferred_completion_mode: None, - enable_feedback: None, - play_sound_when_agent_done: None, - }, - VersionedAgentSettingsContent::V2(ref settings) => settings.clone(), - }, - Some(AgentSettingsContentInner::Legacy(settings)) => AgentSettingsContentV2 { - enabled: None, - button: settings.button, - dock: settings.dock, - default_width: settings.default_width, - default_height: settings.default_height, - default_model: Some(LanguageModelSelection { - provider: "openai".into(), - model: settings - .default_open_ai_model - .clone() - .unwrap_or_default() - .id() - .to_string(), - }), - inline_assistant_model: None, - commit_message_model: None, - thread_summary_model: None, - inline_alternatives: None, - default_profile: None, - default_view: None, - profiles: None, - always_allow_tool_actions: None, - notify_when_agent_waiting: None, - stream_edits: None, - single_file_review: None, - model_parameters: Vec::new(), - preferred_completion_mode: None, - enable_feedback: None, - play_sound_when_agent_done: None, - }, - None => AgentSettingsContentV2::default(), - } - } - pub fn set_dock(&mut self, dock: AgentDockPosition) { - match &mut self.inner { - Some(AgentSettingsContentInner::Versioned(settings)) => match **settings { - VersionedAgentSettingsContent::V1(ref mut settings) => { - settings.dock = Some(dock); - } - VersionedAgentSettingsContent::V2(ref mut settings) => { - settings.dock = Some(dock); - } - }, - Some(AgentSettingsContentInner::Legacy(settings)) => { - settings.dock = Some(dock); - } - None => { - self.inner = Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 { - dock: Some(dock), - ..Default::default() - })) - } - } + self.dock = Some(dock); } pub fn set_model(&mut self, language_model: Arc) { let model = language_model.id().0.to_string(); let provider = language_model.provider_id().0.to_string(); - match &mut self.inner { - Some(AgentSettingsContentInner::Versioned(settings)) => match **settings { - VersionedAgentSettingsContent::V1(ref mut settings) => match provider.as_ref() { - "zed.dev" => { - log::warn!("attempted to set zed.dev model on outdated settings"); - } - "anthropic" => { - let api_url = match &settings.provider { - Some(AgentProviderContentV1::Anthropic { api_url, .. }) => { - api_url.clone() - } - _ => None, - }; - settings.provider = Some(AgentProviderContentV1::Anthropic { - default_model: AnthropicModel::from_id(&model).ok(), - api_url, - }); - } - "ollama" => { - let api_url = match &settings.provider { - Some(AgentProviderContentV1::Ollama { api_url, .. }) => api_url.clone(), - _ => None, - }; - settings.provider = Some(AgentProviderContentV1::Ollama { - default_model: Some(ollama::Model::new( - &model, - None, - None, - Some(language_model.supports_tools()), - Some(language_model.supports_images()), - None, - )), - api_url, - }); - } - "lmstudio" => { - let api_url = match &settings.provider { - Some(AgentProviderContentV1::LmStudio { api_url, .. }) => { - api_url.clone() - } - _ => None, - }; - settings.provider = Some(AgentProviderContentV1::LmStudio { - default_model: Some(lmstudio::Model::new( - &model, None, None, false, false, - )), - api_url, - }); - } - "openai" => { - let (api_url, available_models) = match &settings.provider { - Some(AgentProviderContentV1::OpenAi { - api_url, - available_models, - .. - }) => (api_url.clone(), available_models.clone()), - _ => (None, None), - }; - settings.provider = Some(AgentProviderContentV1::OpenAi { - default_model: OpenAiModel::from_id(&model).ok(), - api_url, - available_models, - }); - } - "deepseek" => { - let api_url = match &settings.provider { - Some(AgentProviderContentV1::DeepSeek { api_url, .. }) => { - api_url.clone() - } - _ => None, - }; - settings.provider = Some(AgentProviderContentV1::DeepSeek { - default_model: DeepseekModel::from_id(&model).ok(), - api_url, - }); - } - _ => {} - }, - VersionedAgentSettingsContent::V2(ref mut settings) => { - settings.default_model = Some(LanguageModelSelection { - provider: provider.into(), - model, - }); - } - }, - Some(AgentSettingsContentInner::Legacy(settings)) => { - if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) { - settings.default_open_ai_model = Some(model); - } - } - None => { - self.inner = Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 { - default_model: Some(LanguageModelSelection { - provider: provider.into(), - model, - }), - ..Default::default() - })); - } - } + self.default_model = Some(LanguageModelSelection { + provider: provider.into(), + model, + }); } pub fn set_inline_assistant_model(&mut self, provider: String, model: String) { - self.v2_setting(|setting| { - setting.inline_assistant_model = Some(LanguageModelSelection { - provider: provider.into(), - model, - }); - Ok(()) - }) - .ok(); + self.inline_assistant_model = Some(LanguageModelSelection { + provider: provider.into(), + model, + }); } pub fn set_commit_message_model(&mut self, provider: String, model: String) { - self.v2_setting(|setting| { - setting.commit_message_model = Some(LanguageModelSelection { - provider: provider.into(), - model, - }); - Ok(()) - }) - .ok(); - } - - pub fn v2_setting( - &mut self, - f: impl FnOnce(&mut AgentSettingsContentV2) -> anyhow::Result<()>, - ) -> anyhow::Result<()> { - match self.inner.get_or_insert_with(|| { - AgentSettingsContentInner::for_v2(AgentSettingsContentV2 { - ..Default::default() - }) - }) { - AgentSettingsContentInner::Versioned(boxed) => { - if let VersionedAgentSettingsContent::V2(ref mut settings) = **boxed { - f(settings) - } else { - Ok(()) - } - } - _ => Ok(()), - } + self.commit_message_model = Some(LanguageModelSelection { + provider: provider.into(), + model, + }); } pub fn set_thread_summary_model(&mut self, provider: String, model: String) { - self.v2_setting(|setting| { - setting.thread_summary_model = Some(LanguageModelSelection { - provider: provider.into(), - model, - }); - Ok(()) - }) - .ok(); + self.thread_summary_model = Some(LanguageModelSelection { + provider: provider.into(), + model, + }); } pub fn set_always_allow_tool_actions(&mut self, allow: bool) { - self.v2_setting(|setting| { - setting.always_allow_tool_actions = Some(allow); - Ok(()) - }) - .ok(); + self.always_allow_tool_actions = Some(allow); } pub fn set_play_sound_when_agent_done(&mut self, allow: bool) { - self.v2_setting(|setting| { - setting.play_sound_when_agent_done = Some(allow); - Ok(()) - }) - .ok(); + self.play_sound_when_agent_done = Some(allow); } pub fn set_single_file_review(&mut self, allow: bool) { - self.v2_setting(|setting| { - setting.single_file_review = Some(allow); - Ok(()) - }) - .ok(); + self.single_file_review = Some(allow); } pub fn set_profile(&mut self, profile_id: AgentProfileId) { - self.v2_setting(|setting| { - setting.default_profile = Some(profile_id); - Ok(()) - }) - .ok(); + self.default_profile = Some(profile_id); } pub fn create_profile( @@ -535,79 +180,39 @@ impl AgentSettingsContent { profile_id: AgentProfileId, profile_settings: AgentProfileSettings, ) -> Result<()> { - self.v2_setting(|settings| { - let profiles = settings.profiles.get_or_insert_default(); - if profiles.contains_key(&profile_id) { - bail!("profile with ID '{profile_id}' already exists"); - } - - profiles.insert( - profile_id, - AgentProfileContent { - name: profile_settings.name.into(), - tools: profile_settings.tools, - enable_all_context_servers: Some(profile_settings.enable_all_context_servers), - context_servers: profile_settings - .context_servers - .into_iter() - .map(|(server_id, preset)| { - ( - server_id, - ContextServerPresetContent { - tools: preset.tools, - }, - ) - }) - .collect(), - }, - ); - - Ok(()) - }) - } -} + let profiles = self.profiles.get_or_insert_default(); + if profiles.contains_key(&profile_id) { + bail!("profile with ID '{profile_id}' already exists"); + } -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] -#[serde(tag = "version")] -#[schemars(deny_unknown_fields)] -pub enum VersionedAgentSettingsContent { - #[serde(rename = "1")] - V1(AgentSettingsContentV1), - #[serde(rename = "2")] - V2(AgentSettingsContentV2), -} + profiles.insert( + profile_id, + AgentProfileContent { + name: profile_settings.name.into(), + tools: profile_settings.tools, + enable_all_context_servers: Some(profile_settings.enable_all_context_servers), + context_servers: profile_settings + .context_servers + .into_iter() + .map(|(server_id, preset)| { + ( + server_id, + ContextServerPresetContent { + tools: preset.tools, + }, + ) + }) + .collect(), + }, + ); -impl Default for VersionedAgentSettingsContent { - fn default() -> Self { - Self::V2(AgentSettingsContentV2 { - enabled: None, - button: None, - dock: None, - default_width: None, - default_height: None, - default_model: None, - inline_assistant_model: None, - commit_message_model: None, - thread_summary_model: None, - inline_alternatives: None, - default_profile: None, - default_view: None, - profiles: None, - always_allow_tool_actions: None, - notify_when_agent_waiting: None, - stream_edits: None, - single_file_review: None, - model_parameters: Vec::new(), - preferred_completion_mode: None, - enable_feedback: None, - play_sound_when_agent_done: None, - }) + Ok(()) } } #[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)] #[schemars(deny_unknown_fields)] -pub struct AgentSettingsContentV2 { +pub struct AgentSettingsContent { /// Whether the Agent is enabled. /// /// Default: true @@ -779,65 +384,6 @@ pub struct ContextServerPresetContent { pub tools: IndexMap, bool>, } -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] -#[schemars(deny_unknown_fields)] -pub struct AgentSettingsContentV1 { - /// Whether the Agent is enabled. - /// - /// Default: true - enabled: Option, - /// Whether to show the Agent panel button in the status bar. - /// - /// Default: true - button: Option, - /// Where to dock the Agent. - /// - /// Default: right - dock: Option, - /// Default width in pixels when the Agent is docked to the left or right. - /// - /// Default: 640 - default_width: Option, - /// Default height in pixels when the Agent is docked to the bottom. - /// - /// Default: 320 - default_height: Option, - /// The provider of the Agent service. - /// - /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev" - /// each with their respective default models and configurations. - provider: Option, -} - -#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)] -#[schemars(deny_unknown_fields)] -pub struct LegacyAgentSettingsContent { - /// Whether to show the Agent panel button in the status bar. - /// - /// Default: true - pub button: Option, - /// Where to dock the Agent. - /// - /// Default: right - pub dock: Option, - /// Default width in pixels when the Agent is docked to the left or right. - /// - /// Default: 640 - pub default_width: Option, - /// Default height in pixels when the Agent is docked to the bottom. - /// - /// Default: 320 - pub default_height: Option, - /// The default OpenAI model to use when creating new chats. - /// - /// Default: gpt-4-1106-preview - pub default_open_ai_model: Option, - /// OpenAI API base URL to use when creating new chats. - /// - /// Default: - pub openai_api_url: Option, -} - impl Settings for AgentSettings { const KEY: Option<&'static str> = Some("agent"); @@ -854,11 +400,6 @@ impl Settings for AgentSettings { let mut settings = AgentSettings::default(); for value in sources.defaults_and_customizations() { - if value.is_version_outdated() { - settings.using_outdated_settings_version = true; - } - - let value = value.upgrade(); merge(&mut settings.enabled, value.enabled); merge(&mut settings.button, value.button); merge(&mut settings.dock, value.dock); @@ -870,17 +411,23 @@ impl Settings for AgentSettings { &mut settings.default_height, value.default_height.map(Into::into), ); - merge(&mut settings.default_model, value.default_model); + merge(&mut settings.default_model, value.default_model.clone()); settings.inline_assistant_model = value .inline_assistant_model + .clone() .or(settings.inline_assistant_model.take()); settings.commit_message_model = value + .clone() .commit_message_model .or(settings.commit_message_model.take()); settings.thread_summary_model = value + .clone() .thread_summary_model .or(settings.thread_summary_model.take()); - merge(&mut settings.inline_alternatives, value.inline_alternatives); + merge( + &mut settings.inline_alternatives, + value.inline_alternatives.clone(), + ); merge( &mut settings.always_allow_tool_actions, value.always_allow_tool_actions, @@ -895,7 +442,7 @@ impl Settings for AgentSettings { ); merge(&mut settings.stream_edits, value.stream_edits); merge(&mut settings.single_file_review, value.single_file_review); - merge(&mut settings.default_profile, value.default_profile); + merge(&mut settings.default_profile, value.default_profile.clone()); merge(&mut settings.default_view, value.default_view); merge( &mut settings.preferred_completion_mode, @@ -907,24 +454,24 @@ impl Settings for AgentSettings { .model_parameters .extend_from_slice(&value.model_parameters); - if let Some(profiles) = value.profiles { + if let Some(profiles) = value.profiles.as_ref() { settings .profiles .extend(profiles.into_iter().map(|(id, profile)| { ( - id, + id.clone(), AgentProfileSettings { - name: profile.name.into(), - tools: profile.tools, + name: profile.name.clone().into(), + tools: profile.tools.clone(), enable_all_context_servers: profile .enable_all_context_servers .unwrap_or_default(), context_servers: profile .context_servers - .into_iter() + .iter() .map(|(context_server_id, preset)| { ( - context_server_id, + context_server_id.clone(), ContextServerPreset { tools: preset.tools.clone(), }, @@ -945,28 +492,8 @@ impl Settings for AgentSettings { .read_value("chat.agent.enabled") .and_then(|b| b.as_bool()) { - match &mut current.inner { - Some(AgentSettingsContentInner::Versioned(versioned)) => match versioned.as_mut() { - VersionedAgentSettingsContent::V1(setting) => { - setting.enabled = Some(b); - setting.button = Some(b); - } - - VersionedAgentSettingsContent::V2(setting) => { - setting.enabled = Some(b); - setting.button = Some(b); - } - }, - Some(AgentSettingsContentInner::Legacy(setting)) => setting.button = Some(b), - None => { - current.inner = - Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 { - enabled: Some(b), - button: Some(b), - ..Default::default() - })); - } - } + current.enabled = Some(b); + current.button = Some(b); } } } @@ -976,149 +503,3 @@ fn merge(target: &mut T, value: Option) { *target = value; } } - -#[cfg(test)] -mod tests { - use fs::Fs; - use gpui::{ReadGlobal, TestAppContext}; - use settings::SettingsStore; - - use super::*; - - #[gpui::test] - async fn test_deserialize_agent_settings_with_version(cx: &mut TestAppContext) { - let fs = fs::FakeFs::new(cx.executor().clone()); - fs.create_dir(paths::settings_file().parent().unwrap()) - .await - .unwrap(); - - cx.update(|cx| { - let test_settings = settings::SettingsStore::test(cx); - cx.set_global(test_settings); - AgentSettings::register(cx); - }); - - cx.update(|cx| { - assert!(!AgentSettings::get_global(cx).using_outdated_settings_version); - assert_eq!( - AgentSettings::get_global(cx).default_model, - LanguageModelSelection { - provider: "zed.dev".into(), - model: "claude-sonnet-4".into(), - } - ); - }); - - cx.update(|cx| { - settings::SettingsStore::global(cx).update_settings_file::( - fs.clone(), - |settings, _| { - *settings = AgentSettingsContent { - inner: Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 { - default_model: Some(LanguageModelSelection { - provider: "test-provider".into(), - model: "gpt-99".into(), - }), - inline_assistant_model: None, - commit_message_model: None, - thread_summary_model: None, - inline_alternatives: None, - enabled: None, - button: None, - dock: None, - default_width: None, - default_height: None, - default_profile: None, - default_view: None, - profiles: None, - always_allow_tool_actions: None, - play_sound_when_agent_done: None, - notify_when_agent_waiting: None, - stream_edits: None, - single_file_review: None, - enable_feedback: None, - model_parameters: Vec::new(), - preferred_completion_mode: None, - })), - } - }, - ); - }); - - cx.run_until_parked(); - - let raw_settings_value = fs.load(paths::settings_file()).await.unwrap(); - assert!(raw_settings_value.contains(r#""version": "2""#)); - - #[derive(Debug, Deserialize)] - struct AgentSettingsTest { - agent: AgentSettingsContent, - } - - let agent_settings: AgentSettingsTest = - serde_json_lenient::from_str(&raw_settings_value).unwrap(); - - assert!(!agent_settings.agent.is_version_outdated()); - } - - #[gpui::test] - async fn test_load_settings_from_old_key(cx: &mut TestAppContext) { - let fs = fs::FakeFs::new(cx.executor().clone()); - fs.create_dir(paths::settings_file().parent().unwrap()) - .await - .unwrap(); - - cx.update(|cx| { - let mut test_settings = settings::SettingsStore::test(cx); - let user_settings_content = r#"{ - "assistant": { - "enabled": true, - "version": "2", - "default_model": { - "provider": "zed.dev", - "model": "gpt-99" - }, - }}"#; - test_settings - .set_user_settings(user_settings_content, cx) - .unwrap(); - cx.set_global(test_settings); - AgentSettings::register(cx); - }); - - cx.run_until_parked(); - - let agent_settings = cx.update(|cx| AgentSettings::get_global(cx).clone()); - assert!(agent_settings.enabled); - assert!(!agent_settings.using_outdated_settings_version); - assert_eq!(agent_settings.default_model.model, "gpt-99"); - - cx.update_global::(|settings_store, cx| { - settings_store.update_user_settings::(cx, |settings| { - *settings = AgentSettingsContent { - inner: Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 { - enabled: Some(false), - default_model: Some(LanguageModelSelection { - provider: "xai".to_owned().into(), - model: "grok".to_owned(), - }), - ..Default::default() - })), - }; - }); - }); - - cx.run_until_parked(); - - let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone()); - - #[derive(Debug, Deserialize)] - struct AgentSettingsTest { - assistant: AgentSettingsContent, - agent: Option, - } - - let agent_settings: AgentSettingsTest = serde_json::from_value(settings).unwrap(); - assert!(agent_settings.agent.is_none()); - } -} diff --git a/crates/agent_ui/src/agent_configuration/tool_picker.rs b/crates/agent_ui/src/agent_configuration/tool_picker.rs index 7c3d20457e2b9138e49f3c61e867b2f15b54bb84..8f1e0d71c0bd8ef56a71c1a88db1bf67929b060c 100644 --- a/crates/agent_ui/src/agent_configuration/tool_picker.rs +++ b/crates/agent_ui/src/agent_configuration/tool_picker.rs @@ -272,42 +272,35 @@ impl PickerDelegate for ToolPickerDelegate { let server_id = server_id.clone(); let tool_name = tool_name.clone(); move |settings: &mut AgentSettingsContent, _cx| { - settings - .v2_setting(|v2_settings| { - let profiles = v2_settings.profiles.get_or_insert_default(); - let profile = - profiles - .entry(profile_id) - .or_insert_with(|| AgentProfileContent { - name: default_profile.name.into(), - tools: default_profile.tools, - enable_all_context_servers: Some( - default_profile.enable_all_context_servers, - ), - context_servers: default_profile - .context_servers - .into_iter() - .map(|(server_id, preset)| { - ( - server_id, - ContextServerPresetContent { - tools: preset.tools, - }, - ) - }) - .collect(), - }); - - if let Some(server_id) = server_id { - let preset = profile.context_servers.entry(server_id).or_default(); - *preset.tools.entry(tool_name).or_default() = !is_currently_enabled; - } else { - *profile.tools.entry(tool_name).or_default() = !is_currently_enabled; - } - - Ok(()) - }) - .ok(); + let profiles = settings.profiles.get_or_insert_default(); + let profile = profiles + .entry(profile_id) + .or_insert_with(|| AgentProfileContent { + name: default_profile.name.into(), + tools: default_profile.tools, + enable_all_context_servers: Some( + default_profile.enable_all_context_servers, + ), + context_servers: default_profile + .context_servers + .into_iter() + .map(|(server_id, preset)| { + ( + server_id, + ContextServerPresetContent { + tools: preset.tools, + }, + ) + }) + .collect(), + }); + + if let Some(server_id) = server_id { + let preset = profile.context_servers.entry(server_id).or_default(); + *preset.tools.entry(tool_name).or_default() = !is_currently_enabled; + } else { + *profile.tools.entry(tool_name).or_default() = !is_currently_enabled; + } } }); } diff --git a/crates/assistant_tools/src/edit_agent/evals.rs b/crates/assistant_tools/src/edit_agent/evals.rs index 116654e38276ce677d54380155ee0e6d93a15fa9..7beb2ec9190c4e6e65ed7d48211328dc51073ea4 100644 --- a/crates/assistant_tools/src/edit_agent/evals.rs +++ b/crates/assistant_tools/src/edit_agent/evals.rs @@ -1470,7 +1470,7 @@ impl EditAgentTest { Project::init_settings(cx); language::init(cx); language_model::init(client.clone(), cx); - language_models::init(user_store.clone(), client.clone(), fs.clone(), cx); + language_models::init(user_store.clone(), client.clone(), cx); crate::init(client.http_client(), cx); }); diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index e5132b0f33c6494807c65c2ed6df95e3e2d016e8..5e8dd8961c8c3416fa84303eff722c22c31738e6 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -417,7 +417,7 @@ pub fn init(cx: &mut App) -> Arc { debug_adapter_extension::init(extension_host_proxy.clone(), cx); language_extension::init(extension_host_proxy.clone(), languages.clone()); language_model::init(client.clone(), cx); - language_models::init(user_store.clone(), client.clone(), fs.clone(), cx); + language_models::init(user_store.clone(), client.clone(), cx); languages::init(languages.clone(), node_runtime.clone(), cx); prompt_store::init(cx); terminal_view::init(cx); diff --git a/crates/language_models/Cargo.toml b/crates/language_models/Cargo.toml index 80412cb5d24910d2b8c1567025063102d1ccea41..d6aff380aab1696b0f71f0b83ed876cc1e756ecb 100644 --- a/crates/language_models/Cargo.toml +++ b/crates/language_models/Cargo.toml @@ -42,7 +42,6 @@ open_ai = { workspace = true, features = ["schemars"] } open_router = { workspace = true, features = ["schemars"] } vercel = { workspace = true, features = ["schemars"] } partial-json-fixer.workspace = true -project.workspace = true proto.workspace = true release_channel.workspace = true schemars.workspace = true diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 78dbc33c51cf3e74fd641028b5f84099a7ddbef3..c7324732c9bbf88698a1a7280ff80cea077a1d2f 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -1,7 +1,6 @@ use std::sync::Arc; use client::{Client, UserStore}; -use fs::Fs; use gpui::{App, Context, Entity}; use language_model::LanguageModelRegistry; use provider::deepseek::DeepSeekLanguageModelProvider; @@ -23,8 +22,8 @@ use crate::provider::open_router::OpenRouterLanguageModelProvider; use crate::provider::vercel::VercelLanguageModelProvider; pub use crate::settings::*; -pub fn init(user_store: Entity, client: Arc, fs: Arc, cx: &mut App) { - crate::settings::init(fs, cx); +pub fn init(user_store: Entity, client: Arc, cx: &mut App) { + crate::settings::init(cx); let registry = LanguageModelRegistry::global(cx); registry.update(cx, |registry, cx| { register_language_model_providers(registry, user_store, client, cx); diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index d19348eed6dcf8c65c06c20bfe5cdab4a2b41ddd..48bea47fec02a0cb5b51b59883492caf00d1c982 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -41,7 +41,6 @@ pub struct AnthropicSettings { pub api_url: String, /// Extend Zed's list of Anthropic models. pub available_models: Vec, - pub needs_setting_migration: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 5e46c41746d1404d1061da47fce32bb5ad74d048..171ce058968afe2f8bc16326fc841e3ea6b804de 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -36,7 +36,6 @@ const PROVIDER_NAME: &str = "Mistral"; pub struct MistralSettings { pub api_url: String, pub available_models: Vec, - pub needs_setting_migration: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 56a81d36e955ee8fadece0fea59a240215759965..ad4203ff81c5ec28e98bbf6eab0e4f3e23b7f604 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -28,6 +28,7 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*}; use ui_input::SingleLineInput; use util::ResultExt; +use crate::OpenAiSettingsContent; use crate::{AllLanguageModelSettings, ui::InstructionListItem}; const PROVIDER_ID: &str = "openai"; @@ -37,7 +38,6 @@ const PROVIDER_NAME: &str = "OpenAI"; pub struct OpenAiSettings { pub api_url: String, pub available_models: Vec, - pub needs_setting_migration: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] @@ -803,30 +803,13 @@ impl ConfigurationView { if !api_url.is_empty() && api_url != effective_current_url { let fs = ::global(cx); update_settings_file::(fs, cx, move |settings, _| { - use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent}; - - if settings.openai.is_none() { - settings.openai = Some(OpenAiSettingsContent::Versioned( - VersionedOpenAiSettingsContent::V1( - crate::settings::OpenAiSettingsContentV1 { - api_url: Some(api_url.clone()), - available_models: None, - }, - ), - )); + if let Some(settings) = settings.openai.as_mut() { + settings.api_url = Some(api_url.clone()); } else { - if let Some(openai) = settings.openai.as_mut() { - match openai { - OpenAiSettingsContent::Versioned(versioned) => match versioned { - VersionedOpenAiSettingsContent::V1(v1) => { - v1.api_url = Some(api_url.clone()); - } - }, - OpenAiSettingsContent::Legacy(legacy) => { - legacy.api_url = Some(api_url.clone()); - } - } - } + settings.openai = Some(OpenAiSettingsContent { + api_url: Some(api_url.clone()), + available_models: None, + }); } }); } @@ -840,19 +823,8 @@ impl ConfigurationView { }); let fs = ::global(cx); update_settings_file::(fs, cx, |settings, _cx| { - use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent}; - - if let Some(openai) = settings.openai.as_mut() { - match openai { - OpenAiSettingsContent::Versioned(versioned) => match versioned { - VersionedOpenAiSettingsContent::V1(v1) => { - v1.api_url = None; - } - }, - OpenAiSettingsContent::Legacy(legacy) => { - legacy.api_url = None; - } - } + if let Some(settings) = settings.openai.as_mut() { + settings.api_url = None; } }); cx.notify(); diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index c86902fe76538fdb9ad857657a880d6dc6faf834..2f64115d2096c4bd4214d43d0a010995fb2edd15 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -32,7 +32,6 @@ const PROVIDER_NAME: &str = "Vercel"; pub struct VercelSettings { pub api_url: String, pub available_models: Vec, - pub needs_setting_migration: bool, } #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)] diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index 644e59d397dcab684d03a0026bb797dc04f5803c..f96a2c0a66cfe698738deec177b5f82cde274df7 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -1,12 +1,8 @@ -use std::sync::Arc; - use anyhow::Result; use gpui::App; -use language_model::LanguageModelCacheConfiguration; -use project::Fs; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use settings::{Settings, SettingsSources, update_settings_file}; +use settings::{Settings, SettingsSources}; use crate::provider::{ self, @@ -24,36 +20,8 @@ use crate::provider::{ }; /// Initializes the language model settings. -pub fn init(fs: Arc, cx: &mut App) { +pub fn init(cx: &mut App) { AllLanguageModelSettings::register(cx); - - if AllLanguageModelSettings::get_global(cx) - .openai - .needs_setting_migration - { - update_settings_file::(fs.clone(), cx, move |setting, _| { - if let Some(settings) = setting.openai.clone() { - let (newest_version, _) = settings.upgrade(); - setting.openai = Some(OpenAiSettingsContent::Versioned( - VersionedOpenAiSettingsContent::V1(newest_version), - )); - } - }); - } - - if AllLanguageModelSettings::get_global(cx) - .anthropic - .needs_setting_migration - { - update_settings_file::(fs, cx, move |setting, _| { - if let Some(settings) = setting.anthropic.clone() { - let (newest_version, _) = settings.upgrade(); - setting.anthropic = Some(AnthropicSettingsContent::Versioned( - VersionedAnthropicSettingsContent::V1(newest_version), - )); - } - }); - } } #[derive(Default)] @@ -90,78 +58,7 @@ pub struct AllLanguageModelSettingsContent { } #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -#[serde(untagged)] -pub enum AnthropicSettingsContent { - Versioned(VersionedAnthropicSettingsContent), - Legacy(LegacyAnthropicSettingsContent), -} - -impl AnthropicSettingsContent { - pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) { - match self { - AnthropicSettingsContent::Legacy(content) => ( - AnthropicSettingsContentV1 { - api_url: content.api_url, - available_models: content.available_models.map(|models| { - models - .into_iter() - .filter_map(|model| match model { - anthropic::Model::Custom { - name, - display_name, - max_tokens, - tool_override, - cache_configuration, - max_output_tokens, - default_temperature, - extra_beta_headers, - mode, - } => Some(provider::anthropic::AvailableModel { - name, - display_name, - max_tokens, - tool_override, - cache_configuration: cache_configuration.as_ref().map( - |config| LanguageModelCacheConfiguration { - max_cache_anchors: config.max_cache_anchors, - should_speculate: config.should_speculate, - min_total_token: config.min_total_token, - }, - ), - max_output_tokens, - default_temperature, - extra_beta_headers, - mode: Some(mode.into()), - }), - _ => None, - }) - .collect() - }), - }, - true, - ), - AnthropicSettingsContent::Versioned(content) => match content { - VersionedAnthropicSettingsContent::V1(content) => (content, false), - }, - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -pub struct LegacyAnthropicSettingsContent { - pub api_url: Option, - pub available_models: Option>, -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -#[serde(tag = "version")] -pub enum VersionedAnthropicSettingsContent { - #[serde(rename = "1")] - V1(AnthropicSettingsContentV1), -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -pub struct AnthropicSettingsContentV1 { +pub struct AnthropicSettingsContent { pub api_url: Option, pub available_models: Option>, } @@ -200,64 +97,7 @@ pub struct MistralSettingsContent { } #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -#[serde(untagged)] -pub enum OpenAiSettingsContent { - Versioned(VersionedOpenAiSettingsContent), - Legacy(LegacyOpenAiSettingsContent), -} - -impl OpenAiSettingsContent { - pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) { - match self { - OpenAiSettingsContent::Legacy(content) => ( - OpenAiSettingsContentV1 { - api_url: content.api_url, - available_models: content.available_models.map(|models| { - models - .into_iter() - .filter_map(|model| match model { - open_ai::Model::Custom { - name, - display_name, - max_tokens, - max_output_tokens, - max_completion_tokens, - } => Some(provider::open_ai::AvailableModel { - name, - max_tokens, - max_output_tokens, - display_name, - max_completion_tokens, - }), - _ => None, - }) - .collect() - }), - }, - true, - ), - OpenAiSettingsContent::Versioned(content) => match content { - VersionedOpenAiSettingsContent::V1(content) => (content, false), - }, - } - } -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -pub struct LegacyOpenAiSettingsContent { - pub api_url: Option, - pub available_models: Option>, -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -#[serde(tag = "version")] -pub enum VersionedOpenAiSettingsContent { - #[serde(rename = "1")] - V1(OpenAiSettingsContentV1), -} - -#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)] -pub struct OpenAiSettingsContentV1 { +pub struct OpenAiSettingsContent { pub api_url: Option, pub available_models: Option>, } @@ -303,15 +143,7 @@ impl settings::Settings for AllLanguageModelSettings { for value in sources.defaults_and_customizations() { // Anthropic - let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) { - Some((content, upgraded)) => (Some(content), upgraded), - None => (None, false), - }; - - if upgraded { - settings.anthropic.needs_setting_migration = true; - } - + let anthropic = value.anthropic.clone(); merge( &mut settings.anthropic.api_url, anthropic.as_ref().and_then(|s| s.api_url.clone()), @@ -377,15 +209,7 @@ impl settings::Settings for AllLanguageModelSettings { ); // OpenAI - let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) { - Some((content, upgraded)) => (Some(content), upgraded), - None => (None, false), - }; - - if upgraded { - settings.openai.needs_setting_migration = true; - } - + let openai = value.openai.clone(); merge( &mut settings.openai.api_url, openai.as_ref().and_then(|s| s.api_url.clone()), diff --git a/crates/migrator/src/migrations.rs b/crates/migrator/src/migrations.rs index 281ae93123ebc42f59cb600a2b8831da2dea9185..d43521faa958782f902151b99233f511732786fb 100644 --- a/crates/migrator/src/migrations.rs +++ b/crates/migrator/src/migrations.rs @@ -81,3 +81,9 @@ pub(crate) mod m_2025_06_16 { pub(crate) use settings::SETTINGS_PATTERNS; } + +pub(crate) mod m_2025_06_25 { + mod settings; + + pub(crate) use settings::SETTINGS_PATTERNS; +} diff --git a/crates/migrator/src/migrations/m_2025_06_25/settings.rs b/crates/migrator/src/migrations/m_2025_06_25/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..5dd6c3093a43b00acff3db6c1e316a3fc6664175 --- /dev/null +++ b/crates/migrator/src/migrations/m_2025_06_25/settings.rs @@ -0,0 +1,133 @@ +use std::ops::Range; +use tree_sitter::{Query, QueryMatch}; + +use crate::MigrationPatterns; + +pub const SETTINGS_PATTERNS: MigrationPatterns = &[ + (SETTINGS_VERSION_PATTERN, remove_version_fields), + ( + SETTINGS_NESTED_VERSION_PATTERN, + remove_nested_version_fields, + ), +]; + +const SETTINGS_VERSION_PATTERN: &str = r#"(document + (object + (pair + key: (string (string_content) @key) + value: (object + (pair + key: (string (string_content) @version_key) + value: (_) @version_value + ) @version_pair + ) + ) + ) + (#eq? @key "agent") + (#eq? @version_key "version") +)"#; + +const SETTINGS_NESTED_VERSION_PATTERN: &str = r#"(document + (object + (pair + key: (string (string_content) @language_models) + value: (object + (pair + key: (string (string_content) @provider) + value: (object + (pair + key: (string (string_content) @version_key) + value: (_) @version_value + ) @version_pair + ) + ) + ) + ) + ) + (#eq? @language_models "language_models") + (#match? @provider "^(anthropic|openai)$") + (#eq? @version_key "version") +)"#; + +fn remove_version_fields( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let version_pair_ix = query.capture_index_for_name("version_pair")?; + let version_pair_node = mat.nodes_for_capture_index(version_pair_ix).next()?; + + remove_pair_with_whitespace(contents, version_pair_node) +} + +fn remove_nested_version_fields( + contents: &str, + mat: &QueryMatch, + query: &Query, +) -> Option<(Range, String)> { + let version_pair_ix = query.capture_index_for_name("version_pair")?; + let version_pair_node = mat.nodes_for_capture_index(version_pair_ix).next()?; + + remove_pair_with_whitespace(contents, version_pair_node) +} + +fn remove_pair_with_whitespace( + contents: &str, + pair_node: tree_sitter::Node, +) -> Option<(Range, String)> { + let mut range_to_remove = pair_node.byte_range(); + + // Check if there's a comma after this pair + if let Some(next_sibling) = pair_node.next_sibling() { + if next_sibling.kind() == "," { + range_to_remove.end = next_sibling.end_byte(); + } + } else { + // If no next sibling, check if there's a comma before + if let Some(prev_sibling) = pair_node.prev_sibling() { + if prev_sibling.kind() == "," { + range_to_remove.start = prev_sibling.start_byte(); + } + } + } + + // Include any leading whitespace/newline, including comments + let text_before = &contents[..range_to_remove.start]; + if let Some(last_newline) = text_before.rfind('\n') { + let whitespace_start = last_newline + 1; + let potential_whitespace = &contents[whitespace_start..range_to_remove.start]; + + // Check if it's only whitespace or comments + let mut is_whitespace_or_comment = true; + let mut in_comment = false; + let mut chars = potential_whitespace.chars().peekable(); + + while let Some(ch) = chars.next() { + if in_comment { + if ch == '\n' { + in_comment = false; + } + } else if ch == '/' && chars.peek() == Some(&'/') { + in_comment = true; + chars.next(); // Skip the second '/' + } else if !ch.is_whitespace() { + is_whitespace_or_comment = false; + break; + } + } + + if is_whitespace_or_comment { + range_to_remove.start = whitespace_start; + } + } + + // Also check if we need to include trailing whitespace up to the next line + let text_after = &contents[range_to_remove.end..]; + if let Some(newline_pos) = text_after.find('\n') { + if text_after[..newline_pos].chars().all(|c| c.is_whitespace()) { + range_to_remove.end += newline_pos + 1; + } + } + + Some((range_to_remove, String::new())) +} diff --git a/crates/migrator/src/migrator.rs b/crates/migrator/src/migrator.rs index b45744b766b7cfe3af01252ce20e17bf7a9f4782..bcd41836e6f8d1d3dabf1f37c5ba456d475f12e1 100644 --- a/crates/migrator/src/migrator.rs +++ b/crates/migrator/src/migrator.rs @@ -152,6 +152,10 @@ pub fn migrate_settings(text: &str) -> Result> { migrations::m_2025_06_16::SETTINGS_PATTERNS, &SETTINGS_QUERY_2025_06_16, ), + ( + migrations::m_2025_06_25::SETTINGS_PATTERNS, + &SETTINGS_QUERY_2025_06_25, + ), ]; run_migrations(text, migrations) } @@ -254,6 +258,10 @@ define_query!( SETTINGS_QUERY_2025_06_16, migrations::m_2025_06_16::SETTINGS_PATTERNS ); +define_query!( + SETTINGS_QUERY_2025_06_25, + migrations::m_2025_06_25::SETTINGS_PATTERNS +); // custom query static EDIT_PREDICTION_SETTINGS_MIGRATION_QUERY: LazyLock = LazyLock::new(|| { @@ -1052,4 +1060,75 @@ mod tests { }"#; assert_migrate_settings(settings, None); } + + #[test] + fn test_remove_version_fields() { + assert_migrate_settings( + r#"{ + "language_models": { + "anthropic": { + "version": "1", + "api_url": "https://api.anthropic.com" + }, + "openai": { + "version": "1", + "api_url": "https://api.openai.com/v1" + } + }, + "agent": { + "version": "2", + "enabled": true, + "preferred_completion_mode": "normal", + "button": true, + "dock": "right", + "default_width": 640, + "default_height": 320, + "default_model": { + "provider": "zed.dev", + "model": "claude-sonnet-4" + } + } +}"#, + Some( + r#"{ + "language_models": { + "anthropic": { + "api_url": "https://api.anthropic.com" + }, + "openai": { + "api_url": "https://api.openai.com/v1" + } + }, + "agent": { + "enabled": true, + "preferred_completion_mode": "normal", + "button": true, + "dock": "right", + "default_width": 640, + "default_height": 320, + "default_model": { + "provider": "zed.dev", + "model": "claude-sonnet-4" + } + } +}"#, + ), + ); + + // Test that version fields in other contexts are not removed + assert_migrate_settings( + r#"{ + "language_models": { + "other_provider": { + "version": "1", + "api_url": "https://api.example.com" + } + }, + "other_section": { + "version": "1" + } +}"#, + None, + ); + } } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 1b8b1d697d5e32a9e285c8a258598e14adcb73d1..0e08b304f7c09d225c1da8449de1fd093512bf74 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -516,12 +516,7 @@ pub fn main() { ); supermaven::init(app_state.client.clone(), cx); language_model::init(app_state.client.clone(), cx); - language_models::init( - app_state.user_store.clone(), - app_state.client.clone(), - app_state.fs.clone(), - cx, - ); + language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); web_search_providers::init(app_state.client.clone(), cx); snippet_provider::init(cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 510cdb2b46e64678af7e051fc4db71a5452d56a3..c57a9b576aa09139ec039de01b9569438a086f3a 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4441,12 +4441,7 @@ mod tests { ); image_viewer::init(cx); language_model::init(app_state.client.clone(), cx); - language_models::init( - app_state.user_store.clone(), - app_state.client.clone(), - app_state.fs.clone(), - cx, - ); + language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); web_search_providers::init(app_state.client.clone(), cx); let prompt_builder = PromptBuilder::load(app_state.fs.clone(), false, cx); From cc62125244e383a748cd2b5d052e7e1c1e98a67e Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Wed, 25 Jun 2025 22:44:49 +0530 Subject: [PATCH 42/56] agent: Add GEMINI.md as a supported rules file name (#33381) Gemini cli creates GEMINI.md file. This PR adds support for it. Release Notes: - agent: Add GEMINI.md as a supported rules file name --- crates/agent/src/thread_store.rs | 3 ++- docs/src/ai/rules.md | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 3c9150ff75f53241120b45c3418288e5033489e2..516151e9ff90dd6dc4a3e4b3dd5eff37522db7f2 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -71,7 +71,7 @@ impl Column for DataType { } } -const RULES_FILE_NAMES: [&'static str; 8] = [ +const RULES_FILE_NAMES: [&'static str; 9] = [ ".rules", ".cursorrules", ".windsurfrules", @@ -80,6 +80,7 @@ const RULES_FILE_NAMES: [&'static str; 8] = [ "CLAUDE.md", "AGENT.md", "AGENTS.md", + "GEMINI.md", ]; pub fn init(cx: &mut App) { diff --git a/docs/src/ai/rules.md b/docs/src/ai/rules.md index 81b8480bd963017af4af8b542fb742ef4ed7d3d5..ed916874cadb957ca45d02af00d3a4047ebd3246 100644 --- a/docs/src/ai/rules.md +++ b/docs/src/ai/rules.md @@ -16,6 +16,7 @@ Other names for this file are also supported for compatibility with other agents - `AGENT.md` - `AGENTS.md` - `CLAUDE.md` +- `GEMINI.md` ## Rules Library {#rules-library} From 2a5a1814cd1073408fcb71ea790909f61e295ad2 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Jun 2025 13:26:24 -0400 Subject: [PATCH 43/56] text_thread: Improve roles after `assistant::Split` (shift-enter) (#33215) Default to `You` when triggering `assistant::Split` at the end of a thread Release Notes: - agent_thread: Improved roles when triggering `assistant::Split` (`shift-enter`) --- crates/assistant_context/src/assistant_context.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/assistant_context/src/assistant_context.rs b/crates/assistant_context/src/assistant_context.rs index a692502a9c390ec168aad2a6448c020428c0f5b1..cef9d2f0fd60c842883fcff80766416ca3db66de 100644 --- a/crates/assistant_context/src/assistant_context.rs +++ b/crates/assistant_context/src/assistant_context.rs @@ -2523,6 +2523,12 @@ impl AssistantContext { } let message = start_message; + let at_end = range.end >= message.offset_range.end.saturating_sub(1); + let role_after = if range.start == range.end || at_end { + Role::User + } else { + message.role + }; let role = message.role; let mut edited_buffer = false; @@ -2557,7 +2563,7 @@ impl AssistantContext { }; let suffix_metadata = MessageMetadata { - role, + role: role_after, status: MessageStatus::Done, timestamp: suffix.id.0, cache: None, From 3740eec5bf7e5426cd3f0bab167219a47e1aa56f Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 25 Jun 2025 13:28:06 -0400 Subject: [PATCH 44/56] Do not show update "View Release Notes" notification in nightly builds (#33394) These are useless in nightly, as the link within the notification simply directs us to a commit view on GitHub. We update frequently on nightly; dismissing this after every update is annoying. Release Notes: - N/A --- crates/auto_update_ui/src/auto_update_ui.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/crates/auto_update_ui/src/auto_update_ui.rs b/crates/auto_update_ui/src/auto_update_ui.rs index 25d64bc3e8245a446c1f55fa31a506d40f3e9bd9..30c1cddec2935d82f2ecc9fe0cfc569999d80d7b 100644 --- a/crates/auto_update_ui/src/auto_update_ui.rs +++ b/crates/auto_update_ui/src/auto_update_ui.rs @@ -132,6 +132,11 @@ pub fn notify_if_app_was_updated(cx: &mut App) { let Some(updater) = AutoUpdater::get(cx) else { return; }; + + if let ReleaseChannel::Nightly = ReleaseChannel::global(cx) { + return; + } + let should_show_notification = updater.read(cx).should_show_update_notification(cx); cx.spawn(async move |cx| { let should_show_notification = should_show_notification.await?; From 8e831ced5b4c0b42711d8de763b40822c52c21d3 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Jun 2025 13:44:43 -0400 Subject: [PATCH 45/56] ci: Remove community_delete_comments (#33396) This was a temporary mitigation against a spam campaign, I don't think this is required any longer. We can easily revert if it's still active. See: - https://github.com/zed-industries/zed/pull/16886 Release Notes: - N/A --- .../workflows/community_delete_comments.yml | 34 ------------------- 1 file changed, 34 deletions(-) delete mode 100644 .github/workflows/community_delete_comments.yml diff --git a/.github/workflows/community_delete_comments.yml b/.github/workflows/community_delete_comments.yml deleted file mode 100644 index 0ebe1ac3acea5fcc2aa572946ac8ae90ac6f94ed..0000000000000000000000000000000000000000 --- a/.github/workflows/community_delete_comments.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Delete Mediafire Comments - -on: - issue_comment: - types: [created] - -permissions: - issues: write - -jobs: - delete_comment: - if: github.repository_owner == 'zed-industries' - runs-on: ubuntu-latest - steps: - - name: Check for specific strings in comment - id: check_comment - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7 - with: - script: | - const comment = context.payload.comment.body; - const triggerStrings = ['www.mediafire.com']; - return triggerStrings.some(triggerString => comment.includes(triggerString)); - - - name: Delete comment if it contains any of the specific strings - if: steps.check_comment.outputs.result == 'true' - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7 - with: - script: | - const commentId = context.payload.comment.id; - await github.rest.issues.deleteComment({ - owner: context.repo.owner, - repo: context.repo.repo, - comment_id: commentId - }); From 4516b099e712af9dddb2d4a373a6500a32dd4f6c Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Wed, 25 Jun 2025 14:10:48 -0400 Subject: [PATCH 46/56] Reduce segment cloning when rendering messages (#33340) While working on retries, I discovered some opportunities to reduce cloning of message segments. These segments have full `String`s (not `SharedString`s), so cloning them means copying cloning all the bytes of all the strings in the message, which would be nice to avoid! Release Notes: - N/A --- crates/agent/src/thread.rs | 7 ++ crates/agent_ui/src/active_thread.rs | 177 ++++++++++++++------------- 2 files changed, 98 insertions(+), 86 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 4494446a6dcbcdeb1f4aec510cecc7f2c527ba56..33b9209f0ccd199d28d7aad4f19b81286eb7dfac 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -198,6 +198,13 @@ impl MessageSegment { Self::RedactedThinking(_) => false, } } + + pub fn text(&self) -> Option<&str> { + match self { + MessageSegment::Text(text) => Some(text), + _ => None, + } + } } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] diff --git a/crates/agent_ui/src/active_thread.rs b/crates/agent_ui/src/active_thread.rs index 8df1c88e8a57615d11f43b8098538130a8fafc24..0e7ca9aa897d1962742e660d2d29e43f8dfe6593 100644 --- a/crates/agent_ui/src/active_thread.rs +++ b/crates/agent_ui/src/active_thread.rs @@ -809,7 +809,12 @@ impl ActiveThread { }; for message in thread.read(cx).messages().cloned().collect::>() { - this.push_message(&message.id, &message.segments, window, cx); + let rendered_message = RenderedMessage::from_segments( + &message.segments, + this.language_registry.clone(), + cx, + ); + this.push_rendered_message(message.id, rendered_message); for tool_use in thread.read(cx).tool_uses_for_message(message.id, cx) { this.render_tool_use_markdown( @@ -875,36 +880,11 @@ impl ActiveThread { &self.text_thread_store } - fn push_message( - &mut self, - id: &MessageId, - segments: &[MessageSegment], - _window: &mut Window, - cx: &mut Context, - ) { + fn push_rendered_message(&mut self, id: MessageId, rendered_message: RenderedMessage) { let old_len = self.messages.len(); - self.messages.push(*id); + self.messages.push(id); self.list_state.splice(old_len..old_len, 1); - - let rendered_message = - RenderedMessage::from_segments(segments, self.language_registry.clone(), cx); - self.rendered_messages_by_id.insert(*id, rendered_message); - } - - fn edited_message( - &mut self, - id: &MessageId, - segments: &[MessageSegment], - _window: &mut Window, - cx: &mut Context, - ) { - let Some(index) = self.messages.iter().position(|message_id| message_id == id) else { - return; - }; - self.list_state.splice(index..index + 1, 1); - let rendered_message = - RenderedMessage::from_segments(segments, self.language_registry.clone(), cx); - self.rendered_messages_by_id.insert(*id, rendered_message); + self.rendered_messages_by_id.insert(id, rendered_message); } fn deleted_message(&mut self, id: &MessageId) { @@ -1037,31 +1017,43 @@ impl ActiveThread { } } ThreadEvent::MessageAdded(message_id) => { - if let Some(message_segments) = self - .thread - .read(cx) - .message(*message_id) - .map(|message| message.segments.clone()) - { - self.push_message(message_id, &message_segments, window, cx); + if let Some(rendered_message) = self.thread.update(cx, |thread, cx| { + thread.message(*message_id).map(|message| { + RenderedMessage::from_segments( + &message.segments, + self.language_registry.clone(), + cx, + ) + }) + }) { + self.push_rendered_message(*message_id, rendered_message); } self.save_thread(cx); cx.notify(); } ThreadEvent::MessageEdited(message_id) => { - if let Some(message_segments) = self - .thread - .read(cx) - .message(*message_id) - .map(|message| message.segments.clone()) - { - self.edited_message(message_id, &message_segments, window, cx); + if let Some(index) = self.messages.iter().position(|id| id == message_id) { + if let Some(rendered_message) = self.thread.update(cx, |thread, cx| { + thread.message(*message_id).map(|message| { + let mut rendered_message = RenderedMessage { + language_registry: self.language_registry.clone(), + segments: Vec::with_capacity(message.segments.len()), + }; + for segment in &message.segments { + rendered_message.push_segment(segment, cx); + } + rendered_message + }) + }) { + self.list_state.splice(index..index + 1, 1); + self.rendered_messages_by_id + .insert(*message_id, rendered_message); + self.scroll_to_bottom(cx); + self.save_thread(cx); + cx.notify(); + } } - - self.scroll_to_bottom(cx); - self.save_thread(cx); - cx.notify(); } ThreadEvent::MessageDeleted(message_id) => { self.deleted_message(message_id); @@ -1311,17 +1303,11 @@ impl ActiveThread { fn start_editing_message( &mut self, message_id: MessageId, - message_segments: &[MessageSegment], + message_text: impl Into>, message_creases: &[MessageCrease], window: &mut Window, cx: &mut Context, ) { - // User message should always consist of a single text segment, - // therefore we can skip returning early if it's not a text segment. - let Some(MessageSegment::Text(message_text)) = message_segments.first() else { - return; - }; - let editor = crate::message_editor::create_editor( self.workspace.clone(), self.context_store.downgrade(), @@ -1333,7 +1319,7 @@ impl ActiveThread { cx, ); editor.update(cx, |editor, cx| { - editor.set_text(message_text.clone(), window, cx); + editor.set_text(message_text, window, cx); insert_message_creases(editor, message_creases, &self.context_store, window, cx); editor.focus_handle(cx).focus(window); editor.move_to_end(&editor::actions::MoveToEnd, window, cx); @@ -1828,8 +1814,6 @@ impl ActiveThread { return div().children(loading_dots).into_any(); } - let message_creases = message.creases.clone(); - let Some(rendered_message) = self.rendered_messages_by_id.get(&message_id) else { return Empty.into_any(); }; @@ -2144,15 +2128,30 @@ impl ActiveThread { }), ) .on_click(cx.listener({ - let message_segments = message.segments.clone(); + let message_creases = message.creases.clone(); move |this, _, window, cx| { - this.start_editing_message( - message_id, - &message_segments, - &message_creases, - window, - cx, - ); + if let Some(message_text) = + this.thread.read(cx).message(message_id).and_then(|message| { + message.segments.first().and_then(|segment| { + match segment { + MessageSegment::Text(message_text) => { + Some(Into::>::into(message_text.as_str())) + } + _ => { + None + } + } + }) + }) + { + this.start_editing_message( + message_id, + message_text, + &message_creases, + window, + cx, + ); + } } })), ), @@ -3826,13 +3825,15 @@ mod tests { }); active_thread.update_in(cx, |active_thread, window, cx| { - active_thread.start_editing_message( - message.id, - message.segments.as_slice(), - message.creases.as_slice(), - window, - cx, - ); + if let Some(message_text) = message.segments.first().and_then(MessageSegment::text) { + active_thread.start_editing_message( + message.id, + message_text, + message.creases.as_slice(), + window, + cx, + ); + } let editor = active_thread .editing_message .as_ref() @@ -3847,13 +3848,15 @@ mod tests { let message = thread.update(cx, |thread, _| thread.message(message.id).cloned().unwrap()); active_thread.update_in(cx, |active_thread, window, cx| { - active_thread.start_editing_message( - message.id, - message.segments.as_slice(), - message.creases.as_slice(), - window, - cx, - ); + if let Some(message_text) = message.segments.first().and_then(MessageSegment::text) { + active_thread.start_editing_message( + message.id, + message_text, + message.creases.as_slice(), + window, + cx, + ); + } let editor = active_thread .editing_message .as_ref() @@ -3935,13 +3938,15 @@ mod tests { // Edit the message while the completion is still running active_thread.update_in(cx, |active_thread, window, cx| { - active_thread.start_editing_message( - message.id, - message.segments.as_slice(), - message.creases.as_slice(), - window, - cx, - ); + if let Some(message_text) = message.segments.first().and_then(MessageSegment::text) { + active_thread.start_editing_message( + message.id, + message_text, + message.creases.as_slice(), + window, + cx, + ); + } let editor = active_thread .editing_message .as_ref() From 294147f4738bc3bbe678d83e67f9d194146e9bf4 Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Jun 2025 14:24:47 -0400 Subject: [PATCH 47/56] ci: Skip build_docs more often (#33398) Don't run `build_docs` when the only change is: `.github/{workflows,ISSUE_TEMPLATE}/**`. Example [extra run](https://github.com/zed-industries/zed/actions/runs/15883155767). Release Notes: - N/A --- .github/workflows/ci.yml | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b0293cfddddd98be98c3957298c527b3a71ad1e2..600956c379144d1fcf8d101bfc8b9f85b5e6d4e1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,6 +29,7 @@ jobs: outputs: run_tests: ${{ steps.filter.outputs.run_tests }} run_license: ${{ steps.filter.outputs.run_license }} + run_docs: ${{ steps.filter.outputs.run_docs }} runs-on: - ubuntu-latest steps: @@ -58,6 +59,11 @@ jobs: else echo "run_tests=false" >> $GITHUB_OUTPUT fi + if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep '^docs/') ]]; then + echo "run_docs=true" >> $GITHUB_OUTPUT + else + echo "run_docs=false" >> $GITHUB_OUTPUT + fi if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep '^Cargo.lock') ]]; then echo "run_license=true" >> $GITHUB_OUTPUT else @@ -198,7 +204,9 @@ jobs: timeout-minutes: 60 name: Check docs needs: [job_spec] - if: github.repository_owner == 'zed-industries' + if: | + github.repository_owner == 'zed-industries' && + (needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true') runs-on: - buildjet-8vcpu-ubuntu-2204 steps: From e5c812fbcbad552e01597a87db82bb0db65781e9 Mon Sep 17 00:00:00 2001 From: Matin Aniss <76515905+MatinAniss@users.noreply.github.com> Date: Thu, 26 Jun 2025 05:29:13 +1000 Subject: [PATCH 48/56] gpui: Dynamic element arena (#32079) Implements a chunking strategy for the element arena that allows it to grow dynamically based on allocations, it is initialised with a single chunk of a total size of 1 mebibyte. On allocation of data with a size greater than the remaining space of the current chunk a new chunk is created. This reduces the memory allocation from the static 32 mebibytes, this especially helps GPUI applications that don't need such a large element arena and even Zed in most cases. This also prevents the panic when allocations ever exceed the element arena. Release Notes: - N/A --------- Co-authored-by: Michael Sloan --- crates/gpui/src/arena.rs | 143 ++++++++++++++++++++++++++++---------- crates/gpui/src/window.rs | 9 +-- 2 files changed, 109 insertions(+), 43 deletions(-) diff --git a/crates/gpui/src/arena.rs b/crates/gpui/src/arena.rs index f30f4b6480cc7487ed8a384306c632cd188531d8..2448746a8867b88cc7e6b22b27a6ef5eae6c40aa 100644 --- a/crates/gpui/src/arena.rs +++ b/crates/gpui/src/arena.rs @@ -1,5 +1,5 @@ use std::{ - alloc, + alloc::{self, handle_alloc_error}, cell::Cell, ops::{Deref, DerefMut}, ptr, @@ -20,43 +20,98 @@ impl Drop for ArenaElement { } } -pub struct Arena { +struct Chunk { start: *mut u8, end: *mut u8, offset: *mut u8, - elements: Vec, - valid: Rc>, } -impl Arena { - pub fn new(size_in_bytes: usize) -> Self { +impl Drop for Chunk { + fn drop(&mut self) { unsafe { - let layout = alloc::Layout::from_size_align(size_in_bytes, 1).unwrap(); + let chunk_size = self.end.offset_from_unsigned(self.start); + // this never fails as it succeeded during allocation + let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap(); + alloc::dealloc(self.start, layout); + } + } +} + +impl Chunk { + fn new(chunk_size: usize) -> Self { + unsafe { + // this only fails if chunk_size is unreasonably huge + let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap(); let start = alloc::alloc(layout); - let end = start.add(size_in_bytes); + if start.is_null() { + handle_alloc_error(layout); + } + let end = start.add(chunk_size); Self { start, end, offset: start, - elements: Vec::new(), - valid: Rc::new(Cell::new(true)), } } } - pub fn len(&self) -> usize { - self.offset as usize - self.start as usize + fn allocate(&mut self, layout: alloc::Layout) -> Option<*mut u8> { + unsafe { + let aligned = self.offset.add(self.offset.align_offset(layout.align())); + let next = aligned.add(layout.size()); + + if next <= self.end { + self.offset = next; + Some(aligned) + } else { + None + } + } + } + + fn reset(&mut self) { + self.offset = self.start; + } +} + +pub struct Arena { + chunks: Vec, + elements: Vec, + valid: Rc>, + current_chunk_index: usize, + chunk_size: usize, +} + +impl Drop for Arena { + fn drop(&mut self) { + self.clear(); + } +} + +impl Arena { + pub fn new(chunk_size: usize) -> Self { + assert!(chunk_size > 0); + Self { + chunks: vec![Chunk::new(chunk_size)], + elements: Vec::new(), + valid: Rc::new(Cell::new(true)), + current_chunk_index: 0, + chunk_size, + } } pub fn capacity(&self) -> usize { - self.end as usize - self.start as usize + self.chunks.len() * self.chunk_size } pub fn clear(&mut self) { self.valid.set(false); self.valid = Rc::new(Cell::new(true)); self.elements.clear(); - self.offset = self.start; + for chunk_index in 0..=self.current_chunk_index { + self.chunks[chunk_index].reset(); + } + self.current_chunk_index = 0; } #[inline(always)] @@ -79,33 +134,45 @@ impl Arena { unsafe { let layout = alloc::Layout::new::(); - let offset = self.offset.add(self.offset.align_offset(layout.align())); - let next_offset = offset.add(layout.size()); - assert!(next_offset <= self.end, "not enough space in Arena"); - - let result = ArenaBox { - ptr: offset.cast(), - valid: self.valid.clone(), + let mut current_chunk = &mut self.chunks[self.current_chunk_index]; + let ptr = if let Some(ptr) = current_chunk.allocate(layout) { + ptr + } else { + self.current_chunk_index += 1; + if self.current_chunk_index >= self.chunks.len() { + self.chunks.push(Chunk::new(self.chunk_size)); + assert_eq!(self.current_chunk_index, self.chunks.len() - 1); + log::info!( + "increased element arena capacity to {}kb", + self.capacity() / 1024, + ); + } + current_chunk = &mut self.chunks[self.current_chunk_index]; + if let Some(ptr) = current_chunk.allocate(layout) { + ptr + } else { + panic!( + "Arena chunk_size of {} is too small to allocate {} bytes", + self.chunk_size, + layout.size() + ); + } }; - inner_writer(result.ptr, f); + inner_writer(ptr.cast(), f); self.elements.push(ArenaElement { - value: offset, + value: ptr, drop: drop::, }); - self.offset = next_offset; - result + ArenaBox { + ptr: ptr.cast(), + valid: self.valid.clone(), + } } } } -impl Drop for Arena { - fn drop(&mut self) { - self.clear(); - } -} - pub struct ArenaBox { ptr: *mut T, valid: Rc>, @@ -215,13 +282,17 @@ mod tests { } #[test] - #[should_panic(expected = "not enough space in Arena")] - fn test_arena_overflow() { - let mut arena = Arena::new(16); + fn test_arena_grow() { + let mut arena = Arena::new(8); arena.alloc(|| 1u64); arena.alloc(|| 2u64); - // This should panic. - arena.alloc(|| 3u64); + + assert_eq!(arena.capacity(), 16); + + arena.alloc(|| 3u32); + arena.alloc(|| 4u32); + + assert_eq!(arena.capacity(), 24); } #[test] diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 0e3f5763dad3a92a7910b424a7f2f04d2074e3fb..be3b753d6ad487eec203a7ea321ea52818a8cad2 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -206,8 +206,7 @@ slotmap::new_key_type! { } thread_local! { - /// 8MB wasn't quite enough... - pub(crate) static ELEMENT_ARENA: RefCell = RefCell::new(Arena::new(32 * 1024 * 1024)); + pub(crate) static ELEMENT_ARENA: RefCell = RefCell::new(Arena::new(1024 * 1024)); } /// Returned when the element arena has been used and so must be cleared before the next draw. @@ -218,12 +217,8 @@ impl ArenaClearNeeded { /// Clear the element arena. pub fn clear(self) { ELEMENT_ARENA.with_borrow_mut(|element_arena| { - let percentage = (element_arena.len() as f32 / element_arena.capacity() as f32) * 100.; - if percentage >= 80. { - log::warn!("elevated element arena occupation: {}.", percentage); - } element_arena.clear(); - }) + }); } } From aae4778b4e455630af1a084f0f235babf2d05a01 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Wed, 25 Jun 2025 13:46:15 -0600 Subject: [PATCH 49/56] gpui: Add more flushing of x11 requests (#33407) Flushes should happen after sending messages to X11 when effects should be applied quickly. This is not needed for requests that return replies since it automatically flushes in that case. Release Notes: - N/A --- crates/gpui/src/platform/linux/x11/client.rs | 26 ++++++++------ crates/gpui/src/platform/linux/x11/window.rs | 36 +++++++++++--------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index dddff8566102be44b63680999be7bed30439e7a5..f0ad8b8cf416498f3a1719180f3d6cc7327dceef 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -1,4 +1,4 @@ -use crate::Capslock; +use crate::{Capslock, xcb_flush}; use core::str; use std::{ cell::RefCell, @@ -378,6 +378,7 @@ impl X11Client { xcb_connection .xkb_use_extension(XKB_X11_MIN_MAJOR_XKB_VERSION, XKB_X11_MIN_MINOR_XKB_VERSION), )?; + assert!(xkb.supported); let events = xkb::EventType::STATE_NOTIFY | xkb::EventType::MAP_NOTIFY @@ -401,7 +402,6 @@ impl X11Client { &xkb::SelectEventsAux::new(), ), )?; - assert!(xkb.supported); let xkb_context = xkbc::Context::new(xkbc::CONTEXT_NO_FLAGS); let xkb_device_id = xkbc::x11::get_core_keyboard_device_id(&xcb_connection); @@ -484,6 +484,8 @@ impl X11Client { }) .map_err(|err| anyhow!("Failed to initialize XDP event source: {err:?}"))?; + xcb_flush(&xcb_connection); + Ok(X11Client(Rc::new(RefCell::new(X11ClientState { modifiers: Modifiers::default(), capslock: Capslock::default(), @@ -1523,6 +1525,7 @@ impl LinuxClient for X11Client { ), ) .log_err(); + xcb_flush(&state.xcb_connection); let window_ref = WindowRef { window: window.0.clone(), @@ -1554,19 +1557,18 @@ impl LinuxClient for X11Client { }; state.cursor_styles.insert(focused_window, style); - state - .xcb_connection - .change_window_attributes( + check_reply( + || "Failed to set cursor style", + state.xcb_connection.change_window_attributes( focused_window, &ChangeWindowAttributesAux { cursor: Some(cursor), ..Default::default() }, - ) - .anyhow() - .and_then(|cookie| cookie.check().anyhow()) - .context("X11: Failed to set cursor style") - .log_err(); + ), + ) + .log_err(); + state.xcb_connection.flush().log_err(); } fn open_uri(&self, uri: &str) { @@ -2087,6 +2089,7 @@ fn xdnd_send_finished( xcb_connection.send_event(false, target, EventMask::default(), message), ) .log_err(); + xcb_connection.flush().log_err(); } fn xdnd_send_status( @@ -2109,6 +2112,7 @@ fn xdnd_send_status( xcb_connection.send_event(false, target, EventMask::default(), message), ) .log_err(); + xcb_connection.flush().log_err(); } /// Recomputes `pointer_device_states` by querying all pointer devices. @@ -2262,6 +2266,6 @@ fn create_invisible_cursor( connection.free_pixmap(empty_pixmap)?; - connection.flush()?; + xcb_flush(connection); Ok(cursor) } diff --git a/crates/gpui/src/platform/linux/x11/window.rs b/crates/gpui/src/platform/linux/x11/window.rs index 2b6028f1ccc2e258f80e33f533fcf5bbf69f881a..673c04a3e5edfcdbb4efd508bffd50b0c50891c5 100644 --- a/crates/gpui/src/platform/linux/x11/window.rs +++ b/crates/gpui/src/platform/linux/x11/window.rs @@ -320,6 +320,13 @@ impl rwh::HasDisplayHandle for X11Window { } } +pub(crate) fn xcb_flush(xcb: &XCBConnection) { + xcb.flush() + .map_err(handle_connection_error) + .context("X11 flush failed") + .log_err(); +} + pub(crate) fn check_reply( failure_context: F, result: Result, ConnectionError>, @@ -597,7 +604,7 @@ impl X11WindowState { ), )?; - xcb.flush()?; + xcb_flush(&xcb); let renderer = { let raw_window = RawWindow { @@ -657,7 +664,7 @@ impl X11WindowState { || "X11 DestroyWindow failed while cleaning it up after setup failure.", xcb.destroy_window(x_window), )?; - xcb.flush()?; + xcb_flush(&xcb); } setup_result @@ -685,7 +692,7 @@ impl Drop for X11WindowHandle { || "X11 DestroyWindow failed while dropping X11WindowHandle.", self.xcb.destroy_window(self.id), )?; - self.xcb.flush()?; + xcb_flush(&self.xcb); anyhow::Ok(()) }) .log_err(); @@ -704,7 +711,7 @@ impl Drop for X11Window { || "X11 DestroyWindow failure.", self.0.xcb.destroy_window(self.0.x_window), )?; - self.0.xcb.flush()?; + xcb_flush(&self.0.xcb); anyhow::Ok(()) }) @@ -799,7 +806,9 @@ impl X11Window { xproto::EventMask::SUBSTRUCTURE_REDIRECT | xproto::EventMask::SUBSTRUCTURE_NOTIFY, message, ), - ) + )?; + xcb_flush(&self.0.xcb); + Ok(()) } fn get_root_position( @@ -852,15 +861,8 @@ impl X11Window { ), )?; - self.flush() - } - - fn flush(&self) -> anyhow::Result<()> { - self.0 - .xcb - .flush() - .map_err(handle_connection_error) - .context("X11 flush failed") + xcb_flush(&self.0.xcb); + Ok(()) } } @@ -1198,7 +1200,7 @@ impl PlatformWindow for X11Window { ), ) .log_err(); - self.flush().log_err(); + xcb_flush(&self.0.xcb); } fn scale_factor(&self) -> f32 { @@ -1289,7 +1291,7 @@ impl PlatformWindow for X11Window { xproto::Time::CURRENT_TIME, ) .log_err(); - self.flush().log_err(); + xcb_flush(&self.0.xcb); } fn is_active(&self) -> bool { @@ -1324,7 +1326,7 @@ impl PlatformWindow for X11Window { ), ) .log_err(); - self.flush().log_err(); + xcb_flush(&self.0.xcb); } fn set_app_id(&mut self, app_id: &str) { From 8f9817173d749183966e1c9ac526baf5d720730d Mon Sep 17 00:00:00 2001 From: vipex <101529155+vipexv@users.noreply.github.com> Date: Wed, 25 Jun 2025 21:52:15 +0200 Subject: [PATCH 50/56] pane: Update pinned tab count when it exceeds actual tab count (#33405) ## Summary This PR improves the workaround introduced in #33335 that handles cases where the pinned tab count exceeds the actual tab count during workspace deserialization. ## Problem The original workaround in #33335 successfully prevented the panic but had two issues: 1. **Console spam**: The warning message was logged repeatedly because `self.pinned_tab_count` wasn't updated to match the actual tab count 2. **Auto-pinning behavior**: New tabs up until you exceed the old safe tab count were automatically pinned after the workaround was triggered. ## Solution Updates the defensive code to set `self.pinned_tab_count = tab_count` when the mismatch is detected, ensuring: - The warning is only logged once when encountered. - New tabs behave normally (aren't auto-pinned) - The workspace remains in a consistent state This is an immediate fix for the workaround. I'll attempt to open up a follow-up PR when i get the chance that will address the root cause by implementing serialization for empty untitled tabs, as discussed in #33342. Release Notes: - N/A --- crates/workspace/src/pane.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 9644ef9e7967529098129a73d30442f800c391ad..5c04912d6b07e236652d04a220f00038287a76e6 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -2784,7 +2784,7 @@ impl Pane { }) .collect::>(); let tab_count = tab_items.len(); - let safe_pinned_count = if self.pinned_tab_count > tab_count { + if self.pinned_tab_count > tab_count { log::warn!( "Pinned tab count ({}) exceeds actual tab count ({}). \ This should not happen. If possible, add reproduction steps, \ @@ -2792,11 +2792,9 @@ impl Pane { self.pinned_tab_count, tab_count ); - tab_count - } else { - self.pinned_tab_count - }; - let unpinned_tabs = tab_items.split_off(safe_pinned_count); + self.pinned_tab_count = tab_count; + } + let unpinned_tabs = tab_items.split_off(self.pinned_tab_count); let pinned_tabs = tab_items; TabBar::new("tab_bar") .when( From 6fb5500ef221c5aedd891fee74bbfb7650b113e6 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Wed, 25 Jun 2025 15:06:14 -0500 Subject: [PATCH 51/56] collab: Save Customer name and billing address to Customer on checkout (#33385) We are collecting billing address and name on checkout now (for tax) but we're not saving it back to the Customer level. Updating the Checkout Session code to make`customer_update.address` equal to `auto`, instead of the default `never`, as well as the same for `customer_update.name`. Release Notes: - N/A --- crates/collab/src/stripe_billing.rs | 16 +++++- crates/collab/src/stripe_client.rs | 26 +++++++++ .../src/stripe_client/fake_stripe_client.rs | 9 ++- .../src/stripe_client/real_stripe_client.rs | 57 +++++++++++++++++-- .../collab/src/tests/stripe_billing_tests.rs | 29 +++++++++- 5 files changed, 125 insertions(+), 12 deletions(-) diff --git a/crates/collab/src/stripe_billing.rs b/crates/collab/src/stripe_billing.rs index 28eaf4de0885ca58c9aa81183a0cf5d5f0b2fd8b..8bf6c08158b9fa742f0f9e59711c7df80013614d 100644 --- a/crates/collab/src/stripe_billing.rs +++ b/crates/collab/src/stripe_billing.rs @@ -16,9 +16,9 @@ use crate::stripe_client::{ StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams, - StripeCustomerId, StripeMeter, StripePrice, StripePriceId, StripeSubscription, - StripeSubscriptionId, StripeSubscriptionTrialSettings, - StripeSubscriptionTrialSettingsEndBehavior, + StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName, + StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, + StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior, StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems, UpdateSubscriptionParams, }; @@ -247,6 +247,11 @@ impl StripeBilling { }]); params.success_url = Some(success_url); params.billing_address_collection = Some(StripeBillingAddressCollection::Required); + params.customer_update = Some(StripeCustomerUpdate { + address: Some(StripeCustomerUpdateAddress::Auto), + name: Some(StripeCustomerUpdateName::Auto), + shipping: None, + }); let session = self.client.create_checkout_session(params).await?; Ok(session.url.context("no checkout session URL")?) @@ -301,6 +306,11 @@ impl StripeBilling { }]); params.success_url = Some(success_url); params.billing_address_collection = Some(StripeBillingAddressCollection::Required); + params.customer_update = Some(StripeCustomerUpdate { + address: Some(StripeCustomerUpdateAddress::Auto), + name: Some(StripeCustomerUpdateName::Auto), + shipping: None, + }); let session = self.client.create_checkout_session(params).await?; Ok(session.url.context("no checkout session URL")?) diff --git a/crates/collab/src/stripe_client.rs b/crates/collab/src/stripe_client.rs index 48158e7cd95998a9dbed379d39a7bd66f42db498..9ffcb2ba6c9fde13ebc84b9e7c509851158e0a1e 100644 --- a/crates/collab/src/stripe_client.rs +++ b/crates/collab/src/stripe_client.rs @@ -154,6 +154,31 @@ pub enum StripeBillingAddressCollection { Required, } +#[derive(Debug, PartialEq, Clone)] +pub struct StripeCustomerUpdate { + pub address: Option, + pub name: Option, + pub shipping: Option, +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum StripeCustomerUpdateAddress { + Auto, + Never, +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum StripeCustomerUpdateName { + Auto, + Never, +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum StripeCustomerUpdateShipping { + Auto, + Never, +} + #[derive(Debug, Default)] pub struct StripeCreateCheckoutSessionParams<'a> { pub customer: Option<&'a StripeCustomerId>, @@ -164,6 +189,7 @@ pub struct StripeCreateCheckoutSessionParams<'a> { pub subscription_data: Option, pub success_url: Option<&'a str>, pub billing_address_collection: Option, + pub customer_update: Option, } #[derive(Debug, PartialEq, Eq, Clone, Copy)] diff --git a/crates/collab/src/stripe_client/fake_stripe_client.rs b/crates/collab/src/stripe_client/fake_stripe_client.rs index 96596aa4141b156f00d855c00bcde352c1a99f30..11b210dd0e7aba54148d26de0670f23415ae7cea 100644 --- a/crates/collab/src/stripe_client/fake_stripe_client.rs +++ b/crates/collab/src/stripe_client/fake_stripe_client.rs @@ -12,9 +12,10 @@ use crate::stripe_client::{ StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, StripeClient, StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, - StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripeMeterId, - StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, StripeSubscriptionItem, - StripeSubscriptionItemId, UpdateCustomerParams, UpdateSubscriptionParams, + StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate, + StripeMeter, StripeMeterId, StripePrice, StripePriceId, StripeSubscription, + StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, UpdateCustomerParams, + UpdateSubscriptionParams, }; #[derive(Debug, Clone)] @@ -36,6 +37,7 @@ pub struct StripeCreateCheckoutSessionCall { pub subscription_data: Option, pub success_url: Option, pub billing_address_collection: Option, + pub customer_update: Option, } pub struct FakeStripeClient { @@ -233,6 +235,7 @@ impl StripeClient for FakeStripeClient { subscription_data: params.subscription_data, success_url: params.success_url.map(|url| url.to_string()), billing_address_collection: params.billing_address_collection, + customer_update: params.customer_update, }); Ok(StripeCheckoutSession { diff --git a/crates/collab/src/stripe_client/real_stripe_client.rs b/crates/collab/src/stripe_client/real_stripe_client.rs index 917e23cac360aad5d27ecfc852775a8b352eaea7..7108e8d7597a3afd235c2ae48a4b05c5fc5de014 100644 --- a/crates/collab/src/stripe_client/real_stripe_client.rs +++ b/crates/collab/src/stripe_client/real_stripe_client.rs @@ -22,10 +22,11 @@ use crate::stripe_client::{ StripeCheckoutSessionPaymentMethodCollection, StripeClient, StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams, StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams, - StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripePrice, - StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId, - StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings, - StripeSubscriptionTrialSettingsEndBehavior, + StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate, + StripeCustomerUpdateAddress, StripeCustomerUpdateName, StripeCustomerUpdateShipping, + StripeMeter, StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription, + StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, + StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior, StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateCustomerParams, UpdateSubscriptionParams, }; @@ -446,6 +447,7 @@ impl<'a> TryFrom> for CreateCheckoutSessio subscription_data: value.subscription_data.map(Into::into), success_url: value.success_url, billing_address_collection: value.billing_address_collection.map(Into::into), + customer_update: value.customer_update.map(Into::into), ..Default::default() }) } @@ -541,3 +543,50 @@ impl From for stripe::CheckoutSessionBillingAddr } } } + +impl From for stripe::CreateCheckoutSessionCustomerUpdateAddress { + fn from(value: StripeCustomerUpdateAddress) -> Self { + match value { + StripeCustomerUpdateAddress::Auto => { + stripe::CreateCheckoutSessionCustomerUpdateAddress::Auto + } + StripeCustomerUpdateAddress::Never => { + stripe::CreateCheckoutSessionCustomerUpdateAddress::Never + } + } + } +} + +impl From for stripe::CreateCheckoutSessionCustomerUpdateName { + fn from(value: StripeCustomerUpdateName) -> Self { + match value { + StripeCustomerUpdateName::Auto => stripe::CreateCheckoutSessionCustomerUpdateName::Auto, + StripeCustomerUpdateName::Never => { + stripe::CreateCheckoutSessionCustomerUpdateName::Never + } + } + } +} + +impl From for stripe::CreateCheckoutSessionCustomerUpdateShipping { + fn from(value: StripeCustomerUpdateShipping) -> Self { + match value { + StripeCustomerUpdateShipping::Auto => { + stripe::CreateCheckoutSessionCustomerUpdateShipping::Auto + } + StripeCustomerUpdateShipping::Never => { + stripe::CreateCheckoutSessionCustomerUpdateShipping::Never + } + } + } +} + +impl From for stripe::CreateCheckoutSessionCustomerUpdate { + fn from(value: StripeCustomerUpdate) -> Self { + stripe::CreateCheckoutSessionCustomerUpdate { + address: value.address.map(Into::into), + name: value.name.map(Into::into), + shipping: value.shipping.map(Into::into), + } + } +} diff --git a/crates/collab/src/tests/stripe_billing_tests.rs b/crates/collab/src/tests/stripe_billing_tests.rs index 941669362d6b7988c7165661834bece61ea00e73..c19eb0a23432fb835b99007b0ebca2e4a5a8f2e6 100644 --- a/crates/collab/src/tests/stripe_billing_tests.rs +++ b/crates/collab/src/tests/stripe_billing_tests.rs @@ -8,8 +8,9 @@ use crate::stripe_billing::StripeBilling; use crate::stripe_client::{ FakeStripeClient, StripeBillingAddressCollection, StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, StripeCreateCheckoutSessionLineItems, - StripeCreateCheckoutSessionSubscriptionData, StripeCustomerId, StripeMeter, StripeMeterId, - StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId, + StripeCreateCheckoutSessionSubscriptionData, StripeCustomerId, StripeCustomerUpdate, + StripeCustomerUpdateAddress, StripeCustomerUpdateName, StripeMeter, StripeMeterId, StripePrice, + StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior, StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems, @@ -431,6 +432,14 @@ async fn test_checkout_with_zed_pro() { call.billing_address_collection, Some(StripeBillingAddressCollection::Required) ); + assert_eq!( + call.customer_update, + Some(StripeCustomerUpdate { + address: Some(StripeCustomerUpdateAddress::Auto), + name: Some(StripeCustomerUpdateName::Auto), + shipping: None, + }) + ); } } @@ -516,6 +525,14 @@ async fn test_checkout_with_zed_pro_trial() { call.billing_address_collection, Some(StripeBillingAddressCollection::Required) ); + assert_eq!( + call.customer_update, + Some(StripeCustomerUpdate { + address: Some(StripeCustomerUpdateAddress::Auto), + name: Some(StripeCustomerUpdateName::Auto), + shipping: None, + }) + ); } // Successful checkout with extended trial. @@ -574,5 +591,13 @@ async fn test_checkout_with_zed_pro_trial() { call.billing_address_collection, Some(StripeBillingAddressCollection::Required) ); + assert_eq!( + call.customer_update, + Some(StripeCustomerUpdate { + address: Some(StripeCustomerUpdateAddress::Auto), + name: Some(StripeCustomerUpdateName::Auto), + shipping: None, + }) + ); } } From dae4e84bc58bbccdcda24dd4c148a4f0d7e078fd Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Jun 2025 16:29:44 -0400 Subject: [PATCH 52/56] Explicitly associate files as JSONC (#33410) Fixes an issue when the zed repo was checked out to folder other than `zed` (e.g. `zed2`) files were incorrectly identified as JSON instead of JSONC. Release Notes: - N/A --- .zed/settings.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.zed/settings.json b/.zed/settings.json index b20d741659af99f5c5df83d8b4444f991596de1c..1ef6bc28f7dffb3fd7b25489f3f6ff0c1b0f74c9 100644 --- a/.zed/settings.json +++ b/.zed/settings.json @@ -40,6 +40,7 @@ }, "file_types": { "Dockerfile": ["Dockerfile*[!dockerignore]"], + "JSONC": ["assets/**/*.json", "renovate.json"], "Git Ignore": ["dockerignore"] }, "hard_tabs": false, From 1330cb7a1f704600794f414a7cbc99d6018081ec Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 25 Jun 2025 17:52:23 -0300 Subject: [PATCH 53/56] docs: Update instructions to use Vercel's v0 model (#33415) To make sure this reflects the current reality as of today's preview/stable version. Release Notes: - N/A --- docs/src/ai/configuration.md | 29 ++++++++++++----------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/docs/src/ai/configuration.md b/docs/src/ai/configuration.md index 94ca8b90b824c5257a89a22f42bcd8338c1726fd..5c49cde598a71a3592bf96c2660dc4b31dfa8c30 100644 --- a/docs/src/ai/configuration.md +++ b/docs/src/ai/configuration.md @@ -445,23 +445,6 @@ Zed supports using OpenAI compatible APIs by specifying a custom `endpoint` and You can add a custom API URL for OpenAI either via the UI or by editing your `settings.json`. Here are a few model examples you can plug in by using this feature: -#### Vercel v0 - -[Vercel v0](https://vercel.com/docs/v0/api) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel. -It supports text and image inputs and provides fast streaming responses. - -To use it with Zed, ensure you have first created a [v0 API key](https://v0.dev/chat/settings/keys). -Once that's done, insert that into the OpenAI API key section, and add this endpoint URL: - -```json - "language_models": { - "openai": { - "api_url": "https://api.v0.dev/v1", - "version": "1" - }, - } -``` - #### X.ai Grok Example configuration for using X.ai Grok with Zed: @@ -540,6 +523,18 @@ You can find available models and their specifications on the [OpenRouter models Custom models will be listed in the model dropdown in the Agent Panel. +### Vercel v0 + +[Vercel v0](https://vercel.com/docs/v0/api) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel. +It supports text and image inputs and provides fast streaming responses. + +The v0 models are [OpenAI-compatible models](/#openai-api-compatible), but Vercel is listed as first-class provider in the panel's settings view. + +To start using it with Zed, ensure you have first created a [v0 API key](https://v0.dev/chat/settings/keys). +Once you have it, paste it directly into the Vercel provider section in the panel's settings view. + +You should then find it as `v0-1.5-md` in the model dropdown in the Agent Panel. + ## Advanced Configuration {#advanced-configuration} ### Custom Provider Endpoints {#custom-provider-endpoint} From 1af9f98c1d58fa06dcdcad5c68ff1be1a79d032a Mon Sep 17 00:00:00 2001 From: David Barsky Date: Wed, 25 Jun 2025 15:24:51 -0700 Subject: [PATCH 54/56] lsp-log: Avoid trimming leading space in language server logs (#33418) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Not sure what the full intention/right fix for this is, but https://github.com/zed-industries/zed/pull/32659 re-introduced trimming of leading spaces. rust-analyzer has [a custom tracing formatter](https://github.com/rust-lang/rust-analyzer/blob/317542c1e4a3ec3467d21d1c25f6a43b80d83e7d/crates/rust-analyzer/src/tracing/hprof.rs) that is _super_ useful for profiling what the heck rust-analyzer is doing. It makes prodigious use of whitespace to delineate to create a tree-shaped structure. This change reintroduces the leading whitespace. I made a previous change similar to this that removed a `stderr:` in https://github.com/zed-industries/zed/pull/27213/. If this is a direction y'all are happy to go with, I'd be happy to add a test for this!
A screenshot of the before Screenshot 2025-06-25 at 2 12 45 PM
A screenshot of the after Screenshot 2025-06-25 at 2 40 07 PM
cc: @mgsloan. Release Notes: - Fixed the removal of leading whitespace in a language server's stderr logs. --- crates/language_tools/src/lsp_log.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/language_tools/src/lsp_log.rs b/crates/language_tools/src/lsp_log.rs index de474c1d9f3a272407c72be52b6b2e2dd4dbb0db..a3827218c3b76c3b373492ca2092128b27462c40 100644 --- a/crates/language_tools/src/lsp_log.rs +++ b/crates/language_tools/src/lsp_log.rs @@ -430,7 +430,7 @@ impl LogStore { log_lines, id, LogMessage { - message: message.trim().to_string(), + message: message.trim_end().to_string(), typ, }, language_server_state.log_level, From b1450b6d716d4bf7ac0a96172b1225f8db8c6949 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Wed, 25 Jun 2025 16:29:30 -0600 Subject: [PATCH 55/56] Remove `git_panel::GenerateCommitMessage` in favor of `git::GenerateCommitMessage` (#33421) `git_panel::GenerateCommitMessage` has no handler, `git::GenerateCommitMessage` should be preferred. Could add a `#[action(deprecated_aliases = ["git_panel::GenerateCommitMessage"])]`, but decided not to because that action didn't work. So instead uses of it will show up as keymap errors. Closes #32667 Release Notes: - N/A --- crates/git_ui/src/git_panel.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 3cc94f84d325e89f2f5f6a9322460b5de07f45ca..dce3a52e0a567301f4b3b387ee71f89014ef5083 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -83,7 +83,6 @@ actions!( FocusEditor, FocusChanges, ToggleFillCoAuthors, - GenerateCommitMessage ] ); From b9f81c7ce75cf61225006a96cee6ca6b06b957f2 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Wed, 25 Jun 2025 15:48:40 -0700 Subject: [PATCH 56/56] Restore missing initialization of text thread actions (#33422) Fixes a regression introduced in https://github.com/zed-industries/zed/pull/33289 Release Notes: - Fixed a bug where some text thread actions were accidentally removed. --- crates/agent_ui/src/agent_ui.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index a1439620b62208b5778671836655acba141c40dd..4babe4f676054740ea88645235ccbcb834d3fc18 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -48,7 +48,7 @@ pub use crate::agent_panel::{AgentPanel, ConcreteAssistantPanelDelegate}; pub use crate::inline_assistant::InlineAssistant; use crate::slash_command_settings::SlashCommandSettings; pub use agent_diff::{AgentDiffPane, AgentDiffToolbar}; -pub use text_thread_editor::AgentPanelDelegate; +pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor}; pub use ui::preview::{all_agent_previews, get_agent_preview}; actions!( @@ -157,6 +157,7 @@ pub fn init( agent::init(cx); agent_panel::init(cx); context_server_configuration::init(language_registry.clone(), fs.clone(), cx); + TextThreadEditor::init(cx); register_slash_commands(cx); inline_assistant::init(