Detailed changes
@@ -26,6 +26,8 @@ If you're looking for concrete ideas:
- [Triaged bugs with confirmed steps to reproduce](https://github.com/zed-industries/zed/issues?q=is%3Aissue%20state%3Aopen%20type%3ABug%20label%3Astate%3Areproducible).
- [Area labels](https://github.com/zed-industries/zed/labels?q=area%3A*) to browse bugs in a specific part of the product you care about (after clicking on an area label, add type:Bug to the search).
+If you're thinking about proposing or building a larger feature, read the [Zed Feature Process](./docs/src/development/feature-process.md) for how we think about feature design — what context to provide, what integration points to consider, and how to put together a strong proposal.
+
## Sending changes
The Zed culture values working code and synchronous conversations over long
@@ -3697,6 +3697,7 @@ dependencies = [
name = "copilot_chat"
version = "0.1.0"
dependencies = [
+ "anthropic",
"anyhow",
"collections",
"dirs 4.0.0",
@@ -10815,7 +10816,7 @@ dependencies = [
[[package]]
name = "naga"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"arrayvec",
"bit-set",
@@ -20095,7 +20096,7 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3"
[[package]]
name = "wgpu"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"arrayvec",
"bitflags 2.10.0",
@@ -20124,7 +20125,7 @@ dependencies = [
[[package]]
name = "wgpu-core"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"arrayvec",
"bit-set",
@@ -20155,7 +20156,7 @@ dependencies = [
[[package]]
name = "wgpu-core-deps-apple"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"wgpu-hal",
]
@@ -20163,7 +20164,7 @@ dependencies = [
[[package]]
name = "wgpu-core-deps-emscripten"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"wgpu-hal",
]
@@ -20171,7 +20172,7 @@ dependencies = [
[[package]]
name = "wgpu-core-deps-windows-linux-android"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"wgpu-hal",
]
@@ -20179,7 +20180,7 @@ dependencies = [
[[package]]
name = "wgpu-hal"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"android_system_properties",
"arrayvec",
@@ -20226,7 +20227,7 @@ dependencies = [
[[package]]
name = "wgpu-types"
version = "28.0.1"
-source = "git+https://github.com/zed-industries/wgpu?rev=0343151f535c8386df3c1db014cd42f44470e4c0#0343151f535c8386df3c1db014cd42f44470e4c0"
+source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4"
dependencies = [
"bitflags 2.10.0",
"bytemuck",
@@ -783,7 +783,7 @@ wax = "0.7"
which = "6.0.0"
wasm-bindgen = "0.2.113"
web-time = "1.1.0"
-wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "0343151f535c8386df3c1db014cd42f44470e4c0" }
+wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "465557eccfe77c840a9b4936f1408da9503372c4" }
windows-core = "0.61"
yawc = "0.2.5"
zeroize = "1.8"
@@ -1423,7 +1423,7 @@ impl EditAgentTest {
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
settings::init(cx);
- language_model::init(client.clone(), cx);
+ language_model::init(user_store.clone(), client.clone(), cx);
language_models::init(user_store, client.clone(), cx);
});
@@ -3167,7 +3167,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
let clock = Arc::new(clock::FakeSystemClock::new());
let client = Client::new(clock, http_client, cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(client.clone(), cx);
+ language_model::init(user_store.clone(), client.clone(), cx);
language_models::init(user_store, client.clone(), cx);
LanguageModelRegistry::test(cx);
});
@@ -3791,7 +3791,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
cx.set_http_client(Arc::new(http_client));
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(client.clone(), cx);
+ language_model::init(user_store.clone(), client.clone(), cx);
language_models::init(user_store, client.clone(), cx);
}
};
@@ -2,6 +2,7 @@ use crate::{AgentServer, AgentServerDelegate};
use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus};
use agent_client_protocol as acp;
use futures::{FutureExt, StreamExt, channel::mpsc, select};
+use gpui::AppContext;
use gpui::{Entity, TestAppContext};
use indoc::indoc;
use project::{FakeFs, Project};
@@ -408,7 +409,8 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap();
cx.set_http_client(Arc::new(http_client));
let client = client::Client::production(cx);
- language_model::init(client, cx);
+ let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
+ language_model::init(user_store, client, cx);
#[cfg(test)]
project::agent_server_store::AllAgentServersSettings::override_global(
@@ -3685,7 +3685,7 @@ impl AgentPanel {
h_flex()
.gap_1()
.child(agent_icon_element)
- .child(Label::new(selected_agent_label).color(label_color))
+ .child(Label::new(selected_agent_label).color(label_color).ml_0p5())
.child(
Icon::new(chevron_icon)
.color(icon_color)
@@ -2715,6 +2715,31 @@ impl ThreadView {
(IconName::Maximize, "Expand Message Editor")
};
+ if v2_empty_state {
+ self.message_editor.update(cx, |editor, cx| {
+ editor.set_mode(
+ EditorMode::Full {
+ scale_ui_elements_with_buffer_font_size: false,
+ show_active_line_background: false,
+ sizing_behavior: SizingBehavior::Default,
+ },
+ cx,
+ );
+ });
+ } else {
+ self.message_editor.update(cx, |editor, cx| {
+ editor.set_mode(
+ EditorMode::AutoHeight {
+ min_lines: AgentSettings::get_global(cx).message_editor_min_lines,
+ max_lines: Some(
+ AgentSettings::get_global(cx).set_message_editor_max_lines(),
+ ),
+ },
+ cx,
+ );
+ });
+ }
+
v_flex()
.on_action(cx.listener(Self::expand_message_editor))
.p_2()
@@ -2731,6 +2756,7 @@ impl ThreadView {
v_flex()
.relative()
.size_full()
+ .when(v2_empty_state, |this| this.flex_1())
.pt_1()
.pr_2p5()
.child(self.message_editor.clone())
@@ -2120,7 +2120,7 @@ pub mod test {
client::init(&client, cx);
workspace::init(app_state.clone(), cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
- language_model::init(client.clone(), cx);
+ language_model::init(user_store.clone(), client.clone(), cx);
language_models::init(user_store, client.clone(), cx);
cx.set_global(inline_assistant);
@@ -1222,8 +1222,10 @@ impl MessageEditor {
pub fn set_mode(&mut self, mode: EditorMode, cx: &mut Context<Self>) {
self.editor.update(cx, |editor, cx| {
- editor.set_mode(mode);
- cx.notify()
+ if *editor.mode() != mode {
+ editor.set_mode(mode);
+ cx.notify()
+ }
});
}
@@ -995,7 +995,7 @@ pub enum Speed {
}
#[derive(Debug, Serialize, Deserialize)]
-struct StreamingRequest {
+pub struct StreamingRequest {
#[serde(flatten)]
pub base: Request,
pub stream: bool,
@@ -34,4 +34,7 @@ pub enum CliResponse {
/// When Zed started not as an *.app but as a binary (e.g. local development),
/// there's a possibility to tell it to behave "regularly".
+///
+/// Note that in the main zed binary, this variable is unset after it's read for the first time,
+/// therefore it should always be accessed through the `FORCE_CLI_MODE` static.
pub const FORCE_CLI_MODE_ENV_VAR_NAME: &str = "ZED_FORCE_CLI_MODE";
@@ -140,6 +140,7 @@ pub enum Event {
ParticipantIndicesChanged,
PrivateUserInfoUpdated,
PlanUpdated,
+ OrganizationChanged,
}
#[derive(Clone, Copy)]
@@ -694,8 +695,21 @@ impl UserStore {
self.current_organization.clone()
}
- pub fn set_current_organization(&mut self, organization: Arc<Organization>) {
- self.current_organization.replace(organization);
+ pub fn set_current_organization(
+ &mut self,
+ organization: Arc<Organization>,
+ cx: &mut Context<Self>,
+ ) {
+ let is_same_organization = self
+ .current_organization
+ .as_ref()
+ .is_some_and(|current| current.id == organization.id);
+
+ if !is_same_organization {
+ self.current_organization.replace(organization);
+ cx.emit(Event::OrganizationChanged);
+ cx.notify();
+ }
}
pub fn organizations(&self) -> &Vec<Arc<Organization>> {
@@ -21,6 +21,7 @@ test-support = [
]
[dependencies]
+anthropic.workspace = true
anyhow.workspace = true
collections.workspace = true
dirs.workspace = true
@@ -52,6 +52,10 @@ impl CopilotChatConfiguration {
format!("{}/responses", api_endpoint)
}
+ pub fn messages_url(&self, api_endpoint: &str) -> String {
+ format!("{}/v1/messages", api_endpoint)
+ }
+
pub fn models_url(&self, api_endpoint: &str) -> String {
format!("{}/models", api_endpoint)
}
@@ -77,6 +81,30 @@ pub enum Role {
System,
}
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
+pub enum ChatLocation {
+ #[default]
+ Panel,
+ Editor,
+ EditingSession,
+ Terminal,
+ Agent,
+ Other,
+}
+
+impl ChatLocation {
+ pub fn to_intent_string(self) -> &'static str {
+ match self {
+ ChatLocation::Panel => "conversation-panel",
+ ChatLocation::Editor => "conversation-inline",
+ ChatLocation::EditingSession => "conversation-edits",
+ ChatLocation::Terminal => "conversation-terminal",
+ ChatLocation::Agent => "conversation-agent",
+ ChatLocation::Other => "conversation-other",
+ }
+ }
+}
+
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]
pub enum ModelSupportedEndpoint {
#[serde(rename = "/chat/completions")]
@@ -179,6 +207,16 @@ struct ModelSupportedFeatures {
parallel_tool_calls: bool,
#[serde(default)]
vision: bool,
+ #[serde(default)]
+ thinking: bool,
+ #[serde(default)]
+ adaptive_thinking: bool,
+ #[serde(default)]
+ max_thinking_budget: Option<u32>,
+ #[serde(default)]
+ min_thinking_budget: Option<u32>,
+ #[serde(default)]
+ reasoning_effort: Vec<String>,
}
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
@@ -226,6 +264,10 @@ impl Model {
self.capabilities.limits.max_context_window_tokens as u64
}
+ pub fn max_output_tokens(&self) -> usize {
+ self.capabilities.limits.max_output_tokens
+ }
+
pub fn supports_tools(&self) -> bool {
self.capabilities.supports.tool_calls
}
@@ -256,6 +298,41 @@ impl Model {
.contains(&ModelSupportedEndpoint::Responses)
}
+ pub fn supports_messages(&self) -> bool {
+ self.supported_endpoints
+ .contains(&ModelSupportedEndpoint::Messages)
+ }
+
+ pub fn supports_thinking(&self) -> bool {
+ self.capabilities.supports.thinking
+ }
+
+ pub fn supports_adaptive_thinking(&self) -> bool {
+ self.capabilities.supports.adaptive_thinking
+ }
+
+ pub fn can_think(&self) -> bool {
+ self.supports_thinking()
+ || self.supports_adaptive_thinking()
+ || self.max_thinking_budget().is_some()
+ }
+
+ pub fn max_thinking_budget(&self) -> Option<u32> {
+ self.capabilities.supports.max_thinking_budget
+ }
+
+ pub fn min_thinking_budget(&self) -> Option<u32> {
+ self.capabilities.supports.min_thinking_budget
+ }
+
+ pub fn reasoning_effort_levels(&self) -> &[String] {
+ &self.capabilities.supports.reasoning_effort
+ }
+
+ pub fn family(&self) -> &str {
+ &self.capabilities.family
+ }
+
pub fn multiplier(&self) -> f64 {
self.billing.multiplier
}
@@ -263,7 +340,6 @@ impl Model {
#[derive(Serialize, Deserialize)]
pub struct Request {
- pub intent: bool,
pub n: usize,
pub stream: bool,
pub temperature: f32,
@@ -273,6 +349,8 @@ pub struct Request {
pub tools: Vec<Tool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<ToolChoice>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub thinking_budget: Option<u32>,
}
#[derive(Serialize, Deserialize)]
@@ -550,6 +628,7 @@ impl CopilotChat {
pub async fn stream_completion(
request: Request,
+ location: ChatLocation,
is_user_initiated: bool,
mut cx: AsyncApp,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
@@ -563,12 +642,14 @@ impl CopilotChat {
api_url.into(),
request,
is_user_initiated,
+ location,
)
.await
}
pub async fn stream_response(
request: responses::Request,
+ location: ChatLocation,
is_user_initiated: bool,
mut cx: AsyncApp,
) -> Result<BoxStream<'static, Result<responses::StreamEvent>>> {
@@ -582,6 +663,30 @@ impl CopilotChat {
api_url,
request,
is_user_initiated,
+ location,
+ )
+ .await
+ }
+
+ pub async fn stream_messages(
+ body: String,
+ location: ChatLocation,
+ is_user_initiated: bool,
+ anthropic_beta: Option<String>,
+ mut cx: AsyncApp,
+ ) -> Result<BoxStream<'static, Result<anthropic::Event, anthropic::AnthropicError>>> {
+ let (client, oauth_token, api_endpoint, configuration) =
+ Self::get_auth_details(&mut cx).await?;
+
+ let api_url = configuration.messages_url(&api_endpoint);
+ stream_messages(
+ client.clone(),
+ oauth_token,
+ api_url,
+ body,
+ is_user_initiated,
+ location,
+ anthropic_beta,
)
.await
}
@@ -755,6 +860,7 @@ pub(crate) fn copilot_request_headers(
builder: http_client::Builder,
oauth_token: &str,
is_user_initiated: Option<bool>,
+ location: Option<ChatLocation>,
) -> http_client::Builder {
builder
.header("Authorization", format!("Bearer {}", oauth_token))
@@ -766,12 +872,19 @@ pub(crate) fn copilot_request_headers(
option_env!("CARGO_PKG_VERSION").unwrap_or("unknown")
),
)
+ .header("X-GitHub-Api-Version", "2025-10-01")
.when_some(is_user_initiated, |builder, is_user_initiated| {
builder.header(
"X-Initiator",
if is_user_initiated { "user" } else { "agent" },
)
})
+ .when_some(location, |builder, loc| {
+ let interaction_type = loc.to_intent_string();
+ builder
+ .header("X-Interaction-Type", interaction_type)
+ .header("OpenAI-Intent", interaction_type)
+ })
}
async fn request_models(
@@ -785,8 +898,8 @@ async fn request_models(
.uri(models_url.as_ref()),
&oauth_token,
None,
- )
- .header("x-github-api-version", "2025-05-01");
+ None,
+ );
let request = request_builder.body(AsyncBody::empty())?;
@@ -830,6 +943,7 @@ async fn stream_completion(
completion_url: Arc<str>,
request: Request,
is_user_initiated: bool,
+ location: ChatLocation,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let is_vision_request = request.messages.iter().any(|message| match message {
ChatMessage::User { content }
@@ -846,6 +960,7 @@ async fn stream_completion(
.uri(completion_url.as_ref()),
&oauth_token,
Some(is_user_initiated),
+ Some(location),
)
.when(is_vision_request, |builder| {
builder.header("Copilot-Vision-Request", is_vision_request.to_string())
@@ -905,6 +1020,65 @@ async fn stream_completion(
}
}
+async fn stream_messages(
+ client: Arc<dyn HttpClient>,
+ oauth_token: String,
+ api_url: String,
+ body: String,
+ is_user_initiated: bool,
+ location: ChatLocation,
+ anthropic_beta: Option<String>,
+) -> Result<BoxStream<'static, Result<anthropic::Event, anthropic::AnthropicError>>> {
+ let mut request_builder = copilot_request_headers(
+ HttpRequest::builder().method(Method::POST).uri(&api_url),
+ &oauth_token,
+ Some(is_user_initiated),
+ Some(location),
+ );
+
+ if let Some(beta) = &anthropic_beta {
+ request_builder = request_builder.header("anthropic-beta", beta.as_str());
+ }
+
+ let request = request_builder.body(AsyncBody::from(body))?;
+ let mut response = client.send(request).await?;
+
+ if !response.status().is_success() {
+ let mut body = String::new();
+ response.body_mut().read_to_string(&mut body).await?;
+ anyhow::bail!("Failed to connect to API: {} {}", response.status(), body);
+ }
+
+ let reader = BufReader::new(response.into_body());
+ Ok(reader
+ .lines()
+ .filter_map(|line| async move {
+ match line {
+ Ok(line) => {
+ let line = line
+ .strip_prefix("data: ")
+ .or_else(|| line.strip_prefix("data:"))?;
+ if line.starts_with("[DONE]") || line.is_empty() {
+ return None;
+ }
+ match serde_json::from_str(line) {
+ Ok(event) => Some(Ok(event)),
+ Err(error) => {
+ log::error!(
+ "Failed to parse Copilot messages stream event: `{}`\nResponse: `{}`",
+ error,
+ line,
+ );
+ Some(Err(anthropic::AnthropicError::DeserializeResponse(error)))
+ }
+ }
+ }
+ Err(error) => Some(Err(anthropic::AnthropicError::ReadResponse(error))),
+ }
+ })
+ .boxed())
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -1513,6 +1687,11 @@ mod tests {
tool_calls: true,
parallel_tool_calls: false,
vision: false,
+ thinking: false,
+ adaptive_thinking: false,
+ max_thinking_budget: None,
+ min_thinking_budget: None,
+ reasoning_effort: vec![],
},
model_type: "chat".to_string(),
tokenizer: None,
@@ -1,9 +1,9 @@
use std::sync::Arc;
-use super::copilot_request_headers;
+use super::{ChatLocation, copilot_request_headers};
use anyhow::{Result, anyhow};
use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
-use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
+use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
use serde::{Deserialize, Serialize};
use serde_json::Value;
pub use settings::OpenAiReasoningEffort as ReasoningEffort;
@@ -24,6 +24,7 @@ pub struct Request {
pub reasoning: Option<ReasoningConfig>,
#[serde(skip_serializing_if = "Option::is_none")]
pub include: Option<Vec<ResponseIncludable>>,
+ pub store: bool,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -280,6 +281,7 @@ pub async fn stream_response(
api_url: String,
request: Request,
is_user_initiated: bool,
+ location: ChatLocation,
) -> Result<BoxStream<'static, Result<StreamEvent>>> {
let is_vision_request = request.input.iter().any(|item| match item {
ResponseInputItem::Message {
@@ -295,13 +297,11 @@ pub async fn stream_response(
HttpRequest::builder().method(Method::POST).uri(&api_url),
&oauth_token,
Some(is_user_initiated),
- );
-
- let request_builder = if is_vision_request {
- request_builder.header("Copilot-Vision-Request", "true")
- } else {
- request_builder
- };
+ Some(location),
+ )
+ .when(is_vision_request, |builder| {
+ builder.header("Copilot-Vision-Request", "true")
+ });
let is_streaming = request.stream;
let json = serde_json::to_string(&request)?;
@@ -533,8 +533,8 @@ mod tests {
zlog::init_test();
let http_client = FakeHttpClient::with_404_response();
let client = Client::new(Arc::new(FakeSystemClock::new()), http_client, cx);
- language_model::init(client.clone(), cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
+ language_model::init(user_store.clone(), client.clone(), cx);
EditPredictionStore::global(&client, &user_store, cx);
})
}
@@ -1850,9 +1850,8 @@ fn init_test_with_fake_client(
let client = client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx);
client.cloud_client().set_credentials(1, "test".into());
- language_model::init(client.clone(), cx);
-
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
+ language_model::init(user_store.clone(), client.clone(), cx);
let ep_store = EditPredictionStore::global(&client, &user_store, cx);
(
@@ -2218,8 +2217,9 @@ async fn make_test_ep_store(
});
let client = cx.update(|cx| Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
+ let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
cx.update(|cx| {
- RefreshLlmTokenListener::register(client.clone(), cx);
+ RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
});
let _server = FakeServer::for_client(42, &client, cx).await;
@@ -2301,8 +2301,9 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut
let client =
cx.update(|cx| client::Client::new(Arc::new(FakeSystemClock::new()), http_client, cx));
+ let user_store = cx.update(|cx| cx.new(|cx| client::UserStore::new(client.clone(), cx)));
cx.update(|cx| {
- language_model::RefreshLlmTokenListener::register(client.clone(), cx);
+ language_model::RefreshLlmTokenListener::register(client.clone(), user_store.clone(), cx);
});
let ep_store = cx.new(|cx| EditPredictionStore::new(client, project.read(cx).user_store(), cx));
@@ -19,7 +19,7 @@ use settings::EditPredictionPromptFormat;
use text::{Anchor, Bias};
use ui::SharedString;
use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification};
-use zeta_prompt::ZetaPromptInput;
+use zeta_prompt::{ParsedOutput, ZetaPromptInput};
use std::{env, ops::Range, path::Path, sync::Arc, time::Instant};
use zeta_prompt::{
@@ -175,13 +175,12 @@ pub fn request_prediction_with_zeta(
let request_id = EditPredictionId(request_id.into());
let output_text = zeta1::clean_zeta1_model_output(&response_text);
+ let parsed_output = output_text.map(|text| ParsedOutput {
+ new_editable_region: text,
+ range_in_excerpt: editable_range_in_excerpt,
+ });
- (
- request_id,
- Some(editable_range_in_excerpt).zip(output_text),
- None,
- None,
- )
+ (request_id, parsed_output, None, None)
}
EditPredictionPromptFormat::Zeta2 => {
let prompt = format_zeta_prompt(&prompt_input, zeta_version);
@@ -271,20 +270,23 @@ pub fn request_prediction_with_zeta(
let request_id = EditPredictionId(response.request_id.into());
let output_text = Some(response.output).filter(|s| !s.is_empty());
let model_version = response.model_version;
+ let parsed_output = ParsedOutput {
+ new_editable_region: output_text.unwrap_or_default(),
+ range_in_excerpt: response.editable_range,
+ };
- (
- request_id,
- Some(response.editable_range).zip(output_text),
- model_version,
- usage,
- )
+ (request_id, Some(parsed_output), model_version, usage)
};
let received_response_at = Instant::now();
log::trace!("Got edit prediction response");
- let Some((editable_range_in_excerpt, mut output_text)) = output else {
+ let Some(ParsedOutput {
+ new_editable_region: mut output_text,
+ range_in_excerpt: editable_range_in_excerpt,
+ }) = output
+ else {
return Ok(((request_id, None), None));
};
@@ -105,7 +105,7 @@ pub fn init(cx: &mut App) -> EpAppState {
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
- language_model::init(client.clone(), cx);
+ language_model::init(user_store.clone(), client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
prompt_store::init(cx);
@@ -738,6 +738,21 @@ async fn load_examples(
examples.append(&mut requested_examples);
}
+ if !captured_after_timestamps.is_empty() {
+ captured_after_timestamps.sort();
+
+ let mut captured_examples = pull_examples::fetch_captured_examples_after(
+ http_client.clone(),
+ &captured_after_timestamps,
+ max_rows_per_timestamp,
+ remaining_offset,
+ background_executor.clone(),
+ Some(MIN_CAPTURE_VERSION),
+ )
+ .await?;
+ examples.append(&mut captured_examples);
+ }
+
if !settled_after_timestamps.is_empty() {
settled_after_timestamps.sort();
@@ -6,7 +6,7 @@ use crate::{
};
use anyhow::{Context as _, Result};
use edit_prediction::example_spec::encode_cursor_in_patch;
-use zeta_prompt::{CURSOR_MARKER, ZetaFormat, output_end_marker_for_format, resolve_cursor_region};
+use zeta_prompt::{CURSOR_MARKER, ZetaFormat, parse_zeta2_model_output};
pub fn run_parse_output(example: &mut Example) -> Result<()> {
example
@@ -60,10 +60,13 @@ fn parse_zeta2_output(
.as_ref()
.context("prompt_inputs required")?;
- let (context, editable_range, _, _) = resolve_cursor_region(prompt_inputs, format);
- let old_text = context[editable_range].to_string();
+ let parsed = parse_zeta2_model_output(actual_output, format, prompt_inputs)?;
+ let range_in_excerpt = parsed.range_in_excerpt;
+
+ let excerpt = prompt_inputs.cursor_excerpt.as_ref();
+ let old_text = excerpt[range_in_excerpt.clone()].to_string();
+ let mut new_text = parsed.new_editable_region;
- let mut new_text = actual_output.to_string();
let cursor_offset = if let Some(offset) = new_text.find(CURSOR_MARKER) {
new_text.replace_range(offset..offset + CURSOR_MARKER.len(), "");
Some(offset)
@@ -71,14 +74,8 @@ fn parse_zeta2_output(
None
};
- if let Some(marker) = output_end_marker_for_format(format) {
- new_text = new_text
- .strip_suffix(marker)
- .unwrap_or(&new_text)
- .to_string();
- }
-
- let mut old_text_normalized = old_text.clone();
+ // Normalize trailing newlines for diff generation
+ let mut old_text_normalized = old_text;
if !new_text.is_empty() && !new_text.ends_with('\n') {
new_text.push('\n');
}
@@ -86,22 +83,10 @@ fn parse_zeta2_output(
old_text_normalized.push('\n');
}
- let old_text_trimmed = old_text.trim_end_matches('\n');
- let excerpt = prompt_inputs.cursor_excerpt.as_ref();
- let (editable_region_offset, _) = excerpt
- .match_indices(old_text_trimmed)
- .min_by_key(|(index, _)| index.abs_diff(prompt_inputs.cursor_offset_in_excerpt))
- .with_context(|| {
- format!(
- "could not find editable region in content.\nLooking for:\n{}\n\nIn content:\n{}",
- old_text_trimmed, excerpt
- )
- })?;
-
+ let editable_region_offset = range_in_excerpt.start;
let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count();
-
- // Use full context so cursor offset (relative to editable region start) aligns with diff content
let editable_region_lines = old_text_normalized.lines().count() as u32;
+
let diff = language::unified_diff_with_context(
&old_text_normalized,
&new_text,
@@ -565,6 +565,101 @@ pub async fn fetch_requested_examples_after(
Ok(all_examples)
}
+pub async fn fetch_captured_examples_after(
+ http_client: Arc<dyn HttpClient>,
+ after_timestamps: &[String],
+ max_rows_per_timestamp: usize,
+ offset: usize,
+ background_executor: BackgroundExecutor,
+ min_capture_version: Option<MinCaptureVersion>,
+) -> Result<Vec<Example>> {
+ if after_timestamps.is_empty() {
+ return Ok(Vec::new());
+ }
+
+ let progress = Progress::global();
+
+ let mut all_examples = Vec::new();
+
+ for after_date in after_timestamps.iter() {
+ let step_progress_name = format!("captured>{after_date}");
+ let step_progress = progress.start(Step::PullExamples, &step_progress_name);
+ step_progress.set_substatus("querying");
+
+ let min_minor_str = min_capture_version.map(|version| version.minor.to_string());
+ let min_patch_str = min_capture_version.map(|version| version.patch.to_string());
+ let min_minor_str_ref = min_minor_str.as_deref();
+ let min_patch_str_ref = min_patch_str.as_deref();
+
+ let statement = indoc! {r#"
+ SELECT
+ settled.event_properties:request_id::string AS request_id,
+ settled.device_id::string AS device_id,
+ settled.time::string AS time,
+ req.event_properties:input AS input,
+ settled.event_properties:settled_editable_region::string AS settled_editable_region,
+ settled.event_properties:example AS example,
+ req.event_properties:zed_version::string AS zed_version
+ FROM events settled
+ INNER JOIN events req
+ ON settled.event_properties:request_id::string = req.event_properties:request_id::string
+ WHERE settled.event_type = ?
+ AND req.event_type = ?
+ AND req.event_properties:version = 'V3'
+ AND req.event_properties:input:can_collect_data = true
+ AND settled.event_properties:example IS NOT NULL
+ AND TYPEOF(settled.event_properties:example) != 'NULL_VALUE'
+ AND settled.time > TRY_TO_TIMESTAMP_NTZ(?)
+ AND (? IS NULL OR (
+ TRY_CAST(SPLIT_PART(req.event_properties:zed_version::string, '.', 2) AS INTEGER) > ?
+ OR (
+ TRY_CAST(SPLIT_PART(req.event_properties:zed_version::string, '.', 2) AS INTEGER) = ?
+ AND TRY_CAST(SPLIT_PART(SPLIT_PART(req.event_properties:zed_version::string, '.', 3), '+', 1) AS INTEGER) >= ?
+ )
+ ))
+ ORDER BY settled.time ASC
+ LIMIT ?
+ OFFSET ?
+ "#};
+
+ let bindings = json!({
+ "1": { "type": "TEXT", "value": EDIT_PREDICTION_SETTLED_EVENT },
+ "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT },
+ "3": { "type": "TEXT", "value": after_date },
+ "4": { "type": "FIXED", "value": min_minor_str_ref },
+ "5": { "type": "FIXED", "value": min_minor_str_ref },
+ "6": { "type": "FIXED", "value": min_minor_str_ref },
+ "7": { "type": "FIXED", "value": min_patch_str_ref },
+ "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() },
+ "9": { "type": "FIXED", "value": offset.to_string() }
+ });
+
+ let examples = fetch_examples_with_query(
+ http_client.clone(),
+ &step_progress,
+ background_executor.clone(),
+ statement,
+ bindings,
+ DEFAULT_STATEMENT_TIMEOUT_SECONDS,
+ &[
+ "request_id",
+ "device_id",
+ "time",
+ "input",
+ "settled_editable_region",
+ "example",
+ "zed_version",
+ ],
+ captured_examples_from_response,
+ )
+ .await?;
+
+ all_examples.extend(examples);
+ }
+
+ Ok(all_examples)
+}
+
pub async fn fetch_settled_examples_after(
http_client: Arc<dyn HttpClient>,
after_timestamps: &[String],
@@ -1018,7 +1113,7 @@ fn settled_examples_from_response<'a>(
}
};
- let parse_json_value = |_: &str, raw: Option<&JsonValue>| -> Option<JsonValue> {
+ let parse_json_value = |raw: Option<&JsonValue>| -> Option<JsonValue> {
let value = raw?;
match value {
JsonValue::String(s) => serde_json::from_str::<JsonValue>(s).ok(),
@@ -1030,7 +1125,7 @@ fn settled_examples_from_response<'a>(
let device_id = get_string("device_id");
let time = get_string("time");
let input_raw = get_value("input");
- let input_json = parse_json_value("input", input_raw.as_ref());
+ let input_json = parse_json_value(input_raw.as_ref());
let input: Option<ZetaPromptInput> = input_json
.as_ref()
.and_then(|parsed| serde_json::from_value(parsed.clone()).ok());
@@ -1104,6 +1199,133 @@ fn settled_examples_from_response<'a>(
Ok(Box::new(iter))
}
+fn captured_examples_from_response<'a>(
+ response: &'a SnowflakeStatementResponse,
+ column_indices: &'a std::collections::HashMap<String, usize>,
+) -> Result<Box<dyn Iterator<Item = Example> + 'a>> {
+ if let Some(code) = &response.code {
+ if code != SNOWFLAKE_SUCCESS_CODE {
+ anyhow::bail!(
+ "snowflake sql api returned error code={code} message={}",
+ response.message.as_deref().unwrap_or("<no message>")
+ );
+ }
+ }
+
+ let iter = response
+ .data
+ .iter()
+ .enumerate()
+ .filter_map(move |(row_index, data_row)| {
+ let get_value = |name: &str| -> Option<JsonValue> {
+ let index = column_indices.get(name).copied()?;
+ let value = data_row.get(index)?;
+ if value.is_null() {
+ None
+ } else {
+ Some(value.clone())
+ }
+ };
+
+ let get_string = |name: &str| -> Option<String> {
+ match get_value(name)? {
+ JsonValue::String(s) => Some(s),
+ other => Some(other.to_string()),
+ }
+ };
+
+ let parse_json_value = |raw: Option<&JsonValue>| -> Option<JsonValue> {
+ let value = raw?;
+ match value {
+ JsonValue::String(s) => serde_json::from_str::<JsonValue>(s).ok(),
+ other => Some(other.clone()),
+ }
+ };
+
+ let request_id = get_string("request_id");
+ let device_id = get_string("device_id");
+ let time = get_string("time");
+ let input_raw = get_value("input");
+ let input_json = parse_json_value(input_raw.as_ref());
+ let input: Option<ZetaPromptInput> = input_json
+ .as_ref()
+ .and_then(|parsed| serde_json::from_value(parsed.clone()).ok());
+ let example_raw = get_value("example");
+ let example_json = parse_json_value(example_raw.as_ref());
+ let example_spec: Option<ExampleSpec> = example_json.as_ref().and_then(|parsed| {
+ serde_json::from_value(parsed.clone())
+ .or_else(|_| {
+ parsed
+ .as_str()
+ .and_then(|markdown| ExampleSpec::from_markdown(markdown).ok())
+ .ok_or_else(|| {
+ serde_json::Error::io(std::io::Error::other("not markdown"))
+ })
+ })
+ .ok()
+ });
+ let has_example_spec = example_spec.is_some();
+ let settled_editable_region = get_string("settled_editable_region");
+ let zed_version = get_string("zed_version");
+
+ match (
+ request_id.clone(),
+ device_id.clone(),
+ time.clone(),
+ input.clone(),
+ example_spec,
+ settled_editable_region.clone(),
+ ) {
+ (
+ Some(request_id),
+ Some(device_id),
+ Some(time),
+ Some(input),
+ Some(example_spec),
+ Some(settled_editable_region),
+ ) => Some(build_captured_example(
+ request_id,
+ device_id,
+ time,
+ input,
+ example_spec,
+ settled_editable_region,
+ zed_version,
+ )),
+ _ => {
+ let mut missing_fields = Vec::new();
+
+ if request_id.is_none() {
+ missing_fields.push("request_id");
+ }
+ if device_id.is_none() {
+ missing_fields.push("device_id");
+ }
+ if time.is_none() {
+ missing_fields.push("time");
+ }
+ if input_raw.is_none() || input_json.is_none() || input.is_none() {
+ missing_fields.push("input");
+ }
+ if example_raw.is_none() || !has_example_spec {
+ missing_fields.push("example");
+ }
+ if settled_editable_region.is_none() {
+ missing_fields.push("settled_editable_region");
+ }
+
+ log::warn!(
+ "skipping captured row {row_index}: [{}]",
+ missing_fields.join(", "),
+ );
+ None
+ }
+ }
+ });
+
+ Ok(Box::new(iter))
+}
+
fn build_settled_example(
request_id: String,
device_id: String,
@@ -1160,6 +1382,43 @@ fn build_settled_example(
example
}
+fn build_captured_example(
+ request_id: String,
+ device_id: String,
+ time: String,
+ input: ZetaPromptInput,
+ mut example_spec: ExampleSpec,
+ settled_editable_region: String,
+ zed_version: Option<String>,
+) -> Example {
+ let expected_patch = build_output_patch(
+ &input.cursor_path,
+ input.cursor_excerpt.as_ref(),
+ &input.excerpt_ranges.editable_350,
+ settled_editable_region.as_str(),
+ );
+
+ example_spec.expected_patches = vec![expected_patch];
+ example_spec.telemetry = Some(TelemetrySource {
+ request_id,
+ device_id,
+ time,
+ rejection_reason: String::new(),
+ was_shown: false,
+ });
+
+ Example {
+ spec: example_spec,
+ zed_version,
+ prompt_inputs: Some(input),
+ prompt: None,
+ predictions: Vec::new(),
+ score: Vec::new(),
+ qa: Vec::new(),
+ state: None,
+ }
+}
+
fn rejected_examples_from_response<'a>(
response: &'a SnowflakeStatementResponse,
column_indices: &'a std::collections::HashMap<String, usize>,
@@ -227,16 +227,17 @@ pub fn needs_repair(example: &Example, confidence_threshold: u8) -> bool {
/// Handles the `KEEP_PREVIOUS` sentinel by copying the teacher's prediction,
/// and delegates normal output to `TeacherPrompt::parse`.
pub fn parse(example: &Example, actual_output: &str) -> Result<(String, Option<ActualCursor>)> {
- if let Some(last_codeblock) = extract_last_codeblock(actual_output) {
- if last_codeblock.trim() == KEEP_PREVIOUS {
- let original = example
- .predictions
- .first()
- .context("no original prediction to keep")?;
- let patch = original.actual_patch.clone().unwrap_or_default();
- let cursor = original.actual_cursor.clone();
- return Ok((patch, cursor));
- }
+ let last_codeblock =
+ extract_last_codeblock(actual_output).unwrap_or_else(|| actual_output.to_string());
+
+ if last_codeblock.contains(KEEP_PREVIOUS) {
+ let original = example
+ .predictions
+ .first()
+ .context("no original prediction to keep")?;
+ let patch = original.actual_patch.clone().unwrap_or_default();
+ let cursor = original.actual_cursor.clone();
+ return Ok((patch, cursor));
}
TeacherPrompt::parse(example, actual_output)
@@ -1233,6 +1233,7 @@ pub struct Editor {
autoindent_mode: Option<AutoindentMode>,
workspace: Option<(WeakEntity<Workspace>, Option<WorkspaceId>)>,
input_enabled: bool,
+ expects_character_input: bool,
use_modal_editing: bool,
read_only: bool,
leader_id: Option<CollaboratorId>,
@@ -2469,6 +2470,7 @@ impl Editor {
collapse_matches: false,
workspace: None,
input_enabled: !is_minimap,
+ expects_character_input: !is_minimap,
use_modal_editing: full_mode,
read_only: is_minimap,
use_autoclose: true,
@@ -3365,6 +3367,10 @@ impl Editor {
self.input_enabled = input_enabled;
}
+ pub fn set_expects_character_input(&mut self, expects_character_input: bool) {
+ self.expects_character_input = expects_character_input;
+ }
+
pub fn set_edit_predictions_hidden_for_vim_mode(
&mut self,
hidden: bool,
@@ -28409,7 +28415,7 @@ impl EntityInputHandler for Editor {
}
fn accepts_text_input(&self, _window: &mut Window, _cx: &mut Context<Self>) -> bool {
- self.input_enabled
+ self.expects_character_input
}
}
@@ -429,7 +429,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
let extension_host_proxy = ExtensionHostProxy::global(cx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
- language_model::init(client.clone(), cx);
+ language_model::init(user_store.clone(), client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
prompt_store::init(cx);
@@ -104,7 +104,7 @@ pub fn init(cx: &mut App) -> Arc<AgentCliAppState> {
let extension_host_proxy = ExtensionHostProxy::global(cx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
- language_model::init(client.clone(), cx);
+ language_model::init(user_store.clone(), client.clone(), cx);
language_models::init(user_store.clone(), client.clone(), cx);
languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
prompt_store::init(cx);
@@ -80,6 +80,18 @@ pub trait Extension: Send + Sync + 'static {
worktree: Arc<dyn WorktreeDelegate>,
) -> Result<Option<String>>;
+ async fn language_server_initialization_options_schema(
+ &self,
+ language_server_id: LanguageServerName,
+ worktree: Arc<dyn WorktreeDelegate>,
+ ) -> Result<Option<String>>;
+
+ async fn language_server_workspace_configuration_schema(
+ &self,
+ language_server_id: LanguageServerName,
+ worktree: Arc<dyn WorktreeDelegate>,
+ ) -> Result<Option<String>>;
+
async fn language_server_additional_initialization_options(
&self,
language_server_id: LanguageServerName,
@@ -100,6 +100,28 @@ pub trait Extension: Send + Sync {
Ok(None)
}
+ /// Returns the JSON schema for the initialization options.
+ ///
+ /// The schema must conform to the JSON Schema speification.
+ fn language_server_initialization_options_schema(
+ &mut self,
+ _language_server_id: &LanguageServerId,
+ _worktree: &Worktree,
+ ) -> Option<serde_json::Value> {
+ None
+ }
+
+ /// Returns the JSON schema for the workspace configuration.
+ ///
+ /// The schema must conform to the JSON Schema specification.
+ fn language_server_workspace_configuration_schema(
+ &mut self,
+ _language_server_id: &LanguageServerId,
+ _worktree: &Worktree,
+ ) -> Option<serde_json::Value> {
+ None
+ }
+
/// Returns the initialization options to pass to the other language server.
fn language_server_additional_initialization_options(
&mut self,
@@ -370,6 +392,26 @@ impl wit::Guest for Component {
.and_then(|value| serde_json::to_string(&value).ok()))
}
+ fn language_server_initialization_options_schema(
+ language_server_id: String,
+ worktree: &Worktree,
+ ) -> Option<String> {
+ let language_server_id = LanguageServerId(language_server_id);
+ extension()
+ .language_server_initialization_options_schema(&language_server_id, worktree)
+ .and_then(|value| serde_json::to_string(&value).ok())
+ }
+
+ fn language_server_workspace_configuration_schema(
+ language_server_id: String,
+ worktree: &Worktree,
+ ) -> Option<String> {
+ let language_server_id = LanguageServerId(language_server_id);
+ extension()
+ .language_server_workspace_configuration_schema(&language_server_id, worktree)
+ .and_then(|value| serde_json::to_string(&value).ok())
+ }
+
fn language_server_additional_initialization_options(
language_server_id: String,
target_language_server_id: String,
@@ -101,6 +101,16 @@ world extension {
/// Returns the workspace configuration options to pass to the language server.
export language-server-workspace-configuration: func(language-server-id: string, worktree: borrow<worktree>) -> result<option<string>, string>;
+ /// Returns the JSON schema for the initialization options.
+ ///
+ /// The schema is represented as a JSON string conforming to the JSON Schema specification.
+ export language-server-initialization-options-schema: func(language-server-id: string, worktree: borrow<worktree>) -> option<string>;
+
+ /// Returns the JSON schema for the workspace configuration.
+ ///
+ /// The schema is represented as a JSON string conforming to the JSON Schema specification.
+ export language-server-workspace-configuration-schema: func(language-server-id: string, worktree: borrow<worktree>) -> option<string>;
+
/// Returns the initialization options to pass to the other language server.
export language-server-additional-initialization-options: func(language-server-id: string, target-language-server-id: string, worktree: borrow<worktree>) -> result<option<string>, string>;
@@ -159,6 +159,48 @@ impl extension::Extension for WasmExtension {
.await?
}
+ async fn language_server_initialization_options_schema(
+ &self,
+ language_server_id: LanguageServerName,
+ worktree: Arc<dyn WorktreeDelegate>,
+ ) -> Result<Option<String>> {
+ self.call(|extension, store| {
+ async move {
+ let resource = store.data_mut().table().push(worktree)?;
+ extension
+ .call_language_server_initialization_options_schema(
+ store,
+ &language_server_id,
+ resource,
+ )
+ .await
+ }
+ .boxed()
+ })
+ .await?
+ }
+
+ async fn language_server_workspace_configuration_schema(
+ &self,
+ language_server_id: LanguageServerName,
+ worktree: Arc<dyn WorktreeDelegate>,
+ ) -> Result<Option<String>> {
+ self.call(|extension, store| {
+ async move {
+ let resource = store.data_mut().table().push(worktree)?;
+ extension
+ .call_language_server_workspace_configuration_schema(
+ store,
+ &language_server_id,
+ resource,
+ )
+ .await
+ }
+ .boxed()
+ })
+ .await?
+ }
+
async fn language_server_additional_initialization_options(
&self,
language_server_id: LanguageServerName,
@@ -465,6 +465,60 @@ impl Extension {
}
}
+ pub async fn call_language_server_initialization_options_schema(
+ &self,
+ store: &mut Store<WasmState>,
+ language_server_id: &LanguageServerName,
+ resource: Resource<Arc<dyn WorktreeDelegate>>,
+ ) -> Result<Option<String>> {
+ match self {
+ Extension::V0_8_0(ext) => {
+ ext.call_language_server_initialization_options_schema(
+ store,
+ &language_server_id.0,
+ resource,
+ )
+ .await
+ }
+ Extension::V0_6_0(_)
+ | Extension::V0_5_0(_)
+ | Extension::V0_4_0(_)
+ | Extension::V0_3_0(_)
+ | Extension::V0_2_0(_)
+ | Extension::V0_1_0(_)
+ | Extension::V0_0_6(_)
+ | Extension::V0_0_4(_)
+ | Extension::V0_0_1(_) => Ok(None),
+ }
+ }
+
+ pub async fn call_language_server_workspace_configuration_schema(
+ &self,
+ store: &mut Store<WasmState>,
+ language_server_id: &LanguageServerName,
+ resource: Resource<Arc<dyn WorktreeDelegate>>,
+ ) -> Result<Option<String>> {
+ match self {
+ Extension::V0_8_0(ext) => {
+ ext.call_language_server_workspace_configuration_schema(
+ store,
+ &language_server_id.0,
+ resource,
+ )
+ .await
+ }
+ Extension::V0_6_0(_)
+ | Extension::V0_5_0(_)
+ | Extension::V0_4_0(_)
+ | Extension::V0_3_0(_)
+ | Extension::V0_2_0(_)
+ | Extension::V0_1_0(_)
+ | Extension::V0_0_6(_)
+ | Extension::V0_0_4(_)
+ | Extension::V0_0_1(_) => Ok(None),
+ }
+ }
+
pub async fn call_language_server_additional_initialization_options(
&self,
store: &mut Store<WasmState>,
@@ -517,7 +517,11 @@ impl ProjectDiff {
fn move_to_beginning(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.editor.update(cx, |editor, cx| {
editor.rhs_editor().update(cx, |editor, cx| {
- editor.move_to_beginning(&Default::default(), window, cx);
+ editor.change_selections(Default::default(), window, cx, |s| {
+ s.select_ranges(vec![
+ multi_buffer::Anchor::min()..multi_buffer::Anchor::min(),
+ ]);
+ });
});
});
}
@@ -151,6 +151,7 @@ rand.workspace = true
scheduler = { workspace = true, features = ["test-support"] }
unicode-segmentation.workspace = true
gpui_util = { workspace = true }
+proptest = { workspace = true }
[target.'cfg(not(target_family = "wasm"))'.dev-dependencies]
http_client = { workspace = true, features = ["test-support"] }
@@ -744,9 +744,11 @@ impl App {
}));
platform.on_quit(Box::new({
- let cx = app.clone();
+ let cx = Rc::downgrade(&app);
move || {
- cx.borrow_mut().shutdown();
+ if let Some(cx) = cx.upgrade() {
+ cx.borrow_mut().shutdown();
+ }
}
}));
@@ -2613,13 +2615,6 @@ impl<'a, T> Drop for GpuiBorrow<'a, T> {
}
}
-impl Drop for App {
- fn drop(&mut self) {
- self.foreground_executor.close();
- self.background_executor.close();
- }
-}
-
#[cfg(test)]
mod test {
use std::{cell::RefCell, rc::Rc};
@@ -129,11 +129,6 @@ impl BackgroundExecutor {
}
}
- /// Close this executor. Tasks will not run after this is called.
- pub fn close(&self) {
- self.inner.close();
- }
-
/// Enqueues the given future to be run to completion on a background thread.
#[track_caller]
pub fn spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Task<R>
@@ -173,7 +168,6 @@ impl BackgroundExecutor {
{
use crate::RunnableMeta;
use parking_lot::{Condvar, Mutex};
- use std::sync::{Arc, atomic::AtomicBool};
struct NotifyOnDrop<'a>(&'a (Condvar, Mutex<bool>));
@@ -197,14 +191,13 @@ impl BackgroundExecutor {
let dispatcher = self.dispatcher.clone();
let location = core::panic::Location::caller();
- let closed = Arc::new(AtomicBool::new(false));
let pair = &(Condvar::new(), Mutex::new(false));
let _wait_guard = WaitOnDrop(pair);
let (runnable, task) = unsafe {
async_task::Builder::new()
- .metadata(RunnableMeta { location, closed })
+ .metadata(RunnableMeta { location })
.spawn_unchecked(
move |_| async {
let _notify_guard = NotifyOnDrop(pair);
@@ -404,11 +397,6 @@ impl ForegroundExecutor {
}
}
- /// Close this executor. Tasks will not run after this is called.
- pub fn close(&self) {
- self.inner.close();
- }
-
/// Enqueues the given Task to run on the main thread.
#[track_caller]
pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
@@ -595,144 +583,4 @@ mod test {
"Task should run normally when app is alive"
);
}
-
- #[test]
- fn test_task_cancelled_when_app_dropped() {
- let (dispatcher, _background_executor, app) = create_test_app();
- let foreground_executor = app.borrow().foreground_executor.clone();
- let app_weak = Rc::downgrade(&app);
-
- let task_ran = Rc::new(RefCell::new(false));
- let task_ran_clone = Rc::clone(&task_ran);
-
- foreground_executor
- .spawn(async move {
- *task_ran_clone.borrow_mut() = true;
- })
- .detach();
-
- drop(app);
-
- assert!(app_weak.upgrade().is_none(), "App should have been dropped");
-
- dispatcher.run_until_parked();
-
- // The task should have been cancelled, not run
- assert!(
- !*task_ran.borrow(),
- "Task should have been cancelled when app was dropped, but it ran!"
- );
- }
-
- #[test]
- fn test_nested_tasks_both_cancel() {
- let (dispatcher, _background_executor, app) = create_test_app();
- let foreground_executor = app.borrow().foreground_executor.clone();
- let app_weak = Rc::downgrade(&app);
-
- let outer_completed = Rc::new(RefCell::new(false));
- let inner_completed = Rc::new(RefCell::new(false));
- let reached_await = Rc::new(RefCell::new(false));
-
- let outer_flag = Rc::clone(&outer_completed);
- let inner_flag = Rc::clone(&inner_completed);
- let await_flag = Rc::clone(&reached_await);
-
- // Channel to block the inner task until we're ready
- let (tx, rx) = futures::channel::oneshot::channel::<()>();
-
- let inner_executor = foreground_executor.clone();
-
- foreground_executor
- .spawn(async move {
- let inner_task = inner_executor.spawn({
- let inner_flag = Rc::clone(&inner_flag);
- async move {
- rx.await.ok();
- *inner_flag.borrow_mut() = true;
- }
- });
-
- *await_flag.borrow_mut() = true;
-
- inner_task.await;
-
- *outer_flag.borrow_mut() = true;
- })
- .detach();
-
- // Run dispatcher until outer task reaches the await point
- // The inner task will be blocked on the channel
- dispatcher.run_until_parked();
-
- // Verify we actually reached the await point before dropping the app
- assert!(
- *reached_await.borrow(),
- "Outer task should have reached the await point"
- );
-
- // Neither task should have completed yet
- assert!(
- !*outer_completed.borrow(),
- "Outer task should not have completed yet"
- );
- assert!(
- !*inner_completed.borrow(),
- "Inner task should not have completed yet"
- );
-
- // Drop the channel sender and app while outer is awaiting inner
- drop(tx);
- drop(app);
- assert!(app_weak.upgrade().is_none(), "App should have been dropped");
-
- // Run dispatcher - both tasks should be cancelled
- dispatcher.run_until_parked();
-
- // Neither task should have completed (both were cancelled)
- assert!(
- !*outer_completed.borrow(),
- "Outer task should have been cancelled, not completed"
- );
- assert!(
- !*inner_completed.borrow(),
- "Inner task should have been cancelled, not completed"
- );
- }
-
- #[test]
- #[should_panic]
- fn test_polling_cancelled_task_panics() {
- let (dispatcher, _background_executor, app) = create_test_app();
- let foreground_executor = app.borrow().foreground_executor.clone();
- let app_weak = Rc::downgrade(&app);
-
- let task = foreground_executor.spawn(async move { 42 });
-
- drop(app);
-
- assert!(app_weak.upgrade().is_none(), "App should have been dropped");
-
- dispatcher.run_until_parked();
-
- foreground_executor.block_on(task);
- }
-
- #[test]
- fn test_polling_cancelled_task_returns_none_with_fallible() {
- let (dispatcher, _background_executor, app) = create_test_app();
- let foreground_executor = app.borrow().foreground_executor.clone();
- let app_weak = Rc::downgrade(&app);
-
- let task = foreground_executor.spawn(async move { 42 }).fallible();
-
- drop(app);
-
- assert!(app_weak.upgrade().is_none(), "App should have been dropped");
-
- dispatcher.run_until_parked();
-
- let result = foreground_executor.block_on(task);
- assert_eq!(result, None, "Cancelled task should return None");
- }
}
@@ -1062,6 +1062,13 @@ impl PlatformInputHandler {
pub fn accepts_text_input(&mut self, window: &mut Window, cx: &mut App) -> bool {
self.handler.accepts_text_input(window, cx)
}
+
+ #[allow(dead_code)]
+ pub fn query_accepts_text_input(&mut self) -> bool {
+ self.cx
+ .update(|window, cx| self.handler.accepts_text_input(window, cx))
+ .unwrap_or(true)
+ }
}
/// A struct representing a selection in a text buffer, in UTF16 characters.
@@ -109,16 +109,13 @@ impl Scheduler for PlatformScheduler {
#[track_caller]
fn timer(&self, duration: Duration) -> Timer {
- use std::sync::{Arc, atomic::AtomicBool};
-
let (tx, rx) = oneshot::channel();
let dispatcher = self.dispatcher.clone();
// Create a runnable that will send the completion signal
let location = std::panic::Location::caller();
- let closed = Arc::new(AtomicBool::new(false));
let (runnable, _task) = async_task::Builder::new()
- .metadata(RunnableMeta { location, closed })
+ .metadata(RunnableMeta { location })
.spawn(
move |_| async move {
let _ = tx.send(());
@@ -44,11 +44,6 @@ impl LinuxDispatcher {
.name(format!("Worker-{i}"))
.spawn(move || {
for runnable in receiver.iter() {
- // Check if the executor that spawned this task was closed
- if runnable.metadata().is_closed() {
- continue;
- }
-
let start = Instant::now();
let location = runnable.metadata().location;
@@ -94,11 +89,6 @@ impl LinuxDispatcher {
calloop::timer::Timer::from_duration(timer.duration),
move |_, _, _| {
if let Some(runnable) = runnable.take() {
- // Check if the executor that spawned this task was closed
- if runnable.metadata().is_closed() {
- return TimeoutAction::Drop;
- }
-
let start = Instant::now();
let location = runnable.metadata().location;
let mut timing = TaskTiming {
@@ -221,6 +221,7 @@ pub(crate) struct WaylandClientState {
// Output to scale mapping
outputs: HashMap<ObjectId, Output>,
in_progress_outputs: HashMap<ObjectId, InProgressOutput>,
+ wl_outputs: HashMap<ObjectId, wl_output::WlOutput>,
keyboard_layout: LinuxKeyboardLayout,
keymap_state: Option<xkb::State>,
compose_state: Option<xkb::compose::State>,
@@ -463,6 +464,8 @@ impl WaylandClient {
let mut seat: Option<wl_seat::WlSeat> = None;
#[allow(clippy::mutable_key_type)]
let mut in_progress_outputs = HashMap::default();
+ #[allow(clippy::mutable_key_type)]
+ let mut wl_outputs: HashMap<ObjectId, wl_output::WlOutput> = HashMap::default();
globals.contents().with_list(|list| {
for global in list {
match &global.interface[..] {
@@ -482,6 +485,7 @@ impl WaylandClient {
(),
);
in_progress_outputs.insert(output.id(), InProgressOutput::default());
+ wl_outputs.insert(output.id(), output);
}
_ => {}
}
@@ -589,6 +593,7 @@ impl WaylandClient {
composing: false,
outputs: HashMap::default(),
in_progress_outputs,
+ wl_outputs,
windows: HashMap::default(),
common,
keyboard_layout: LinuxKeyboardLayout::new(UNKNOWN_KEYBOARD_LAYOUT_NAME),
@@ -720,6 +725,15 @@ impl LinuxClient for WaylandClient {
let parent = state.keyboard_focused_window.clone();
+ let target_output = params.display_id.and_then(|display_id| {
+ let target_protocol_id: u32 = display_id.into();
+ state
+ .wl_outputs
+ .iter()
+ .find(|(id, _)| id.protocol_id() == target_protocol_id)
+ .map(|(_, output)| output.clone())
+ });
+
let appearance = state.common.appearance;
let compositor_gpu = state.compositor_gpu.take();
let (window, surface_id) = WaylandWindow::new(
@@ -731,6 +745,7 @@ impl LinuxClient for WaylandClient {
params,
appearance,
parent,
+ target_output,
)?;
state.windows.insert(surface_id, window.0.clone());
@@ -1020,6 +1035,7 @@ impl Dispatch<wl_registry::WlRegistry, GlobalListContents> for WaylandClientStat
state
.in_progress_outputs
.insert(output.id(), InProgressOutput::default());
+ state.wl_outputs.insert(output.id(), output);
}
_ => {}
},
@@ -12,7 +12,10 @@ use futures::channel::oneshot::Receiver;
use raw_window_handle as rwh;
use wayland_backend::client::ObjectId;
use wayland_client::WEnum;
-use wayland_client::{Proxy, protocol::wl_surface};
+use wayland_client::{
+ Proxy,
+ protocol::{wl_output, wl_surface},
+};
use wayland_protocols::wp::viewporter::client::wp_viewport;
use wayland_protocols::xdg::decoration::zv1::client::zxdg_toplevel_decoration_v1;
use wayland_protocols::xdg::shell::client::xdg_surface;
@@ -129,6 +132,7 @@ impl WaylandSurfaceState {
globals: &Globals,
params: &WindowParams,
parent: Option<WaylandWindowStatePtr>,
+ target_output: Option<wl_output::WlOutput>,
) -> anyhow::Result<Self> {
// For layer_shell windows, create a layer surface instead of an xdg surface
if let WindowKind::LayerShell(options) = ¶ms.kind {
@@ -138,7 +142,7 @@ impl WaylandSurfaceState {
let layer_surface = layer_shell.get_layer_surface(
&surface,
- None,
+ target_output.as_ref(),
super::layer_shell::wayland_layer(options.layer),
options.namespace.clone(),
&globals.qh,
@@ -494,9 +498,11 @@ impl WaylandWindow {
params: WindowParams,
appearance: WindowAppearance,
parent: Option<WaylandWindowStatePtr>,
+ target_output: Option<wl_output::WlOutput>,
) -> anyhow::Result<(Self, ObjectId)> {
let surface = globals.compositor.create_surface(&globals.qh, ());
- let surface_state = WaylandSurfaceState::new(&surface, &globals, ¶ms, parent.clone())?;
+ let surface_state =
+ WaylandSurfaceState::new(&surface, &globals, ¶ms, parent.clone(), target_output)?;
if let Some(fractional_scale_manager) = globals.fractional_scale_manager.as_ref() {
fractional_scale_manager.get_fractional_scale(&surface, &globals.qh, surface.id());
@@ -201,14 +201,7 @@ extern "C" fn trampoline(context: *mut c_void) {
let runnable =
unsafe { Runnable::<RunnableMeta>::from_raw(NonNull::new_unchecked(context as *mut ())) };
- let metadata = runnable.metadata();
-
- // Check if the executor that spawned this task was closed
- if metadata.is_closed() {
- return;
- }
-
- let location = metadata.location;
+ let location = runnable.metadata().location;
let start = Instant::now();
let timing = TaskTiming {
@@ -184,10 +184,6 @@ impl WebDispatcher {
}
};
- if runnable.metadata().is_closed() {
- continue;
- }
-
runnable.run();
}
})
@@ -263,9 +259,7 @@ impl PlatformDispatcher for WebDispatcher {
let millis = duration.as_millis().min(i32::MAX as u128) as i32;
if self.on_main_thread() {
let callback = Closure::once_into_js(move || {
- if !runnable.metadata().is_closed() {
- runnable.run();
- }
+ runnable.run();
});
self.browser_window
.set_timeout_with_callback_and_timeout_and_arguments_0(
@@ -300,15 +294,11 @@ impl PlatformDispatcher for WebDispatcher {
fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) {
match item {
MainThreadItem::Runnable(runnable) => {
- if !runnable.metadata().is_closed() {
- runnable.run();
- }
+ runnable.run();
}
MainThreadItem::Delayed { runnable, millis } => {
let callback = Closure::once_into_js(move || {
- if !runnable.metadata().is_closed() {
- runnable.run();
- }
+ runnable.run();
});
window
.set_timeout_with_callback_and_timeout_and_arguments_0(
@@ -325,9 +315,7 @@ fn execute_on_main_thread(window: &web_sys::Window, item: MainThreadItem) {
fn schedule_runnable(window: &web_sys::Window, runnable: RunnableVariant, priority: Priority) {
let callback = Closure::once_into_js(move || {
- if !runnable.metadata().is_closed() {
- runnable.run();
- }
+ runnable.run();
});
let callback: &js_sys::Function = callback.unchecked_ref();
@@ -58,10 +58,6 @@ impl WindowsDispatcher {
let mut task_wrapper = Some(runnable);
WorkItemHandler::new(move |_| {
let runnable = task_wrapper.take().unwrap();
- // Check if the executor that spawned this task was closed
- if runnable.metadata().is_closed() {
- return Ok(());
- }
Self::execute_runnable(runnable);
Ok(())
})
@@ -75,10 +71,6 @@ impl WindowsDispatcher {
let mut task_wrapper = Some(runnable);
TimerElapsedHandler::new(move |_| {
let runnable = task_wrapper.take().unwrap();
- // Check if the executor that spawned this task was closed
- if runnable.metadata().is_closed() {
- return Ok(());
- }
Self::execute_runnable(runnable);
Ok(())
})
@@ -593,33 +593,63 @@ impl WindowsWindowInner {
}
pub(crate) fn update_ime_position(&self, handle: HWND, caret_position: POINT) {
+ let Some(ctx) = ImeContext::get(handle) else {
+ return;
+ };
unsafe {
- let ctx = ImmGetContext(handle);
- if ctx.is_invalid() {
- return;
- }
+ ImmSetCompositionWindow(
+ *ctx,
+ &COMPOSITIONFORM {
+ dwStyle: CFS_POINT,
+ ptCurrentPos: caret_position,
+ ..Default::default()
+ },
+ )
+ .ok()
+ .log_err();
- let config = COMPOSITIONFORM {
- dwStyle: CFS_POINT,
- ptCurrentPos: caret_position,
- ..Default::default()
- };
- ImmSetCompositionWindow(ctx, &config).ok().log_err();
- let config = CANDIDATEFORM {
- dwStyle: CFS_CANDIDATEPOS,
- ptCurrentPos: caret_position,
- ..Default::default()
- };
- ImmSetCandidateWindow(ctx, &config).ok().log_err();
- ImmReleaseContext(handle, ctx).ok().log_err();
+ ImmSetCandidateWindow(
+ *ctx,
+ &CANDIDATEFORM {
+ dwStyle: CFS_CANDIDATEPOS,
+ ptCurrentPos: caret_position,
+ ..Default::default()
+ },
+ )
+ .ok()
+ .log_err();
+ }
+ }
+
+ fn update_ime_enabled(&self, handle: HWND) {
+ let ime_enabled = self
+ .with_input_handler(|input_handler| input_handler.query_accepts_text_input())
+ .unwrap_or(false);
+ if ime_enabled == self.state.ime_enabled.get() {
+ return;
+ }
+ self.state.ime_enabled.set(ime_enabled);
+ unsafe {
+ if ime_enabled {
+ ImmAssociateContextEx(handle, HIMC::default(), IACE_DEFAULT)
+ .ok()
+ .log_err();
+ } else {
+ if let Some(ctx) = ImeContext::get(handle) {
+ ImmNotifyIME(*ctx, NI_COMPOSITIONSTR, CPS_COMPLETE, 0)
+ .ok()
+ .log_err();
+ }
+ ImmAssociateContextEx(handle, HIMC::default(), 0)
+ .ok()
+ .log_err();
+ }
}
}
fn handle_ime_composition(&self, handle: HWND, lparam: LPARAM) -> Option<isize> {
- let ctx = unsafe { ImmGetContext(handle) };
- let result = self.handle_ime_composition_inner(ctx, lparam);
- unsafe { ImmReleaseContext(handle, ctx).ok().log_err() };
- result
+ let ctx = ImeContext::get(handle)?;
+ self.handle_ime_composition_inner(*ctx, lparam)
}
fn handle_ime_composition_inner(&self, ctx: HIMC, lparam: LPARAM) -> Option<isize> {
@@ -1123,6 +1153,7 @@ impl WindowsWindowInner {
});
self.state.callbacks.request_frame.set(Some(request_frame));
+ self.update_ime_enabled(handle);
unsafe { ValidateRect(Some(handle), None).ok().log_err() };
Some(0)
@@ -1205,6 +1236,36 @@ impl WindowsWindowInner {
}
}
+struct ImeContext {
+ hwnd: HWND,
+ himc: HIMC,
+}
+
+impl ImeContext {
+ fn get(hwnd: HWND) -> Option<Self> {
+ let himc = unsafe { ImmGetContext(hwnd) };
+ if himc.is_invalid() {
+ return None;
+ }
+ Some(Self { hwnd, himc })
+ }
+}
+
+impl std::ops::Deref for ImeContext {
+ type Target = HIMC;
+ fn deref(&self) -> &HIMC {
+ &self.himc
+ }
+}
+
+impl Drop for ImeContext {
+ fn drop(&mut self) {
+ unsafe {
+ ImmReleaseContext(self.hwnd, self.himc).ok().log_err();
+ }
+ }
+}
+
fn handle_key_event<F>(
wparam: WPARAM,
lparam: LPARAM,
@@ -52,6 +52,7 @@ pub struct WindowsWindowState {
pub callbacks: Callbacks,
pub input_handler: Cell<Option<PlatformInputHandler>>,
+ pub ime_enabled: Cell<bool>,
pub pending_surrogate: Cell<Option<u16>>,
pub last_reported_modifiers: Cell<Option<Modifiers>>,
pub last_reported_capslock: Cell<Option<Capslock>>,
@@ -142,6 +143,7 @@ impl WindowsWindowState {
min_size,
callbacks,
input_handler: Cell::new(input_handler),
+ ime_enabled: Cell::new(true),
pending_surrogate: Cell::new(pending_surrogate),
last_reported_modifiers: Cell::new(last_reported_modifiers),
last_reported_capslock: Cell::new(last_reported_capslock),
@@ -67,25 +67,22 @@ pub fn init(cx: &mut App) {
.detach();
if let Some(extension_events) = extension::ExtensionEvents::try_global(cx) {
- cx.subscribe(&extension_events, move |_, evt, cx| {
- match evt {
- extension::Event::ExtensionInstalled(_)
- | extension::Event::ExtensionUninstalled(_)
- | extension::Event::ConfigureExtensionRequested(_) => return,
- extension::Event::ExtensionsInstalledChanged => {}
+ cx.subscribe(&extension_events, move |_, evt, cx| match evt {
+ extension::Event::ExtensionsInstalledChanged => {
+ cx.update_global::<SchemaStore, _>(|schema_store, cx| {
+ schema_store.notify_schema_changed(ChangedSchemas::Settings, cx);
+ });
}
- cx.update_global::<SchemaStore, _>(|schema_store, cx| {
- schema_store.notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}settings"), cx);
- schema_store
- .notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}project_settings"), cx);
- });
+ extension::Event::ExtensionUninstalled(_)
+ | extension::Event::ExtensionInstalled(_)
+ | extension::Event::ConfigureExtensionRequested(_) => {}
})
.detach();
}
cx.observe_global::<dap::DapRegistry>(move |cx| {
cx.update_global::<SchemaStore, _>(|schema_store, cx| {
- schema_store.notify_schema_changed(&format!("{SCHEMA_URI_PREFIX}debug_tasks"), cx);
+ schema_store.notify_schema_changed(ChangedSchemas::DebugTasks, cx);
});
})
.detach();
@@ -98,18 +95,42 @@ pub struct SchemaStore {
impl gpui::Global for SchemaStore {}
+enum ChangedSchemas {
+ Settings,
+ DebugTasks,
+}
+
impl SchemaStore {
- fn notify_schema_changed(&mut self, uri: &str, cx: &mut App) {
- DYNAMIC_SCHEMA_CACHE.write().remove(uri);
+ fn notify_schema_changed(&mut self, changed_schemas: ChangedSchemas, cx: &mut App) {
+ let uris_to_invalidate = match changed_schemas {
+ ChangedSchemas::Settings => {
+ let settings_uri_prefix = &format!("{SCHEMA_URI_PREFIX}settings");
+ let project_settings_uri = &format!("{SCHEMA_URI_PREFIX}project_settings");
+ DYNAMIC_SCHEMA_CACHE
+ .write()
+ .extract_if(|uri, _| {
+ uri == project_settings_uri || uri.starts_with(settings_uri_prefix)
+ })
+ .map(|(url, _)| url)
+ .collect()
+ }
+ ChangedSchemas::DebugTasks => DYNAMIC_SCHEMA_CACHE
+ .write()
+ .remove_entry(&format!("{SCHEMA_URI_PREFIX}debug_tasks"))
+ .map_or_else(Vec::new, |(uri, _)| vec![uri]),
+ };
+
+ if uris_to_invalidate.is_empty() {
+ return;
+ }
- let uri = uri.to_string();
self.lsp_stores.retain(|lsp_store| {
let Some(lsp_store) = lsp_store.upgrade() else {
return false;
};
- project::lsp_store::json_language_server_ext::notify_schema_changed(
+ project::lsp_store::json_language_server_ext::notify_schemas_changed(
lsp_store,
- uri.clone(),
+ &uris_to_invalidate,
cx,
);
true
@@ -238,7 +259,8 @@ async fn resolve_dynamic_schema(
(adapter_name, LspSchemaKind::Settings)
} else {
anyhow::bail!(
- "Invalid LSP schema path: expected '{{adapter}}/initialization_options' or '{{adapter}}/settings', got '{}'",
+ "Invalid LSP schema path: \
+ Expected '{{adapter}}/initialization_options' or '{{adapter}}/settings', got '{}'",
lsp_path
);
};
@@ -484,7 +506,7 @@ pub fn all_schema_file_associations(
let file_name = normalized_action_name_to_file_name(normalized_name.clone());
serde_json::json!({
"fileMatch": [file_name],
- "url": format!("{}action/{normalized_name}", SCHEMA_URI_PREFIX)
+ "url": format!("{SCHEMA_URI_PREFIX}action/{normalized_name}")
})
}));
@@ -350,6 +350,44 @@ impl LspAdapter for ExtensionLspAdapter {
})
}
+ async fn initialization_options_schema(
+ self: Arc<Self>,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ _cached_binary: OwnedMutexGuard<Option<(bool, LanguageServerBinary)>>,
+ _cx: &mut AsyncApp,
+ ) -> Option<serde_json::Value> {
+ let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _;
+ let json_schema: Option<String> = self
+ .extension
+ .language_server_initialization_options_schema(
+ self.language_server_id.clone(),
+ delegate,
+ )
+ .await
+ .ok()
+ .flatten();
+ json_schema.and_then(|s| serde_json::from_str(&s).ok())
+ }
+
+ async fn settings_schema(
+ self: Arc<Self>,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ _cached_binary: OwnedMutexGuard<Option<(bool, LanguageServerBinary)>>,
+ _cx: &mut AsyncApp,
+ ) -> Option<serde_json::Value> {
+ let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _;
+ let json_schema: Option<String> = self
+ .extension
+ .language_server_workspace_configuration_schema(
+ self.language_server_id.clone(),
+ delegate,
+ )
+ .await
+ .ok()
+ .flatten();
+ json_schema.and_then(|s| serde_json::from_str(&s).ok())
+ }
+
async fn additional_initialization_options(
self: Arc<Self>,
target_language_server_id: LanguageServerName,
@@ -13,10 +13,11 @@ pub mod fake_provider;
use anthropic::{AnthropicError, parse_prompt_too_long};
use anyhow::{Result, anyhow};
use client::Client;
+use client::UserStore;
use cloud_llm_client::CompletionRequestStatus;
use futures::FutureExt;
use futures::{StreamExt, future::BoxFuture, stream::BoxStream};
-use gpui::{AnyView, App, AsyncApp, SharedString, Task, Window};
+use gpui::{AnyView, App, AsyncApp, Entity, SharedString, Task, Window};
use http_client::{StatusCode, http};
use icons::IconName;
use open_router::OpenRouterError;
@@ -61,9 +62,9 @@ pub const ZED_CLOUD_PROVIDER_ID: LanguageModelProviderId = LanguageModelProvider
pub const ZED_CLOUD_PROVIDER_NAME: LanguageModelProviderName =
LanguageModelProviderName::new("Zed");
-pub fn init(client: Arc<Client>, cx: &mut App) {
+pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
init_settings(cx);
- RefreshLlmTokenListener::register(client, cx);
+ RefreshLlmTokenListener::register(client, user_store, cx);
}
pub fn init_settings(cx: &mut App) {
@@ -3,11 +3,14 @@ use std::sync::Arc;
use anyhow::{Context as _, Result};
use client::Client;
+use client::UserStore;
use cloud_api_client::ClientApiError;
use cloud_api_types::OrganizationId;
use cloud_api_types::websocket_protocol::MessageToClient;
use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME};
-use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _};
+use gpui::{
+ App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription,
+};
use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
use thiserror::Error;
@@ -101,13 +104,15 @@ impl Global for GlobalRefreshLlmTokenListener {}
pub struct RefreshLlmTokenEvent;
-pub struct RefreshLlmTokenListener;
+pub struct RefreshLlmTokenListener {
+ _subscription: Subscription,
+}
impl EventEmitter<RefreshLlmTokenEvent> for RefreshLlmTokenListener {}
impl RefreshLlmTokenListener {
- pub fn register(client: Arc<Client>, cx: &mut App) {
- let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, cx));
+ pub fn register(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
+ let listener = cx.new(|cx| RefreshLlmTokenListener::new(client, user_store, cx));
cx.set_global(GlobalRefreshLlmTokenListener(listener));
}
@@ -115,7 +120,7 @@ impl RefreshLlmTokenListener {
GlobalRefreshLlmTokenListener::global(cx).0.clone()
}
- fn new(client: Arc<Client>, cx: &mut Context<Self>) -> Self {
+ fn new(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut Context<Self>) -> Self {
client.add_message_to_client_handler({
let this = cx.entity();
move |message, cx| {
@@ -123,7 +128,15 @@ impl RefreshLlmTokenListener {
}
});
- Self
+ let subscription = cx.subscribe(&user_store, |_this, _user_store, event, cx| {
+ if matches!(event, client::user::Event::OrganizationChanged) {
+ cx.emit(RefreshLlmTokenEvent);
+ }
+ });
+
+ Self {
+ _subscription: subscription,
+ }
}
fn handle_refresh_llm_token(this: Entity<Self>, message: &MessageToClient, cx: &mut App) {
@@ -2,15 +2,17 @@ use std::pin::Pin;
use std::str::FromStr as _;
use std::sync::Arc;
+use anthropic::AnthropicModelMode;
use anyhow::{Result, anyhow};
use cloud_llm_client::CompletionIntent;
use collections::HashMap;
use copilot::{GlobalCopilotAuth, Status};
use copilot_chat::responses as copilot_responses;
use copilot_chat::{
- ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, CopilotChatConfiguration,
- Function, FunctionContent, ImageUrl, Model as CopilotChatModel, ModelVendor,
- Request as CopilotChatRequest, ResponseEvent, Tool, ToolCall, ToolCallContent, ToolChoice,
+ ChatLocation, ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat,
+ CopilotChatConfiguration, Function, FunctionContent, ImageUrl, Model as CopilotChatModel,
+ ModelVendor, Request as CopilotChatRequest, ResponseEvent, Tool, ToolCall, ToolCallContent,
+ ToolChoice,
};
use futures::future::BoxFuture;
use futures::stream::BoxStream;
@@ -20,8 +22,8 @@ use http_client::StatusCode;
use language::language_settings::all_language_settings;
use language_model::{
AuthenticateError, IconOrSvg, LanguageModel, LanguageModelCompletionError,
- LanguageModelCompletionEvent, LanguageModelCostInfo, LanguageModelId, LanguageModelName,
- LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
+ LanguageModelCompletionEvent, LanguageModelCostInfo, LanguageModelEffortLevel, LanguageModelId,
+ LanguageModelName, LanguageModelProvider, LanguageModelProviderId, LanguageModelProviderName,
LanguageModelProviderState, LanguageModelRequest, LanguageModelRequestMessage,
LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason, TokenUsage,
@@ -30,6 +32,7 @@ use settings::SettingsStore;
use ui::prelude::*;
use util::debug_panic;
+use crate::provider::anthropic::{AnthropicEventMapper, into_anthropic};
use crate::provider::util::parse_tool_arguments;
const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat");
@@ -254,6 +257,33 @@ impl LanguageModel for CopilotChatLanguageModel {
self.model.supports_vision()
}
+ fn supports_thinking(&self) -> bool {
+ self.model.can_think()
+ }
+
+ fn supported_effort_levels(&self) -> Vec<LanguageModelEffortLevel> {
+ let levels = self.model.reasoning_effort_levels();
+ if levels.is_empty() {
+ return vec![];
+ }
+ levels
+ .iter()
+ .map(|level| {
+ let name: SharedString = match level.as_str() {
+ "low" => "Low".into(),
+ "medium" => "Medium".into(),
+ "high" => "High".into(),
+ _ => SharedString::from(level.clone()),
+ };
+ LanguageModelEffortLevel {
+ name,
+ value: SharedString::from(level.clone()),
+ is_default: level == "high",
+ }
+ })
+ .collect()
+ }
+
fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
match self.model.vendor() {
ModelVendor::OpenAI | ModelVendor::Anthropic => {
@@ -333,12 +363,94 @@ impl LanguageModel for CopilotChatLanguageModel {
| CompletionIntent::EditFile => false,
});
+ if self.model.supports_messages() {
+ let location = intent_to_chat_location(request.intent);
+ let model = self.model.clone();
+ let request_limiter = self.request_limiter.clone();
+ let future = cx.spawn(async move |cx| {
+ let effort = request
+ .thinking_effort
+ .as_ref()
+ .and_then(|e| anthropic::Effort::from_str(e).ok());
+
+ let mut anthropic_request = into_anthropic(
+ request,
+ model.id().to_string(),
+ 0.0,
+ model.max_output_tokens() as u64,
+ if model.supports_adaptive_thinking() {
+ AnthropicModelMode::Thinking {
+ budget_tokens: None,
+ }
+ } else if model.can_think() {
+ AnthropicModelMode::Thinking {
+ budget_tokens: compute_thinking_budget(
+ model.min_thinking_budget(),
+ model.max_thinking_budget(),
+ model.max_output_tokens() as u32,
+ ),
+ }
+ } else {
+ AnthropicModelMode::Default
+ },
+ );
+
+ anthropic_request.temperature = None;
+
+ // The Copilot proxy doesn't support eager_input_streaming on tools.
+ for tool in &mut anthropic_request.tools {
+ tool.eager_input_streaming = false;
+ }
+
+ if model.supports_adaptive_thinking() {
+ if anthropic_request.thinking.is_some() {
+ anthropic_request.thinking = Some(anthropic::Thinking::Adaptive);
+ anthropic_request.output_config = Some(anthropic::OutputConfig { effort });
+ }
+ }
+
+ let anthropic_beta = if !model.supports_adaptive_thinking() && model.can_think() {
+ Some("interleaved-thinking-2025-05-14".to_string())
+ } else {
+ None
+ };
+
+ let body = serde_json::to_string(&anthropic::StreamingRequest {
+ base: anthropic_request,
+ stream: true,
+ })
+ .map_err(|e| anyhow::anyhow!(e))?;
+
+ let stream = CopilotChat::stream_messages(
+ body,
+ location,
+ is_user_initiated,
+ anthropic_beta,
+ cx.clone(),
+ );
+
+ request_limiter
+ .stream(async move {
+ let events = stream.await?;
+ let mapper = AnthropicEventMapper::new();
+ Ok(mapper.map_stream(events).boxed())
+ })
+ .await
+ });
+ return async move { Ok(future.await?.boxed()) }.boxed();
+ }
+
if self.model.supports_response() {
+ let location = intent_to_chat_location(request.intent);
let responses_request = into_copilot_responses(&self.model, request);
let request_limiter = self.request_limiter.clone();
let future = cx.spawn(async move |cx| {
- let request =
- CopilotChat::stream_response(responses_request, is_user_initiated, cx.clone());
+ let request = CopilotChat::stream_response(
+ responses_request,
+ location,
+ is_user_initiated,
+ cx.clone(),
+ );
request_limiter
.stream(async move {
let stream = request.await?;
@@ -350,6 +462,7 @@ impl LanguageModel for CopilotChatLanguageModel {
return async move { Ok(future.await?.boxed()) }.boxed();
}
+ let location = intent_to_chat_location(request.intent);
let copilot_request = match into_copilot_chat(&self.model, request) {
Ok(request) => request,
Err(err) => return futures::future::ready(Err(err.into())).boxed(),
@@ -358,8 +471,12 @@ impl LanguageModel for CopilotChatLanguageModel {
let request_limiter = self.request_limiter.clone();
let future = cx.spawn(async move |cx| {
- let request =
- CopilotChat::stream_completion(copilot_request, is_user_initiated, cx.clone());
+ let request = CopilotChat::stream_completion(
+ copilot_request,
+ location,
+ is_user_initiated,
+ cx.clone(),
+ );
request_limiter
.stream(async move {
let response = request.await?;
@@ -761,6 +878,9 @@ fn into_copilot_chat(
model: &CopilotChatModel,
request: LanguageModelRequest,
) -> Result<CopilotChatRequest> {
+ let temperature = request.temperature;
+ let tool_choice = request.tool_choice;
+
let mut request_messages: Vec<LanguageModelRequestMessage> = Vec::new();
for message in request.messages {
if let Some(last_message) = request_messages.last_mut() {
@@ -859,10 +979,9 @@ fn into_copilot_chat(
let text_content = {
let mut buffer = String::new();
for string in message.content.iter().filter_map(|content| match content {
- MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
- Some(text.as_str())
- }
- MessageContent::ToolUse(_)
+ MessageContent::Text(text) => Some(text.as_str()),
+ MessageContent::Thinking { .. }
+ | MessageContent::ToolUse(_)
| MessageContent::RedactedThinking(_)
| MessageContent::ToolResult(_)
| MessageContent::Image(_) => None,
@@ -919,21 +1038,52 @@ fn into_copilot_chat(
.collect::<Vec<_>>();
Ok(CopilotChatRequest {
- intent: true,
n: 1,
stream: model.uses_streaming(),
- temperature: 0.1,
+ temperature: temperature.unwrap_or(0.1),
model: model.id().to_string(),
messages,
tools,
- tool_choice: request.tool_choice.map(|choice| match choice {
+ tool_choice: tool_choice.map(|choice| match choice {
LanguageModelToolChoice::Auto => ToolChoice::Auto,
LanguageModelToolChoice::Any => ToolChoice::Any,
LanguageModelToolChoice::None => ToolChoice::None,
}),
+ thinking_budget: None,
})
}
+fn compute_thinking_budget(
+ min_budget: Option<u32>,
+ max_budget: Option<u32>,
+ max_output_tokens: u32,
+) -> Option<u32> {
+ let configured_budget: u32 = 16000;
+ let min_budget = min_budget.unwrap_or(1024);
+ let max_budget = max_budget.unwrap_or(max_output_tokens.saturating_sub(1));
+ let normalized = configured_budget.max(min_budget);
+ Some(
+ normalized
+ .min(max_budget)
+ .min(max_output_tokens.saturating_sub(1)),
+ )
+}
+
+fn intent_to_chat_location(intent: Option<CompletionIntent>) -> ChatLocation {
+ match intent {
+ Some(CompletionIntent::UserPrompt) => ChatLocation::Agent,
+ Some(CompletionIntent::ToolResults) => ChatLocation::Agent,
+ Some(CompletionIntent::ThreadSummarization) => ChatLocation::Panel,
+ Some(CompletionIntent::ThreadContextSummarization) => ChatLocation::Panel,
+ Some(CompletionIntent::CreateFile) => ChatLocation::Agent,
+ Some(CompletionIntent::EditFile) => ChatLocation::Agent,
+ Some(CompletionIntent::InlineAssist) => ChatLocation::Editor,
+ Some(CompletionIntent::TerminalInlineAssist) => ChatLocation::Terminal,
+ Some(CompletionIntent::GenerateGitCommitMessage) => ChatLocation::Other,
+ None => ChatLocation::Panel,
+ }
+}
+
fn into_copilot_responses(
model: &CopilotChatModel,
request: LanguageModelRequest,
@@ -949,7 +1099,7 @@ fn into_copilot_responses(
tool_choice,
stop: _,
temperature,
- thinking_allowed: _,
+ thinking_allowed,
thinking_effort: _,
speed: _,
} = request;
@@ -1128,10 +1278,18 @@ fn into_copilot_responses(
temperature,
tools: converted_tools,
tool_choice: mapped_tool_choice,
- reasoning: None, // We would need to add support for setting from user settings.
+ reasoning: if thinking_allowed {
+ Some(copilot_responses::ReasoningConfig {
+ effort: copilot_responses::ReasoningEffort::Medium,
+ summary: Some(copilot_responses::ReasoningSummary::Detailed),
+ })
+ } else {
+ None
+ },
include: Some(vec![
copilot_responses::ResponseIncludable::ReasoningEncryptedContent,
]),
+ store: false,
}
}
@@ -42,8 +42,8 @@ impl lsp::notification::Notification for SchemaContentsChanged {
type Params = String;
}
-pub fn notify_schema_changed(lsp_store: Entity<LspStore>, uri: String, cx: &App) {
- zlog::trace!(LOGGER => "Notifying schema changed for URI: {:?}", uri);
+pub fn notify_schemas_changed(lsp_store: Entity<LspStore>, uris: &[String], cx: &App) {
+ zlog::trace!(LOGGER => "Notifying schema changes for URIs: {:?}", uris);
let servers = lsp_store.read_with(cx, |lsp_store, _| {
let mut servers = Vec::new();
let Some(local) = lsp_store.as_local() else {
@@ -63,16 +63,18 @@ pub fn notify_schema_changed(lsp_store: Entity<LspStore>, uri: String, cx: &App)
servers
});
for server in servers {
- zlog::trace!(LOGGER => "Notifying server {NAME} (id {ID:?}) of schema change for URI: {uri:?}",
- NAME = server.name(),
- ID = server.server_id()
- );
- if let Err(error) = server.notify::<SchemaContentsChanged>(uri.clone()) {
- zlog::error!(
- LOGGER => "Failed to notify server {NAME} (id {ID:?}) of schema change for URI {uri:?}: {error:#}",
- NAME = server.name(),
- ID = server.server_id(),
+ for uri in uris {
+ zlog::trace!(LOGGER => "Notifying server {NAME} (id {ID:?}) of schema change for URI: {uri:?}",
+ NAME = server.name(),
+ ID = server.server_id()
);
+ if let Err(error) = server.notify::<SchemaContentsChanged>(uri.clone()) {
+ zlog::error!(
+ LOGGER => "Failed to notify server {NAME} (id {ID:?}) of schema change for URI {uri:?}: {error:#}",
+ NAME = server.name(),
+ ID = server.server_id(),
+ );
+ }
}
}
}
@@ -448,7 +448,9 @@ where
TT: Fn(&mut Window, &mut App) -> AnyView + 'static,
{
fn render(self, window: &mut Window, cx: &mut App) -> impl IntoElement {
- let store = ReplStore::global(cx).read(cx);
+ let store = ReplStore::global(cx);
+ store.update(cx, |store, cx| store.ensure_kernelspecs(cx));
+ let store = store.read(cx);
let all_entries = build_grouped_entries(store, self.worktree_id);
let selected_kernelspec = store.active_kernelspec(self.worktree_id, None, cx);
@@ -46,11 +46,9 @@ fn zed_dispatcher(cx: &mut App) -> impl Dispatcher {
impl Dispatcher for ZedDispatcher {
#[track_caller]
fn dispatch(&self, runnable: Runnable) {
- use std::sync::{Arc, atomic::AtomicBool};
let location = core::panic::Location::caller();
- let closed = Arc::new(AtomicBool::new(false));
let (wrapper, task) = async_task::Builder::new()
- .metadata(RunnableMeta { location, closed })
+ .metadata(RunnableMeta { location })
.spawn(|_| async move { runnable.run() }, {
let dispatcher = self.dispatcher.clone();
move |r| dispatcher.dispatch(r, Priority::default())
@@ -61,11 +59,9 @@ fn zed_dispatcher(cx: &mut App) -> impl Dispatcher {
#[track_caller]
fn dispatch_after(&self, duration: Duration, runnable: Runnable) {
- use std::sync::{Arc, atomic::AtomicBool};
let location = core::panic::Location::caller();
- let closed = Arc::new(AtomicBool::new(false));
let (wrapper, task) = async_task::Builder::new()
- .metadata(RunnableMeta { location, closed })
+ .metadata(RunnableMeta { location })
.spawn(|_| async move { runnable.run() }, {
let dispatcher = self.dispatcher.clone();
move |r| dispatcher.dispatch_after(duration, r)
@@ -191,6 +191,7 @@ pub fn run(
if !store.read(cx).is_enabled() {
return Ok(());
}
+ store.update(cx, |store, cx| store.ensure_kernelspecs(cx));
let editor = editor.upgrade().context("editor was dropped")?;
let selected_range = editor
@@ -204,7 +204,8 @@ impl Render for ReplSessionsPage {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let store = ReplStore::global(cx);
- let (kernel_specifications, sessions) = store.update(cx, |store, _cx| {
+ let (kernel_specifications, sessions) = store.update(cx, |store, cx| {
+ store.ensure_kernelspecs(cx);
(
store
.pure_jupyter_kernel_specifications()
@@ -27,6 +27,7 @@ pub struct ReplStore {
enabled: bool,
sessions: HashMap<EntityId, Entity<Session>>,
kernel_specifications: Vec<KernelSpecification>,
+ kernelspecs_initialized: bool,
selected_kernel_for_worktree: HashMap<WorktreeId, KernelSpecification>,
kernel_specifications_for_worktree: HashMap<WorktreeId, Vec<KernelSpecification>>,
active_python_toolchain_for_worktree: HashMap<WorktreeId, SharedString>,
@@ -39,12 +40,6 @@ impl ReplStore {
pub(crate) fn init(fs: Arc<dyn Fs>, cx: &mut App) {
let store = cx.new(move |cx| Self::new(fs, cx));
-
- #[cfg(not(feature = "test-support"))]
- store
- .update(cx, |store, cx| store.refresh_kernelspecs(cx))
- .detach_and_log_err(cx);
-
cx.set_global(GlobalReplStore(store))
}
@@ -65,6 +60,7 @@ impl ReplStore {
enabled: JupyterSettings::enabled(cx),
sessions: HashMap::default(),
kernel_specifications: Vec::new(),
+ kernelspecs_initialized: false,
_subscriptions: subscriptions,
kernel_specifications_for_worktree: HashMap::default(),
selected_kernel_for_worktree: HashMap::default(),
@@ -216,10 +212,17 @@ impl ReplStore {
}
}
+ pub fn ensure_kernelspecs(&mut self, cx: &mut Context<Self>) {
+ if self.kernelspecs_initialized {
+ return;
+ }
+ self.kernelspecs_initialized = true;
+ self.refresh_kernelspecs(cx).detach_and_log_err(cx);
+ }
+
pub fn refresh_kernelspecs(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let local_kernel_specifications = local_kernel_specifications(self.fs.clone());
let wsl_kernel_specifications = wsl_kernel_specifications(cx.background_executor().clone());
-
let remote_kernel_specifications = self.get_remote_kernel_specifications(cx);
let all_specs = cx.background_spawn(async move {
@@ -6,10 +6,7 @@ use std::{
panic::Location,
pin::Pin,
rc::Rc,
- sync::{
- Arc,
- atomic::{AtomicBool, Ordering},
- },
+ sync::Arc,
task::{Context, Poll},
thread::{self, ThreadId},
time::Duration,
@@ -19,7 +16,6 @@ use std::{
pub struct ForegroundExecutor {
session_id: SessionId,
scheduler: Arc<dyn Scheduler>,
- closed: Arc<AtomicBool>,
not_send: PhantomData<Rc<()>>,
}
@@ -28,7 +24,6 @@ impl ForegroundExecutor {
Self {
session_id,
scheduler,
- closed: Arc::new(AtomicBool::new(false)),
not_send: PhantomData,
}
}
@@ -41,16 +36,6 @@ impl ForegroundExecutor {
&self.scheduler
}
- /// Returns the closed flag for this executor.
- pub fn closed(&self) -> &Arc<AtomicBool> {
- &self.closed
- }
-
- /// Close this executor. Tasks will not run after this is called.
- pub fn close(&self) {
- self.closed.store(true, Ordering::SeqCst);
- }
-
#[track_caller]
pub fn spawn<F>(&self, future: F) -> Task<F::Output>
where
@@ -60,13 +45,12 @@ impl ForegroundExecutor {
let session_id = self.session_id;
let scheduler = Arc::clone(&self.scheduler);
let location = Location::caller();
- let closed = self.closed.clone();
let (runnable, task) = spawn_local_with_source_location(
future,
move |runnable| {
scheduler.schedule_foreground(session_id, runnable);
},
- RunnableMeta { location, closed },
+ RunnableMeta { location },
);
runnable.schedule();
Task(TaskState::Spawned(task))
@@ -129,25 +113,11 @@ impl ForegroundExecutor {
#[derive(Clone)]
pub struct BackgroundExecutor {
scheduler: Arc<dyn Scheduler>,
- closed: Arc<AtomicBool>,
}
impl BackgroundExecutor {
pub fn new(scheduler: Arc<dyn Scheduler>) -> Self {
- Self {
- scheduler,
- closed: Arc::new(AtomicBool::new(false)),
- }
- }
-
- /// Returns the closed flag for this executor.
- pub fn closed(&self) -> &Arc<AtomicBool> {
- &self.closed
- }
-
- /// Close this executor. Tasks will not run after this is called.
- pub fn close(&self) {
- self.closed.store(true, Ordering::SeqCst);
+ Self { scheduler }
}
#[track_caller]
@@ -167,9 +137,8 @@ impl BackgroundExecutor {
{
let scheduler = Arc::clone(&self.scheduler);
let location = Location::caller();
- let closed = self.closed.clone();
let (runnable, task) = async_task::Builder::new()
- .metadata(RunnableMeta { location, closed })
+ .metadata(RunnableMeta { location })
.spawn(
move |_| future,
move |runnable| {
@@ -188,20 +157,16 @@ impl BackgroundExecutor {
F::Output: Send + 'static,
{
let location = Location::caller();
- let closed = self.closed.clone();
let (tx, rx) = flume::bounded::<async_task::Runnable<RunnableMeta>>(1);
self.scheduler.spawn_realtime(Box::new(move || {
while let Ok(runnable) = rx.recv() {
- if runnable.metadata().is_closed() {
- continue;
- }
runnable.run();
}
}));
let (runnable, task) = async_task::Builder::new()
- .metadata(RunnableMeta { location, closed })
+ .metadata(RunnableMeta { location })
.spawn(
move |_| future,
move |runnable| {
@@ -14,10 +14,7 @@ use std::{
future::Future,
panic::Location,
pin::Pin,
- sync::{
- Arc,
- atomic::{AtomicBool, Ordering},
- },
+ sync::Arc,
task::{Context, Poll},
time::Duration,
};
@@ -62,23 +59,12 @@ impl Priority {
pub struct RunnableMeta {
/// The source location where the task was spawned.
pub location: &'static Location<'static>,
- /// Shared flag indicating whether the scheduler has been closed.
- /// When true, tasks should be dropped without running.
- pub closed: Arc<AtomicBool>,
-}
-
-impl RunnableMeta {
- /// Returns true if the scheduler has been closed and this task should not run.
- pub fn is_closed(&self) -> bool {
- self.closed.load(Ordering::SeqCst)
- }
}
impl std::fmt::Debug for RunnableMeta {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("RunnableMeta")
.field("location", &self.location)
- .field("closed", &self.is_closed())
.finish()
}
}
@@ -320,10 +320,6 @@ impl TestScheduler {
};
if let Some(runnable) = runnable {
- // Check if the executor that spawned this task was closed
- if runnable.runnable.metadata().is_closed() {
- return true;
- }
let is_foreground = runnable.session_id.is_some();
let was_main_thread = self.state.lock().is_main_thread;
self.state.lock().is_main_thread = is_foreground;
@@ -73,6 +73,7 @@ enum ListEntry {
label: SharedString,
workspace: Entity<Workspace>,
highlight_positions: Vec<usize>,
+ has_threads: bool,
},
Thread {
session_info: acp_thread::AgentSessionInfo,
@@ -322,10 +323,15 @@ impl Sidebar {
window,
|this, agent_panel, event: &AgentPanelEvent, _window, cx| match event {
AgentPanelEvent::ActiveViewChanged => {
- if let Some(thread) = agent_panel.read(cx).active_connection_view()
- && let Some(session_id) = thread.read(cx).parent_id(cx)
- {
- this.focused_thread = Some(session_id);
+ match agent_panel.read(cx).active_connection_view() {
+ Some(thread) => {
+ if let Some(session_id) = thread.read(cx).parent_id(cx) {
+ this.focused_thread = Some(session_id);
+ }
+ }
+ None => {
+ this.focused_thread = None;
+ }
}
this.update_entries(cx);
}
@@ -334,7 +340,7 @@ impl Sidebar {
.read(cx)
.active_connection_view()
.and_then(|thread| thread.read(cx).parent_id(cx));
- if new_focused != this.focused_thread {
+ if new_focused.is_some() && new_focused != this.focused_thread {
this.focused_thread = new_focused;
this.update_entries(cx);
}
@@ -522,6 +528,7 @@ impl Sidebar {
}
if !query.is_empty() {
+ let has_threads = !threads.is_empty();
let mut matched_threads = Vec::new();
for mut thread in threads {
if let ListEntry::Thread {
@@ -554,14 +561,17 @@ impl Sidebar {
label,
workspace: workspace.clone(),
highlight_positions: workspace_highlight_positions,
+ has_threads,
});
entries.extend(matched_threads);
} else {
+ let has_threads = !threads.is_empty();
entries.push(ListEntry::ProjectHeader {
path_list: path_list.clone(),
label,
workspace: workspace.clone(),
highlight_positions: Vec::new(),
+ has_threads,
});
if is_collapsed {
@@ -677,12 +687,14 @@ impl Sidebar {
label,
workspace,
highlight_positions,
+ has_threads,
} => self.render_project_header(
ix,
path_list,
label,
workspace,
highlight_positions,
+ *has_threads,
is_selected,
cx,
),
@@ -736,12 +748,12 @@ impl Sidebar {
label: &SharedString,
workspace: &Entity<Workspace>,
highlight_positions: &[usize],
+ has_threads: bool,
is_selected: bool,
cx: &mut Context<Self>,
) -> AnyElement {
let id = SharedString::from(format!("project-header-{}", ix));
let ib_id = SharedString::from(format!("project-header-new-thread-{}", ix));
- let group = SharedString::from(format!("group-{}", ix));
let is_collapsed = self.collapsed_groups.contains(path_list);
let disclosure_icon = if is_collapsed {
@@ -774,20 +786,19 @@ impl Sidebar {
.into_any_element()
};
- // TODO: if is_selected, draw a blue border around the item.
-
ListItem::new(id)
- .selection_outlined(is_selected)
- .group_name(&group)
.toggle_state(is_active_workspace)
+ .focused(is_selected)
.child(
- h_flex().px_1().py_1p5().gap_0p5().child(label).child(
- div().visible_on_hover(group).child(
+ h_flex()
+ .p_1()
+ .gap_1p5()
+ .child(
Icon::new(disclosure_icon)
.size(IconSize::Small)
- .color(Color::Muted),
- ),
- ),
+ .color(Color::Custom(cx.theme().colors().icon_muted.opacity(0.6))),
+ )
+ .child(label),
)
.end_hover_slot(
h_flex()
@@ -808,18 +819,21 @@ impl Sidebar {
)),
)
})
- .child(
- IconButton::new(ib_id, IconName::NewThread)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .tooltip(Tooltip::text("New Thread"))
- .on_click(cx.listener(move |this, _, window, cx| {
- this.selection = None;
- this.create_new_thread(&workspace_for_new_thread, window, cx);
- })),
- ),
+ .when(has_threads, |this| {
+ this.child(
+ IconButton::new(ib_id, IconName::NewThread)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .tooltip(Tooltip::text("New Thread"))
+ .on_click(cx.listener(move |this, _, window, cx| {
+ this.selection = None;
+ this.create_new_thread(&workspace_for_new_thread, window, cx);
+ })),
+ )
+ }),
)
.on_click(cx.listener(move |this, _, window, cx| {
+ this.selection = None;
this.toggle_collapse(&path_list_for_toggle, window, cx);
}))
// TODO: Decide if we really want the header to be activating different workspaces
@@ -887,12 +901,7 @@ impl Sidebar {
self.update_entries(cx);
}
- fn focus_in(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
- if self.selection.is_none() && !self.contents.entries.is_empty() {
- self.selection = Some(0);
- cx.notify();
- }
- }
+ fn focus_in(&mut self, _window: &mut Window, _cx: &mut Context<Self>) {}
fn cancel(&mut self, _: &Cancel, window: &mut Window, cx: &mut Context<Self>) {
if self.reset_filter_editor_text(window, cx) {
@@ -1122,7 +1131,7 @@ impl Sidebar {
.status(status)
.notified(has_notification)
.selected(self.focused_thread.as_ref() == Some(&session_info.session_id))
- .outlined(is_selected)
+ .focused(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
this.selection = None;
this.activate_thread(session_info.clone(), &workspace, window, cx);
@@ -1168,7 +1177,7 @@ impl Sidebar {
let count = format!("({})", remaining_count);
ListItem::new(id)
- .selection_outlined(is_selected)
+ .focused(is_selected)
.child(
h_flex()
.px_1()
@@ -1319,52 +1328,45 @@ impl Render for Sidebar {
.justify_between()
.border_b_1()
.border_color(cx.theme().colors().border)
- .child(
- h_flex()
- .gap_1()
- .child({
- let focus_handle_toggle = self.focus_handle.clone();
- let focus_handle_focus = self.focus_handle.clone();
- IconButton::new("close-sidebar", IconName::WorkspaceNavOpen)
- .icon_size(IconSize::Small)
- .tooltip(Tooltip::element(move |_, cx| {
- v_flex()
- .gap_1()
- .child(
- h_flex()
- .gap_2()
- .justify_between()
- .child(Label::new("Close Sidebar"))
- .child(KeyBinding::for_action_in(
- &ToggleWorkspaceSidebar,
- &focus_handle_toggle,
- cx,
- )),
- )
- .child(
- h_flex()
- .pt_1()
- .gap_2()
- .border_t_1()
- .border_color(
- cx.theme().colors().border_variant,
- )
- .justify_between()
- .child(Label::new(focus_tooltip_label))
- .child(KeyBinding::for_action_in(
- &FocusWorkspaceSidebar,
- &focus_handle_focus,
- cx,
- )),
- )
- .into_any_element()
- }))
- .on_click(cx.listener(|_this, _, _window, cx| {
- cx.emit(SidebarEvent::Close);
- }))
- })
- .child(Label::new("Threads").size(LabelSize::Small)),
- )
+ .child({
+ let focus_handle_toggle = self.focus_handle.clone();
+ let focus_handle_focus = self.focus_handle.clone();
+ IconButton::new("close-sidebar", IconName::WorkspaceNavOpen)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::element(move |_, cx| {
+ v_flex()
+ .gap_1()
+ .child(
+ h_flex()
+ .gap_2()
+ .justify_between()
+ .child(Label::new("Close Sidebar"))
+ .child(KeyBinding::for_action_in(
+ &ToggleWorkspaceSidebar,
+ &focus_handle_toggle,
+ cx,
+ )),
+ )
+ .child(
+ h_flex()
+ .pt_1()
+ .gap_2()
+ .border_t_1()
+ .border_color(cx.theme().colors().border_variant)
+ .justify_between()
+ .child(Label::new(focus_tooltip_label))
+ .child(KeyBinding::for_action_in(
+ &FocusWorkspaceSidebar,
+ &focus_handle_focus,
+ cx,
+ )),
+ )
+ .into_any_element()
+ }))
+ .on_click(cx.listener(|_this, _, _window, cx| {
+ cx.emit(SidebarEvent::Close);
+ }))
+ })
.child(
IconButton::new("open-project", IconName::OpenFolder)
.icon_size(IconSize::Small)
@@ -1852,6 +1854,7 @@ mod tests {
label: "expanded-project".into(),
workspace: workspace.clone(),
highlight_positions: Vec::new(),
+ has_threads: true,
},
// Thread with default (Completed) status, not active
ListEntry::Thread {
@@ -1954,6 +1957,7 @@ mod tests {
label: "collapsed-project".into(),
workspace: workspace.clone(),
highlight_positions: Vec::new(),
+ has_threads: true,
},
];
// Select the Running thread (index 2)
@@ -2014,11 +2018,16 @@ mod tests {
cx.run_until_parked();
// Entries: [header, thread3, thread2, thread1]
- // Focusing the sidebar triggers focus_in, which selects the first entry
+ // Focusing the sidebar does not set a selection; select_next/select_previous
+ // handle None gracefully by starting from the first or last entry.
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
+ assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None);
+
+ // First SelectNext from None starts at index 0
+ cx.dispatch_action(SelectNext);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
- // Move down through all entries
+ // Move down through remaining entries
cx.dispatch_action(SelectNext);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1));
@@ -2072,7 +2081,7 @@ mod tests {
}
#[gpui::test]
- async fn test_keyboard_focus_in_selects_first(cx: &mut TestAppContext) {
+ async fn test_keyboard_focus_in_does_not_set_selection(cx: &mut TestAppContext) {
let project = init_test_project("/my-project", cx).await;
let (multi_workspace, cx) =
cx.add_window_view(|window, cx| MultiWorkspace::test_new(project, window, cx));
@@ -2081,11 +2090,16 @@ mod tests {
// Initially no selection
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None);
- // Open the sidebar so it's rendered, then focus it to trigger focus_in
+ // Open the sidebar so it's rendered, then focus it to trigger focus_in.
+ // focus_in no longer sets a default selection.
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
- assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
+ assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None);
+
+ // Manually set a selection, blur, then refocus — selection should be preserved
+ sidebar.update_in(cx, |sidebar, _window, _cx| {
+ sidebar.selection = Some(0);
+ });
- // Blur the sidebar, then refocus — existing selection should be preserved
cx.update(|window, _cx| {
window.blur();
});
@@ -2135,9 +2149,11 @@ mod tests {
1
);
- // Focus the sidebar — focus_in selects the header (index 0)
+ // Focus the sidebar and manually select the header (index 0)
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
- assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
+ sidebar.update_in(cx, |sidebar, _window, _cx| {
+ sidebar.selection = Some(0);
+ });
// Press confirm on project header (workspace 0) to activate it.
cx.dispatch_action(Confirm);
@@ -2176,9 +2192,9 @@ mod tests {
assert_eq!(entries.len(), 7);
assert!(entries.iter().any(|e| e.contains("View More (3)")));
- // Focus sidebar (selects index 0), then navigate down to the "View More" entry (index 6)
+ // Focus sidebar (selection starts at None), then navigate down to the "View More" entry (index 6)
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
- for _ in 0..6 {
+ for _ in 0..7 {
cx.dispatch_action(SelectNext);
}
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(6));
@@ -2210,9 +2226,11 @@ mod tests {
vec!["v [my-project]", " Thread 1"]
);
- // Focus sidebar — focus_in selects the header (index 0). Press left to collapse.
+ // Focus sidebar and manually select the header (index 0). Press left to collapse.
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
- assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
+ sidebar.update_in(cx, |sidebar, _window, _cx| {
+ sidebar.selection = Some(0);
+ });
cx.dispatch_action(CollapseSelectedEntry);
cx.run_until_parked();
@@ -2248,9 +2266,10 @@ mod tests {
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
- // Focus sidebar (selects header at index 0), then navigate down to the thread (child)
+ // Focus sidebar (selection starts at None), then navigate down to the thread (child)
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
cx.dispatch_action(SelectNext);
+ cx.dispatch_action(SelectNext);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1));
assert_eq!(
@@ -2282,8 +2301,12 @@ mod tests {
vec!["v [empty-project]", " [+ New Thread]"]
);
- // Focus sidebar — focus_in selects the first entry (header at 0)
+ // Focus sidebar — focus_in does not set a selection
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
+ assert_eq!(sidebar.read_with(cx, |s, _| s.selection), None);
+
+ // First SelectNext from None starts at index 0 (header)
+ cx.dispatch_action(SelectNext);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
// SelectNext moves to the new thread button
@@ -2311,9 +2334,10 @@ mod tests {
multi_workspace.update_in(cx, |_, _window, cx| cx.notify());
cx.run_until_parked();
- // Focus sidebar (selects header at 0), navigate down to the thread (index 1)
+ // Focus sidebar (selection starts at None), navigate down to the thread (index 1)
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
cx.dispatch_action(SelectNext);
+ cx.dispatch_action(SelectNext);
assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(1));
// Collapse the group, which removes the thread from the list
@@ -2935,9 +2959,11 @@ mod tests {
cx.run_until_parked();
// User focuses the sidebar and collapses the group using keyboard:
- // select the header, then press CollapseSelectedEntry to collapse.
+ // manually select the header, then press CollapseSelectedEntry to collapse.
open_and_focus_sidebar(&sidebar, &multi_workspace, cx);
- assert_eq!(sidebar.read_with(cx, |s, _| s.selection), Some(0));
+ sidebar.update_in(cx, |sidebar, _window, _cx| {
+ sidebar.selection = Some(0);
+ });
cx.dispatch_action(CollapseSelectedEntry);
cx.run_until_parked();
@@ -3151,15 +3177,12 @@ mod tests {
});
assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None);
- // When the user tabs back into the sidebar, focus_in restores
- // selection to the first entry for keyboard navigation.
+ // When the user tabs back into the sidebar, focus_in no longer
+ // restores selection — it stays None.
sidebar.update_in(cx, |sidebar, window, cx| {
sidebar.focus_in(window, cx);
});
- assert_eq!(
- sidebar.read_with(cx, |sidebar, _| sidebar.selection),
- Some(0)
- );
+ assert_eq!(sidebar.read_with(cx, |sidebar, _| sidebar.selection), None);
}
#[gpui::test]
@@ -1014,9 +1014,9 @@ impl TitleBar {
let user_store = user_store.clone();
let organization = organization.clone();
move |_window, cx| {
- user_store.update(cx, |user_store, _cx| {
+ user_store.update(cx, |user_store, cx| {
user_store
- .set_current_organization(organization.clone());
+ .set_current_organization(organization.clone(), cx);
});
}
},
@@ -3,7 +3,7 @@ use crate::{
prelude::*,
};
-use gpui::{AnyView, ClickEvent, Hsla, SharedString};
+use gpui::{AnyView, ClickEvent, Hsla, SharedString, linear_color_stop, linear_gradient};
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub enum AgentThreadStatus {
@@ -24,7 +24,7 @@ pub struct ThreadItem {
notified: bool,
status: AgentThreadStatus,
selected: bool,
- outlined: bool,
+ focused: bool,
hovered: bool,
added: Option<usize>,
removed: Option<usize>,
@@ -48,7 +48,7 @@ impl ThreadItem {
notified: false,
status: AgentThreadStatus::default(),
selected: false,
- outlined: false,
+ focused: false,
hovered: false,
added: None,
removed: None,
@@ -92,8 +92,8 @@ impl ThreadItem {
self
}
- pub fn outlined(mut self, outlined: bool) -> Self {
- self.outlined = outlined;
+ pub fn focused(mut self, focused: bool) -> Self {
+ self.focused = focused;
self
}
@@ -153,7 +153,7 @@ impl ThreadItem {
impl RenderOnce for ThreadItem {
fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement {
- let clr = cx.theme().colors();
+ let color = cx.theme().colors();
// let dot_separator = || {
// Label::new("•")
// .size(LabelSize::Small)
@@ -161,7 +161,7 @@ impl RenderOnce for ThreadItem {
// .alpha(0.5)
// };
- let icon_container = || h_flex().size_4().justify_center();
+ let icon_container = || h_flex().size_4().flex_none().justify_center();
let agent_icon = if let Some(custom_svg) = self.custom_icon_from_external_svg {
Icon::from_external_svg(custom_svg)
.color(Color::Muted)
@@ -189,7 +189,7 @@ impl RenderOnce for ThreadItem {
} else if self.status == AgentThreadStatus::Error {
Some(decoration(IconDecorationKind::X, cx.theme().status().error))
} else if self.notified {
- Some(decoration(IconDecorationKind::Dot, clr.text_accent))
+ Some(decoration(IconDecorationKind::Dot, color.text_accent))
} else {
None
};
@@ -209,15 +209,41 @@ impl RenderOnce for ThreadItem {
let title = self.title;
let highlight_positions = self.highlight_positions;
let title_label = if highlight_positions.is_empty() {
- Label::new(title).truncate().into_any_element()
+ Label::new(title).into_any_element()
} else {
- HighlightedLabel::new(title, highlight_positions)
- .truncate()
- .into_any_element()
+ HighlightedLabel::new(title, highlight_positions).into_any_element()
};
+ let base_bg = if self.selected {
+ color.element_active
+ } else {
+ color.panel_background
+ };
+
+ let gradient_overlay = div()
+ .absolute()
+ .top_0()
+ .right(px(-10.0))
+ .w_12()
+ .h_full()
+ .bg(linear_gradient(
+ 90.,
+ linear_color_stop(base_bg, 0.6),
+ linear_color_stop(base_bg.opacity(0.0), 0.),
+ ))
+ .group_hover("thread-item", |s| {
+ s.bg(linear_gradient(
+ 90.,
+ linear_color_stop(color.element_hover, 0.6),
+ linear_color_stop(color.element_hover.opacity(0.0), 0.),
+ ))
+ });
+
v_flex()
.id(self.id.clone())
+ .group("thread-item")
+ .relative()
+ .overflow_hidden()
.cursor_pointer()
.w_full()
.map(|this| {
@@ -227,11 +253,11 @@ impl RenderOnce for ThreadItem {
this.px_2().py_1()
}
})
- .when(self.selected, |s| s.bg(clr.element_active))
+ .when(self.selected, |s| s.bg(color.element_active))
.border_1()
.border_color(gpui::transparent_black())
- .when(self.outlined, |s| s.border_color(clr.panel_focused_border))
- .hover(|s| s.bg(clr.element_hover))
+ .when(self.focused, |s| s.border_color(color.panel_focused_border))
+ .hover(|s| s.bg(color.element_hover))
.on_hover(self.on_hover)
.child(
h_flex()
@@ -249,6 +275,7 @@ impl RenderOnce for ThreadItem {
.child(title_label)
.when_some(self.tooltip, |this, tooltip| this.tooltip(tooltip)),
)
+ .child(gradient_overlay)
.when(running_or_action, |this| {
this.child(
h_flex()
@@ -271,7 +298,6 @@ impl RenderOnce for ThreadItem {
Label::new(worktree)
.size(LabelSize::Small)
.color(Color::Muted)
- .truncate_start()
.into_any_element()
} else {
HighlightedLabel::new(worktree, worktree_highlight_positions)
@@ -420,25 +446,25 @@ impl Component for ThreadItem {
.into_any_element(),
),
single_example(
- "Outlined Item (Keyboard Selection)",
+ "Focused Item (Keyboard Selection)",
container()
.child(
ThreadItem::new("ti-7", "Implement keyboard navigation")
.icon(IconName::AiClaude)
.timestamp("4:00 PM")
- .outlined(true),
+ .focused(true),
)
.into_any_element(),
),
single_example(
- "Selected + Outlined",
+ "Selected + Focused",
container()
.child(
ThreadItem::new("ti-8", "Active and keyboard-focused thread")
.icon(IconName::AiGemini)
.timestamp("5:00 PM")
.selected(true)
- .outlined(true),
+ .focused(true),
)
.into_any_element(),
),
@@ -42,7 +42,6 @@ pub struct ListItem {
selectable: bool,
always_show_disclosure_icon: bool,
outlined: bool,
- selection_outlined: Option<bool>,
rounded: bool,
overflow_x: bool,
focused: Option<bool>,
@@ -72,7 +71,6 @@ impl ListItem {
selectable: true,
always_show_disclosure_icon: false,
outlined: false,
- selection_outlined: None,
rounded: false,
overflow_x: false,
focused: None,
@@ -173,11 +171,6 @@ impl ListItem {
self
}
- pub fn selection_outlined(mut self, outlined: bool) -> Self {
- self.selection_outlined = Some(outlined);
- self
- }
-
pub fn rounded(mut self) -> Self {
self.rounded = true;
self
@@ -248,13 +241,6 @@ impl RenderOnce for ListItem {
})
})
.when(self.rounded, |this| this.rounded_sm())
- .when_some(self.selection_outlined, |this, outlined| {
- this.border_1()
- .border_color(gpui::transparent_black())
- .when(outlined, |this| {
- this.border_color(cx.theme().colors().panel_focused_border)
- })
- })
.when_some(self.on_hover, |this, on_hover| this.on_hover(on_hover))
.child(
h_flex()
@@ -978,6 +978,7 @@ impl Vim {
editor.set_clip_at_line_ends(false, cx);
editor.set_collapse_matches(false);
editor.set_input_enabled(true);
+ editor.set_expects_character_input(true);
editor.set_autoindent(true);
editor.selections.set_line_mode(false);
editor.unregister_addon::<VimAddon>();
@@ -1346,6 +1347,15 @@ impl Vim {
}
}
+ fn expects_character_input(&self) -> bool {
+ if let Some(operator) = self.operator_stack.last() {
+ if operator.is_waiting(self.mode) {
+ return true;
+ }
+ }
+ self.editor_input_enabled()
+ }
+
pub fn editor_input_enabled(&self) -> bool {
match self.mode {
Mode::Insert => {
@@ -2058,6 +2068,7 @@ impl Vim {
clip_at_line_ends: self.clip_at_line_ends(),
collapse_matches: !HelixModeSetting::get_global(cx).0,
input_enabled: self.editor_input_enabled(),
+ expects_character_input: self.expects_character_input(),
autoindent: self.should_autoindent(),
cursor_offset_on_selection: self.mode.is_visual(),
line_mode: matches!(self.mode, Mode::VisualLine),
@@ -2075,6 +2086,7 @@ impl Vim {
editor.set_clip_at_line_ends(state.clip_at_line_ends, cx);
editor.set_collapse_matches(state.collapse_matches);
editor.set_input_enabled(state.input_enabled);
+ editor.set_expects_character_input(state.expects_character_input);
editor.set_autoindent(state.autoindent);
editor.set_cursor_offset_on_selection(state.cursor_offset_on_selection);
editor.selections.set_line_mode(state.line_mode);
@@ -2087,6 +2099,7 @@ struct VimEditorSettingsState {
clip_at_line_ends: bool,
collapse_matches: bool,
input_enabled: bool,
+ expects_character_input: bool,
autoindent: bool,
cursor_offset_on_selection: bool,
line_mode: bool,
@@ -48,7 +48,7 @@ use std::{
path::{Path, PathBuf},
process,
rc::Rc,
- sync::{Arc, OnceLock},
+ sync::{Arc, LazyLock, OnceLock},
time::Instant,
};
use theme::{ActiveTheme, GlobalTheme, ThemeRegistry};
@@ -666,7 +666,7 @@ fn main() {
);
copilot_ui::init(&app_state, cx);
- language_model::init(app_state.client.clone(), cx);
+ language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
acp_tools::init(cx);
zed::telemetry_log::init(cx);
@@ -1586,8 +1586,14 @@ fn init_paths() -> HashMap<io::ErrorKind, Vec<&'static Path>> {
})
}
+pub(crate) static FORCE_CLI_MODE: LazyLock<bool> = LazyLock::new(|| {
+ let env_var = std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_some();
+ unsafe { std::env::remove_var(FORCE_CLI_MODE_ENV_VAR_NAME) };
+ env_var
+});
+
fn stdout_is_a_pty() -> bool {
- std::env::var(FORCE_CLI_MODE_ENV_VAR_NAME).ok().is_none() && io::stdout().is_terminal()
+ !*FORCE_CLI_MODE && io::stdout().is_terminal()
}
#[derive(Parser, Debug)]
@@ -200,7 +200,7 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()>
});
prompt_store::init(cx);
let prompt_builder = prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx);
- language_model::init(app_state.client.clone(), cx);
+ language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
git_ui::init(cx);
project::AgentRegistryStore::init_global(
@@ -2008,13 +2008,29 @@ fn open_local_file(
}
fn open_bundled_file(
- workspace: &Workspace,
+ workspace: &mut Workspace,
text: Cow<'static, str>,
title: &'static str,
language: &'static str,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
+ let existing = workspace.items_of_type::<Editor>(cx).find(|editor| {
+ editor.read_with(cx, |editor, cx| {
+ editor.read_only(cx)
+ && editor.title(cx).as_ref() == title
+ && editor
+ .buffer()
+ .read(cx)
+ .as_singleton()
+ .is_some_and(|buffer| buffer.read(cx).file().is_none())
+ })
+ });
+ if let Some(existing) = existing {
+ workspace.activate_item(&existing, true, true, window, cx);
+ return;
+ }
+
let language = workspace.app_state().languages.language_for_name(language);
cx.spawn_in(window, async move |workspace, cx| {
let language = language.await.log_err();
@@ -4965,6 +4981,54 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_bundled_files_reuse_existing_editor(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ cx.update(init);
+
+ let project = Project::test(app_state.fs.clone(), [], cx).await;
+ let _window = cx.add_window(|window, cx| MultiWorkspace::test_new(project, window, cx));
+
+ cx.update(|cx| {
+ cx.dispatch_action(&OpenDefaultSettings);
+ });
+ cx.run_until_parked();
+
+ let multi_workspace = cx.windows()[0].downcast::<MultiWorkspace>().unwrap();
+ let first_item_id = multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.workspace().update(cx, |workspace, cx| {
+ workspace
+ .active_item(cx)
+ .expect("default settings should be open")
+ .item_id()
+ })
+ })
+ .unwrap();
+
+ cx.update(|cx| {
+ cx.dispatch_action(&OpenDefaultSettings);
+ });
+ cx.run_until_parked();
+
+ let (second_item_id, item_count) = multi_workspace
+ .update(cx, |multi_workspace, _, cx| {
+ multi_workspace.workspace().update(cx, |workspace, cx| {
+ let pane = workspace.active_pane().read(cx);
+ (
+ pane.active_item()
+ .expect("default settings should still be open")
+ .item_id(),
+ pane.items_len(),
+ )
+ })
+ })
+ .unwrap();
+
+ assert_eq!(first_item_id, second_item_id);
+ assert_eq!(item_count, 1);
+ }
+
#[gpui::test]
async fn test_bundled_languages(cx: &mut TestAppContext) {
let fs = fs::FakeFs::new(cx.background_executor.clone());
@@ -5024,7 +5088,7 @@ mod tests {
cx,
);
image_viewer::init(cx);
- language_model::init(app_state.client.clone(), cx);
+ language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
web_search::init(cx);
git_graph::init(cx);
@@ -316,7 +316,7 @@ mod tests {
let app_state = cx.update(|cx| {
let app_state = AppState::test(cx);
client::init(&app_state.client, cx);
- language_model::init(app_state.client.clone(), cx);
+ language_model::init(app_state.user_store.clone(), app_state.client.clone(), cx);
editor::init(cx);
app_state
});
@@ -470,12 +470,19 @@ pub fn encode_patch_as_output_for_format(
}
}
+pub struct ParsedOutput {
+ /// Text that should replace the editable region
+ pub new_editable_region: String,
+ /// The byte range within `cursor_excerpt` that this replacement applies to
+ pub range_in_excerpt: Range<usize>,
+}
+
/// Parse model output for the given zeta format
pub fn parse_zeta2_model_output(
output: &str,
format: ZetaFormat,
prompt_inputs: &ZetaPromptInput,
-) -> Result<(Range<usize>, String)> {
+) -> Result<ParsedOutput> {
let output = match output_end_marker_for_format(format) {
Some(marker) => output.strip_suffix(marker).unwrap_or(output),
None => output,
@@ -509,7 +516,11 @@ pub fn parse_zeta2_model_output(
let range_in_excerpt =
range_in_context.start + context_start..range_in_context.end + context_start;
- Ok((range_in_excerpt, output))
+
+ Ok(ParsedOutput {
+ new_editable_region: output,
+ range_in_excerpt,
+ })
}
pub fn excerpt_range_for_format(
@@ -4612,9 +4623,12 @@ mod tests {
assert_eq!(cleaned, "");
}
- fn apply_edit(excerpt: &str, range: &Range<usize>, new_text: &str) -> String {
+ fn apply_edit(excerpt: &str, parsed_output: &ParsedOutput) -> String {
let mut result = excerpt.to_string();
- result.replace_range(range.clone(), new_text);
+ result.replace_range(
+ parsed_output.range_in_excerpt.clone(),
+ &parsed_output.new_editable_region,
+ );
result
}
@@ -4632,7 +4646,7 @@ mod tests {
editable_start,
);
- let (range, text) = parse_zeta2_model_output(
+ let output = parse_zeta2_model_output(
"editable new\n>>>>>>> UPDATED\n",
ZetaFormat::V0131GitMergeMarkersPrefix,
&input,
@@ -4640,7 +4654,7 @@ mod tests {
.unwrap();
assert_eq!(
- apply_edit(excerpt, &range, &text),
+ apply_edit(excerpt, &output),
"before ctx\nctx start\neditable new\nctx end\nafter ctx\n"
);
}
@@ -4658,10 +4672,10 @@ mod tests {
);
let format = ZetaFormat::V0131GitMergeMarkersPrefix;
- let (range, text) =
+ let output =
parse_zeta2_model_output("bbb\nccc\n>>>>>>> UPDATED\n", format, &input).unwrap();
- assert_eq!(apply_edit(excerpt, &range, &text), excerpt);
+ assert_eq!(apply_edit(excerpt, &output), excerpt);
}
#[test]
@@ -4670,14 +4684,11 @@ mod tests {
let input = make_input_with_context_range(excerpt, 0..excerpt.len(), 0..excerpt.len(), 0);
let format = ZetaFormat::V0131GitMergeMarkersPrefix;
- let (range1, text1) =
+ let output1 =
parse_zeta2_model_output("new content\n>>>>>>> UPDATED\n", format, &input).unwrap();
- let (range2, text2) = parse_zeta2_model_output("new content\n", format, &input).unwrap();
+ let output2 = parse_zeta2_model_output("new content\n", format, &input).unwrap();
- assert_eq!(
- apply_edit(excerpt, &range1, &text1),
- apply_edit(excerpt, &range2, &text2)
- );
- assert_eq!(apply_edit(excerpt, &range1, &text1), "new content\n");
+ assert_eq!(apply_edit(excerpt, &output1), apply_edit(excerpt, &output2));
+ assert_eq!(apply_edit(excerpt, &output1), "new content\n");
}
}
@@ -1,6 +1,6 @@
---
title: AI Agent Settings - Zed
-description: Customize Zed's AI agent: default models, temperature, tool approval, auto-run commands, notifications, and panel options.
+description: "Customize Zed's AI agent: default models, temperature, tool approval, auto-run commands, notifications, and panel options."
---
# Agent Settings
@@ -1,6 +1,6 @@
---
title: AI Privacy and Security - Zed
-description: Zed's approach to AI privacy: opt-in data sharing by default, zero-data retention with providers, and full open-source transparency.
+description: "Zed's approach to AI privacy: opt-in data sharing by default, zero-data retention with providers, and full open-source transparency."
---
# Privacy and Security
@@ -0,0 +1,51 @@
+# Zed's Feature Development Process
+
+This is for moderate-to-large features — new UI, behavior changes, or work that cuts across multiple parts of Zed. Small keybindings or settings tweaks don't need all of this.
+
+> **Before you start:** If you're an external contributor, make sure the feature is something the team wants before investing significant effort. That said, coming prepared with background research makes it much easier for the team to understand and approve the proposal. Read the [Contributing guide](../../../CONTRIBUTING.md#sending-changes) — if there isn't already a GitHub issue with staff confirmation, start with a GitHub Discussion or a Discord message rather than a PR.
+
+## 1. Why does this matter?
+
+Every feature starts as an idea. Before writing any code, ground it:
+
+- **What problem does this solve?**
+- **What's the evidence?** GitHub issues, Discord requests, thumbs-up counts, blog posts.
+- **Is there prior art?** If it's in VS Code, JetBrains, Neovim, or a wildly popular plugin, that's a strong signal. If the idea is more novel, name what it's based on — "This is X, adapted for Zed's multi-buffers" is far more useful than "I think this would be cool."
+
+## 2. What is it?
+
+Write a short, concrete feature statement, then back it up with the context gathered above. If you can't describe the feature in a few sentences, it might be too big or too vague.
+
+Here's an example format, though adapt it to whatever your feature needs:
+
+> **Feature:** Inline Git Blame
+> **Purpose:** Show the last commit author and message for each line directly after the editor text, so developers can understand code history without opening the git blame.
+> **Background:**
+> This is standard across all major code editors
+> \[screenshot of VSCode]
+> \[screenshot of Intellij]
+> \[screenshot of Neovim]
+> and has 146 thumbs up on the [github issue](https://github.com).
+> **Decisions:**
+> We have to decide whether to use the git CLI or a git library. Zed uses a git library but its blame implementation is too slow for a code editor, so we should use the CLI's porcelain interface.
+
+## 3. What else does this affect?
+
+Walk through this list before you start building. Not everything will apply:
+
+- **Actions & keybindings.** What actions does your feature define? Do the default keybindings conflict with existing ones?
+- **Settings.** Is any behavior configurable? Per-user vs. per-project vs. per-language? Don't forget to add new settings to the Settings UI.
+- **Themes & styling.** Does this need a new semantic token? Does it look right in both light and dark mode?
+- **Vim mode.** Vim users might have different expectations for this feature.
+- **Remote development.** Does your feature work with remote projects? File paths, shell commands, and environment variables all might behave differently.
+- **Persistence across restarts.** Should your feature's state persist across restarts?
+- **Accessibility.** Is it keyboard-navigable? Are focus states clear?
+- **Platform differences.** Does behavior differ on macOS, Linux, or Windows?
+- **Performance.** How does it behave with large files or big projects? Are interactions instant?
+- **Security.** How does this feature interact with Workspace Trust? Does it open new attack surfaces in Zed?
+
+If your feature touches the **editor** specifically: the editor has a lot of coexisting features — gutter elements, inline blocks, multiple cursors, folding, edit predictions, code intelligence popovers, the minimap. Test your changes with different combinations of them active. Features that work in a normal buffer might need to be disabled in a multi-buffer.
+
+## 4. Ship it
+
+Use this as the basis for your GitHub Discussion, issue, or PR description. Good product research gets everyone aligned on goals, the state of the art, and any tradeoffs we might need to consider.
@@ -1,5 +1,5 @@
---
-title: Zed Development: Glossary
+title: "Zed Development: Glossary"
description: "Guide to zed development: glossary for Zed development."
---