From cf0f4428692a2fff5bafee5a30d92dbf8c03d5a6 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Oct 2025 17:58:22 -0300 Subject: [PATCH 01/82] settings_ui: Fix links for edit prediction items (#41492) Follow up to the bonus commit we added in https://github.com/zed-industries/zed/pull/41172/. Release Notes: - N/A --- crates/settings_ui/src/page_data.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 915d34e1087823841e985bb141879bf781db64fb..8075e6396ae0011d00f7a9a65fc3732c08823787 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -5724,7 +5724,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { title: "Display Mode", description: "When to show edit predictions previews in buffer. The eager mode displays them inline, while the subtle mode displays them only when holding a modifier key.", field: Box::new(SettingField { - json_path: Some("edit_prediction_mode"), + json_path: Some("edit_prediction.display_mode"), pick: |settings_content| { settings_content.project.all_languages.edit_predictions.as_ref()?.mode.as_ref() }, @@ -5739,7 +5739,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { title: "In Text Threads", description: "Whether edit predictions are enabled when editing text threads in the agent panel.", field: Box::new(SettingField { - json_path: Some("edit_prediction_in_text_threads"), + json_path: Some("edit_prediction.in_text_threads"), pick: |settings_content| { settings_content.project.all_languages.edit_predictions.as_ref()?.enabled_in_text_threads.as_ref() }, @@ -5752,10 +5752,10 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Copilot Provider", - description: "Set up GitHub Copilot as your edit prediction provider. You can toggle between it and Zed's default provider.", + description: "Use GitHub Copilot as your edit prediction provider.", field: Box::new( SettingField { - json_path: Some("languages.$(language).wrap_guides"), + json_path: Some("edit_prediction.copilot_provider"), pick: |settings_content| { settings_content.project.all_languages.edit_predictions.as_ref()?.copilot.as_ref() }, @@ -5770,10 +5770,10 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }), SettingsPageItem::SettingItem(SettingItem { title: "Codestral Provider", - description: "Set up Mistral's Codestral as your edit prediction provider. You can toggle between it and Zed's default provider.", + description: "Use Mistral's Codestral as your edit prediction provider.", field: Box::new( SettingField { - json_path: Some("languages.$(language).wrap_guides"), + json_path: Some("edit_prediction.codestral_provider"), pick: |settings_content| { settings_content.project.all_languages.edit_predictions.as_ref()?.codestral.as_ref() }, From 12dae071089137285a2b5f6f995dfcad3cb235f3 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Oct 2025 17:58:31 -0300 Subject: [PATCH 02/82] agent_ui: Fix history view background color when zoomed in (#41493) Release Notes: - N/A --- crates/agent_ui/src/acp/thread_history.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/acp/thread_history.rs index d96c3b3219717b3ffa7310d207a323bc5fb222b0..9cfe30278e1e46d95c00b3c881358a4b00786801 100644 --- a/crates/agent_ui/src/acp/thread_history.rs +++ b/crates/agent_ui/src/acp/thread_history.rs @@ -450,6 +450,7 @@ impl Render for AcpThreadHistory { v_flex() .key_context("ThreadHistory") .size_full() + .bg(cx.theme().colors().panel_background) .on_action(cx.listener(Self::select_previous)) .on_action(cx.listener(Self::select_next)) .on_action(cx.listener(Self::select_first)) From 87f9ba380f231778aa04d544952ae3a0a528ba52 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Oct 2025 19:19:36 -0300 Subject: [PATCH 03/82] settings_ui: Close the settings window when going to the JSON file (#41491) Release Notes: - N/A --- crates/settings_ui/src/settings_ui.rs | 50 +++++++++++++++------------ 1 file changed, 28 insertions(+), 22 deletions(-) diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 14e2eaf688f39f6d50fbdcb1102df28e3fa0975e..f78d7cd41a137d01fee09b5ef7dbee669c2d7e54 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -1989,6 +1989,21 @@ impl SettingsWindow { let this = cx.entity(); + let selected_file_ix = self + .files + .iter() + .enumerate() + .skip(OVERFLOW_LIMIT) + .find_map(|(ix, (file, _))| { + if file == &self.current_file { + Some(ix) + } else { + None + } + }) + .unwrap_or(OVERFLOW_LIMIT); + let edit_in_json_id = SharedString::new(format!("edit-in-json-{}", selected_file_ix)); + h_flex() .w_full() .gap_1() @@ -2005,20 +2020,6 @@ impl SettingsWindow { ), ) .when(self.files.len() > OVERFLOW_LIMIT, |div| { - let selected_file_ix = self - .files - .iter() - .enumerate() - .skip(OVERFLOW_LIMIT) - .find_map(|(ix, (file, _))| { - if file == &self.current_file { - Some(ix) - } else { - None - } - }) - .unwrap_or(OVERFLOW_LIMIT); - let (file, focus_handle) = &self.files[selected_file_ix]; div.child(file_button(selected_file_ix, file, focus_handle, cx)) @@ -2081,11 +2082,11 @@ impl SettingsWindow { }), ) .child( - Button::new("edit-in-json", "Edit in settings.json") + Button::new(edit_in_json_id, "Edit in settings.json") .tab_index(0_isize) .style(ButtonStyle::OutlinedGhost) - .on_click(cx.listener(|this, _, _, cx| { - this.open_current_settings_file(cx); + .on_click(cx.listener(|this, _, window, cx| { + this.open_current_settings_file(window, cx); })), ) } @@ -2801,8 +2802,8 @@ impl SettingsWindow { Button::new("fix-in-json", "Fix in settings.json") .tab_index(0_isize) .style(ButtonStyle::Tinted(ui::TintColor::Warning)) - .on_click(cx.listener(|this, _, _, cx| { - this.open_current_settings_file(cx); + .on_click(cx.listener(|this, _, window, cx| { + this.open_current_settings_file(window, cx); })), ), ) @@ -2941,7 +2942,7 @@ impl SettingsWindow { /// This function will create a new settings file if one doesn't exist /// if the current file is a project settings with a valid worktree id /// We do this because the settings ui allows initializing project settings - fn open_current_settings_file(&mut self, cx: &mut Context) { + fn open_current_settings_file(&mut self, window: &mut Window, cx: &mut Context) { match &self.current_file { SettingsUiFile::User => { let Some(original_window) = self.original_window else { @@ -2983,6 +2984,8 @@ impl SettingsWindow { .detach(); }) .ok(); + + window.remove_window(); } SettingsUiFile::Project((worktree_id, path)) => { let settings_path = path.join(paths::local_settings_file_relative_path()); @@ -3066,8 +3069,11 @@ impl SettingsWindow { .detach(); }) .ok(); + + window.remove_window(); } SettingsUiFile::Server(_) => { + // Server files are not editable return; } }; @@ -3186,8 +3192,8 @@ impl Render for SettingsWindow { .id("settings-window") .key_context("SettingsWindow") .track_focus(&self.focus_handle) - .on_action(cx.listener(|this, _: &OpenCurrentFile, _, cx| { - this.open_current_settings_file(cx); + .on_action(cx.listener(|this, _: &OpenCurrentFile, window, cx| { + this.open_current_settings_file(window, cx); })) .on_action(|_: &Minimize, window, _cx| { window.minimize_window(); From b8cdd38efb3796302a88270f62be342d5afde794 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Wed, 29 Oct 2025 20:06:49 -0300 Subject: [PATCH 04/82] zeta2: Improve context search performance (#41501) We'll now perform all searches from the context model concurrently, and combine queries for the same glob into one reducing the total number of project searches. For better readability, the debug context view now displays each top-level regex alternation individually, grouped by its corresponding glob: CleanShot 2025-10-29 at 19 56 03@2x Release Notes: - N/A --- Cargo.lock | 1 + crates/zeta2/src/related_excerpts.rs | 495 +++++++++++-------- crates/zeta2/src/zeta2.rs | 1 + crates/zeta2_tools/Cargo.toml | 1 + crates/zeta2_tools/src/zeta2_context_view.rs | 122 +++-- 5 files changed, 388 insertions(+), 232 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1e1ccad1a7dcb3257c9cf471c166655c23056a07..78c972865a4e01ba66357142ff8737b634639b27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21702,6 +21702,7 @@ dependencies = [ "ordered-float 2.10.1", "pretty_assertions", "project", + "regex-syntax", "serde", "serde_json", "settings", diff --git a/crates/zeta2/src/related_excerpts.rs b/crates/zeta2/src/related_excerpts.rs index 7434dbed9e48bb2dcf98131177dc65b2f3930094..d8fff7e0201716be45451c302c4f83b667727bc2 100644 --- a/crates/zeta2/src/related_excerpts.rs +++ b/crates/zeta2/src/related_excerpts.rs @@ -1,4 +1,7 @@ -use std::{cmp::Reverse, fmt::Write, ops::Range, path::PathBuf, sync::Arc, time::Instant}; +use std::{ + cmp::Reverse, collections::hash_map::Entry, fmt::Write, ops::Range, path::PathBuf, sync::Arc, + time::Instant, +}; use crate::{ ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo, @@ -7,22 +10,31 @@ use crate::{ use anyhow::{Result, anyhow}; use collections::HashMap; use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions, Line}; -use futures::{StreamExt, channel::mpsc, stream::BoxStream}; -use gpui::{App, AsyncApp, Entity, Task}; +use futures::{ + StreamExt, + channel::mpsc::{self, UnboundedSender}, + stream::BoxStream, +}; +use gpui::{App, AppContext, AsyncApp, Entity, Task}; use indoc::indoc; -use language::{Anchor, Bias, Buffer, OffsetRangeExt, Point, TextBufferSnapshot, ToPoint as _}; +use language::{ + Anchor, Bias, Buffer, BufferSnapshot, OffsetRangeExt, Point, TextBufferSnapshot, ToPoint as _, +}; use language_model::{ LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelRequestTool, LanguageModelToolResult, MessageContent, Role, + LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, }; use project::{ Project, WorktreeSettings, search::{SearchQuery, SearchResult}, }; use schemars::JsonSchema; -use serde::Deserialize; -use util::paths::{PathMatcher, PathStyle}; +use serde::{Deserialize, Serialize}; +use util::{ + ResultExt as _, + paths::{PathMatcher, PathStyle}, +}; use workspace::item::Settings as _; const SEARCH_PROMPT: &str = indoc! {r#" @@ -64,22 +76,19 @@ const SEARCH_TOOL_NAME: &str = "search"; /// Search for relevant code /// /// For the best results, run multiple queries at once with a single invocation of this tool. -#[derive(Clone, Deserialize, JsonSchema)] +#[derive(Clone, Deserialize, Serialize, JsonSchema)] pub struct SearchToolInput { /// An array of queries to run for gathering context relevant to the next prediction #[schemars(length(max = 5))] pub queries: Box<[SearchToolQuery]>, } -#[derive(Debug, Clone, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] pub struct SearchToolQuery { /// A glob pattern to match file paths in the codebase pub glob: String, /// A regular expression to match content within the files matched by the glob pattern pub regex: String, - /// Whether the regex is case-sensitive. Defaults to false (case-insensitive). - #[serde(default)] - pub case_sensitive: bool, } const RESULTS_MESSAGE: &str = indoc! {" @@ -209,6 +218,8 @@ pub fn find_related_excerpts<'a>( let mut select_request_messages = Vec::with_capacity(5); // initial prompt, LLM response/thinking, tool use, tool result, select prompt select_request_messages.push(initial_prompt_message); + + let mut regex_by_glob: HashMap = HashMap::default(); let mut search_calls = Vec::new(); while let Some(event) = search_stream.next().await { @@ -219,7 +230,18 @@ pub fn find_related_excerpts<'a>( } if tool_use.name.as_ref() == SEARCH_TOOL_NAME { - search_calls.push((select_request_messages.len(), tool_use)); + let input = + serde_json::from_value::(tool_use.input.clone())?; + + for query in input.queries { + let regex = regex_by_glob.entry(query.glob).or_default(); + if !regex.is_empty() { + regex.push('|'); + } + regex.push_str(&query.regex); + } + + search_calls.push(tool_use); } else { log::warn!( "context gathering model tried to use unknown tool: {}", @@ -303,19 +325,35 @@ pub fn find_related_excerpts<'a>( } } - struct ResultBuffer { - buffer: Entity, - snapshot: TextBufferSnapshot, - } - - let search_queries = search_calls - .iter() - .map(|(_, tool_use)| { - Ok(serde_json::from_value::( - tool_use.input.clone(), - )?) + let search_tool_use = if search_calls.is_empty() { + log::warn!("context model ran 0 searches"); + return anyhow::Ok(Default::default()); + } else if search_calls.len() == 1 { + search_calls.swap_remove(0) + } else { + // In theory, the model could perform multiple search calls + // Dealing with them separately is not worth it when it doesn't happen in practice. + // If it were to happen, here we would combine them into one. + // The second request doesn't need to know it was actually two different calls ;) + let input = serde_json::to_value(&SearchToolInput { + queries: regex_by_glob + .iter() + .map(|(glob, regex)| SearchToolQuery { + glob: glob.clone(), + regex: regex.clone(), + }) + .collect(), }) - .collect::>>()?; + .unwrap_or_default(); + + LanguageModelToolUse { + id: search_calls.swap_remove(0).id, + name: SELECT_TOOL_NAME.into(), + raw_input: serde_json::to_string(&input).unwrap_or_default(), + input, + is_input_complete: true, + } + }; if let Some(debug_tx) = &debug_tx { debug_tx @@ -323,114 +361,132 @@ pub fn find_related_excerpts<'a>( ZetaSearchQueryDebugInfo { project: project.clone(), timestamp: Instant::now(), - queries: search_queries + queries: regex_by_glob .iter() - .flat_map(|call| call.queries.iter().cloned()) + .map(|(glob, regex)| SearchToolQuery { + glob: glob.clone(), + regex: regex.clone(), + }) .collect(), }, )) .ok(); } - let mut result_buffers_by_path = HashMap::default(); - - for ((index, tool_use), call) in search_calls.into_iter().zip(search_queries).rev() { - let mut excerpts_by_buffer = HashMap::default(); - - for query in call.queries { - // TODO [zeta2] parallelize? + let (results_tx, mut results_rx) = mpsc::unbounded(); + for (glob, regex) in regex_by_glob { + let exclude_matcher = exclude_matcher.clone(); + let results_tx = results_tx.clone(); + let project = project.clone(); + cx.spawn(async move |cx| { run_query( - query, - &mut excerpts_by_buffer, + &glob, + ®ex, + results_tx.clone(), path_style, - exclude_matcher.clone(), + exclude_matcher, &project, cx, ) - .await?; - } - - if excerpts_by_buffer.is_empty() { - continue; - } - - let mut merged_result = RESULTS_MESSAGE.to_string(); - - for (buffer_entity, mut excerpts_for_buffer) in excerpts_by_buffer { - excerpts_for_buffer.sort_unstable_by_key(|range| (range.start, Reverse(range.end))); - - buffer_entity - .clone() - .read_with(cx, |buffer, cx| { - let Some(file) = buffer.file() else { - return; - }; - - let path = file.full_path(cx); - - writeln!(&mut merged_result, "`````filename={}", path.display()).unwrap(); + .await + .log_err(); + }) + .detach() + } + drop(results_tx); - let snapshot = buffer.snapshot(); + struct ResultBuffer { + buffer: Entity, + snapshot: TextBufferSnapshot, + } - write_merged_excerpts( - &snapshot, - excerpts_for_buffer, - &[], - &mut merged_result, - ); + let (result_buffers_by_path, merged_result) = cx + .background_spawn(async move { + let mut excerpts_by_buffer: HashMap, MatchedBuffer> = + HashMap::default(); + + while let Some((buffer, matched)) = results_rx.next().await { + match excerpts_by_buffer.entry(buffer) { + Entry::Occupied(mut entry) => { + let entry = entry.get_mut(); + entry.full_path = matched.full_path; + entry.snapshot = matched.snapshot; + entry.line_ranges.extend(matched.line_ranges); + } + Entry::Vacant(entry) => { + entry.insert(matched); + } + } + } - merged_result.push_str("`````\n\n"); + let mut result_buffers_by_path = HashMap::default(); + let mut merged_result = RESULTS_MESSAGE.to_string(); + + for (buffer, mut matched) in excerpts_by_buffer { + matched + .line_ranges + .sort_unstable_by_key(|range| (range.start, Reverse(range.end))); + + writeln!( + &mut merged_result, + "`````filename={}", + matched.full_path.display() + ) + .unwrap(); + write_merged_excerpts( + &matched.snapshot, + matched.line_ranges, + &[], + &mut merged_result, + ); + merged_result.push_str("`````\n\n"); + + result_buffers_by_path.insert( + matched.full_path, + ResultBuffer { + buffer, + snapshot: matched.snapshot.text, + }, + ); + } - result_buffers_by_path.insert( - path, - ResultBuffer { - buffer: buffer_entity, - snapshot: snapshot.text, - }, - ); - }) - .ok(); - } + (result_buffers_by_path, merged_result) + }) + .await; - let tool_result = LanguageModelToolResult { - tool_use_id: tool_use.id.clone(), - tool_name: SEARCH_TOOL_NAME.into(), - is_error: false, - content: merged_result.into(), - output: None, - }; - - // Almost always appends at the end, but in theory, the model could return some text after the tool call - // or perform parallel tool calls, so we splice at the message index for correctness. - select_request_messages.splice( - index..index, - [ - LanguageModelRequestMessage { - role: Role::Assistant, - content: vec![MessageContent::ToolUse(tool_use)], - cache: false, - }, - LanguageModelRequestMessage { - role: Role::User, - content: vec![MessageContent::ToolResult(tool_result)], - cache: false, + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted( + ZetaContextRetrievalDebugInfo { + project: project.clone(), + timestamp: Instant::now(), }, - ], - ); - - if let Some(debug_tx) = &debug_tx { - debug_tx - .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted( - ZetaContextRetrievalDebugInfo { - project: project.clone(), - timestamp: Instant::now(), - }, - )) - .ok(); - } + )) + .ok(); } + let tool_result = LanguageModelToolResult { + tool_use_id: search_tool_use.id.clone(), + tool_name: SEARCH_TOOL_NAME.into(), + is_error: false, + content: merged_result.into(), + output: None, + }; + + select_request_messages.extend([ + LanguageModelRequestMessage { + role: Role::Assistant, + content: vec![MessageContent::ToolUse(search_tool_use)], + cache: false, + }, + LanguageModelRequestMessage { + role: Role::User, + content: vec![MessageContent::ToolResult(tool_result)], + cache: false, + }, + ]); + if result_buffers_by_path.is_empty() { log::trace!("context gathering queries produced no results"); return anyhow::Ok(HashMap::default()); @@ -449,73 +505,85 @@ pub fn find_related_excerpts<'a>( cx, ) .await?; - let mut selected_ranges = Vec::new(); - while let Some(event) = select_stream.next().await { - match event? { - LanguageModelCompletionEvent::ToolUse(tool_use) => { - if !tool_use.is_input_complete { - continue; - } + cx.background_spawn(async move { + let mut selected_ranges = Vec::new(); - if tool_use.name.as_ref() == SELECT_TOOL_NAME { - let call = - serde_json::from_value::(tool_use.input.clone())?; - selected_ranges.extend(call.ranges); - } else { - log::warn!( - "context gathering model tried to use unknown tool: {}", - tool_use.name - ); + while let Some(event) = select_stream.next().await { + match event? { + LanguageModelCompletionEvent::ToolUse(tool_use) => { + if !tool_use.is_input_complete { + continue; + } + + if tool_use.name.as_ref() == SELECT_TOOL_NAME { + let call = + serde_json::from_value::(tool_use.input.clone())?; + selected_ranges.extend(call.ranges); + } else { + log::warn!( + "context gathering model tried to use unknown tool: {}", + tool_use.name + ); + } + } + ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => { + log::error!("{ev:?}"); + } + ev => { + log::trace!("context select event: {ev:?}") } - } - ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => { - log::error!("{ev:?}"); - } - ev => { - log::trace!("context select event: {ev:?}") } } - } - if selected_ranges.is_empty() { - log::trace!("context gathering selected no ranges") - } + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(ZetaDebugInfo::SearchResultsFiltered( + ZetaContextRetrievalDebugInfo { + project: project.clone(), + timestamp: Instant::now(), + }, + )) + .ok(); + } - let mut related_excerpts_by_buffer: HashMap<_, Vec<_>> = HashMap::default(); - - for selected_range in selected_ranges { - if let Some(ResultBuffer { buffer, snapshot }) = - result_buffers_by_path.get(&selected_range.path) - { - let start_point = Point::new(selected_range.start_line.saturating_sub(1), 0); - let end_point = - snapshot.clip_point(Point::new(selected_range.end_line, 0), Bias::Left); - let range = snapshot.anchor_after(start_point)..snapshot.anchor_before(end_point); - - related_excerpts_by_buffer - .entry(buffer.clone()) - .or_default() - .push(range); - } else { - log::warn!( - "selected path that wasn't included in search results: {}", - selected_range.path.display() - ); + if selected_ranges.is_empty() { + log::trace!("context gathering selected no ranges") } - } - for (buffer, ranges) in &mut related_excerpts_by_buffer { - buffer.read_with(cx, |buffer, _cx| { - ranges.sort_unstable_by(|a, b| { - a.start - .cmp(&b.start, buffer) - .then(b.end.cmp(&a.end, buffer)) - }); - })?; - } + selected_ranges.sort_unstable_by(|a, b| { + a.start_line + .cmp(&b.start_line) + .then(b.end_line.cmp(&a.end_line)) + }); + + let mut related_excerpts_by_buffer: HashMap<_, Vec<_>> = HashMap::default(); + + for selected_range in selected_ranges { + if let Some(ResultBuffer { buffer, snapshot }) = + result_buffers_by_path.get(&selected_range.path) + { + let start_point = Point::new(selected_range.start_line.saturating_sub(1), 0); + let end_point = + snapshot.clip_point(Point::new(selected_range.end_line, 0), Bias::Left); + let range = + snapshot.anchor_after(start_point)..snapshot.anchor_before(end_point); + + related_excerpts_by_buffer + .entry(buffer.clone()) + .or_default() + .push(range); + } else { + log::warn!( + "selected path that wasn't included in search results: {}", + selected_range.path.display() + ); + } + } - anyhow::Ok(related_excerpts_by_buffer) + anyhow::Ok(related_excerpts_by_buffer) + }) + .await }) } @@ -549,20 +617,27 @@ const MIN_EXCERPT_LEN: usize = 16; const MAX_EXCERPT_LEN: usize = 768; const MAX_RESULT_BYTES_PER_QUERY: usize = MAX_EXCERPT_LEN * 5; +struct MatchedBuffer { + snapshot: BufferSnapshot, + line_ranges: Vec>, + full_path: PathBuf, +} + async fn run_query( - args: SearchToolQuery, - excerpts_by_buffer: &mut HashMap, Vec>>, + glob: &str, + regex: &str, + results_tx: UnboundedSender<(Entity, MatchedBuffer)>, path_style: PathStyle, exclude_matcher: PathMatcher, project: &Entity, cx: &mut AsyncApp, ) -> Result<()> { - let include_matcher = PathMatcher::new(vec![args.glob], path_style)?; + let include_matcher = PathMatcher::new(vec![glob], path_style)?; let query = SearchQuery::regex( - &args.regex, + regex, false, - args.case_sensitive, + true, false, true, include_matcher, @@ -581,42 +656,56 @@ async fn run_query( continue; } - let excerpts_for_buffer = excerpts_by_buffer - .entry(buffer.clone()) - .or_insert_with(|| Vec::with_capacity(ranges.len())); + let Some((snapshot, full_path)) = buffer.read_with(cx, |buffer, cx| { + Some((buffer.snapshot(), buffer.file()?.full_path(cx))) + })? + else { + continue; + }; - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?; + let results_tx = results_tx.clone(); + cx.background_spawn(async move { + let mut line_ranges = Vec::with_capacity(ranges.len()); - for range in ranges { - let offset_range = range.to_offset(&snapshot); - let query_point = (offset_range.start + offset_range.len() / 2).to_point(&snapshot); + for range in ranges { + let offset_range = range.to_offset(&snapshot); + let query_point = (offset_range.start + offset_range.len() / 2).to_point(&snapshot); - if total_bytes + MIN_EXCERPT_LEN >= MAX_RESULT_BYTES_PER_QUERY { - break; - } + if total_bytes + MIN_EXCERPT_LEN >= MAX_RESULT_BYTES_PER_QUERY { + break; + } + + let excerpt = EditPredictionExcerpt::select_from_buffer( + query_point, + &snapshot, + &EditPredictionExcerptOptions { + max_bytes: MAX_EXCERPT_LEN.min(MAX_RESULT_BYTES_PER_QUERY - total_bytes), + min_bytes: MIN_EXCERPT_LEN, + target_before_cursor_over_total_bytes: 0.5, + }, + None, + ); - let excerpt = EditPredictionExcerpt::select_from_buffer( - query_point, - &snapshot, - &EditPredictionExcerptOptions { - max_bytes: MAX_EXCERPT_LEN.min(MAX_RESULT_BYTES_PER_QUERY - total_bytes), - min_bytes: MIN_EXCERPT_LEN, - target_before_cursor_over_total_bytes: 0.5, - }, - None, - ); - - if let Some(excerpt) = excerpt { - total_bytes += excerpt.range.len(); - if !excerpt.line_range.is_empty() { - excerpts_for_buffer.push(excerpt.line_range); + if let Some(excerpt) = excerpt { + total_bytes += excerpt.range.len(); + if !excerpt.line_range.is_empty() { + line_ranges.push(excerpt.line_range); + } } } - } - if excerpts_for_buffer.is_empty() { - excerpts_by_buffer.remove(&buffer); - } + results_tx + .unbounded_send(( + buffer, + MatchedBuffer { + snapshot, + line_ranges, + full_path, + }, + )) + .log_err(); + }) + .detach(); } anyhow::Ok(()) diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index 81fc4172592c59ca47527c545ee8d57398ae1247..b6311f9d25dfc91c078f6614b344eb91cabd51eb 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -138,6 +138,7 @@ pub enum ZetaDebugInfo { ContextRetrievalStarted(ZetaContextRetrievalDebugInfo), SearchQueriesGenerated(ZetaSearchQueryDebugInfo), SearchQueriesExecuted(ZetaContextRetrievalDebugInfo), + SearchResultsFiltered(ZetaContextRetrievalDebugInfo), ContextRetrievalFinished(ZetaContextRetrievalDebugInfo), EditPredicted(ZetaEditPredictionDebugInfo), } diff --git a/crates/zeta2_tools/Cargo.toml b/crates/zeta2_tools/Cargo.toml index 0877ee6f4661e7dcdbbae5241702951746b74725..703dbd08b20184c6cd09f68e41cabbc296309483 100644 --- a/crates/zeta2_tools/Cargo.toml +++ b/crates/zeta2_tools/Cargo.toml @@ -30,6 +30,7 @@ project.workspace = true serde.workspace = true telemetry.workspace = true text.workspace = true +regex-syntax = "0.8.8" ui.workspace = true ui_input.workspace = true util.workspace = true diff --git a/crates/zeta2_tools/src/zeta2_context_view.rs b/crates/zeta2_tools/src/zeta2_context_view.rs index 421328df2c3f39d61352290c0ca5fd34ff39bb78..0abca0fbf451955c285fe3a9df482c507dc4ff10 100644 --- a/crates/zeta2_tools/src/zeta2_context_view.rs +++ b/crates/zeta2_tools/src/zeta2_context_view.rs @@ -20,12 +20,11 @@ use project::Project; use text::OffsetRangeExt; use ui::{ ButtonCommon, Clickable, Color, Disableable, FluentBuilder as _, Icon, IconButton, IconName, - IconSize, InteractiveElement, IntoElement, ListItem, StyledTypography, div, h_flex, v_flex, + IconSize, InteractiveElement, IntoElement, ListHeader, ListItem, StyledTypography, div, h_flex, + v_flex, }; use workspace::{Item, ItemHandle as _}; -use zeta2::{ - SearchToolQuery, Zeta, ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo, -}; +use zeta2::{Zeta, ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo}; pub struct Zeta2ContextView { empty_focus_handle: FocusHandle, @@ -37,15 +36,22 @@ pub struct Zeta2ContextView { } #[derive(Debug)] -pub struct RetrievalRun { +struct RetrievalRun { editor: Entity, - search_queries: Vec, + search_queries: Vec, started_at: Instant, search_results_generated_at: Option, search_results_executed_at: Option, + search_results_filtered_at: Option, finished_at: Option, } +#[derive(Debug)] +struct GlobQueries { + glob: String, + alternations: Vec, +} + actions!( dev, [ @@ -108,6 +114,11 @@ impl Zeta2ContextView { self.handle_search_queries_executed(info, window, cx); } } + ZetaDebugInfo::SearchResultsFiltered(info) => { + if info.project == self.project { + self.handle_search_results_filtered(info, window, cx); + } + } ZetaDebugInfo::ContextRetrievalFinished(info) => { if info.project == self.project { self.handle_context_retrieval_finished(info, window, cx); @@ -145,6 +156,7 @@ impl Zeta2ContextView { started_at: info.timestamp, search_results_generated_at: None, search_results_executed_at: None, + search_results_filtered_at: None, finished_at: None, }); @@ -202,7 +214,23 @@ impl Zeta2ContextView { }; run.search_results_generated_at = Some(info.timestamp); - run.search_queries = info.queries; + run.search_queries = info + .queries + .into_iter() + .map(|query| { + let mut regex_parser = regex_syntax::ast::parse::Parser::new(); + + GlobQueries { + glob: query.glob, + alternations: match regex_parser.parse(&query.regex) { + Ok(regex_syntax::ast::Ast::Alternation(ref alt)) => { + alt.asts.iter().map(|ast| ast.to_string()).collect() + } + _ => vec![query.regex], + }, + } + }) + .collect(); cx.notify(); } @@ -225,6 +253,20 @@ impl Zeta2ContextView { cx.notify(); } + fn handle_search_results_filtered( + &mut self, + info: ZetaContextRetrievalDebugInfo, + _window: &mut Window, + cx: &mut Context, + ) { + let Some(run) = self.runs.back_mut() else { + return; + }; + + run.search_results_filtered_at = Some(info.timestamp); + cx.notify(); + } + fn handle_go_back( &mut self, _: &Zeta2ContextGoBack, @@ -255,28 +297,37 @@ impl Zeta2ContextView { let run = &self.runs[self.current_ix]; h_flex() + .p_2() .w_full() .font_buffer(cx) .text_xs() .border_t_1() + .gap_2() .child( - v_flex() - .h_full() - .flex_1() - .children(run.search_queries.iter().enumerate().map(|(ix, query)| { - ListItem::new(ix) - .start_slot( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child(query.regex.clone()) - })), + v_flex().h_full().flex_1().children( + run.search_queries + .iter() + .enumerate() + .flat_map(|(ix, query)| { + std::iter::once(ListHeader::new(query.glob.clone()).into_any_element()) + .chain(query.alternations.iter().enumerate().map( + move |(alt_ix, alt)| { + ListItem::new(ix * 100 + alt_ix) + .start_slot( + Icon::new(IconName::MagnifyingGlass) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child(alt.clone()) + .into_any_element() + }, + )) + }), + ), ) .child( v_flex() .h_full() - .pr_2() .text_align(TextAlign::Right) .child( h_flex() @@ -325,25 +376,38 @@ impl Zeta2ContextView { ), ) .map(|mut div| { + let pending_message = |div: ui::Div, msg: &'static str| { + if is_latest { + return div.child(msg); + } else { + return div.child("Canceled"); + } + }; + let t0 = run.started_at; let Some(t1) = run.search_results_generated_at else { - return div.child("Planning search..."); + return pending_message(div, "Planning search..."); }; div = div.child(format!("Planned search: {:>5} ms", (t1 - t0).as_millis())); let Some(t2) = run.search_results_executed_at else { - return div.child("Running search..."); + return pending_message(div, "Running search..."); }; div = div.child(format!("Ran search: {:>5} ms", (t2 - t1).as_millis())); - let Some(t3) = run.finished_at else { - if is_latest { - return div.child("Filtering results..."); - } else { - return div.child("Canceled"); - } + let Some(t3) = run.search_results_filtered_at else { + return pending_message(div, "Filtering results..."); + }; + div = + div.child(format!("Filtered results: {:>5} ms", (t3 - t2).as_millis())); + + let Some(t4) = run.finished_at else { + return pending_message(div, "Building excerpts"); }; - div.child(format!("Filtered results: {:>5} ms", (t3 - t2).as_millis())) + div = div + .child(format!("Build excerpts: {:>5} µs", (t4 - t3).as_micros())) + .child(format!("Total: {:>5} ms", (t4 - t0).as_millis())); + div }), ) } From 802b0e4968c275a368532172af2e143922167437 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Oct 2025 22:07:38 -0300 Subject: [PATCH 05/82] docs: Add content about EP with Codestral (#41507) This was missing after we added support to Codestral as an edit prediction provider. Release Notes: - N/A --- docs/src/ai/edit-prediction.md | 35 +++++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 07f68dadce41ab89f210873aadd10d1ca65fcef3..4104bdbda3b4697fdcc1e30530c3a8e53111c102 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -4,8 +4,7 @@ Edit Prediction is Zed's mechanism for predicting the code you want to write thr Each keystroke sends a new request to the edit prediction provider, which returns individual or multi-line suggestions that can be quickly accepted by pressing `tab`. The default provider is [Zeta, a proprietary open source and open dataset model](https://huggingface.co/zed-industries/zeta), which [requires being signed into Zed](../authentication.md#what-features-require-signing-in). - -Alternatively, you can use other providers like [GitHub Copilot](#github-copilot) (or [Enterprise](#github-copilot-enterprise)) or [Supermaven](#supermaven). +Alternatively, you can also use [other providers](#other-providers) like GitHub Copilot and Codestral. ## Configuring Zeta @@ -257,7 +256,12 @@ To completely turn off edit prediction across all providers, explicitly set the }, ``` -## Configuring GitHub Copilot {#github-copilot} +## Configuring Other Providers {#other-providers} + +Zed's Edit Prediction also work with other completion model providers aside from Zeta. +Learn about the available ones below. + +### GitHub Copilot {#github-copilot} To use GitHub Copilot as your provider, set this within `settings.json`: @@ -271,7 +275,7 @@ To use GitHub Copilot as your provider, set this within `settings.json`: You should be able to sign-in to GitHub Copilot by clicking on the Copilot icon in the status bar and following the setup instructions. -### Using GitHub Copilot Enterprise {#github-copilot-enterprise} +#### Using GitHub Copilot Enterprise If your organization uses GitHub Copilot Enterprise, you can configure Zed to use your enterprise instance by specifying the enterprise URI in your `settings.json`: @@ -287,14 +291,16 @@ If your organization uses GitHub Copilot Enterprise, you can configure Zed to us Replace `"https://your.enterprise.domain"` with the URL provided by your GitHub Enterprise administrator (e.g., `https://foo.ghe.com`). -Once set, Zed will route Copilot requests through your enterprise endpoint. When you sign in by clicking the Copilot icon in the status bar, you will be redirected to your configured enterprise URL to complete authentication. All other Copilot features and usage remain the same. +Once set, Zed will route Copilot requests through your enterprise endpoint. +When you sign in by clicking the Copilot icon in the status bar, you will be redirected to your configured enterprise URL to complete authentication. +All other Copilot features and usage remain the same. Copilot can provide multiple completion alternatives, and these can be navigated with the following actions: - {#action editor::NextEditPrediction} ({#kb editor::NextEditPrediction}): To cycle to the next edit prediction - {#action editor::PreviousEditPrediction} ({#kb editor::PreviousEditPrediction}): To cycle to the previous edit prediction -## Configuring Supermaven {#supermaven} +### Supermaven {#supermaven} To use Supermaven as your provider, set this within `settings.json`: @@ -308,6 +314,21 @@ To use Supermaven as your provider, set this within `settings.json`: You should be able to sign-in to Supermaven by clicking on the Supermaven icon in the status bar and following the setup instructions. +### Configuring Supermaven {#supermaven} + +To use Mistral's Codestral as your provider, start by going to the the Agent Panel settings view by running the {#action agent::OpenSettings} action. +Look for the Mistral item and add a Codestral API key in the corresponding text input. + +After that, you should be able to switch your provider to it in your `settings.json` file: + +```json [settings] +{ + "features": { + "edit_prediction_provider": "codestral" + } +} +``` + ## See also -You may also use the [Agent Panel](./agent-panel.md) or the [Inline Assistant](./inline-assistant.md) to interact with language models, see the [AI documentation](./overview.md) for more information on the other AI features in Zed. +To learn about other ways to interact with AI in Zed, you may also want to see more about the [Agent Panel](./agent-panel.md) or the [Inline Assistant](./inline-assistant.md) feature. From 992448b560f0e8ee00323c177564ac8ecce7b509 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Oct 2025 22:16:13 -0300 Subject: [PATCH 06/82] edit prediction: Add ability to switch providers from the status bar menu (#41504) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/41500 Screenshot 2025-10-29 at 9  43@2x Release Notes: - Added the ability to switch between configured edit prediction providers through the status bar menu. --- crates/codestral/src/codestral.rs | 8 + .../src/edit_prediction_button.rs | 164 ++++++++++++++---- .../src/provider/copilot_chat.rs | 5 +- .../language_models/src/provider/mistral.rs | 4 +- crates/zed/src/zed.rs | 1 + 5 files changed, 144 insertions(+), 38 deletions(-) diff --git a/crates/codestral/src/codestral.rs b/crates/codestral/src/codestral.rs index fe6b6678c99181facc4269df187c32c5a72ab565..e439cfb974fb55f4d30e5eb4be5c0dfa0d77c3d3 100644 --- a/crates/codestral/src/codestral.rs +++ b/crates/codestral/src/codestral.rs @@ -66,6 +66,14 @@ impl CodestralCompletionProvider { Self::api_key(cx).is_some() } + /// This is so we can immediately show Codestral as a provider users can + /// switch to in the edit prediction menu, if the API has been added + pub fn ensure_api_key_loaded(http_client: Arc, cx: &mut App) { + MistralLanguageModelProvider::global(http_client, cx) + .load_codestral_api_key(cx) + .detach(); + } + fn api_key(cx: &App) -> Option> { MistralLanguageModelProvider::try_global(cx) .and_then(|provider| provider.codestral_api_key(CODESTRAL_API_URL, cx)) diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 8b9bfc1c50092b65892cfcee9f4da1aeb2a0993e..70c861ab1112630c2e3293cb54a4e96c6754b3bd 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -1,5 +1,5 @@ use anyhow::Result; -use client::{UserStore, zed_urls}; +use client::{Client, UserStore, zed_urls}; use cloud_llm_client::UsageLimit; use codestral::CodestralCompletionProvider; use copilot::{Copilot, Status}; @@ -192,6 +192,7 @@ impl Render for EditPredictionButton { Some(ContextMenu::build(window, cx, |menu, _, _| { let fs = fs.clone(); let activate_url = activate_url.clone(); + menu.entry("Sign In", None, move |_, cx| { cx.open_url(activate_url.as_str()) }) @@ -244,15 +245,8 @@ impl Render for EditPredictionButton { } else { Some(ContextMenu::build(window, cx, |menu, _, _| { let fs = fs.clone(); - menu.entry("Use Zed AI instead", None, move |_, cx| { - set_completion_provider( - fs.clone(), - cx, - EditPredictionProvider::Zed, - ) - }) - .separator() - .entry( + + menu.entry( "Configure Codestral API Key", None, move |window, cx| { @@ -262,6 +256,18 @@ impl Render for EditPredictionButton { ); }, ) + .separator() + .entry( + "Use Zed AI instead", + None, + move |_, cx| { + set_completion_provider( + fs.clone(), + cx, + EditPredictionProvider::Zed, + ) + }, + ) })) } }) @@ -412,6 +418,7 @@ impl EditPredictionButton { fs: Arc, user_store: Entity, popover_menu_handle: PopoverMenuHandle, + client: Arc, cx: &mut Context, ) -> Self { if let Some(copilot) = Copilot::global(cx) { @@ -421,6 +428,8 @@ impl EditPredictionButton { cx.observe_global::(move |_, cx| cx.notify()) .detach(); + CodestralCompletionProvider::ensure_api_key_loaded(client.http_client(), cx); + Self { editor_subscription: None, editor_enabled: None, @@ -435,6 +444,89 @@ impl EditPredictionButton { } } + fn get_available_providers(&self, cx: &App) -> Vec { + let mut providers = Vec::new(); + + providers.push(EditPredictionProvider::Zed); + + if let Some(copilot) = Copilot::global(cx) { + if matches!(copilot.read(cx).status(), Status::Authorized) { + providers.push(EditPredictionProvider::Copilot); + } + } + + if let Some(supermaven) = Supermaven::global(cx) { + if let Supermaven::Spawned(agent) = supermaven.read(cx) { + if matches!(agent.account_status, AccountStatus::Ready) { + providers.push(EditPredictionProvider::Supermaven); + } + } + } + + if CodestralCompletionProvider::has_api_key(cx) { + providers.push(EditPredictionProvider::Codestral); + } + + providers + } + + fn add_provider_switching_section( + &self, + mut menu: ContextMenu, + current_provider: EditPredictionProvider, + cx: &App, + ) -> ContextMenu { + let available_providers = self.get_available_providers(cx); + + let other_providers: Vec<_> = available_providers + .into_iter() + .filter(|p| *p != current_provider && *p != EditPredictionProvider::None) + .collect(); + + if !other_providers.is_empty() { + menu = menu.separator().header("Switch Providers"); + + for provider in other_providers { + let fs = self.fs.clone(); + + menu = match provider { + EditPredictionProvider::Zed => menu.item( + ContextMenuEntry::new("Zed AI") + .documentation_aside( + DocumentationSide::Left, + DocumentationEdge::Top, + |_| { + Label::new("Zed's edit prediction is powered by Zeta, an open-source, dataset mode.") + .into_any_element() + }, + ) + .handler(move |_, cx| { + set_completion_provider(fs.clone(), cx, provider); + }), + ), + EditPredictionProvider::Copilot => { + menu.entry("GitHub Copilot", None, move |_, cx| { + set_completion_provider(fs.clone(), cx, provider); + }) + } + EditPredictionProvider::Supermaven => { + menu.entry("Supermaven", None, move |_, cx| { + set_completion_provider(fs.clone(), cx, provider); + }) + } + EditPredictionProvider::Codestral => { + menu.entry("Codestral", None, move |_, cx| { + set_completion_provider(fs.clone(), cx, provider); + }) + } + EditPredictionProvider::None => continue, + }; + } + } + + menu + } + pub fn build_copilot_start_menu( &mut self, window: &mut Window, @@ -572,8 +664,10 @@ impl EditPredictionButton { } menu = menu.separator().header("Privacy"); + if let Some(provider) = &self.edit_prediction_provider { let data_collection = provider.data_collection_state(cx); + if data_collection.is_supported() { let provider = provider.clone(); let enabled = data_collection.is_enabled(); @@ -691,7 +785,7 @@ impl EditPredictionButton { } }), ).item( - ContextMenuEntry::new("View Documentation") + ContextMenuEntry::new("View Docs") .icon(IconName::FileGeneric) .icon_color(Color::Muted) .handler(move |_, cx| { @@ -711,6 +805,7 @@ impl EditPredictionButton { if let Some(editor_focus_handle) = self.editor_focus_handle.clone() { menu = menu .separator() + .header("Actions") .entry( "Predict Edit at Cursor", Some(Box::new(ShowEditPrediction)), @@ -721,7 +816,11 @@ impl EditPredictionButton { } }, ) - .context(editor_focus_handle); + .context(editor_focus_handle) + .when( + cx.has_flag::(), + |this| this.action("Rate Completions", RateCompletions.boxed_clone()), + ); } menu @@ -733,15 +832,11 @@ impl EditPredictionButton { cx: &mut Context, ) -> Entity { ContextMenu::build(window, cx, |menu, window, cx| { - self.build_language_settings_menu(menu, window, cx) - .separator() - .entry("Use Zed AI instead", None, { - let fs = self.fs.clone(); - move |_window, cx| { - set_completion_provider(fs.clone(), cx, EditPredictionProvider::Zed) - } - }) - .separator() + let menu = self.build_language_settings_menu(menu, window, cx); + let menu = + self.add_provider_switching_section(menu, EditPredictionProvider::Copilot, cx); + + menu.separator() .link( "Go to Copilot Settings", OpenBrowser { @@ -759,8 +854,11 @@ impl EditPredictionButton { cx: &mut Context, ) -> Entity { ContextMenu::build(window, cx, |menu, window, cx| { - self.build_language_settings_menu(menu, window, cx) - .separator() + let menu = self.build_language_settings_menu(menu, window, cx); + let menu = + self.add_provider_switching_section(menu, EditPredictionProvider::Supermaven, cx); + + menu.separator() .action("Sign Out", supermaven::SignOut.boxed_clone()) }) } @@ -770,14 +868,12 @@ impl EditPredictionButton { window: &mut Window, cx: &mut Context, ) -> Entity { - let fs = self.fs.clone(); ContextMenu::build(window, cx, |menu, window, cx| { - self.build_language_settings_menu(menu, window, cx) - .separator() - .entry("Use Zed AI instead", None, move |_, cx| { - set_completion_provider(fs.clone(), cx, EditPredictionProvider::Zed) - }) - .separator() + let menu = self.build_language_settings_menu(menu, window, cx); + let menu = + self.add_provider_switching_section(menu, EditPredictionProvider::Codestral, cx); + + menu.separator() .entry("Configure Codestral API Key", None, move |window, cx| { window.dispatch_action(zed_actions::agent::OpenSettings.boxed_clone(), cx); }) @@ -872,10 +968,10 @@ impl EditPredictionButton { .separator(); } - self.build_language_settings_menu(menu, window, cx).when( - cx.has_flag::(), - |this| this.action("Rate Completions", RateCompletions.boxed_clone()), - ) + let menu = self.build_language_settings_menu(menu, window, cx); + let menu = self.add_provider_switching_section(menu, EditPredictionProvider::Zed, cx); + + menu }) } diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 1941bd903951420266ba5c4609cb34c15130224e..6c665a0c1f06aa44e2b86f96517f7998fc02f4d3 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -1377,11 +1377,12 @@ impl Render for ConfigurationView { v_flex().gap_2().child(Label::new(LABEL)).child( Button::new("sign_in", "Sign in to use GitHub Copilot") + .full_width() + .style(ButtonStyle::Outlined) .icon_color(Color::Muted) .icon(IconName::Github) .icon_position(IconPosition::Start) - .icon_size(IconSize::Medium) - .full_width() + .icon_size(IconSize::Small) .on_click(|_, window, cx| copilot::initiate_sign_in(window, cx)), ) } diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 66527792ff0b82348457fd28ae04dba60d10de5b..acd4a1c768e0d6ffdffbc3d69dcdc2bfd37fa928 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -753,9 +753,9 @@ struct ConfigurationView { impl ConfigurationView { fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { let api_key_editor = - cx.new(|cx| InputField::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2")); + cx.new(|cx| InputField::new(window, cx, "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")); let codestral_api_key_editor = - cx.new(|cx| InputField::new(window, cx, "0aBCDEFGhIjKLmNOpqrSTUVwxyzabCDE1f2")); + cx.new(|cx| InputField::new(window, cx, "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")); cx.observe(&state, |_, _, cx| { cx.notify(); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index f71299997930040c848dd6f5c2819185cf8fee81..d712f782ca78745a94ce22c9a57900a8b8e42863 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -388,6 +388,7 @@ pub fn initialize_workspace( app_state.fs.clone(), app_state.user_store.clone(), edit_prediction_menu_handle.clone(), + app_state.client.clone(), cx, ) }); From 64fdc1d5b6917ccde630b2fa9754eb132dc78733 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 29 Oct 2025 22:18:43 -0300 Subject: [PATCH 07/82] docs: Fix Codestral section title in edit prediction page (#41509) Follow up to https://github.com/zed-industries/zed/pull/41507 as I realized I didn't change the title for this section. Release Notes: - N/A --- docs/src/ai/edit-prediction.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 4104bdbda3b4697fdcc1e30530c3a8e53111c102..0ec26266438e4a6e0d8308efb5357cc73bb47c4b 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -314,7 +314,7 @@ To use Supermaven as your provider, set this within `settings.json`: You should be able to sign-in to Supermaven by clicking on the Supermaven icon in the status bar and following the setup instructions. -### Configuring Supermaven {#supermaven} +### Codestral {#codestral} To use Mistral's Codestral as your provider, start by going to the the Agent Panel settings view by running the {#action agent::OpenSettings} action. Look for the Mistral item and add a Codestral API key in the corresponding text input. From 277ae27ca29c680ec58aa8a925887355554c359f Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 29 Oct 2025 19:28:43 -0600 Subject: [PATCH 08/82] Use gh-workflow for tests (take 2) (#41420) This re-implements the reverted commit 8b051d6cc3c7c3bcda16702f30dc0fabe7b9f881. Closes #ISSUE Release Notes: - N/A --------- Co-authored-by: Ben Kunkle --- .github/workflows/ci.yml | 18 +- .github/workflows/nix_build.yml | 42 +- .github/workflows/release_nightly.yml | 10 +- .github/workflows/run_bundling.yml | 9 +- .github/workflows/run_tests.yml | 549 ++++++++++++++++++ .github/workflows/script_checks.yml | 21 - docs/src/vim.md | 2 +- script/prettier | 17 + tooling/xtask/src/tasks/workflows.rs | 5 +- tooling/xtask/src/tasks/workflows/danger.rs | 2 +- .../xtask/src/tasks/workflows/nix_build.rs | 46 -- .../src/tasks/workflows/release_nightly.rs | 33 +- .../xtask/src/tasks/workflows/run_bundling.rs | 12 +- .../xtask/src/tasks/workflows/run_tests.rs | 474 +++++++++++++++ tooling/xtask/src/tasks/workflows/runners.rs | 7 +- tooling/xtask/src/tasks/workflows/steps.rs | 113 +++- tooling/xtask/src/tasks/workflows/vars.rs | 53 +- 17 files changed, 1266 insertions(+), 147 deletions(-) create mode 100644 .github/workflows/run_tests.yml delete mode 100644 .github/workflows/script_checks.yml create mode 100755 script/prettier create mode 100644 tooling/xtask/src/tasks/workflows/run_tests.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8fe069c5e060ce5846cfd493f07148b8e4e8d2a5..4e1d5d59c551976c94272b682250e100ed3957ed 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,16 +2,9 @@ name: CI on: push: - branches: - - main - - "v[0-9]+.[0-9]+.x" tags: - "v*" - pull_request: - branches: - - "**" - concurrency: # Allow only one workflow per any non-`main` branch. group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} @@ -273,15 +266,12 @@ jobs: uses: ./.github/actions/run_tests - name: Build collab + # we should do this on a linux x86 machinge run: cargo build -p collab - name: Build other binaries and features run: | - cargo build --workspace --bins --all-features - cargo check -p gpui --features "macos-blade" - cargo check -p workspace - cargo build -p remote_server - cargo check -p gpui --examples + cargo build --workspace --bins --examples # Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug. - name: Clean CI config file @@ -731,10 +721,6 @@ jobs: (contains(github.event.pull_request.labels.*.name, 'run-nix') || needs.job_spec.outputs.run_nix == 'true') secrets: inherit - with: - flake-output: debug - # excludes the final package to only cache dependencies - cachix-filter: "-zed-editor-[0-9.]*-nightly" bundle-windows-x64: timeout-minutes: 120 diff --git a/.github/workflows/nix_build.yml b/.github/workflows/nix_build.yml index cc6bac824f8374cd60bb5ce5dcfb19439ecdc22d..4dd45bd3a740a43785e0284f0b86b2cdef50c1c7 100644 --- a/.github/workflows/nix_build.yml +++ b/.github/workflows/nix_build.yml @@ -1,14 +1,31 @@ # Generated from xtask::workflows::nix_build # Rebuild with `cargo xtask workflows`. name: nix_build +env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: '1' + CARGO_INCREMENTAL: '0' on: - workflow_call: - inputs: - flake-output: - type: string - default: default - cachix-filter: - type: string + pull_request: + branches: + - '**' + paths: + - nix/** + - flake.* + - Cargo.* + - rust-toolchain.toml + - .cargo/config.toml + push: + branches: + - main + - v[0-9]+.[0-9]+.x + paths: + - nix/** + - flake.* + - Cargo.* + - rust-toolchain.toml + - .cargo/config.toml + workflow_call: {} jobs: build_nix_linux_x86_64: if: github.repository_owner == 'zed-industries' @@ -33,9 +50,9 @@ jobs: name: zed authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} cachixArgs: -v - pushFilter: ${{ inputs.cachix-filter }} + pushFilter: -zed-editor-[0-9.]*-nightly - name: nix_build::build - run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config + run: nix build .#debug -L --accept-flake-config shell: bash -euxo pipefail {0} timeout-minutes: 60 continue-on-error: true @@ -63,9 +80,9 @@ jobs: name: zed authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} cachixArgs: -v - pushFilter: ${{ inputs.cachix-filter }} + pushFilter: -zed-editor-[0-9.]*-nightly - name: nix_build::build - run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config + run: nix build .#debug -L --accept-flake-config shell: bash -euxo pipefail {0} - name: nix_build::limit_store run: |- @@ -75,3 +92,6 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 continue-on-error: true +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index c593fea4cdf8538704aa6bab3478c2a3d894af41..e341ab5ad0f56d77b23146f484e9033fe31cb03a 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -33,7 +33,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_mac: - if: github.repository_owner == 'zed-industries' + if: 'false' runs-on: self-mini-macos steps: - name: steps::checkout_repo @@ -49,6 +49,9 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' + - name: steps::clippy + run: ./script/clippy + shell: bash -euxo pipefail {0} - name: steps::cargo_install_nextest run: cargo install cargo-nextest --locked shell: bash -euxo pipefail {0} @@ -65,7 +68,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_windows: - if: github.repository_owner == 'zed-industries' + if: 'false' runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo @@ -81,6 +84,9 @@ jobs: uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: node-version: '20' + - name: steps::clippy + run: ./script/clippy.ps1 + shell: pwsh - name: steps::cargo_install_nextest run: cargo install cargo-nextest --locked shell: pwsh diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 98354d0b7895848196ccd58d0d953d2856cfa8c2..9766c7c14b64007692cfb1c68efead5b23382426 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -109,10 +109,10 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: ./script/linux + - name: steps::setup_linux run: ./script/linux shell: bash -euxo pipefail {0} - - name: ./script/install-mold + - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} - name: ./script/bundle-linux @@ -143,9 +143,12 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: ./script/linux + - name: steps::setup_linux run: ./script/linux shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml new file mode 100644 index 0000000000000000000000000000000000000000..a9d20e2d88aa5bf15396304fa0fb24a5d0655387 --- /dev/null +++ b/.github/workflows/run_tests.yml @@ -0,0 +1,549 @@ +# Generated from xtask::workflows::run_tests +# Rebuild with `cargo xtask workflows`. +name: run_tests +env: + CARGO_TERM_COLOR: always + RUST_BACKTRACE: '1' + CARGO_INCREMENTAL: '0' +on: + pull_request: + branches: + - '**' + push: + branches: + - main + - v[0-9]+.[0-9]+.x +jobs: + orchestrate: + if: github.repository_owner == 'zed-industries' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }} + - id: filter + name: filter + run: | + if [ -z "$GITHUB_BASE_REF" ]; then + echo "Not in a PR context (i.e., push to main/stable/preview)" + COMPARE_REV="$(git rev-parse HEAD~1)" + else + echo "In a PR context comparing to pull_request.base.ref" + git fetch origin "$GITHUB_BASE_REF" --depth=350 + COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" + fi + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + + check_pattern() { + local output_name="$1" + local pattern="$2" + local grep_arg="$3" + + echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \ + echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \ + echo "${output_name}=false" >> "$GITHUB_OUTPUT" + } + + check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask' -qP + check_pattern "run_docs" '^docs/' -qP + check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP + check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP + check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP + shell: bash -euxo pipefail {0} + outputs: + run_action_checks: ${{ steps.filter.outputs.run_action_checks }} + run_docs: ${{ steps.filter.outputs.run_docs }} + run_licenses: ${{ steps.filter.outputs.run_licenses }} + run_nix: ${{ steps.filter.outputs.run_nix }} + run_tests: ${{ steps.filter.outputs.run_tests }} + check_style: + if: github.repository_owner == 'zed-industries' + runs-on: namespace-profile-4x8-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_pnpm + uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 + with: + version: '9' + - name: ./script/prettier + run: ./script/prettier + shell: bash -euxo pipefail {0} + - name: ./script/check-todos + run: ./script/check-todos + shell: bash -euxo pipefail {0} + - name: ./script/check-keymaps + run: ./script/check-keymaps + shell: bash -euxo pipefail {0} + - name: run_tests::check_style::check_for_typos + uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 + with: + config: ./typos.toml + - name: steps::cargo_fmt + run: cargo fmt --all -- --check + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + run_tests_windows: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: self-32vcpu-windows-2022 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + New-Item -ItemType Directory -Path "./../.cargo" -Force + Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml" + shell: pwsh + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::clippy + run: ./script/clippy.ps1 + shell: pwsh + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: pwsh + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than.ps1 250 + shell: pwsh + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: pwsh + - name: steps::cleanup_cargo_config + if: always() + run: | + Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue + shell: pwsh + timeout-minutes: 60 + run_tests_linux: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::clippy + run: ./script/clippy + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 100 + shell: bash -euxo pipefail {0} + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + run_tests_mac: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: self-mini-macos + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::clippy + run: ./script/clippy + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + doctests: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::cache_rust_dependencies + uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - id: run_doctests + name: run_tests::doctests::run_doctests + run: | + cargo test --workspace --doc --no-fail-fast + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + check_workspace_binaries: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: namespace-profile-8x16-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: cargo build -p collab + run: cargo build -p collab + shell: bash -euxo pipefail {0} + - name: cargo build --workspace --bins --examples + run: cargo build --workspace --bins --examples + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + check_postgres_and_protobuf_migrations: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: self-mini-macos + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + fetch-depth: 0 + - name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files + run: git clean -df + shell: bash -euxo pipefail {0} + - name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge + run: | + if [ -z "$GITHUB_BASE_REF" ]; + then + echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV" + else + git checkout -B temp + git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp" + echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV" + fi + shell: bash -euxo pipefail {0} + - name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action + uses: bufbuild/buf-setup-action@v1 + with: + version: v1.29.0 + - name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action + uses: bufbuild/buf-breaking-action@v1 + with: + input: crates/proto/proto/ + against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/ + timeout-minutes: 60 + check_dependencies: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: run_tests::check_dependencies::install_cargo_machete + uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 + with: + command: install + args: cargo-machete@0.7.0 + - name: run_tests::check_dependencies::run_cargo_machete + uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 + with: + command: machete + - name: run_tests::check_dependencies::check_cargo_lock + run: cargo update --locked --workspace + shell: bash -euxo pipefail {0} + - name: run_tests::check_dependencies::check_vulnerable_dependencies + if: github.event_name == 'pull_request' + uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 + with: + license-check: false + timeout-minutes: 60 + check_docs: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_docs == 'true' + runs-on: namespace-profile-8x16-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies + uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - name: run_tests::check_docs::lychee_link_check + uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 + with: + args: --no-progress --exclude '^http' './docs/src/**/*' + fail: true + jobSummary: false + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: run_tests::check_docs::install_mdbook + uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 + with: + mdbook-version: 0.4.37 + - name: run_tests::check_docs::build_docs + run: | + mkdir -p target/deploy + mdbook build ./docs --dest-dir=../target/deploy/docs/ + shell: bash -euxo pipefail {0} + - name: run_tests::check_docs::lychee_link_check + uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 + with: + args: --no-progress --exclude '^http' 'target/deploy/docs' + fail: true + jobSummary: false + timeout-minutes: 60 + check_licenses: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_licenses == 'true' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: ./script/check-licenses + run: ./script/check-licenses + shell: bash -euxo pipefail {0} + - name: ./script/generate-licenses + run: ./script/generate-licenses + shell: bash -euxo pipefail {0} + check_scripts: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_action_checks == 'true' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: run_tests::check_scripts::run_shellcheck + run: ./script/shellcheck-scripts error + shell: bash -euxo pipefail {0} + - id: get_actionlint + name: run_tests::check_scripts::download_actionlint + run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) + shell: bash -euxo pipefail {0} + - name: run_tests::check_scripts::run_actionlint + run: | + ${{ steps.get_actionlint.outputs.executable }} -color + shell: bash -euxo pipefail {0} + - name: run_tests::check_scripts::check_xtask_workflows + run: | + cargo xtask workflows + if ! git diff --exit-code .github; then + echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'" + echo "Please run 'cargo xtask workflows' locally and commit the changes" + exit 1 + fi + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + build_nix_linux_x86_64: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_nix == 'true' + runs-on: namespace-profile-32x64-ubuntu-2004 + env: + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} + GIT_LFS_SKIP_SMUDGE: '1' + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: nix_build::install_nix + uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f + with: + github_access_token: ${{ secrets.GITHUB_TOKEN }} + - name: nix_build::cachix_action + uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad + with: + name: zed + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + cachixArgs: -v + pushFilter: -zed-editor-[0-9.]*-nightly + - name: nix_build::build + run: nix build .#debug -L --accept-flake-config + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + continue-on-error: true + build_nix_mac_aarch64: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_nix == 'true' + runs-on: self-mini-macos + env: + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} + GIT_LFS_SKIP_SMUDGE: '1' + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: nix_build::set_path + run: | + echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" + echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" + shell: bash -euxo pipefail {0} + - name: nix_build::cachix_action + uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad + with: + name: zed + authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} + cachixArgs: -v + pushFilter: -zed-editor-[0-9.]*-nightly + - name: nix_build::build + run: nix build .#debug -L --accept-flake-config + shell: bash -euxo pipefail {0} + - name: nix_build::limit_store + run: |- + if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then + nix-collect-garbage -d || true + fi + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + continue-on-error: true + tests_pass: + needs: + - orchestrate + - check_style + - run_tests_windows + - run_tests_linux + - run_tests_mac + - doctests + - check_workspace_binaries + - check_postgres_and_protobuf_migrations + - check_dependencies + - check_docs + - check_licenses + - check_scripts + - build_nix_linux_x86_64 + - build_nix_mac_aarch64 + if: github.repository_owner == 'zed-industries' && always() + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: run_tests::tests_pass + run: | + set +x + EXIT_CODE=0 + + check_result() { + echo "* $1: $2" + if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi + } + + check_result "orchestrate" "${{ needs.orchestrate.result }}" + check_result "check_style" "${{ needs.check_style.result }}" + check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}" + check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}" + check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}" + check_result "doctests" "${{ needs.doctests.result }}" + check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}" + check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}" + check_result "check_dependencies" "${{ needs.check_dependencies.result }}" + check_result "check_docs" "${{ needs.check_docs.result }}" + check_result "check_licenses" "${{ needs.check_licenses.result }}" + check_result "check_scripts" "${{ needs.check_scripts.result }}" + check_result "build_nix_linux_x86_64" "${{ needs.build_nix_linux_x86_64.result }}" + check_result "build_nix_mac_aarch64" "${{ needs.build_nix_mac_aarch64.result }}" + + exit $EXIT_CODE + shell: bash -euxo pipefail {0} +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/script_checks.yml b/.github/workflows/script_checks.yml deleted file mode 100644 index 5dbfc9cb7fa9a51b9e0aca972d125c2a27677584..0000000000000000000000000000000000000000 --- a/.github/workflows/script_checks.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Script - -on: - pull_request: - paths: - - "script/**" - push: - branches: - - main - -jobs: - shellcheck: - name: "ShellCheck Scripts" - if: github.repository_owner == 'zed-industries' - runs-on: namespace-profile-2x4-ubuntu-2404 - - steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - name: Shellcheck ./scripts - run: | - ./script/shellcheck-scripts error diff --git a/docs/src/vim.md b/docs/src/vim.md index 6af563d3555ab0bbc192b8521ce3eb0986c28988..b1c1e7a5853ef75f78612e06f437c3fa8aa6260f 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -628,7 +628,7 @@ Here's an example of these settings changed: // Allow the cursor to reach the edges of the screen "vertical_scroll_margin": 0, "gutter": { - // Disable line numbers completely: + // Disable line numbers completely "line_numbers": false }, "command_aliases": { diff --git a/script/prettier b/script/prettier new file mode 100755 index 0000000000000000000000000000000000000000..b1d28fb66d70c08a6d03b21be6f168fd0b2da5dc --- /dev/null +++ b/script/prettier @@ -0,0 +1,17 @@ +#!/bin/bash +set -euxo pipefail + +PRETTIER_VERSION=3.5.0 + +pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || { + echo "To fix, run from the root of the Zed repo:" + echo " pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write" + false +} + +cd docs +pnpm dlx "prettier@${PRETTIER_VERSION}" . --check || { + echo "To fix, run from the root of the Zed repo:" + echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .." + false +} diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index b86f31cbd26321998d1b1c26d94459d512e7d817..0fd17088c14d87812e49809461ea97d4f2456960 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -8,6 +8,7 @@ mod nix_build; mod release_nightly; mod run_bundling; +mod run_tests; mod runners; mod steps; mod vars; @@ -20,11 +21,9 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { let workflows = vec![ ("danger.yml", danger::danger()), - ("nix_build.yml", nix_build::nix_build()), ("run_bundling.yml", run_bundling::run_bundling()), ("release_nightly.yml", release_nightly::release_nightly()), - // ("run_tests.yml", run_tests::run_tests()), - // ("release.yml", release::release()), + ("run_tests.yml", run_tests::run_tests()), ]; fs::create_dir_all(dir) .with_context(|| format!("Failed to create directory: {}", dir.display()))?; diff --git a/tooling/xtask/src/tasks/workflows/danger.rs b/tooling/xtask/src/tasks/workflows/danger.rs index e4121d8f5c20852f1d7eda446a2742c82ef80fb9..6ae7543a76480b37ca53d96d3f682c06b3d073e8 100644 --- a/tooling/xtask/src/tasks/workflows/danger.rs +++ b/tooling/xtask/src/tasks/workflows/danger.rs @@ -21,7 +21,7 @@ pub fn danger() -> Workflow { .cond(Expression::new( "github.repository_owner == 'zed-industries'", )) - .runs_on(runners::LINUX_CHEAP) + .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo()) .add_step(steps::setup_pnpm()) .add_step( diff --git a/tooling/xtask/src/tasks/workflows/nix_build.rs b/tooling/xtask/src/tasks/workflows/nix_build.rs index 1406f2a1e12518c39d65d208205d6abd3ea38bb4..85d13ff62fdd3c62ba2db47dfb93c750f29c17c2 100644 --- a/tooling/xtask/src/tasks/workflows/nix_build.rs +++ b/tooling/xtask/src/tasks/workflows/nix_build.rs @@ -7,52 +7,6 @@ use super::{runners, steps, steps::named, vars}; use gh_workflow::*; use indoc::indoc; -/// Generates the nix.yml workflow -pub fn nix_build() -> Workflow { - // todo(ci) instead of having these as optional YAML inputs, - // should we just generate two copies of the job (one for release-nightly - // and one for CI?) - let (input_flake_output, flake_output) = vars::input( - "flake-output", - WorkflowCallInput { - input_type: "string".into(), - default: Some("default".into()), - ..Default::default() - }, - ); - let (input_cachix_filter, cachix_filter) = vars::input( - "cachix-filter", - WorkflowCallInput { - input_type: "string".into(), - ..Default::default() - }, - ); - - let linux_x86 = build_nix( - Platform::Linux, - Arch::X86_64, - &input_flake_output, - Some(&input_cachix_filter), - &[], - ); - let mac_arm = build_nix( - Platform::Mac, - Arch::ARM64, - &input_flake_output, - Some(&input_cachix_filter), - &[], - ); - - named::workflow() - .on(Event::default().workflow_call( - WorkflowCall::default() - .add_input(flake_output.0, flake_output.1) - .add_input(cachix_filter.0, cachix_filter.1), - )) - .add_job(linux_x86.name, linux_x86.job) - .add_job(mac_arm.name, mac_arm.job) -} - pub(crate) fn build_nix( platform: Platform, arch: Arch, diff --git a/tooling/xtask/src/tasks/workflows/release_nightly.rs b/tooling/xtask/src/tasks/workflows/release_nightly.rs index 4e203f6934d50ec869068d237bcf8fd193678736..7d7de5b289572c49eeccc103979a7518bec82d44 100644 --- a/tooling/xtask/src/tasks/workflows/release_nightly.rs +++ b/tooling/xtask/src/tasks/workflows/release_nightly.rs @@ -1,6 +1,7 @@ use crate::tasks::workflows::{ nix_build::build_nix, run_bundling::bundle_mac, + run_tests::run_platform_tests, runners::{Arch, Platform}, steps::NamedJob, vars::{mac_bundle_envs, windows_bundle_envs}, @@ -32,8 +33,8 @@ pub fn release_nightly() -> Workflow { .collect(); let style = check_style(); - let tests = run_tests(Platform::Mac); - let windows_tests = run_tests(Platform::Windows); + let tests = run_platform_tests(Platform::Mac); + let windows_tests = run_platform_tests(Platform::Windows); let bundle_mac_x86 = bundle_mac_nightly(Arch::X86_64, &[&style, &tests]); let bundle_mac_arm = bundle_mac_nightly(Arch::ARM64, &[&style, &tests]); let linux_x86 = bundle_linux_nightly(Arch::X86_64, &[&style, &tests]); @@ -111,26 +112,6 @@ fn release_job(deps: &[&NamedJob]) -> Job { } } -fn run_tests(platform: Platform) -> NamedJob { - let runner = match platform { - Platform::Windows => runners::WINDOWS_DEFAULT, - Platform::Linux => runners::LINUX_DEFAULT, - Platform::Mac => runners::MAC_DEFAULT, - }; - NamedJob { - name: format!("run_tests_{platform}"), - job: release_job(&[]) - .runs_on(runner) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_cargo_config(platform)) - .add_step(steps::setup_node()) - .add_step(steps::cargo_install_nextest(platform)) - .add_step(steps::clear_target_dir_if_large(platform)) - .add_step(steps::cargo_nextest(platform)) - .add_step(steps::cleanup_cargo_config(platform)), - } -} - fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { let platform = Platform::Mac; NamedJob { @@ -150,7 +131,7 @@ fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { fn bundle_linux_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { let platform = Platform::Linux; - let mut job = release_job(deps) + let mut job = steps::release_job(deps) .runs_on(arch.linux_bundler()) .add_step(steps::checkout_repo()) .add_step(steps::setup_sentry()) @@ -176,7 +157,7 @@ fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { let platform = Platform::Windows; NamedJob { name: format!("bundle_windows_nightly_{arch}"), - job: release_job(deps) + job: steps::release_job(deps) .runs_on(runners::WINDOWS_DEFAULT) .envs(windows_bundle_envs()) .add_step(steps::checkout_repo()) @@ -190,8 +171,8 @@ fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob { NamedJob { name: "update_nightly_tag".to_owned(), - job: release_job(deps) - .runs_on(runners::LINUX_CHEAP) + job: steps::release_job(deps) + .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo().add_with(("fetch-depth", 0))) .add_step(update_nightly_tag()) .add_step(create_sentry_release()), diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index ee3d5b2a7558af7b8561952836badcd38d20f01e..2e83678967ca030ec64493ec0d802ba42664496b 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -1,5 +1,5 @@ use crate::tasks::workflows::{ - steps::named, + steps::{FluentBuilder, named}, vars::{mac_bundle_envs, windows_bundle_envs}, }; @@ -76,16 +76,12 @@ fn bundle_linux(arch: runners::Arch) -> Job { vars::GITHUB_SHA, arch.triple() ); - let mut job = bundle_job() + bundle_job() .runs_on(arch.linux_bundler()) .add_step(steps::checkout_repo()) .add_step(steps::setup_sentry()) - .add_step(steps::script("./script/linux")); - // todo(ci) can we do this on arm too? - if arch == runners::Arch::X86_64 { - job = job.add_step(steps::script("./script/install-mold")); - } - job.add_step(steps::script("./script/bundle-linux")) + .map(steps::install_linux_dependencies) + .add_step(steps::script("./script/bundle-linux")) .add_step(steps::upload_artifact( &artifact_name, "target/release/zed-*.tar.gz", diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs new file mode 100644 index 0000000000000000000000000000000000000000..8060bafa5aaabc34a3b20cdb29ff606298a82f64 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -0,0 +1,474 @@ +use gh_workflow::{ + Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow, +}; +use indexmap::IndexMap; + +use crate::tasks::workflows::{ + nix_build::build_nix, runners::Arch, steps::BASH_SHELL, vars::PathCondition, +}; + +use super::{ + runners::{self, Platform}, + steps::{self, FluentBuilder, NamedJob, named, release_job}, +}; + +pub(crate) fn run_tests() -> Workflow { + // Specify anything which should potentially skip full test suite in this regex: + // - docs/ + // - script/update_top_ranking_issues/ + // - .github/ISSUE_TEMPLATE/ + // - .github/workflows/ (except .github/workflows/ci.yml) + let should_run_tests = PathCondition::inverted( + "run_tests", + r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))", + ); + let should_check_docs = PathCondition::new("run_docs", r"^docs/"); + let should_check_scripts = PathCondition::new( + "run_action_checks", + r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask", + ); + let should_check_licences = + PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)"); + let should_build_nix = PathCondition::new( + "run_nix", + r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)", + ); + + let orchestrate = orchestrate(&[ + &should_check_scripts, + &should_check_docs, + &should_check_licences, + &should_build_nix, + &should_run_tests, + ]); + + let jobs = [ + orchestrate, + check_style(), + should_run_tests.guard(run_platform_tests(Platform::Windows)), + should_run_tests.guard(run_platform_tests(Platform::Linux)), + should_run_tests.guard(run_platform_tests(Platform::Mac)), + should_run_tests.guard(doctests()), + should_run_tests.guard(check_workspace_binaries()), + should_run_tests.guard(check_postgres_and_protobuf_migrations()), // could be more specific here? + should_run_tests.guard(check_dependencies()), // could be more specific here? + should_check_docs.guard(check_docs()), + should_check_licences.guard(check_licenses()), + should_check_scripts.guard(check_scripts()), + should_build_nix.guard(build_nix( + Platform::Linux, + Arch::X86_64, + "debug", + // *don't* cache the built output + Some("-zed-editor-[0-9.]*-nightly"), + &[], + )), + should_build_nix.guard(build_nix( + Platform::Mac, + Arch::ARM64, + "debug", + // *don't* cache the built output + Some("-zed-editor-[0-9.]*-nightly"), + &[], + )), + ]; + let tests_pass = tests_pass(&jobs); + + let mut workflow = named::workflow() + .add_event(Event::default() + .push( + Push::default() + .add_branch("main") + .add_branch("v[0-9]+.[0-9]+.x") + ) + .pull_request(PullRequest::default().add_branch("**")) + ) + .concurrency(Concurrency::default() + .group("${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}") + .cancel_in_progress(true) + ) + .add_env(( "CARGO_TERM_COLOR", "always" )) + .add_env(( "RUST_BACKTRACE", 1 )) + .add_env(( "CARGO_INCREMENTAL", 0 )); + for job in jobs { + workflow = workflow.add_job(job.name, job.job) + } + workflow.add_job(tests_pass.name, tests_pass.job) +} + +// Generates a bash script that checks changed files against regex patterns +// and sets GitHub output variables accordingly +fn orchestrate(rules: &[&PathCondition]) -> NamedJob { + let name = "orchestrate".to_owned(); + let step_name = "filter".to_owned(); + let mut script = String::new(); + + script.push_str(indoc::indoc! {r#" + if [ -z "$GITHUB_BASE_REF" ]; then + echo "Not in a PR context (i.e., push to main/stable/preview)" + COMPARE_REV="$(git rev-parse HEAD~1)" + else + echo "In a PR context comparing to pull_request.base.ref" + git fetch origin "$GITHUB_BASE_REF" --depth=350 + COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" + fi + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" + + check_pattern() { + local output_name="$1" + local pattern="$2" + local grep_arg="$3" + + echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \ + echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \ + echo "${output_name}=false" >> "$GITHUB_OUTPUT" + } + + "#}); + + let mut outputs = IndexMap::new(); + + for rule in rules { + assert!( + rule.set_by_step + .borrow_mut() + .replace(name.clone()) + .is_none() + ); + assert!( + outputs + .insert( + rule.name.to_owned(), + format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name) + ) + .is_none() + ); + + let grep_arg = if rule.invert { "-qvP" } else { "-qP" }; + script.push_str(&format!( + "check_pattern \"{}\" '{}' {}\n", + rule.name, rule.pattern, grep_arg + )); + } + + let job = Job::default() + .runs_on(runners::LINUX_SMALL) + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) + .outputs(outputs) + .add_step(steps::checkout_repo().add_with(( + "fetch-depth", + "${{ github.ref == 'refs/heads/main' && 2 || 350 }}", + ))) + .add_step( + Step::new(step_name.clone()) + .run(script) + .id(step_name) + .shell(BASH_SHELL), + ); + + NamedJob { name, job } +} + +pub(crate) fn tests_pass(jobs: &[NamedJob]) -> NamedJob { + let mut script = String::from(indoc::indoc! {r#" + set +x + EXIT_CODE=0 + + check_result() { + echo "* $1: $2" + if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi + } + + "#}); + + script.push_str( + &jobs + .iter() + .map(|job| { + format!( + "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"", + job.name, job.name + ) + }) + .collect::>() + .join("\n"), + ); + + script.push_str("\n\nexit $EXIT_CODE\n"); + + let job = Job::default() + .runs_on(runners::LINUX_SMALL) + .needs( + jobs.iter() + .map(|j| j.name.to_string()) + .collect::>(), + ) + .cond(Expression::new( + "github.repository_owner == 'zed-industries' && always()", + )) + .add_step(named::bash(&script)); + + named::job(job) +} + +fn check_style() -> NamedJob { + fn check_for_typos() -> Step { + named::uses( + "crate-ci", + "typos", + "80c8a4945eec0f6d464eaf9e65ed98ef085283d1", + ) // v1.38.1 + .with(("config", "./typos.toml")) + } + named::job( + release_job(&[]) + .runs_on(runners::LINUX_MEDIUM) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_pnpm()) + .add_step(steps::script("./script/prettier")) + .add_step(steps::script("./script/check-todos")) + .add_step(steps::script("./script/check-keymaps")) + .add_step(check_for_typos()) + .add_step(steps::cargo_fmt()), + ) +} + +fn check_dependencies() -> NamedJob { + fn install_cargo_machete() -> Step { + named::uses( + "clechasseur", + "rs-cargo", + "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2 + ) + .add_with(("command", "install")) + .add_with(("args", "cargo-machete@0.7.0")) + } + + fn run_cargo_machete() -> Step { + named::uses( + "clechasseur", + "rs-cargo", + "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2 + ) + .add_with(("command", "machete")) + } + + fn check_cargo_lock() -> Step { + named::bash("cargo update --locked --workspace") + } + + fn check_vulnerable_dependencies() -> Step { + named::uses( + "actions", + "dependency-review-action", + "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4 + ) + .if_condition(Expression::new("github.event_name == 'pull_request'")) + .with(("license-check", false)) + } + + named::job( + release_job(&[]) + .runs_on(runners::LINUX_SMALL) + .add_step(steps::checkout_repo()) + .add_step(install_cargo_machete()) + .add_step(run_cargo_machete()) + .add_step(check_cargo_lock()) + .add_step(check_vulnerable_dependencies()), + ) +} + +fn check_workspace_binaries() -> NamedJob { + named::job( + release_job(&[]) + .runs_on(runners::LINUX_LARGE) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(Platform::Linux)) + .map(steps::install_linux_dependencies) + .add_step(steps::script("cargo build -p collab")) + .add_step(steps::script("cargo build --workspace --bins --examples")) + .add_step(steps::cleanup_cargo_config(Platform::Linux)), + ) +} + +pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { + let runner = match platform { + Platform::Windows => runners::WINDOWS_DEFAULT, + Platform::Linux => runners::LINUX_DEFAULT, + Platform::Mac => runners::MAC_DEFAULT, + }; + NamedJob { + name: format!("run_tests_{platform}"), + job: release_job(&[]) + .cond(Expression::new("false")) + .runs_on(runner) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(platform)) + .when( + platform == Platform::Linux, + steps::install_linux_dependencies, + ) + .add_step(steps::setup_node()) + .add_step(steps::clippy(platform)) + .add_step(steps::cargo_install_nextest(platform)) + .add_step(steps::clear_target_dir_if_large(platform)) + .add_step(steps::cargo_nextest(platform)) + .add_step(steps::cleanup_cargo_config(platform)), + } +} + +pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob { + fn remove_untracked_files() -> Step { + named::bash("git clean -df") + } + + fn ensure_fresh_merge() -> Step { + named::bash(indoc::indoc! {r#" + if [ -z "$GITHUB_BASE_REF" ]; + then + echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV" + else + git checkout -B temp + git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp" + echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV" + fi + "#}) + } + + fn bufbuild_setup_action() -> Step { + named::uses("bufbuild", "buf-setup-action", "v1").add_with(("version", "v1.29.0")) + } + + fn bufbuild_breaking_action() -> Step { + named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/")) + .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/")) + } + + named::job( + release_job(&[]) + .runs_on(runners::MAC_DEFAULT) + .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history + .add_step(remove_untracked_files()) + .add_step(ensure_fresh_merge()) + .add_step(bufbuild_setup_action()) + .add_step(bufbuild_breaking_action()), + ) +} + +fn doctests() -> NamedJob { + fn run_doctests() -> Step { + named::bash(indoc::indoc! {r#" + cargo test --workspace --doc --no-fail-fast + "#}) + .id("run_doctests") + } + + named::job( + release_job(&[]) + .runs_on(runners::LINUX_DEFAULT) + .add_step(steps::checkout_repo()) + .add_step(steps::cache_rust_dependencies()) + .map(steps::install_linux_dependencies) + .add_step(steps::setup_cargo_config(Platform::Linux)) + .add_step(run_doctests()) + .add_step(steps::cleanup_cargo_config(Platform::Linux)), + ) +} + +fn check_licenses() -> NamedJob { + named::job( + Job::default() + .runs_on(runners::LINUX_SMALL) + .add_step(steps::checkout_repo()) + .add_step(steps::script("./script/check-licenses")) + .add_step(steps::script("./script/generate-licenses")), + ) +} + +fn check_docs() -> NamedJob { + fn lychee_link_check(dir: &str) -> Step { + named::uses( + "lycheeverse", + "lychee-action", + "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332", + ) // v2.4.1 + .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'"))) + .add_with(("fail", true)) + .add_with(("jobSummary", false)) + } + + fn install_mdbook() -> Step { + named::uses( + "peaceiris", + "actions-mdbook", + "ee69d230fe19748b7abf22df32acaa93833fad08", // v2 + ) + .with(("mdbook-version", "0.4.37")) + } + + fn build_docs() -> Step { + named::bash(indoc::indoc! {r#" + mkdir -p target/deploy + mdbook build ./docs --dest-dir=../target/deploy/docs/ + "#}) + } + + named::job( + release_job(&[]) + .runs_on(runners::LINUX_LARGE) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(Platform::Linux)) + // todo(ci): un-inline build_docs/action.yml here + .add_step(steps::cache_rust_dependencies()) + .add_step( + lychee_link_check("./docs/src/**/*"), // check markdown links + ) + .map(steps::install_linux_dependencies) + .add_step(install_mdbook()) + .add_step(build_docs()) + .add_step( + lychee_link_check("target/deploy/docs"), // check links in generated html + ), + ) +} + +fn check_scripts() -> NamedJob { + fn download_actionlint() -> Step { + named::bash( + "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)", + ) + } + + fn run_actionlint() -> Step { + named::bash(indoc::indoc! {r#" + ${{ steps.get_actionlint.outputs.executable }} -color + "#}) + } + + fn run_shellcheck() -> Step { + named::bash("./script/shellcheck-scripts error") + } + + fn check_xtask_workflows() -> Step { + named::bash(indoc::indoc! {r#" + cargo xtask workflows + if ! git diff --exit-code .github; then + echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'" + echo "Please run 'cargo xtask workflows' locally and commit the changes" + exit 1 + fi + "#}) + } + + named::job( + release_job(&[]) + .runs_on(runners::LINUX_SMALL) + .add_step(steps::checkout_repo()) + .add_step(run_shellcheck()) + .add_step(download_actionlint().id("get_actionlint")) + .add_step(run_actionlint()) + .add_step(check_xtask_workflows()), + ) +} diff --git a/tooling/xtask/src/tasks/workflows/runners.rs b/tooling/xtask/src/tasks/workflows/runners.rs index 02263ef42bb54dc31c10bfa07a4dde76010fdd85..d001439b175789e709bd733f7660cc3200721d0a 100644 --- a/tooling/xtask/src/tasks/workflows/runners.rs +++ b/tooling/xtask/src/tasks/workflows/runners.rs @@ -1,5 +1,8 @@ -pub const LINUX_CHEAP: Runner = Runner("namespace-profile-2x4-ubuntu-2404"); -pub const LINUX_DEFAULT: Runner = Runner("namespace-profile-16x32-ubuntu-2204"); +pub const LINUX_SMALL: Runner = Runner("namespace-profile-2x4-ubuntu-2404"); +pub const LINUX_DEFAULT: Runner = LINUX_XL; +pub const LINUX_XL: Runner = Runner("namespace-profile-16x32-ubuntu-2204"); +pub const LINUX_LARGE: Runner = Runner("namespace-profile-8x16-ubuntu-2204"); +pub const LINUX_MEDIUM: Runner = Runner("namespace-profile-4x8-ubuntu-2204"); // Using Ubuntu 20.04 for minimal glibc version pub const LINUX_X86_BUNDLER: Runner = Runner("namespace-profile-32x64-ubuntu-2004"); diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 235fcd64b1e40c8809c4c237f4bbcdcb37874acd..14ee5cc5b50a464d1fcc54d7497906926f85321e 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -2,9 +2,9 @@ use gh_workflow::*; use crate::tasks::workflows::{runners::Platform, vars}; -const BASH_SHELL: &str = "bash -euxo pipefail {0}"; +pub const BASH_SHELL: &str = "bash -euxo pipefail {0}"; // https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#jobsjob_idstepsshell -const PWSH_SHELL: &str = "pwsh"; +pub const PWSH_SHELL: &str = "pwsh"; pub fn checkout_repo() -> Step { named::uses( @@ -105,6 +105,34 @@ pub fn clear_target_dir_if_large(platform: Platform) -> Step { } } +pub(crate) fn clippy(platform: Platform) -> Step { + match platform { + Platform::Windows => named::pwsh("./script/clippy.ps1"), + _ => named::bash("./script/clippy"), + } +} + +pub(crate) fn cache_rust_dependencies() -> Step { + named::uses( + "swatinem", + "rust-cache", + "9d47c6ad4b02e050fd481d890b2ea34778fd09d6", // v2 + ) + .with(("save-if", "${{ github.ref == 'refs/heads/main' }}")) +} + +fn setup_linux() -> Step { + named::bash("./script/linux") +} + +fn install_mold() -> Step { + named::bash("./script/install-mold") +} + +pub(crate) fn install_linux_dependencies(job: Job) -> Job { + job.add_step(setup_linux()).add_step(install_mold()) +} + pub fn script(name: &str) -> Step { if name.ends_with(".ps1") { Step::new(name).run(name).shell(PWSH_SHELL) @@ -118,6 +146,87 @@ pub(crate) struct NamedJob { pub job: Job, } +// impl NamedJob { +// pub fn map(self, f: impl FnOnce(Job) -> Job) -> Self { +// NamedJob { +// name: self.name, +// job: f(self.job), +// } +// } +// } + +pub(crate) fn release_job(deps: &[&NamedJob]) -> Job { + let job = Job::default() + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) + .timeout_minutes(60u32); + if deps.len() > 0 { + job.needs(deps.iter().map(|j| j.name.clone()).collect::>()) + } else { + job + } +} + +impl FluentBuilder for Job {} +impl FluentBuilder for Workflow {} + +/// A helper trait for building complex objects with imperative conditionals in a fluent style. +/// Copied from GPUI to avoid adding GPUI as dependency +/// todo(ci) just put this in gh-workflow +#[allow(unused)] +pub(crate) trait FluentBuilder { + /// Imperatively modify self with the given closure. + fn map(self, f: impl FnOnce(Self) -> U) -> U + where + Self: Sized, + { + f(self) + } + + /// Conditionally modify self with the given closure. + fn when(self, condition: bool, then: impl FnOnce(Self) -> Self) -> Self + where + Self: Sized, + { + self.map(|this| if condition { then(this) } else { this }) + } + + /// Conditionally modify self with the given closure. + fn when_else( + self, + condition: bool, + then: impl FnOnce(Self) -> Self, + else_fn: impl FnOnce(Self) -> Self, + ) -> Self + where + Self: Sized, + { + self.map(|this| if condition { then(this) } else { else_fn(this) }) + } + + /// Conditionally unwrap and modify self with the given closure, if the given option is Some. + fn when_some(self, option: Option, then: impl FnOnce(Self, T) -> Self) -> Self + where + Self: Sized, + { + self.map(|this| { + if let Some(value) = option { + then(this, value) + } else { + this + } + }) + } + /// Conditionally unwrap and modify self with the given closure, if the given option is None. + fn when_none(self, option: &Option, then: impl FnOnce(Self) -> Self) -> Self + where + Self: Sized, + { + self.map(|this| if option.is_some() { this } else { then(this) }) + } +} + // (janky) helper to generate steps with a name that corresponds // to the name of the calling function. pub(crate) mod named { diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index 6220e3960b091dc04798283ff7239a56ffef5eb0..257bf31b5e981cbfccfddfa77939b9a0f2c3f603 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -1,4 +1,8 @@ -use gh_workflow::{Env, WorkflowCallInput}; +use std::cell::RefCell; + +use gh_workflow::{Env, Expression}; + +use crate::tasks::workflows::steps::NamedJob; macro_rules! secret { ($secret_name:ident) => { @@ -12,10 +16,6 @@ macro_rules! var { }; } -pub fn input(name: &str, input: WorkflowCallInput) -> (String, (&str, WorkflowCallInput)) { - return (format!("${{{{ inputs.{name} }}}}"), (name, input)); -} - secret!(APPLE_NOTARIZATION_ISSUER_ID); secret!(APPLE_NOTARIZATION_KEY); secret!(APPLE_NOTARIZATION_KEY_ID); @@ -61,3 +61,46 @@ pub fn windows_bundle_envs() -> Env { .add("TIMESTAMP_DIGEST", "SHA256") .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com") } + +// Represents a pattern to check for changed files and corresponding output variable +pub(crate) struct PathCondition { + pub name: &'static str, + pub pattern: &'static str, + pub invert: bool, + pub set_by_step: RefCell>, +} +impl PathCondition { + pub fn new(name: &'static str, pattern: &'static str) -> Self { + Self { + name, + pattern, + invert: false, + set_by_step: Default::default(), + } + } + pub fn inverted(name: &'static str, pattern: &'static str) -> Self { + Self { + name, + pattern, + invert: true, + set_by_step: Default::default(), + } + } + pub fn guard(&self, job: NamedJob) -> NamedJob { + let set_by_step = self + .set_by_step + .borrow() + .clone() + .unwrap_or_else(|| panic!("condition {},is never set", self.name)); + NamedJob { + name: job.name, + job: job + .job + .add_needs(set_by_step.clone()) + .cond(Expression::new(format!( + "needs.{}.outputs.{} == 'true'", + &set_by_step, self.name + ))), + } + } +} From e30d5998e47eaeb8915e6ab59a0381a9c622c52b Mon Sep 17 00:00:00 2001 From: claytonrcarter Date: Wed, 29 Oct 2025 23:55:02 -0400 Subject: [PATCH 09/82] bundle: Restore local install on macOS (#41482) I just pulled and ran a local build via `script/bundle-mac -l -i` but found that the resulting bundle wasn't installed as expected. (me: "ToggleAllDocks!! Wait! Where is it?!") Looking into, it looks like the `-l` flag was removed in #41392, leaving the `$local_only` var orphaned, which then left the `-i/$local_install` flag unreachable. I suspect that this was unintentional, so this PR re-adds the `-l/$local_only` flag to `script/bundle-mac`. I ran the build again and confirmed that local install seemed to work as expected. (ie "ToggleAllDocks!! :tada:") While here, I also removed the last reference to `$local_arch`, because all other references to that were removed in #41392. /cc @osiewicz Release Notes: - N/A --------- Co-authored-by: Conrad Irwin --- .github/workflows/run_tests.yml | 2 +- script/bundle-mac | 29 +++++-------------- .../xtask/src/tasks/workflows/run_tests.rs | 2 +- 3 files changed, 10 insertions(+), 23 deletions(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index a9d20e2d88aa5bf15396304fa0fb24a5d0655387..63c882bf7b0cf447bfd641002bcf67667bbea8b6 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -46,7 +46,7 @@ jobs: echo "${output_name}=false" >> "$GITHUB_OUTPUT" } - check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask' -qP + check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP check_pattern "run_docs" '^docs/' -qP check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP diff --git a/script/bundle-mac b/script/bundle-mac index 3f895ec14b72522abddb7548a6139729adcdfe8d..0b4c1285fb21915c24bfeafecd9d8f1e190681fc 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -6,8 +6,6 @@ source script/lib/blob-store.sh build_flag="--release" target_dir="release" open_result=false -local_arch=false -local_only=false local_install=false can_code_sign=false @@ -196,10 +194,6 @@ function sign_app_binaries() { /usr/bin/codesign --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${app_path}" -v else echo "One or more of the following variables are missing: MACOS_CERTIFICATE, MACOS_CERTIFICATE_PASSWORD, APPLE_NOTARIZATION_KEY, APPLE_NOTARIZATION_KEY_ID, APPLE_NOTARIZATION_ISSUER_ID" - if [[ "$local_only" = false ]]; then - echo "To create a self-signed local build use ./scripts/build.sh -ldf" - exit 1 - fi echo "====== WARNING ======" echo "This bundle is being signed without all entitlements, some features (e.g. universal links) will not work" @@ -215,7 +209,7 @@ function sign_app_binaries() { codesign --force --deep --entitlements "${app_path}/Contents/Resources/zed.entitlements" --sign ${MACOS_SIGNING_KEY:- -} "${app_path}" -v fi - if [[ "$target_dir" = "debug" && "$local_only" = false ]]; then + if [[ "$target_dir" = "debug" ]]; then if [ "$open_result" = true ]; then open "$app_path" else @@ -227,20 +221,13 @@ function sign_app_binaries() { bundle_name=$(basename "$app_path") - if [ "$local_only" = true ]; then - if [ "$local_install" = true ]; then - rm -rf "/Applications/$bundle_name" - mv "$app_path" "/Applications/$bundle_name" - echo "Installed application bundle: /Applications/$bundle_name" - if [ "$open_result" = true ]; then - echo "Opening /Applications/$bundle_name" - open "/Applications/$bundle_name" - fi - else - if [ "$open_result" = true ]; then - echo "Opening $app_path" - open "$app_path" - fi + if [ "$local_install" = true ]; then + rm -rf "/Applications/$bundle_name" + mv "$app_path" "/Applications/$bundle_name" + echo "Installed application bundle: /Applications/$bundle_name" + if [ "$open_result" = true ]; then + echo "Opening /Applications/$bundle_name" + open "/Applications/$bundle_name" fi else dmg_target_directory="target/${target_triple}/${target_dir}" diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 8060bafa5aaabc34a3b20cdb29ff606298a82f64..82b40f23d5b4adfa2ae04eb2aa14d0b92ff66285 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -25,7 +25,7 @@ pub(crate) fn run_tests() -> Workflow { let should_check_docs = PathCondition::new("run_docs", r"^docs/"); let should_check_scripts = PathCondition::new( "run_action_checks", - r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask", + r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/", ); let should_check_licences = PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)"); From 344f63c6cadef985cea86b9ea16d9440ace54a6e Mon Sep 17 00:00:00 2001 From: "A. Teo Welton" <76081718+teowelton@users.noreply.github.com> Date: Thu, 30 Oct 2025 04:47:44 -0600 Subject: [PATCH 10/82] Language: Fix minor C++ completion label formatting issue (#41544) Closes #39515 **Details:** - Improved logic for formatting completion labels, as some (such as `namespace`) were missing space characters. - Added extra logic as per stale PR #39533 [comment](https://github.com/zed-industries/zed/pull/39533#issuecomment-3368549433) ensuring that cases where extra spaces are not necessary (such as functions) are not affected - I will note, I was not able to figure out how to fix the coloring of `namespace` within completion labels as mentioned in that comment, if someone would provide me with direction I would be happy to look into that too. Previous: previous Fixed: fixed Release Notes: - Fixed minor issue where some `clangd` labels would be missing a space in formatting --- crates/languages/src/c.rs | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 3463f4505044c83c9ba8a0e602cf5bfa82e93e3f..8e90cf821368c0c88781b2d10e82ad9eaa05989c 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -166,13 +166,24 @@ impl super::LspAdapter for CLspAdapter { None => "", }; - let label = completion + let mut label = completion .label .strip_prefix('•') .unwrap_or(&completion.label) .trim() - .to_owned() - + label_detail; + .to_owned(); + + if !label_detail.is_empty() { + let should_add_space = match completion.kind { + Some(lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD) => false, + _ => true, + }; + + if should_add_space && !label.ends_with(' ') && !label_detail.starts_with(' ') { + label.push(' '); + } + label.push_str(label_detail); + } match completion.kind { Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => { From 785b5ade6e3f43ace9d24ce12776de68b7c6ce49 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Thu, 30 Oct 2025 13:24:32 +0100 Subject: [PATCH 11/82] extension_host: Do not try auto installing suppressed extensions (#41551) Release Notes: - Fixed an issue where Zed would try to install extensions specified under `auto_install_extensions` which were moved into core. --- crates/extension_host/src/extension_host.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 3397f770c241ab0b29deab661e014811fed7f852..04b03352d83fd3323770a00a13c4377dc111535a 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -360,7 +360,7 @@ impl ExtensionStore { } extension_id = reload_rx.next() => { let Some(extension_id) = extension_id else { break; }; - this.update( cx, |this, _| { + this.update(cx, |this, _| { this.modified_extensions.extend(extension_id); })?; index_changed = true; @@ -608,7 +608,7 @@ impl ExtensionStore { .extension_index .extensions .contains_key(extension_id.as_ref()); - !is_already_installed + !is_already_installed && !SUPPRESSED_EXTENSIONS.contains(&extension_id.as_ref()) }) .cloned() .collect::>(); From 426040f08f7f010d4109b69493693aa36a830d49 Mon Sep 17 00:00:00 2001 From: Caleb Jasik Date: Thu, 30 Oct 2025 07:28:06 -0500 Subject: [PATCH 12/82] Add cmd-d shortcut for (terminal) pane::SplitRight (#41139) Add default keybinding for `pane::SplitRight` in the `Terminal` context for all platforms. Closes #ISSUE Release Notes: - Added VS Code's terminal split keybindings (`cmd` on MacOS, `ctrl-shift-5` on Windows and Linux) --------- Co-authored-by: dino --- assets/keymaps/default-linux.json | 3 ++- assets/keymaps/default-macos.json | 1 + assets/keymaps/default-windows.json | 3 ++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index d745474e09e1730127522e8c3170356864fd83b2..19f9e3ca9199b883186e4100b174cad04f474bba 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -1134,7 +1134,8 @@ "ctrl-shift-space": "terminal::ToggleViMode", "ctrl-shift-r": "terminal::RerunTask", "ctrl-alt-r": "terminal::RerunTask", - "alt-t": "terminal::RerunTask" + "alt-t": "terminal::RerunTask", + "ctrl-shift-5": "pane::SplitRight" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 50fa44be02703e0a0935e14de501070c53c4df87..fc20b274c231a80397929ef89abd8ceea3fa6be3 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -1217,6 +1217,7 @@ "ctrl-alt-down": "pane::SplitDown", "ctrl-alt-left": "pane::SplitLeft", "ctrl-alt-right": "pane::SplitRight", + "cmd-d": "pane::SplitRight", "cmd-alt-r": "terminal::RerunTask" } }, diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index ef454ff12d2a437bda4b3fba0f214651a0c74396..944870d03761ac1c11d807a517245333785414a2 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -1160,7 +1160,8 @@ "ctrl-shift-space": "terminal::ToggleViMode", "ctrl-shift-r": "terminal::RerunTask", "ctrl-alt-r": "terminal::RerunTask", - "alt-t": "terminal::RerunTask" + "alt-t": "terminal::RerunTask", + "ctrl-shift-5": "pane::SplitRight" } }, { From 046b43f1358ce9df8487153e12f55e9201ae53aa Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Thu, 30 Oct 2025 10:10:19 -0300 Subject: [PATCH 13/82] collab panel: Open selected channel notes (#41560) Adds an action to open the notes for the currently selected channel in the collab panel, which is mapped to `alt-enter` in all platforms. Release Notes: - collab: Add `collab_panel::OpenSelectedChannelNotes` action (`alt-enter` by default) --- assets/keymaps/default-linux.json | 3 ++- assets/keymaps/default-macos.json | 3 ++- assets/keymaps/default-windows.json | 3 ++- crates/collab_ui/src/collab_panel.rs | 16 ++++++++++++++++ crates/workspace/src/workspace.rs | 3 +++ 5 files changed, 25 insertions(+), 3 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 19f9e3ca9199b883186e4100b174cad04f474bba..979e5a6ccc1d4520db65981fb3b8a01094f9c625 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -1020,7 +1020,8 @@ "context": "CollabPanel", "bindings": { "alt-up": "collab_panel::MoveChannelUp", - "alt-down": "collab_panel::MoveChannelDown" + "alt-down": "collab_panel::MoveChannelDown", + "alt-enter": "collab_panel::OpenSelectedChannelNotes" } }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index fc20b274c231a80397929ef89abd8ceea3fa6be3..4f9b85ff03790a8c9a59a657a3e0ca0710d41e25 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -1085,7 +1085,8 @@ "use_key_equivalents": true, "bindings": { "alt-up": "collab_panel::MoveChannelUp", - "alt-down": "collab_panel::MoveChannelDown" + "alt-down": "collab_panel::MoveChannelDown", + "alt-enter": "collab_panel::OpenSelectedChannelNotes" } }, { diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 944870d03761ac1c11d807a517245333785414a2..29146f3080d6ecad75bb9754503bb93c6710ff30 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -1038,7 +1038,8 @@ "use_key_equivalents": true, "bindings": { "alt-up": "collab_panel::MoveChannelUp", - "alt-down": "collab_panel::MoveChannelDown" + "alt-down": "collab_panel::MoveChannelDown", + "alt-enter": "collab_panel::OpenSelectedChannelNotes" } }, { diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs index 57d510d94c15e9c7c2d62873870019eda3e0d7d5..29eff951d973027a96bb5ac6f8fd28981d8ebc93 100644 --- a/crates/collab_ui/src/collab_panel.rs +++ b/crates/collab_ui/src/collab_panel.rs @@ -54,6 +54,10 @@ actions!( CollapseSelectedChannel, /// Expands the selected channel in the tree view. ExpandSelectedChannel, + /// Opens the meeting notes for the selected channel in the panel. + /// + /// Use `collab::OpenChannelNotes` to open the channel notes for the current call. + OpenSelectedChannelNotes, /// Starts moving a channel to a new location. StartMoveChannel, /// Moves the selected item to the current location. @@ -1856,6 +1860,17 @@ impl CollabPanel { } } + fn open_selected_channel_notes( + &mut self, + _: &OpenSelectedChannelNotes, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(channel) = self.selected_channel() { + self.open_channel_notes(channel.id, window, cx); + } + } + fn set_channel_visibility( &mut self, channel_id: ChannelId, @@ -2976,6 +2991,7 @@ impl Render for CollabPanel { .on_action(cx.listener(CollabPanel::remove_selected_channel)) .on_action(cx.listener(CollabPanel::show_inline_context_menu)) .on_action(cx.listener(CollabPanel::rename_selected_channel)) + .on_action(cx.listener(CollabPanel::open_selected_channel_notes)) .on_action(cx.listener(CollabPanel::collapse_selected_channel)) .on_action(cx.listener(CollabPanel::expand_selected_channel)) .on_action(cx.listener(CollabPanel::start_move_selected_channel)) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index a548a04aa7be55d44a0d30af5dbb49eeba54ade5..b1de240eb62bcca6967333641bf8234825730300 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -7089,6 +7089,9 @@ actions!( [ /// Opens the channel notes for the current call. /// + /// Use `collab_panel::OpenSelectedChannelNotes` to open the channel notes for the selected + /// channel in the collab panel. + /// /// If you want to open a specific channel, use `zed::OpenZedUrl` with a channel notes URL - /// can be copied via "Copy link to section" in the context menu of the channel notes /// buffer. These URLs look like `https://zed.dev/channel/channel-name-CHANNEL_ID/notes`. From 94ba24daddaa30976769b2465254e4f4af2de424 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 30 Oct 2025 14:40:31 +0100 Subject: [PATCH 14/82] terminal: Properly kill child process on terminal exit (#41562) Release Notes: - Fixed terminal processes occasionally leaking Co-authored by: Jakub --- crates/terminal/src/terminal.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 41c51c38f7b1163229bb6570679ac873bd36474b..49dd35528a330b5b0457fe0ca7357e95a2f0157a 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -2241,7 +2241,8 @@ unsafe fn append_text_to_term(term: &mut Term, text_lines: &[&str]) impl Drop for Terminal { fn drop(&mut self) { - if let TerminalType::Pty { pty_tx, .. } = &self.terminal_type { + if let TerminalType::Pty { pty_tx, info } = &mut self.terminal_type { + info.kill_current_process(); pty_tx.0.send(Msg::Shutdown).ok(); } } From c7aa805398624e390b434c2cbfcb3527db4a35e6 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 30 Oct 2025 10:53:31 -0300 Subject: [PATCH 15/82] docs: Improve the Inline Assistant content (#41566) Release Notes: - N/A --- docs/src/ai/agent-panel.md | 14 ++--- docs/src/ai/agent-settings.md | 36 +++++++++--- docs/src/ai/inline-assistant.md | 97 +++++++++++++++++++++++++++++++-- 3 files changed, 128 insertions(+), 19 deletions(-) diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index 0b9f4fe8fac40b1881b9087d4a079db055bc2539..2b4a8a4692f6539b67b6e3a097aecccb2002ddf8 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -1,10 +1,10 @@ # Agent Panel -The Agent Panel allows you to interact with many LLMs and coding agents that can help with in various types of tasks, such as generating code, codebase understanding, and other general inquiries like writing emails, documentation, and more. +The Agent Panel allows you to interact with many LLMs and coding agents that can help with various types of tasks, such as generating code, codebase understanding, and other general inquiries like writing emails, documentation, and more. To open it, use the `agent: new thread` action in [the Command Palette](../getting-started.md#command-palette) or click the ✨ (sparkles) icon in the status bar. -## Getting Started +## Getting Started {#getting-started} If you're using the Agent Panel for the first time, you need to have at least one LLM provider or external agent configured. You can do that by: @@ -28,7 +28,7 @@ From this point on, you can interact with the many supported features outlined b By default, the Agent Panel uses Zed's first-party agent. To change that, go to the plus button in the top-right of the Agent Panel and choose another option. -You choose to create a new [Text Thread](./text-threads.md) or, if you have [external agents](./external-agents.md) connected, you can create new threads with them. +You can choose to create a new [Text Thread](./text-threads.md) or, if you have [external agents](./external-agents.md) connected, you can create new threads with them. ### Editing Messages {#editing-messages} @@ -37,7 +37,7 @@ You can click on the card that contains your message and re-submit it with an ad ### Checkpoints {#checkpoints} -Every time the AI performs an edit, you should see a "Restore Checkpoint" button to the top of your message, allowing you to return your code base to the state it was in prior to that message. +Every time the AI performs an edit, you should see a "Restore Checkpoint" button at the top of your message, allowing you to return your code base to the state it was in prior to that message. The checkpoint button appears even if you interrupt the thread midway through an edit attempt, as this is likely a moment when you've identified that the agent is not heading in the right direction and you want to revert back. @@ -78,7 +78,7 @@ Edit diffs also appear in individual buffers. If your active tab had edits made ## Adding Context {#adding-context} -Although Zed's agent is very efficient at reading through your code base to autonomously pick up relevant context, manually adding whatever would be useful to fulfill your prompt is still very encouraged as a way to not only improve the AI's response quality but also to speed its response time up. +Although Zed's agent is very efficient at reading through your code base to autonomously pick up relevant context, manually adding whatever would be useful to fulfill your prompt is still very encouraged as a way to not only improve the AI's response quality but also to speed up its response time. In Zed's Agent Panel, all pieces of context are added as mentions in the panel's message editor. You can type `@` to mention files, directories, symbols, previous threads, and rules files. @@ -89,7 +89,7 @@ Copying images and pasting them in the panel's message editor is also supported. ### Token Usage {#token-usage} -Zed surfaces how many tokens you are consuming for your currently active thread nearby the profile selector in the panel's message editor. Depending on how many pieces of context you add, your token consumption can grow rapidly. +Zed surfaces how many tokens you are consuming for your currently active thread near the profile selector in the panel's message editor. Depending on how many pieces of context you add, your token consumption can grow rapidly. Once you approach the model's context window, a banner appears below the message editor suggesting to start a new thread with the current one summarized and added as context. You can also do this at any time with an ongoing thread via the "Agent Options" menu on the top right. @@ -147,7 +147,7 @@ All [Zed's hosted models](./models.md) support tool calling out-of-the-box. ### MCP Servers {#mcp-servers} -Similarly to the built-in tools, some models may not support all tools included in a given MCP Server. Zed's UI will inform about this via a warning icon that appears close to the model selector. +Similarly to the built-in tools, some models may not support all tools included in a given MCP Server. Zed's UI will inform you about this via a warning icon that appears close to the model selector. ## Text Threads {#text-threads} diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index e2aba0fe4134d038b9aed3a2dd19a7359618c139..21607649ada8809fd7a92778a53c4a57cba92b6b 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -54,15 +54,33 @@ You can assign distinct and specific models for the following AI-powered feature ### Alternative Models for Inline Assists {#alternative-assists} -The Inline Assist feature in particular has the capacity to perform multiple generations in parallel using different models. -That is possible by assigning more than one model to it, taking the configuration shown above one step further. +With the Inline Assistant in particular, you can send the same prompt to multiple models at once. -When configured, the inline assist UI will surface controls to cycle between the outputs generated by each model. +Here's how you can customize your `settings.json` to add this functionality: + +```json [settings] +{ + "agent": { + "default_model": { + "provider": "zed.dev", + "model": "claude-sonnet-4" + }, + "inline_alternatives": [ + { + "provider": "zed.dev", + "model": "gpt-4-mini" + } + ] + } +} +``` + +When multiple models are configured, you'll see in the Inline Assistant UI buttons that allow you to cycle between outputs generated by each model. The models you specify here are always used in _addition_ to your [default model](#default-model). -For example, the following configuration will generate two outputs for every assist. -One with Claude Sonnet 4 (the default model), and one with GPT-5-mini. +For example, the following configuration will generate three outputs for every assist. +One with Claude Sonnet 4 (the default model), another with GPT-5-mini, and another one with Gemini 2.5 Flash. ```json [settings] { @@ -75,6 +93,10 @@ One with Claude Sonnet 4 (the default model), and one with GPT-5-mini. { "provider": "zed.dev", "model": "gpt-4-mini" + }, + { + "provider": "zed.dev", + "model": "gemini-2.5-flash" } ] } @@ -179,7 +201,7 @@ The default value is `false`. ### Message Editor Size -Use the `message_editor_min_lines` setting to control minimum number of lines of height the agent message editor should have. +Use the `message_editor_min_lines` setting to control the minimum number of lines of height the agent message editor should have. It is set to `4` by default, and the max number of lines is always double of the minimum. ```json [settings] @@ -232,7 +254,7 @@ It is set to `true` by default, but if set to false, the card will be fully coll ### Feedback Controls -Control whether to display the thumbs up/down buttons at the bottom of each agent response, allowing to give Zed feedback about the agent's performance. +Control whether to display the thumbs up/down buttons at the bottom of each agent response, allowing you to give Zed feedback about the agent's performance. The default value is `true`. ```json [settings] diff --git a/docs/src/ai/inline-assistant.md b/docs/src/ai/inline-assistant.md index d3caff6f45903c549073b97105a3310236d64478..af232a837ec09e703cca165ea654b8f78613bd7d 100644 --- a/docs/src/ai/inline-assistant.md +++ b/docs/src/ai/inline-assistant.md @@ -2,17 +2,104 @@ ## Usage Overview -Use `ctrl-enter` to open the Inline Assistant nearly anywhere you can enter text: editors, text threads, the rules library, channel notes, and even within the terminal panel. +Use {#kb assistant::InlineAssist} to open the Inline Assistant nearly anywhere you can enter text: editors, text threads, the rules library, channel notes, and even within the terminal panel. The Inline Assistant allows you to send the current selection (or the current line) to a language model and modify the selection with the language model's response. -You can also perform multiple generation requests in parallel by pressing `ctrl-enter` with multiple cursors, or by pressing the same binding with a selection that spans multiple excerpts in a multibuffer. +## Getting Started -## Context +If you're using the Inline Assistant for the first time, you need to have at least one LLM provider or external agent configured. +You can do that by: -Give the Inline Assistant context the same way you can in [the Agent Panel](./agent-panel.md), allowing you to provide additional instructions or rules for code transformations with @-mentions. +1. [subscribing to our Pro plan](https://zed.dev/pricing), so you have access to our hosted models +2. [using your own API keys](./llm-providers.md#use-your-own-keys), either from model providers like Anthropic or model gateways like OpenRouter. -A useful pattern here is to create a thread in the Agent Panel, and then mention that thread with `@thread` in the Inline Assistant to include it as context. +If you have already set up an LLM provider to interact with [the Agent Panel](./agent-panel.md#getting-started), then that will also work for the Inline Assistant. + +> Unlike the Agent Panel, though, the only exception at the moment is [external agents](./external-agents.md). +> They currently can't be used for generating changes with the Inline Assistant. + +## Adding Context + +You can add context in the Inline Assistant the same way you can in [the Agent Panel](./agent-panel.md#adding-context): + +- @-mention files, directories, past threads, rules, and symbols +- paste images that are copied on your clipboard + +Additionally, a useful pattern is to create a thread in the Agent Panel, and then mention it with `@thread` in the Inline Assistant to include it as context. +That often serves as a way to more quickly iterate over a specific part of a change that happened in the context of a larger thread. + +## Parallel Generations + +There are two ways in which you can generate multiple changes at once with the Inline Assistant: + +### Multiple Cursors + +If you have a multiple cursor selection and hit {#kb assistant::InlineAssist}, you can shoot the same prompt for all cursor positions and get a change in all of them. + +This is particularly useful when working on excerpts in [a multibuffer context](../multibuffers.md). + +### Multiple Models + +You can use the Inline Assistant to send the same prompt to multiple models at once. + +Here's how you can customize your `settings.json` to add this functionality: + +```json [settings] +{ + "agent": { + "default_model": { + "provider": "zed.dev", + "model": "claude-sonnet-4" + }, + "inline_alternatives": [ + { + "provider": "zed.dev", + "model": "gpt-4-mini" + } + ] + } +} +``` + +When multiple models are configured, you'll see in the Inline Assistant UI buttons that allow you to cycle between outputs generated by each model. + +The models you specify here are always used in _addition_ to your [default model](#default-model). + +For example, the following configuration will generate three outputs for every assist. +One with Claude Sonnet 4 (the default model), another with GPT-5-mini, and another one with Gemini 2.5 Flash. + +```json [settings] +{ + "agent": { + "default_model": { + "provider": "zed.dev", + "model": "claude-sonnet-4" + }, + "inline_alternatives": [ + { + "provider": "zed.dev", + "model": "gpt-4-mini" + }, + { + "provider": "zed.dev", + "model": "gemini-2.5-flash" + } + ] + } +} +``` + +## Inline Assistant vs. Edit Prediction + +Users often ask what's the difference between these two AI-powered features in Zed, particularly because both of them involve getting inline LLM code completions. + +Here's how they are different: + +- The Inline Assistant is more similar to the Agent Panel as in you're still writing a prompt yourself and crafting context. It works from within the buffer and is mostly centered around your selections. +- [Edit Predictions](./edit-prediction.md) is an AI-powered completion mechanism that intelligently suggests what you likely want to add next, based on context automatically gathered from your previous edits, recently visited files, and more. + +In summary, the key difference is that in the Inline Assistant, you're still manually prompting, whereas Edit Prediction will _automatically suggest_ edits to you. ## Prefilling Prompts From 0c73252c9d090433b088bf921e5f142d9babc0ac Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 30 Oct 2025 14:55:19 +0100 Subject: [PATCH 16/82] project: Spawn terminal process on background executor (#41216) Attempt 2 for https://github.com/zed-industries/zed/pull/40774 We were spawning the process on the foreground thread before which can block an arbitrary amount of time. Likewise we no longer block deserialization on the terminal loading. Release Notes: - Improved startup time on systems with slow process spawning capabilities --- crates/project/src/terminals.rs | 356 +++++++------- crates/terminal/src/terminal.rs | 541 +++++++++++----------- crates/terminal_view/src/persistence.rs | 102 ++-- crates/terminal_view/src/terminal_view.rs | 34 +- 4 files changed, 525 insertions(+), 508 deletions(-) diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 08de0eb138b6695eb65bc30acbfa9688bb38fe00..5ea9824916520cfb53673f82f17c1d0e5d31ede3 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -139,142 +139,145 @@ impl Project { .await .unwrap_or_default(); - project.update(cx, move |this, cx| { - let format_to_run = || { - if let Some(command) = &spawn_task.command { - let mut command: Option> = shell_kind.try_quote(command); - if let Some(command) = &mut command - && command.starts_with('"') - && let Some(prefix) = shell_kind.command_prefix() - { - *command = Cow::Owned(format!("{prefix}{command}")); - } + let builder = project + .update(cx, move |_, cx| { + let format_to_run = || { + if let Some(command) = &spawn_task.command { + let mut command: Option> = shell_kind.try_quote(command); + if let Some(command) = &mut command + && command.starts_with('"') + && let Some(prefix) = shell_kind.command_prefix() + { + *command = Cow::Owned(format!("{prefix}{command}")); + } - let args = spawn_task - .args - .iter() - .filter_map(|arg| shell_kind.try_quote(&arg)); + let args = spawn_task + .args + .iter() + .filter_map(|arg| shell_kind.try_quote(&arg)); - command.into_iter().chain(args).join(" ") - } else { - // todo: this breaks for remotes to windows - format!("exec {shell} -l") - } - }; - - let (shell, env) = { - env.extend(spawn_task.env); - match remote_client { - Some(remote_client) => match activation_script.clone() { - activation_script if !activation_script.is_empty() => { - let separator = shell_kind.sequential_commands_separator(); - let activation_script = - activation_script.join(&format!("{separator} ")); - let to_run = format_to_run(); - let shell = remote_client - .read(cx) - .shell() - .unwrap_or_else(get_default_system_shell); - let arg = format!("{activation_script}{separator} {to_run}"); - let args = shell_kind.args_for_shell(false, arg); - - create_remote_shell( - Some((&shell, &args)), + command.into_iter().chain(args).join(" ") + } else { + // todo: this breaks for remotes to windows + format!("exec {shell} -l") + } + }; + + let (shell, env) = { + env.extend(spawn_task.env); + match remote_client { + Some(remote_client) => match activation_script.clone() { + activation_script if !activation_script.is_empty() => { + let separator = shell_kind.sequential_commands_separator(); + let activation_script = + activation_script.join(&format!("{separator} ")); + let to_run = format_to_run(); + let shell = remote_client + .read(cx) + .shell() + .unwrap_or_else(get_default_system_shell); + let arg = format!("{activation_script}{separator} {to_run}"); + let args = shell_kind.args_for_shell(false, arg); + + create_remote_shell( + Some((&shell, &args)), + env, + path, + remote_client, + cx, + )? + } + _ => create_remote_shell( + spawn_task + .command + .as_ref() + .map(|command| (command, &spawn_task.args)), env, path, remote_client, cx, - )? - } - _ => create_remote_shell( - spawn_task - .command - .as_ref() - .map(|command| (command, &spawn_task.args)), - env, - path, - remote_client, - cx, - )?, - }, - None => match activation_script.clone() { - activation_script if !activation_script.is_empty() => { - let separator = shell_kind.sequential_commands_separator(); - let activation_script = - activation_script.join(&format!("{separator} ")); - let to_run = format_to_run(); - - let mut arg = format!("{activation_script}{separator} {to_run}"); - if shell_kind == ShellKind::Cmd { - // We need to put the entire command in quotes since otherwise CMD tries to execute them - // as separate commands rather than chaining one after another. - arg = format!("\"{arg}\""); - } + )?, + }, + None => match activation_script.clone() { + activation_script if !activation_script.is_empty() => { + let separator = shell_kind.sequential_commands_separator(); + let activation_script = + activation_script.join(&format!("{separator} ")); + let to_run = format_to_run(); + + let mut arg = + format!("{activation_script}{separator} {to_run}"); + if shell_kind == ShellKind::Cmd { + // We need to put the entire command in quotes since otherwise CMD tries to execute them + // as separate commands rather than chaining one after another. + arg = format!("\"{arg}\""); + } - let args = shell_kind.args_for_shell(false, arg); + let args = shell_kind.args_for_shell(false, arg); - ( - Shell::WithArguments { - program: shell, - args, - title_override: None, + ( + Shell::WithArguments { + program: shell, + args, + title_override: None, + }, + env, + ) + } + _ => ( + if let Some(program) = spawn_task.command { + Shell::WithArguments { + program, + args: spawn_task.args, + title_override: None, + } + } else { + Shell::System }, env, - ) - } - _ => ( - if let Some(program) = spawn_task.command { - Shell::WithArguments { - program, - args: spawn_task.args, - title_override: None, - } - } else { - Shell::System - }, - env, - ), - }, - } - }; - TerminalBuilder::new( - local_path.map(|path| path.to_path_buf()), - task_state, - shell, - env, - settings.cursor_shape, - settings.alternate_scroll, - settings.max_scroll_history_lines, - is_via_remote, - cx.entity_id().as_u64(), - Some(completion_tx), - cx, - activation_script, - ) - .map(|builder| { - let terminal_handle = cx.new(|cx| builder.subscribe(cx)); - - this.terminals - .local_handles - .push(terminal_handle.downgrade()); - - let id = terminal_handle.entity_id(); - cx.observe_release(&terminal_handle, move |project, _terminal, cx| { - let handles = &mut project.terminals.local_handles; - - if let Some(index) = handles - .iter() - .position(|terminal| terminal.entity_id() == id) - { - handles.remove(index); - cx.notify(); + ), + }, } - }) - .detach(); + }; + anyhow::Ok(TerminalBuilder::new( + local_path.map(|path| path.to_path_buf()), + task_state, + shell, + env, + settings.cursor_shape, + settings.alternate_scroll, + settings.max_scroll_history_lines, + is_via_remote, + cx.entity_id().as_u64(), + Some(completion_tx), + cx, + activation_script, + )) + })?? + .await?; + project.update(cx, move |this, cx| { + let terminal_handle = cx.new(|cx| builder.subscribe(cx)); + + this.terminals + .local_handles + .push(terminal_handle.downgrade()); + + let id = terminal_handle.entity_id(); + cx.observe_release(&terminal_handle, move |project, _terminal, cx| { + let handles = &mut project.terminals.local_handles; - terminal_handle + if let Some(index) = handles + .iter() + .position(|terminal| terminal.entity_id() == id) + { + handles.remove(index); + cx.notify(); + } }) - })? + .detach(); + + terminal_handle + }) }) } @@ -355,53 +358,55 @@ impl Project { }) .await .unwrap_or_default(); - project.update(cx, move |this, cx| { - let (shell, env) = { - match remote_client { - Some(remote_client) => { - create_remote_shell(None, env, path, remote_client, cx)? - } - None => (settings.shell, env), - } - }; - TerminalBuilder::new( - local_path.map(|path| path.to_path_buf()), - None, - shell, - env, - settings.cursor_shape, - settings.alternate_scroll, - settings.max_scroll_history_lines, - is_via_remote, - cx.entity_id().as_u64(), - None, - cx, - activation_script, - ) - .map(|builder| { - let terminal_handle = cx.new(|cx| builder.subscribe(cx)); - - this.terminals - .local_handles - .push(terminal_handle.downgrade()); - - let id = terminal_handle.entity_id(); - cx.observe_release(&terminal_handle, move |project, _terminal, cx| { - let handles = &mut project.terminals.local_handles; - - if let Some(index) = handles - .iter() - .position(|terminal| terminal.entity_id() == id) - { - handles.remove(index); - cx.notify(); + let builder = project + .update(cx, move |_, cx| { + let (shell, env) = { + match remote_client { + Some(remote_client) => { + create_remote_shell(None, env, path, remote_client, cx)? + } + None => (settings.shell, env), } - }) - .detach(); + }; + anyhow::Ok(TerminalBuilder::new( + local_path.map(|path| path.to_path_buf()), + None, + shell, + env, + settings.cursor_shape, + settings.alternate_scroll, + settings.max_scroll_history_lines, + is_via_remote, + cx.entity_id().as_u64(), + None, + cx, + activation_script, + )) + })?? + .await?; + project.update(cx, move |this, cx| { + let terminal_handle = cx.new(|cx| builder.subscribe(cx)); + + this.terminals + .local_handles + .push(terminal_handle.downgrade()); + + let id = terminal_handle.entity_id(); + cx.observe_release(&terminal_handle, move |project, _terminal, cx| { + let handles = &mut project.terminals.local_handles; - terminal_handle + if let Some(index) = handles + .iter() + .position(|terminal| terminal.entity_id() == id) + { + handles.remove(index); + cx.notify(); + } }) - })? + .detach(); + + terminal_handle + }) }) } @@ -422,13 +427,14 @@ impl Project { cwd }; - let new_terminal = terminal - .read(cx) - .clone_builder(cx, local_path) - .map(|builder| { - let terminal_handle = cx.new(|cx| builder.subscribe(cx)); + let builder = terminal.read(cx).clone_builder(cx, local_path); + cx.spawn(async |project, cx| { + let terminal = builder.await?; + project.update(cx, |project, cx| { + let terminal_handle = cx.new(|cx| terminal.subscribe(cx)); - self.terminals + project + .terminals .local_handles .push(terminal_handle.downgrade()); @@ -447,8 +453,8 @@ impl Project { .detach(); terminal_handle - }); - Task::ready(new_terminal) + }) + }) } pub fn terminal_settings<'a>( diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 49dd35528a330b5b0457fe0ca7357e95a2f0157a..f91a47630614b4736ebe5bb642d6430574c8c8ed 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -402,6 +402,7 @@ impl TerminalBuilder { window_id, }, child_exited: None, + event_loop_task: Task::ready(Ok(())), }; Ok(TerminalBuilder { @@ -423,236 +424,236 @@ impl TerminalBuilder { completion_tx: Option>>, cx: &App, activation_script: Vec, - ) -> Result { - // If the parent environment doesn't have a locale set - // (As is the case when launched from a .app on MacOS), - // and the Project doesn't have a locale set, then - // set a fallback for our child environment to use. - if std::env::var("LANG").is_err() { - env.entry("LANG".to_string()) - .or_insert_with(|| "en_US.UTF-8".to_string()); - } - - env.insert("ZED_TERM".to_string(), "true".to_string()); - env.insert("TERM_PROGRAM".to_string(), "zed".to_string()); - env.insert("TERM".to_string(), "xterm-256color".to_string()); - env.insert("COLORTERM".to_string(), "truecolor".to_string()); - env.insert( - "TERM_PROGRAM_VERSION".to_string(), - release_channel::AppVersion::global(cx).to_string(), - ); - - #[derive(Default)] - struct ShellParams { - program: String, - args: Option>, - title_override: Option, - } - - impl ShellParams { - fn new( + ) -> Task> { + let version = release_channel::AppVersion::global(cx); + cx.background_spawn(async move { + // If the parent environment doesn't have a locale set + // (As is the case when launched from a .app on MacOS), + // and the Project doesn't have a locale set, then + // set a fallback for our child environment to use. + if std::env::var("LANG").is_err() { + env.entry("LANG".to_string()) + .or_insert_with(|| "en_US.UTF-8".to_string()); + } + + env.insert("ZED_TERM".to_string(), "true".to_string()); + env.insert("TERM_PROGRAM".to_string(), "zed".to_string()); + env.insert("TERM".to_string(), "xterm-256color".to_string()); + env.insert("COLORTERM".to_string(), "truecolor".to_string()); + env.insert("TERM_PROGRAM_VERSION".to_string(), version.to_string()); + + #[derive(Default)] + struct ShellParams { program: String, args: Option>, title_override: Option, - ) -> Self { - log::debug!("Using {program} as shell"); - Self { - program, - args, - title_override, - } } - } - let shell_params = match shell.clone() { - Shell::System => { - if cfg!(windows) { - Some(ShellParams::new( - util::shell::get_windows_system_shell(), - None, - None, - )) - } else { - None + impl ShellParams { + fn new( + program: String, + args: Option>, + title_override: Option, + ) -> Self { + log::debug!("Using {program} as shell"); + Self { + program, + args, + title_override, + } } } - Shell::Program(program) => Some(ShellParams::new(program, None, None)), - Shell::WithArguments { - program, - args, - title_override, - } => Some(ShellParams::new(program, Some(args), title_override)), - }; - let terminal_title_override = shell_params.as_ref().and_then(|e| e.title_override.clone()); - #[cfg(windows)] - let shell_program = shell_params.as_ref().map(|params| { - use util::ResultExt; + let shell_params = match shell.clone() { + Shell::System => { + if cfg!(windows) { + Some(ShellParams::new( + util::shell::get_windows_system_shell(), + None, + None, + )) + } else { + None + } + } + Shell::Program(program) => Some(ShellParams::new(program, None, None)), + Shell::WithArguments { + program, + args, + title_override, + } => Some(ShellParams::new(program, Some(args), title_override)), + }; + let terminal_title_override = + shell_params.as_ref().and_then(|e| e.title_override.clone()); - Self::resolve_path(¶ms.program) - .log_err() - .unwrap_or(params.program.clone()) - }); + #[cfg(windows)] + let shell_program = shell_params.as_ref().map(|params| { + use util::ResultExt; - // Note: when remoting, this shell_kind will scrutinize `ssh` or - // `wsl.exe` as a shell and fall back to posix or powershell based on - // the compilation target. This is fine right now due to the restricted - // way we use the return value, but would become incorrect if we - // supported remoting into windows. - let shell_kind = shell.shell_kind(cfg!(windows)); - - let pty_options = { - let alac_shell = shell_params.as_ref().map(|params| { - alacritty_terminal::tty::Shell::new( - params.program.clone(), - params.args.clone().unwrap_or_default(), - ) + Self::resolve_path(¶ms.program) + .log_err() + .unwrap_or(params.program.clone()) }); - alacritty_terminal::tty::Options { - shell: alac_shell, - working_directory: working_directory.clone(), - drain_on_exit: true, - env: env.clone().into_iter().collect(), - #[cfg(windows)] - escape_args: shell_kind.tty_escape_args(), - } - }; + // Note: when remoting, this shell_kind will scrutinize `ssh` or + // `wsl.exe` as a shell and fall back to posix or powershell based on + // the compilation target. This is fine right now due to the restricted + // way we use the return value, but would become incorrect if we + // supported remoting into windows. + let shell_kind = shell.shell_kind(cfg!(windows)); + + let pty_options = { + let alac_shell = shell_params.as_ref().map(|params| { + alacritty_terminal::tty::Shell::new( + params.program.clone(), + params.args.clone().unwrap_or_default(), + ) + }); - let default_cursor_style = AlacCursorStyle::from(cursor_shape); - let scrolling_history = if task.is_some() { - // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling. - // After the task finishes, we do not allow appending to that terminal, so small tasks output should not - // cause excessive memory usage over time. - MAX_SCROLL_HISTORY_LINES - } else { - max_scroll_history_lines - .unwrap_or(DEFAULT_SCROLL_HISTORY_LINES) - .min(MAX_SCROLL_HISTORY_LINES) - }; - let config = Config { - scrolling_history, - default_cursor_style, - ..Config::default() - }; + alacritty_terminal::tty::Options { + shell: alac_shell, + working_directory: working_directory.clone(), + drain_on_exit: true, + env: env.clone().into_iter().collect(), + #[cfg(windows)] + escape_args: shell_kind.tty_escape_args(), + } + }; - //Spawn a task so the Alacritty EventLoop can communicate with us - //TODO: Remove with a bounded sender which can be dispatched on &self - let (events_tx, events_rx) = unbounded(); - //Set up the terminal... - let mut term = Term::new( - config.clone(), - &TerminalBounds::default(), - ZedListener(events_tx.clone()), - ); + let default_cursor_style = AlacCursorStyle::from(cursor_shape); + let scrolling_history = if task.is_some() { + // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling. + // After the task finishes, we do not allow appending to that terminal, so small tasks output should not + // cause excessive memory usage over time. + MAX_SCROLL_HISTORY_LINES + } else { + max_scroll_history_lines + .unwrap_or(DEFAULT_SCROLL_HISTORY_LINES) + .min(MAX_SCROLL_HISTORY_LINES) + }; + let config = Config { + scrolling_history, + default_cursor_style, + ..Config::default() + }; - //Alacritty defaults to alternate scrolling being on, so we just need to turn it off. - if let AlternateScroll::Off = alternate_scroll { - term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll)); - } + //Setup the pty... + let pty = match tty::new(&pty_options, TerminalBounds::default().into(), window_id) { + Ok(pty) => pty, + Err(error) => { + bail!(TerminalError { + directory: working_directory, + program: shell_params.as_ref().map(|params| params.program.clone()), + args: shell_params.as_ref().and_then(|params| params.args.clone()), + title_override: terminal_title_override, + source: error, + }); + } + }; - let term = Arc::new(FairMutex::new(term)); + //Spawn a task so the Alacritty EventLoop can communicate with us + //TODO: Remove with a bounded sender which can be dispatched on &self + let (events_tx, events_rx) = unbounded(); + //Set up the terminal... + let mut term = Term::new( + config.clone(), + &TerminalBounds::default(), + ZedListener(events_tx.clone()), + ); - //Setup the pty... - let pty = match tty::new(&pty_options, TerminalBounds::default().into(), window_id) { - Ok(pty) => pty, - Err(error) => { - bail!(TerminalError { - directory: working_directory, - program: shell_params.as_ref().map(|params| params.program.clone()), - args: shell_params.as_ref().and_then(|params| params.args.clone()), - title_override: terminal_title_override, - source: error, - }); + //Alacritty defaults to alternate scrolling being on, so we just need to turn it off. + if let AlternateScroll::Off = alternate_scroll { + term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll)); } - }; - let pty_info = PtyProcessInfo::new(&pty); + let term = Arc::new(FairMutex::new(term)); - //And connect them together - let event_loop = EventLoop::new( - term.clone(), - ZedListener(events_tx), - pty, - pty_options.drain_on_exit, - false, - ) - .context("failed to create event loop")?; + let pty_info = PtyProcessInfo::new(&pty); - //Kick things off - let pty_tx = event_loop.channel(); - let _io_thread = event_loop.spawn(); // DANGER + //And connect them together + let event_loop = EventLoop::new( + term.clone(), + ZedListener(events_tx), + pty, + pty_options.drain_on_exit, + false, + ) + .context("failed to create event loop")?; - let no_task = task.is_none(); + let pty_tx = event_loop.channel(); + let _io_thread = event_loop.spawn(); // DANGER - let terminal = Terminal { - task, - terminal_type: TerminalType::Pty { - pty_tx: Notifier(pty_tx), - info: pty_info, - }, - completion_tx, - term, - term_config: config, - title_override: terminal_title_override, - events: VecDeque::with_capacity(10), //Should never get this high. - last_content: Default::default(), - last_mouse: None, - matches: Vec::new(), - selection_head: None, - breadcrumb_text: String::new(), - scroll_px: px(0.), - next_link_id: 0, - selection_phase: SelectionPhase::Ended, - hyperlink_regex_searches: RegexSearches::new(), - vi_mode_enabled: false, - is_ssh_terminal, - last_mouse_move_time: Instant::now(), - last_hyperlink_search_position: None, - #[cfg(windows)] - shell_program, - activation_script: activation_script.clone(), - template: CopyTemplate { - shell, - env, - cursor_shape, - alternate_scroll, - max_scroll_history_lines, - window_id, - }, - child_exited: None, - }; + let no_task = task.is_none(); + let terminal = Terminal { + task, + terminal_type: TerminalType::Pty { + pty_tx: Notifier(pty_tx), + info: pty_info, + }, + completion_tx, + term, + term_config: config, + title_override: terminal_title_override, + events: VecDeque::with_capacity(10), //Should never get this high. + last_content: Default::default(), + last_mouse: None, + matches: Vec::new(), + selection_head: None, + breadcrumb_text: String::new(), + scroll_px: px(0.), + next_link_id: 0, + selection_phase: SelectionPhase::Ended, + hyperlink_regex_searches: RegexSearches::new(), + vi_mode_enabled: false, + is_ssh_terminal, + last_mouse_move_time: Instant::now(), + last_hyperlink_search_position: None, + #[cfg(windows)] + shell_program, + activation_script: activation_script.clone(), + template: CopyTemplate { + shell, + env, + cursor_shape, + alternate_scroll, + max_scroll_history_lines, + window_id, + }, + child_exited: None, + event_loop_task: Task::ready(Ok(())), + }; - if !activation_script.is_empty() && no_task { - for activation_script in activation_script { - terminal.write_to_pty(activation_script.into_bytes()); + if !activation_script.is_empty() && no_task { + for activation_script in activation_script { + terminal.write_to_pty(activation_script.into_bytes()); + // Simulate enter key press + // NOTE(PowerShell): using `\r\n` will put PowerShell in a continuation mode (infamous >> character) + // and generally mess up the rendering. + terminal.write_to_pty(b"\x0d"); + } + // In order to clear the screen at this point, we have two options: + // 1. We can send a shell-specific command such as "clear" or "cls" + // 2. We can "echo" a marker message that we will then catch when handling a Wakeup event + // and clear the screen using `terminal.clear()` method + // We cannot issue a `terminal.clear()` command at this point as alacritty is evented + // and while we have sent the activation script to the pty, it will be executed asynchronously. + // Therefore, we somehow need to wait for the activation script to finish executing before we + // can proceed with clearing the screen. + terminal.write_to_pty(shell_kind.clear_screen_command().as_bytes()); // Simulate enter key press - // NOTE(PowerShell): using `\r\n` will put PowerShell in a continuation mode (infamous >> character) - // and generally mess up the rendering. terminal.write_to_pty(b"\x0d"); } - // In order to clear the screen at this point, we have two options: - // 1. We can send a shell-specific command such as "clear" or "cls" - // 2. We can "echo" a marker message that we will then catch when handling a Wakeup event - // and clear the screen using `terminal.clear()` method - // We cannot issue a `terminal.clear()` command at this point as alacritty is evented - // and while we have sent the activation script to the pty, it will be executed asynchronously. - // Therefore, we somehow need to wait for the activation script to finish executing before we - // can proceed with clearing the screen. - terminal.write_to_pty(shell_kind.clear_screen_command().as_bytes()); - // Simulate enter key press - terminal.write_to_pty(b"\x0d"); - } - Ok(TerminalBuilder { - terminal, - events_rx, + Ok(TerminalBuilder { + terminal, + events_rx, + }) }) } pub fn subscribe(mut self, cx: &Context) -> Terminal { //Event loop - cx.spawn(async move |terminal, cx| { + self.terminal.event_loop_task = cx.spawn(async move |terminal, cx| { while let Some(event) = self.events_rx.next().await { terminal.update(cx, |terminal, cx| { //Process the first event immediately for lowered latency @@ -709,11 +710,8 @@ impl TerminalBuilder { smol::future::yield_now().await; } } - anyhow::Ok(()) - }) - .detach(); - + }); self.terminal } @@ -836,6 +834,7 @@ pub struct Terminal { template: CopyTemplate, activation_script: Vec, child_exited: Option, + event_loop_task: Task>, } struct CopyTemplate { @@ -1266,15 +1265,11 @@ impl Terminal { } pub fn total_lines(&self) -> usize { - let term = self.term.clone(); - let terminal = term.lock_unfair(); - terminal.total_lines() + self.term.lock_unfair().total_lines() } pub fn viewport_lines(&self) -> usize { - let term = self.term.clone(); - let terminal = term.lock_unfair(); - terminal.screen_lines() + self.term.lock_unfair().screen_lines() } //To test: @@ -2151,7 +2146,7 @@ impl Terminal { self.vi_mode_enabled } - pub fn clone_builder(&self, cx: &App, cwd: Option) -> Result { + pub fn clone_builder(&self, cx: &App, cwd: Option) -> Task> { let working_directory = self.working_directory().or_else(|| cwd); TerminalBuilder::new( working_directory, @@ -2388,28 +2383,30 @@ mod tests { let (completion_tx, completion_rx) = smol::channel::unbounded(); let (program, args) = ShellBuilder::new(&Shell::System, false) .build(Some("echo".to_owned()), &["hello".to_owned()]); - let terminal = cx.new(|cx| { - TerminalBuilder::new( - None, - None, - task::Shell::WithArguments { - program, - args, - title_override: None, - }, - HashMap::default(), - CursorShape::default(), - AlternateScroll::On, - None, - false, - 0, - Some(completion_tx), - cx, - vec![], - ) - .unwrap() - .subscribe(cx) - }); + let builder = cx + .update(|cx| { + TerminalBuilder::new( + None, + None, + task::Shell::WithArguments { + program, + args, + title_override: None, + }, + HashMap::default(), + CursorShape::default(), + AlternateScroll::On, + None, + false, + 0, + Some(completion_tx), + cx, + vec![], + ) + }) + .await + .unwrap(); + let terminal = cx.new(|cx| builder.subscribe(cx)); assert_eq!( completion_rx.recv().await.unwrap(), Some(ExitStatus::default()) @@ -2438,25 +2435,27 @@ mod tests { cx.executor().allow_parking(); let (completion_tx, completion_rx) = smol::channel::unbounded(); + let builder = cx + .update(|cx| { + TerminalBuilder::new( + None, + None, + task::Shell::System, + HashMap::default(), + CursorShape::default(), + AlternateScroll::On, + None, + false, + 0, + Some(completion_tx), + cx, + Vec::new(), + ) + }) + .await + .unwrap(); // Build an empty command, which will result in a tty shell spawned. - let terminal = cx.new(|cx| { - TerminalBuilder::new( - None, - None, - task::Shell::System, - HashMap::default(), - CursorShape::default(), - AlternateScroll::On, - None, - false, - 0, - Some(completion_tx), - cx, - Vec::new(), - ) - .unwrap() - .subscribe(cx) - }); + let terminal = cx.new(|cx| builder.subscribe(cx)); let (event_tx, event_rx) = smol::channel::unbounded::(); cx.update(|cx| { @@ -2507,28 +2506,30 @@ mod tests { let (completion_tx, completion_rx) = smol::channel::unbounded(); let (program, args) = ShellBuilder::new(&Shell::System, false) .build(Some("asdasdasdasd".to_owned()), &["@@@@@".to_owned()]); - let terminal = cx.new(|cx| { - TerminalBuilder::new( - None, - None, - task::Shell::WithArguments { - program, - args, - title_override: None, - }, - HashMap::default(), - CursorShape::default(), - AlternateScroll::On, - None, - false, - 0, - Some(completion_tx), - cx, - Vec::new(), - ) - .unwrap() - .subscribe(cx) - }); + let builder = cx + .update(|cx| { + TerminalBuilder::new( + None, + None, + task::Shell::WithArguments { + program, + args, + title_override: None, + }, + HashMap::default(), + CursorShape::default(), + AlternateScroll::On, + None, + false, + 0, + Some(completion_tx), + cx, + Vec::new(), + ) + }) + .await + .unwrap(); + let terminal = cx.new(|cx| builder.subscribe(cx)); let (event_tx, event_rx) = smol::channel::unbounded::(); cx.update(|cx| { diff --git a/crates/terminal_view/src/persistence.rs b/crates/terminal_view/src/persistence.rs index 14606d4ed58054cca70ca16d420e90083bcbcc14..8d6ef03fd714e6694aca12f3fe6a3a8bb166e84c 100644 --- a/crates/terminal_view/src/persistence.rs +++ b/crates/terminal_view/src/persistence.rs @@ -214,14 +214,6 @@ async fn deserialize_pane_group( } SerializedPaneGroup::Pane(serialized_pane) => { let active = serialized_pane.active; - let new_items = deserialize_terminal_views( - workspace_id, - project.clone(), - workspace.clone(), - serialized_pane.children.as_slice(), - cx, - ) - .await; let pane = panel .update_in(cx, |terminal_panel, window, cx| { @@ -236,56 +228,71 @@ async fn deserialize_pane_group( .log_err()?; let active_item = serialized_pane.active_item; let pinned_count = serialized_pane.pinned_count; - let terminal = pane - .update_in(cx, |pane, window, cx| { - populate_pane_items(pane, new_items, active_item, window, cx); - pane.set_pinned_count(pinned_count); + let new_items = deserialize_terminal_views( + workspace_id, + project.clone(), + workspace.clone(), + serialized_pane.children.as_slice(), + cx, + ); + cx.spawn({ + let pane = pane.downgrade(); + async move |cx| { + let new_items = new_items.await; + + let items = pane.update_in(cx, |pane, window, cx| { + populate_pane_items(pane, new_items, active_item, window, cx); + pane.set_pinned_count(pinned_count); + pane.items_len() + }); // Avoid blank panes in splits - if pane.items_len() == 0 { + if items.is_ok_and(|items| items == 0) { let working_directory = workspace .update(cx, |workspace, cx| default_working_directory(workspace, cx)) .ok() .flatten(); - let terminal = project.update(cx, |project, cx| { - project.create_terminal_shell(working_directory, cx) - }); - Some(Some(terminal)) - } else { - Some(None) + let Some(terminal) = project + .update(cx, |project, cx| { + project.create_terminal_shell(working_directory, cx) + }) + .log_err() + else { + return; + }; + + let terminal = terminal.await.log_err(); + pane.update_in(cx, |pane, window, cx| { + if let Some(terminal) = terminal { + let terminal_view = Box::new(cx.new(|cx| { + TerminalView::new( + terminal, + workspace.clone(), + Some(workspace_id), + project.downgrade(), + window, + cx, + ) + })); + pane.add_item(terminal_view, true, false, None, window, cx); + } + }) + .ok(); } - }) - .ok() - .flatten()?; - if let Some(terminal) = terminal { - let terminal = terminal.await.ok()?; - pane.update_in(cx, |pane, window, cx| { - let terminal_view = Box::new(cx.new(|cx| { - TerminalView::new( - terminal, - workspace.clone(), - Some(workspace_id), - project.downgrade(), - window, - cx, - ) - })); - pane.add_item(terminal_view, true, false, None, window, cx); - }) - .ok()?; - } + } + }) + .await; Some((Member::Pane(pane.clone()), active.then_some(pane))) } } } -async fn deserialize_terminal_views( +fn deserialize_terminal_views( workspace_id: WorkspaceId, project: Entity, workspace: WeakEntity, item_ids: &[u64], cx: &mut AsyncWindowContext, -) -> Vec> { - let mut items = Vec::with_capacity(item_ids.len()); +) -> impl Future>> + use<> { let mut deserialized_items = item_ids .iter() .map(|item_id| { @@ -302,12 +309,15 @@ async fn deserialize_terminal_views( .unwrap_or_else(|e| Task::ready(Err(e.context("no window present")))) }) .collect::>(); - while let Some(item) = deserialized_items.next().await { - if let Some(item) = item.log_err() { - items.push(item); + async move { + let mut items = Vec::with_capacity(deserialized_items.len()); + while let Some(item) = deserialized_items.next().await { + if let Some(item) = item.log_err() { + items.push(item); + } } + items } - items } #[derive(Debug, Serialize, Deserialize)] diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index ff169e48e53b01f29ca1ab1682927ea116f320fc..63d6c503b29d1eec6500bd4acb5c2f0f6ef36e33 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1223,26 +1223,26 @@ impl Item for TerminalView { window: &mut Window, cx: &mut Context, ) -> Task>> { - let Some(terminal_task) = self - .project - .update(cx, |project, cx| { - let cwd = project - .active_project_directory(cx) - .map(|it| it.to_path_buf()); - project.clone_terminal(self.terminal(), cx, cwd) - }) - .ok() - else { + let Ok(terminal) = self.project.update(cx, |project, cx| { + let cwd = project + .active_project_directory(cx) + .map(|it| it.to_path_buf()); + project.clone_terminal(self.terminal(), cx, cwd) + }) else { return Task::ready(None); }; - - let workspace = self.workspace.clone(); - let project = self.project.clone(); - cx.spawn_in(window, async move |_, cx| { - let terminal = terminal_task.await.log_err()?; - cx.update(|window, cx| { + cx.spawn_in(window, async move |this, cx| { + let terminal = terminal.await.log_err()?; + this.update_in(cx, |this, window, cx| { cx.new(|cx| { - TerminalView::new(terminal, workspace, workspace_id, project, window, cx) + TerminalView::new( + terminal, + this.workspace.clone(), + workspace_id, + this.project.clone(), + window, + cx, + ) }) }) .ok() From bdb2d6c8dee0ef3da8c4badfa5f6e7104333ad9f Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 30 Oct 2025 08:59:30 -0600 Subject: [PATCH 17/82] Don't skip tests in nightly release (#41573) Release Notes: - N/A --- .github/workflows/release_nightly.yml | 4 ++-- tooling/xtask/src/tasks/workflows/run_tests.rs | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index e341ab5ad0f56d77b23146f484e9033fe31cb03a..80e6534e70e8f7169514fb8cc569f7b11488cd88 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -33,7 +33,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_mac: - if: 'false' + if: github.repository_owner == 'zed-industries' runs-on: self-mini-macos steps: - name: steps::checkout_repo @@ -68,7 +68,7 @@ jobs: shell: bash -euxo pipefail {0} timeout-minutes: 60 run_tests_windows: - if: 'false' + if: github.repository_owner == 'zed-industries' runs-on: self-32vcpu-windows-2022 steps: - name: steps::checkout_repo diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 82b40f23d5b4adfa2ae04eb2aa14d0b92ff66285..3328d857fb22e174a0e452626e0caf54b58065de 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -302,7 +302,9 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { NamedJob { name: format!("run_tests_{platform}"), job: release_job(&[]) - .cond(Expression::new("false")) + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) .runs_on(runner) .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(platform)) From b4cf7e440ebb8e7c1fb2216ebe207b7e688ddb39 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 30 Oct 2025 11:47:59 -0400 Subject: [PATCH 18/82] debugger: Get rid of initialize_args in php debugger setup docs (#41579) Related to issue: #40887 Release Notes: - N/A Co-authored-by: Remco Smits --- docs/src/languages/php.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/src/languages/php.md b/docs/src/languages/php.md index b2b8dffcf1b973f769d2900c21385804fbb4394f..1d7de27c5480421e2bc4d1f150a0b6d04a5ee49c 100644 --- a/docs/src/languages/php.md +++ b/docs/src/languages/php.md @@ -71,9 +71,7 @@ Zed’s PHP extension provides a debug adapter for PHP and Xdebug. The adapter n "label": "PHP: Listen to Xdebug", "adapter": "Xdebug", "request": "launch", - "initialize_args": { - "port": 9003 - } + "port": 9003 }, { "label": "PHP: Debug this test", From 3825ce523e43a36e855c5fb8e45214d247dc67fc Mon Sep 17 00:00:00 2001 From: Bennet Fenner Date: Thu, 30 Oct 2025 17:03:12 +0100 Subject: [PATCH 19/82] agent_ui: Fix `agent: Chat with follow` not working (#41581) Release Notes: - Fixed an issue where `agent: Chat with follow` was not working anymore Co-authored-by: Ben Brandt --- crates/agent_ui/src/acp/message_editor.rs | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 5fe591caca5b88b97351884593a8b1550d8a1d11..35bde2c84d5e9aa5d14ec92dc0579b9fcd849904 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -1,4 +1,5 @@ use crate::{ + ChatWithFollow, acp::completion_provider::{ContextPickerCompletionProvider, SlashCommandCompletion}, context_picker::{ContextPickerAction, fetch_context_picker::fetch_url_content}, }; @@ -49,7 +50,7 @@ use text::OffsetRangeExt; use theme::ThemeSettings; use ui::{ButtonLike, TintColor, Toggleable, prelude::*}; use util::{ResultExt, debug_panic, rel_path::RelPath}; -use workspace::{Workspace, notifications::NotifyResultExt as _}; +use workspace::{CollaboratorId, Workspace, notifications::NotifyResultExt as _}; use zed_actions::agent::Chat; pub struct MessageEditor { @@ -813,6 +814,21 @@ impl MessageEditor { self.send(cx); } + fn chat_with_follow( + &mut self, + _: &ChatWithFollow, + window: &mut Window, + cx: &mut Context, + ) { + self.workspace + .update(cx, |this, cx| { + this.follow(CollaboratorId::Agent, window, cx) + }) + .log_err(); + + self.send(cx); + } + fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context) { cx.emit(MessageEditorEvent::Cancel) } @@ -1276,6 +1292,7 @@ impl Render for MessageEditor { div() .key_context("MessageEditor") .on_action(cx.listener(Self::chat)) + .on_action(cx.listener(Self::chat_with_follow)) .on_action(cx.listener(Self::cancel)) .capture_action(cx.listener(Self::paste)) .flex_1() From 743180342a57892417dbbcfc6e6a0232dbe966e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Kondzior?= Date: Thu, 30 Oct 2025 17:19:32 +0100 Subject: [PATCH 20/82] agent_ui: Insert thread summary as proper mention URI (#40722) This ensures the thread summary is treated as a tracked mention with accessible context. Changes: - Fixed `MessageEditor::insert_thread_summary()` to use proper mention URI format - Added test coverage to verify the fix Release Notes: - Fixed an issue where "New From Summary" was not properly inserting thread summaries as contextual mentions when creating new threads. Thread summaries are now inserted as proper mention URIs. --- crates/agent_ui/src/acp/message_editor.rs | 100 +++++++++++++++++++--- 1 file changed, 87 insertions(+), 13 deletions(-) diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 35bde2c84d5e9aa5d14ec92dc0579b9fcd849904..90991182dc77e00c07fb7c7330695f72da9a2f44 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -235,8 +235,16 @@ impl MessageEditor { window: &mut Window, cx: &mut Context, ) { + let uri = MentionUri::Thread { + id: thread.id.clone(), + name: thread.title.to_string(), + }; + let content = format!("{}\n", uri.as_link()); + + let content_len = content.len() - 1; + let start = self.editor.update(cx, |editor, cx| { - editor.set_text(format!("{}\n", thread.title), window, cx); + editor.set_text(content, window, cx); editor .buffer() .read(cx) @@ -245,18 +253,8 @@ impl MessageEditor { .text_anchor }); - self.confirm_mention_completion( - thread.title.clone(), - start, - thread.title.len(), - MentionUri::Thread { - id: thread.id.clone(), - name: thread.title.to_string(), - }, - window, - cx, - ) - .detach(); + self.confirm_mention_completion(thread.title, start, content_len, uri, window, cx) + .detach(); } #[cfg(test)] @@ -1601,6 +1599,7 @@ mod tests { use gpui::{ AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext, }; + use language_model::LanguageModelRegistry; use lsp::{CompletionContext, CompletionTriggerKind}; use project::{CompletionIntent, Project, ProjectPath}; use serde_json::json; @@ -2746,6 +2745,81 @@ mod tests { _ => panic!("Expected Text mention for small file"), } } + #[gpui::test] + async fn test_insert_thread_summary(cx: &mut TestAppContext) { + init_test(cx); + cx.update(LanguageModelRegistry::test); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({"file": ""})).await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + + // Create a thread metadata to insert as summary + let thread_metadata = agent::DbThreadMetadata { + id: acp::SessionId("thread-123".into()), + title: "Previous Conversation".into(), + updated_at: chrono::Utc::now(), + }; + + let message_editor = cx.update(|window, cx| { + cx.new(|cx| { + let mut editor = MessageEditor::new( + workspace.downgrade(), + project.clone(), + history_store.clone(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ); + editor.insert_thread_summary(thread_metadata.clone(), window, cx); + editor + }) + }); + + // Construct expected values for verification + let expected_uri = MentionUri::Thread { + id: thread_metadata.id.clone(), + name: thread_metadata.title.to_string(), + }; + let expected_link = format!("[@{}]({})", thread_metadata.title, expected_uri.to_uri()); + + message_editor.read_with(cx, |editor, cx| { + let text = editor.text(cx); + + assert!( + text.contains(&expected_link), + "Expected editor text to contain thread mention link.\nExpected substring: {}\nActual text: {}", + expected_link, + text + ); + + let mentions = editor.mentions(); + assert_eq!( + mentions.len(), + 1, + "Expected exactly one mention after inserting thread summary" + ); + + assert!( + mentions.contains(&expected_uri), + "Expected mentions to contain the thread URI" + ); + }); + } #[gpui::test] async fn test_whitespace_trimming(cx: &mut TestAppContext) { From ac3b232dda334199c800a382a809badef52d7f19 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 30 Oct 2025 18:40:56 +0100 Subject: [PATCH 21/82] Reduce amount of foreground tasks spawned on multibuffer/editor updates (#41479) When doing a project wide search in zed on windows for `hang`, zed starts to freeze for a couple seconds ultimately starting to error with `Not enough quota is available to process this command.` when dispatching windows messages. The cause for this is that we simply overload the windows message pump due to the sheer amount of foreground tasks we spawn when we populate the project search. This PR is an attempt at reducing this. Release Notes: - Reduced hangs and stutters in large project file searches --- crates/editor/src/display_map/wrap_map.rs | 7 +--- crates/editor/src/git/blame.rs | 1 + crates/go_to_line/src/cursor_position.rs | 32 ++++++++-------- crates/gpui/src/app/async_context.rs | 2 +- crates/gpui/src/executor.rs | 1 - crates/language/src/buffer.rs | 25 ++++++------ crates/multi_buffer/src/path_key.rs | 16 ++++---- crates/project/src/buffer_store.rs | 46 +++++++++++------------ crates/project/src/git_store.rs | 1 + crates/project/src/project_tests.rs | 4 +- crates/search/src/project_search.rs | 30 +++++++++------ crates/worktree/src/worktree.rs | 4 +- crates/zed/src/zed.rs | 18 +++++---- 13 files changed, 101 insertions(+), 86 deletions(-) diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index e79e5555a61d0ddb8a93a1708c676554f191c3f6..7371eb678538dbc12abe43bde4073ffd9d2bdb21 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -568,18 +568,15 @@ impl WrapSnapshot { let mut old_start = old_cursor.start().output.lines; old_start += tab_edit.old.start.0 - old_cursor.start().input.lines; - // todo(lw): Should these be seek_forward? - old_cursor.seek(&tab_edit.old.end, Bias::Right); + old_cursor.seek_forward(&tab_edit.old.end, Bias::Right); let mut old_end = old_cursor.start().output.lines; old_end += tab_edit.old.end.0 - old_cursor.start().input.lines; - // todo(lw): Should these be seek_forward? new_cursor.seek(&tab_edit.new.start, Bias::Right); let mut new_start = new_cursor.start().output.lines; new_start += tab_edit.new.start.0 - new_cursor.start().input.lines; - // todo(lw): Should these be seek_forward? - new_cursor.seek(&tab_edit.new.end, Bias::Right); + new_cursor.seek_forward(&tab_edit.new.end, Bias::Right); let mut new_end = new_cursor.start().output.lines; new_end += tab_edit.new.end.0 - new_cursor.start().input.lines; diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index 4f210cc9db8913eb7c46c6150d1ecd5d4f9020bb..b36a57a7e47bf148fff4201ec87ac7c868658a04 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -602,6 +602,7 @@ impl GitBlame { } fn regenerate_on_edit(&mut self, cx: &mut Context) { + // todo(lw): hot foreground spawn self.regenerate_on_edit_task = cx.spawn(async move |this, cx| { cx.background_executor() .timer(REGENERATE_ON_EDIT_DEBOUNCE_INTERVAL) diff --git a/crates/go_to_line/src/cursor_position.rs b/crates/go_to_line/src/cursor_position.rs index 2638a49eba5d1c69a41a759efedfe4814ed6dc2c..2a67ff67479021353d7231939726a13b948bf4b7 100644 --- a/crates/go_to_line/src/cursor_position.rs +++ b/crates/go_to_line/src/cursor_position.rs @@ -1,4 +1,4 @@ -use editor::{Editor, MultiBufferSnapshot}; +use editor::{Editor, EditorEvent, MultiBufferSnapshot}; use gpui::{App, Entity, FocusHandle, Focusable, Styled, Subscription, Task, WeakEntity}; use settings::Settings; use std::{fmt::Write, num::NonZeroU32, time::Duration}; @@ -81,7 +81,7 @@ impl CursorPosition { fn update_position( &mut self, - editor: Entity, + editor: &Entity, debounce: Option, window: &mut Window, cx: &mut Context, @@ -269,19 +269,21 @@ impl StatusItemView for CursorPosition { cx: &mut Context, ) { if let Some(editor) = active_pane_item.and_then(|item| item.act_as::(cx)) { - self._observe_active_editor = - Some( - cx.observe_in(&editor, window, |cursor_position, editor, window, cx| { - Self::update_position( - cursor_position, - editor, - Some(UPDATE_DEBOUNCE), - window, - cx, - ) - }), - ); - self.update_position(editor, None, window, cx); + self._observe_active_editor = Some(cx.subscribe_in( + &editor, + window, + |cursor_position, editor, event, window, cx| match event { + EditorEvent::SelectionsChanged { .. } => Self::update_position( + cursor_position, + editor, + Some(UPDATE_DEBOUNCE), + window, + cx, + ), + _ => {} + }, + )); + self.update_position(&editor, None, window, cx); } else { self.position = None; self._observe_active_editor = None; diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index cfe7a5a75c258d09194c7d77a117208161713c6f..381541d4b11377b988dd30e03155855c7ba25aed 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -176,7 +176,7 @@ impl AsyncApp { lock.open_window(options, build_root_view) } - /// Schedule a future to be polled in the background. + /// Schedule a future to be polled in the foreground. #[track_caller] pub fn spawn(&self, f: AsyncFn) -> Task where diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index b820e120dd738df8a39d3a40379414984942f158..b6d3a407f5dbbab07e0273e668e9b5710824edda 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -479,7 +479,6 @@ impl ForegroundExecutor { } /// Enqueues the given Task to run on the main thread at some point in the future. - #[track_caller] pub fn spawn(&self, future: impl Future + 'static) -> Task where R: 'static, diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index c2da93aa7399267f6300625da58aba9bf6dccc4f..c72350f38561e7aea62b7d3402eaa24bbdb08044 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -1573,21 +1573,24 @@ impl Buffer { self.reparse = None; } Err(parse_task) => { + // todo(lw): hot foreground spawn self.reparse = Some(cx.spawn(async move |this, cx| { - let new_syntax_map = parse_task.await; + let new_syntax_map = cx.background_spawn(parse_task).await; this.update(cx, move |this, cx| { - let grammar_changed = + let grammar_changed = || { this.language.as_ref().is_none_or(|current_language| { !Arc::ptr_eq(&language, current_language) - }); - let language_registry_changed = new_syntax_map - .contains_unknown_injections() - && language_registry.is_some_and(|registry| { - registry.version() != new_syntax_map.language_registry_version() - }); - let parse_again = language_registry_changed - || grammar_changed - || this.version.changed_since(&parsed_version); + }) + }; + let language_registry_changed = || { + new_syntax_map.contains_unknown_injections() + && language_registry.is_some_and(|registry| { + registry.version() != new_syntax_map.language_registry_version() + }) + }; + let parse_again = this.version.changed_since(&parsed_version) + || language_registry_changed() + || grammar_changed(); this.did_finish_parsing(new_syntax_map, cx); this.reparse = None; if parse_again { diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index b6175b7aaab4f631728bcfaf8094120068032994..568d1ac8671fc3e10fb7656dfdffa7211accd1cd 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -1,7 +1,7 @@ use std::{mem, ops::Range, sync::Arc}; use collections::HashSet; -use gpui::{App, AppContext, Context, Entity, Task}; +use gpui::{App, AppContext, Context, Entity}; use itertools::Itertools; use language::{Buffer, BufferSnapshot}; use rope::Point; @@ -117,12 +117,14 @@ impl MultiBuffer { buffer: Entity, ranges: Vec>, context_line_count: u32, - cx: &mut Context, - ) -> Task>> { + cx: &Context, + ) -> impl Future>> + use<> { let buffer_snapshot = buffer.read(cx).snapshot(); - cx.spawn(async move |multi_buffer, cx| { + let multi_buffer = cx.weak_entity(); + let mut app = cx.to_async(); + async move { let snapshot = buffer_snapshot.clone(); - let (excerpt_ranges, new, counts) = cx + let (excerpt_ranges, new, counts) = app .background_spawn(async move { let ranges = ranges.into_iter().map(|range| range.to_point(&snapshot)); let excerpt_ranges = @@ -133,7 +135,7 @@ impl MultiBuffer { .await; multi_buffer - .update(cx, move |multi_buffer, cx| { + .update(&mut app, move |multi_buffer, cx| { let (ranges, _) = multi_buffer.set_merged_excerpt_ranges_for_path( path_key, buffer, @@ -147,7 +149,7 @@ impl MultiBuffer { }) .ok() .unwrap_or_default() - }) + } } pub(super) fn expand_excerpts_with_paths( diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index b9249d36e2ca8da6b17f342a8db9f3dcca113515..39e302a2d9b1ae92cce9691c957cb9fcfbf26d7d 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -619,29 +619,24 @@ impl LocalBufferStore { worktree: Entity, cx: &mut Context, ) -> Task>> { - let load_buffer = worktree.update(cx, |worktree, cx| { - let load_file = worktree.load_file(path.as_ref(), cx); - let reservation = cx.reserve_entity(); - let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); - let path = path.clone(); - cx.spawn(async move |_, cx| { - let loaded = load_file.await.with_context(|| { - format!("Could not open path: {}", path.display(PathStyle::local())) - })?; - let text_buffer = cx - .background_spawn(async move { - text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text) - }) - .await; - cx.insert_entity(reservation, |_| { - Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite) - }) - }) - }); - + let load_file = worktree.update(cx, |worktree, cx| worktree.load_file(path.as_ref(), cx)); cx.spawn(async move |this, cx| { - let buffer = match load_buffer.await { - Ok(buffer) => Ok(buffer), + let path = path.clone(); + let buffer = match load_file.await.with_context(|| { + format!("Could not open path: {}", path.display(PathStyle::local())) + }) { + Ok(loaded) => { + let reservation = cx.reserve_entity::()?; + let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); + let text_buffer = cx + .background_spawn(async move { + text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text) + }) + .await; + cx.insert_entity(reservation, |_| { + Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite) + })? + } Err(error) if is_not_found_error(&error) => cx.new(|cx| { let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); let text_buffer = text::Buffer::new(ReplicaId::LOCAL, buffer_id, ""); @@ -657,9 +652,9 @@ impl LocalBufferStore { })), Capability::ReadWrite, ) - }), - Err(e) => Err(e), - }?; + })?, + Err(e) => return Err(e), + }; this.update(cx, |this, cx| { this.add_buffer(buffer.clone(), cx)?; let buffer_id = buffer.read(cx).remote_id(); @@ -840,6 +835,7 @@ impl BufferStore { entry .insert( + // todo(lw): hot foreground spawn cx.spawn(async move |this, cx| { let load_result = load_buffer.await; this.update(cx, |this, cx| { diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 736c96f34e171c4fde83c2db032484456144ae5a..03642df3b4f395e190d03feb04203f7595aaf3cf 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -709,6 +709,7 @@ impl GitStore { repo.load_committed_text(buffer_id, repo_path, cx) }); + // todo(lw): hot foreground spawn cx.spawn(async move |this, cx| { Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx) .await diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 1d4dbc6c86be9ba80e62c29ef32ce1161a6d1a25..891ad2420c6f8a79659a1f05afd0821b995b5b1a 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -9171,7 +9171,9 @@ async fn test_odd_events_for_ignored_dirs( repository_updates.lock().drain(..).collect::>(), vec![ RepositoryEvent::MergeHeadsChanged, - RepositoryEvent::BranchChanged + RepositoryEvent::BranchChanged, + RepositoryEvent::StatusesChanged { full_scan: false }, + RepositoryEvent::StatusesChanged { full_scan: false }, ], "Initial worktree scan should produce a repo update event" ); diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index f407a0a4dbfd00b6515a392f18572c373499d2cc..a8be82d5d5a3fcb20b8ea964af19e3f60fea0573 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -322,18 +322,25 @@ impl ProjectSearch { let mut limit_reached = false; while let Some(results) = matches.next().await { - let mut buffers_with_ranges = Vec::with_capacity(results.len()); - for result in results { - match result { - project::search::SearchResult::Buffer { buffer, ranges } => { - buffers_with_ranges.push((buffer, ranges)); - } - project::search::SearchResult::LimitReached => { - limit_reached = true; + let (buffers_with_ranges, has_reached_limit) = cx + .background_executor() + .spawn(async move { + let mut limit_reached = false; + let mut buffers_with_ranges = Vec::with_capacity(results.len()); + for result in results { + match result { + project::search::SearchResult::Buffer { buffer, ranges } => { + buffers_with_ranges.push((buffer, ranges)); + } + project::search::SearchResult::LimitReached => { + limit_reached = true; + } + } } - } - } - + (buffers_with_ranges, limit_reached) + }) + .await; + limit_reached |= has_reached_limit; let mut new_ranges = project_search .update(cx, |project_search, cx| { project_search.excerpts.update(cx, |excerpts, cx| { @@ -352,7 +359,6 @@ impl ProjectSearch { }) }) .ok()?; - while let Some(new_ranges) = new_ranges.next().await { project_search .update(cx, |project_search, cx| { diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 5f8253e2dfb48fa6882dabf49c64073023a2a298..a4d3f61141c8b05a7ff2ccf2ef0df5896833f199 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -1318,7 +1318,8 @@ impl LocalWorktree { let entry = self.refresh_entry(path.clone(), None, cx); let is_private = self.is_path_private(path.as_ref()); - cx.spawn(async move |this, _cx| { + let this = cx.weak_entity(); + cx.background_spawn(async move { // WARN: Temporary workaround for #27283. // We are not efficient with our memory usage per file, and use in excess of 64GB for a 10GB file // Therefore, as a temporary workaround to prevent system freezes, we just bail before opening a file @@ -1702,6 +1703,7 @@ impl LocalWorktree { }; let t0 = Instant::now(); let mut refresh = self.refresh_entries_for_paths(paths); + // todo(lw): Hot foreground spawn cx.spawn(async move |this, cx| { refresh.recv().await; log::trace!("refreshed entry {path:?} in {:?}", t0.elapsed()); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index d712f782ca78745a94ce22c9a57900a8b8e42863..2d7d47e968e93eef3d455cec9c324a4d4e0cff42 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -2860,16 +2860,20 @@ mod tests { }); // Split the pane with the first entry, then open the second entry again. - let (task1, task2) = window + window .update(cx, |w, window, cx| { - ( - w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, window, cx), - w.open_path(file2.clone(), None, true, window, cx), - ) + w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, window, cx) + }) + .unwrap() + .await + .unwrap(); + window + .update(cx, |w, window, cx| { + w.open_path(file2.clone(), None, true, window, cx) }) + .unwrap() + .await .unwrap(); - task1.await.unwrap(); - task2.await.unwrap(); window .read_with(cx, |w, cx| { From 3944234babc45e9730f082b9e1501b3d094c4948 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 30 Oct 2025 20:09:32 +0100 Subject: [PATCH 22/82] windows: Don't flood windows message queue with gpui messages (#41595) Release Notes: - N/A Co-authored by: Max Brunsfeld --- .../gpui/src/platform/windows/dispatcher.rs | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index 8d3e6305f6b4bb60f6c282280bafa7f76f59eecb..6759a573e6c04ecf943f6cc17616743bcab4ef28 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -80,15 +80,27 @@ impl PlatformDispatcher for WindowsDispatcher { } fn dispatch_on_main_thread(&self, runnable: Runnable) { + let was_empty = self.main_sender.is_empty(); match self.main_sender.send(runnable) { Ok(_) => unsafe { - PostMessageW( - Some(self.platform_window_handle.as_raw()), - WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, - WPARAM(self.validation_number), - LPARAM(0), - ) - .log_err(); + // Only send a `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` to the + // queue if we have no runnables queued up yet, otherwise we + // risk filling the message queue with gpui messages causing us + // to starve the message loop of system messages, resulting in a + // process hang. + // + // When the message loop receives a + // `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` message we drain the + // runnable queue entirely. + if was_empty { + PostMessageW( + Some(self.platform_window_handle.as_raw()), + WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, + WPARAM(self.validation_number), + LPARAM(0), + ) + .log_err(); + } }, Err(runnable) => { // NOTE: Runnable may wrap a Future that is !Send. From 44e5a962e66e669d84edc7f51433c000460d2fb9 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 30 Oct 2025 15:43:32 -0400 Subject: [PATCH 23/82] debugger: Add horizontal scroll bars to variable list, memory view, and breakpoint list (#41594) Closes #40360 This PR added heuristics to determine what variable/breakpoint list entry has the longest width when rendered. I added this in so the uniform list would correctly determine which item has the longest width and use that to calculate the scrollbar size. The heuristic can be off if a non-mono space font is used in the UI; in most cases, it's more than accurate enough though. Release Notes: - debugger: Add horizontal scroll bars to variable list, memory view, and breakpoint list --------- Co-authored-by: MrSubidubi --- .../src/session/running/breakpoint_list.rs | 36 ++++++++++++++- .../src/session/running/memory_view.rs | 18 ++++++-- .../src/session/running/variable_list.rs | 45 ++++++++++++++++--- crates/gpui/src/elements/uniform_list.rs | 2 + crates/ui/src/components/scrollbar.rs | 2 +- crates/ui/src/styles/typography.rs | 13 ++++++ 6 files changed, 105 insertions(+), 11 deletions(-) diff --git a/crates/debugger_ui/src/session/running/breakpoint_list.rs b/crates/debugger_ui/src/session/running/breakpoint_list.rs index c9f2a58dae28c2e41e49aecc847857ca6191c0eb..36e627a3ebac677e0420bf4f5dd93f3d1cd62a5b 100644 --- a/crates/debugger_ui/src/session/running/breakpoint_list.rs +++ b/crates/debugger_ui/src/session/running/breakpoint_list.rs @@ -12,6 +12,7 @@ use gpui::{ Action, AppContext, ClickEvent, Entity, FocusHandle, Focusable, MouseButton, ScrollStrategy, Task, UniformListScrollHandle, WeakEntity, actions, uniform_list, }; +use itertools::Itertools; use language::Point; use project::{ Project, @@ -24,7 +25,7 @@ use project::{ }; use ui::{ Divider, DividerColor, FluentBuilder as _, Indicator, IntoElement, ListItem, Render, - StatefulInteractiveElement, Tooltip, WithScrollbar, prelude::*, + ScrollAxes, StatefulInteractiveElement, Tooltip, WithScrollbar, prelude::*, }; use util::rel_path::RelPath; use workspace::Workspace; @@ -55,6 +56,7 @@ pub(crate) struct BreakpointList { focus_handle: FocusHandle, scroll_handle: UniformListScrollHandle, selected_ix: Option, + max_width_index: Option, input: Entity, strip_mode: Option, serialize_exception_breakpoints_task: Option>>, @@ -95,6 +97,7 @@ impl BreakpointList { dap_store, worktree_store, breakpoints: Default::default(), + max_width_index: None, workspace, session, focus_handle, @@ -570,6 +573,8 @@ impl BreakpointList { .collect() }), ) + .with_horizontal_sizing_behavior(gpui::ListHorizontalSizingBehavior::Unconstrained) + .with_width_from_item(self.max_width_index) .track_scroll(self.scroll_handle.clone()) .flex_1() } @@ -732,6 +737,26 @@ impl Render for BreakpointList { .chain(exception_breakpoints), ); + let text_pixels = ui::TextSize::Default.pixels(cx).to_f64() as f32; + + self.max_width_index = self + .breakpoints + .iter() + .map(|entry| match &entry.kind { + BreakpointEntryKind::LineBreakpoint(line_bp) => { + let name_and_line = format!("{}:{}", line_bp.name, line_bp.line); + let dir_len = line_bp.dir.as_ref().map(|d| d.len()).unwrap_or(0); + (name_and_line.len() + dir_len) as f32 * text_pixels + } + BreakpointEntryKind::ExceptionBreakpoint(exc_bp) => { + exc_bp.data.label.len() as f32 * text_pixels + } + BreakpointEntryKind::DataBreakpoint(data_bp) => { + data_bp.0.context.human_readable_label().len() as f32 * text_pixels + } + }) + .position_max_by(|left, right| left.total_cmp(right)); + v_flex() .id("breakpoint-list") .key_context("BreakpointList") @@ -749,7 +774,14 @@ impl Render for BreakpointList { .size_full() .pt_1() .child(self.render_list(cx)) - .vertical_scrollbar_for(self.scroll_handle.clone(), window, cx) + .custom_scrollbars( + ui::Scrollbars::new(ScrollAxes::Both) + .tracked_scroll_handle(self.scroll_handle.clone()) + .with_track_along(ScrollAxes::Both, cx.theme().colors().panel_background) + .tracked_entity(cx.entity_id()), + window, + cx, + ) .when_some(self.strip_mode, |this, _| { this.child(Divider::horizontal().color(DividerColor::Border)) .child( diff --git a/crates/debugger_ui/src/session/running/memory_view.rs b/crates/debugger_ui/src/session/running/memory_view.rs index bc6e90ed09a9c6ac519cca8345a0ffbb6459f249..8670beb0f5f93f68a6052b868a866e22b82c92fd 100644 --- a/crates/debugger_ui/src/session/running/memory_view.rs +++ b/crates/debugger_ui/src/session/running/memory_view.rs @@ -10,8 +10,9 @@ use std::{ use editor::{Editor, EditorElement, EditorStyle}; use gpui::{ Action, Along, AppContext, Axis, DismissEvent, DragMoveEvent, Empty, Entity, FocusHandle, - Focusable, MouseButton, Point, ScrollStrategy, ScrollWheelEvent, Subscription, Task, TextStyle, - UniformList, UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list, + Focusable, ListHorizontalSizingBehavior, MouseButton, Point, ScrollStrategy, ScrollWheelEvent, + Subscription, Task, TextStyle, UniformList, UniformListScrollHandle, WeakEntity, actions, + anchored, deferred, uniform_list, }; use notifications::status_toast::{StatusToast, ToastIcon}; use project::debugger::{MemoryCell, dap_command::DataBreakpointContext, session::Session}; @@ -229,6 +230,7 @@ impl MemoryView { }, ) .track_scroll(view_state.scroll_handle) + .with_horizontal_sizing_behavior(ListHorizontalSizingBehavior::Unconstrained) .on_scroll_wheel(cx.listener(|this, evt: &ScrollWheelEvent, window, _| { let mut view_state = this.view_state(); let delta = evt.delta.pixel_delta(window.line_height()); @@ -917,7 +919,17 @@ impl Render for MemoryView { ) .with_priority(1) })) - .vertical_scrollbar_for(self.view_state_handle.clone(), window, cx), + .custom_scrollbars( + ui::Scrollbars::new(ui::ScrollAxes::Both) + .tracked_scroll_handle(self.view_state_handle.clone()) + .with_track_along( + ui::ScrollAxes::Both, + cx.theme().colors().panel_background, + ) + .tracked_entity(cx.entity_id()), + window, + cx, + ), ) } } diff --git a/crates/debugger_ui/src/session/running/variable_list.rs b/crates/debugger_ui/src/session/running/variable_list.rs index c69bdfbe7ca8712284dd971d2e86f31f99cd696d..3da1bd33c4a6de3d161a78b5ff5188f655d019c7 100644 --- a/crates/debugger_ui/src/session/running/variable_list.rs +++ b/crates/debugger_ui/src/session/running/variable_list.rs @@ -11,15 +11,18 @@ use gpui::{ FocusHandle, Focusable, Hsla, MouseDownEvent, Point, Subscription, TextStyleRefinement, UniformListScrollHandle, WeakEntity, actions, anchored, deferred, uniform_list, }; +use itertools::Itertools; use menu::{SelectFirst, SelectLast, SelectNext, SelectPrevious}; use project::debugger::{ dap_command::DataBreakpointContext, session::{Session, SessionEvent, Watcher}, }; use std::{collections::HashMap, ops::Range, sync::Arc}; -use ui::{ContextMenu, ListItem, ScrollableHandle, Tooltip, WithScrollbar, prelude::*}; +use ui::{ContextMenu, ListItem, ScrollAxes, ScrollableHandle, Tooltip, WithScrollbar, prelude::*}; use util::{debug_panic, maybe}; +static INDENT_STEP_SIZE: Pixels = px(10.0); + actions!( variable_list, [ @@ -185,6 +188,7 @@ struct VariableColor { pub struct VariableList { entries: Vec, + max_width_index: Option, entry_states: HashMap, selected_stack_frame_id: Option, list_handle: UniformListScrollHandle, @@ -243,6 +247,7 @@ impl VariableList { disabled: false, edited_path: None, entries: Default::default(), + max_width_index: None, entry_states: Default::default(), weak_running, memory_view, @@ -368,6 +373,26 @@ impl VariableList { } self.entries = entries; + + let text_pixels = ui::TextSize::Default.pixels(cx).to_f64() as f32; + let indent_size = INDENT_STEP_SIZE.to_f64() as f32; + + self.max_width_index = self + .entries + .iter() + .map(|entry| match &entry.entry { + DapEntry::Scope(scope) => scope.name.len() as f32 * text_pixels, + DapEntry::Variable(variable) => { + (variable.value.len() + variable.name.len()) as f32 * text_pixels + + (entry.path.indices.len() as f32 * indent_size) + } + DapEntry::Watcher(watcher) => { + (watcher.value.len() + watcher.expression.len()) as f32 * text_pixels + + (entry.path.indices.len() as f32 * indent_size) + } + }) + .position_max_by(|left, right| left.total_cmp(right)); + cx.notify(); } @@ -1244,7 +1269,7 @@ impl VariableList { .disabled(self.disabled) .selectable(false) .indent_level(state.depth) - .indent_step_size(px(10.)) + .indent_step_size(INDENT_STEP_SIZE) .always_show_disclosure_icon(true) .when(var_ref > 0, |list_item| { list_item.toggle(state.is_expanded).on_toggle(cx.listener({ @@ -1445,7 +1470,7 @@ impl VariableList { .disabled(self.disabled) .selectable(false) .indent_level(state.depth) - .indent_step_size(px(10.)) + .indent_step_size(INDENT_STEP_SIZE) .always_show_disclosure_icon(true) .when(var_ref > 0, |list_item| { list_item.toggle(state.is_expanded).on_toggle(cx.listener({ @@ -1507,7 +1532,6 @@ impl Render for VariableList { .key_context("VariableList") .id("variable-list") .group("variable-list") - .overflow_y_scroll() .size_full() .on_action(cx.listener(Self::select_first)) .on_action(cx.listener(Self::select_last)) @@ -1533,6 +1557,9 @@ impl Render for VariableList { }), ) .track_scroll(self.list_handle.clone()) + .with_width_from_item(self.max_width_index) + .with_sizing_behavior(gpui::ListSizingBehavior::Auto) + .with_horizontal_sizing_behavior(gpui::ListHorizontalSizingBehavior::Unconstrained) .gap_1_5() .size_full() .flex_grow(), @@ -1546,7 +1573,15 @@ impl Render for VariableList { ) .with_priority(1) })) - .vertical_scrollbar_for(self.list_handle.clone(), window, cx) + // .vertical_scrollbar_for(self.list_handle.clone(), window, cx) + .custom_scrollbars( + ui::Scrollbars::new(ScrollAxes::Both) + .tracked_scroll_handle(self.list_handle.clone()) + .with_track_along(ScrollAxes::Both, cx.theme().colors().panel_background) + .tracked_entity(cx.entity_id()), + window, + cx, + ) } } diff --git a/crates/gpui/src/elements/uniform_list.rs b/crates/gpui/src/elements/uniform_list.rs index 93082563c02f4168b1d73e2929a6bf9dbd153237..739fa1c5e25eb62378fbe57eea1b62c833780d9d 100644 --- a/crates/gpui/src/elements/uniform_list.rs +++ b/crates/gpui/src/elements/uniform_list.rs @@ -251,6 +251,8 @@ impl Element for UniformList { None } + // self.max_found_width = 0.0 + // fn request_layout( &mut self, global_id: Option<&GlobalElementId>, diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index d3d33a296bbd65edb24371d8f5f1e6462e77e3fe..b7548218371d0772b422adb04f1e326de040241f 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -392,7 +392,7 @@ pub struct Scrollbars { impl Scrollbars { pub fn new(show_along: ScrollAxes) -> Self { - Self::new_with_setting(show_along, |_| ShowScrollbar::default()) + Self::new_with_setting(show_along, |_| ShowScrollbar::Always) } pub fn for_settings() -> Scrollbars { diff --git a/crates/ui/src/styles/typography.rs b/crates/ui/src/styles/typography.rs index 0d7d5af9e74f11f7d77c9d03362f6be41dc9b2ec..2bb0b35720be715251bc7c11a139a1fccfaf6035 100644 --- a/crates/ui/src/styles/typography.rs +++ b/crates/ui/src/styles/typography.rs @@ -144,6 +144,19 @@ impl TextSize { Self::Editor => rems_from_px(theme_settings.buffer_font_size(cx)), } } + + pub fn pixels(self, cx: &App) -> Pixels { + let theme_settings = ThemeSettings::get_global(cx); + + match self { + Self::Large => px(16.), + Self::Default => px(14.), + Self::Small => px(12.), + Self::XSmall => px(10.), + Self::Ui => theme_settings.ui_font_size(cx), + Self::Editor => theme_settings.buffer_font_size(cx), + } + } } /// The size of a [`Headline`] element From 5ae0768ce43ca6cf15d0dde7e4fc32c1458ba803 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Thu, 30 Oct 2025 16:15:21 -0400 Subject: [PATCH 24/82] debugger: Polish breakpoint list UI (#41598) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR fixes breakpoint icon alignment to also be at the end of a rendered entry and enables editing breakpoint qualities when there's no active session. The alignment issue was caused by some icons being invisible, so the layout phase always accounted for the space they would take up. Only laying out the icons when they are visible fixed the issue. #### Before image #### After [ Screenshot 2025-10-30 at 3 21 17 PM ](url) Release Notes: - Breakpoint list: Allow adding conditions, logs, and hit conditions to breakpoints when there's no active session --- .../src/session/running/breakpoint_list.rs | 89 ++++++++++--------- 1 file changed, 45 insertions(+), 44 deletions(-) diff --git a/crates/debugger_ui/src/session/running/breakpoint_list.rs b/crates/debugger_ui/src/session/running/breakpoint_list.rs index 36e627a3ebac677e0420bf4f5dd93f3d1cd62a5b..0a02a5a8e4197bf6b959a592b6e3d3da92c00846 100644 --- a/crates/debugger_ui/src/session/running/breakpoint_list.rs +++ b/crates/debugger_ui/src/session/running/breakpoint_list.rs @@ -549,7 +549,7 @@ impl BreakpointList { .session .as_ref() .map(|session| SupportedBreakpointProperties::from(session.read(cx).capabilities())) - .unwrap_or_else(SupportedBreakpointProperties::empty); + .unwrap_or_else(SupportedBreakpointProperties::all); let strip_mode = self.strip_mode; uniform_list( @@ -1408,8 +1408,10 @@ impl RenderOnce for BreakpointOptionsStrip { h_flex() .gap_px() .mr_3() // Space to avoid overlapping with the scrollbar - .child( - div() + .justify_end() + .when(has_logs || self.is_selected, |this| { + this.child( + div() .map(self.add_focus_styles( ActiveBreakpointStripMode::Log, supports_logs, @@ -1438,45 +1440,46 @@ impl RenderOnce for BreakpointOptionsStrip { ) }), ) - .when(!has_logs && !self.is_selected, |this| this.invisible()), - ) - .child( - div() - .map(self.add_focus_styles( - ActiveBreakpointStripMode::Condition, - supports_condition, - window, - cx, - )) - .child( - IconButton::new( - SharedString::from(format!("{id}-condition-toggle")), - IconName::SplitAlt, - ) - .shape(ui::IconButtonShape::Square) - .style(style_for_toggle( + ) + }) + .when(has_condition || self.is_selected, |this| { + this.child( + div() + .map(self.add_focus_styles( ActiveBreakpointStripMode::Condition, - has_condition, + supports_condition, + window, + cx, )) - .icon_size(IconSize::Small) - .icon_color(color_for_toggle(has_condition)) - .when(has_condition, |this| this.indicator(Indicator::dot().color(Color::Info))) - .disabled(!supports_condition) - .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Condition)) - .on_click(self.on_click_callback(ActiveBreakpointStripMode::Condition)) - .tooltip(|_window, cx| { - Tooltip::with_meta( - "Set Condition", - None, - "Set condition to evaluate when a breakpoint is hit. Program execution will stop only when the condition is met.", - cx, + .child( + IconButton::new( + SharedString::from(format!("{id}-condition-toggle")), + IconName::SplitAlt, ) - }), - ) - .when(!has_condition && !self.is_selected, |this| this.invisible()), - ) - .child( - div() + .shape(ui::IconButtonShape::Square) + .style(style_for_toggle( + ActiveBreakpointStripMode::Condition, + has_condition, + )) + .icon_size(IconSize::Small) + .icon_color(color_for_toggle(has_condition)) + .when(has_condition, |this| this.indicator(Indicator::dot().color(Color::Info))) + .disabled(!supports_condition) + .toggle_state(self.is_toggled(ActiveBreakpointStripMode::Condition)) + .on_click(self.on_click_callback(ActiveBreakpointStripMode::Condition)) + .tooltip(|_window, cx| { + Tooltip::with_meta( + "Set Condition", + None, + "Set condition to evaluate when a breakpoint is hit. Program execution will stop only when the condition is met.", + cx, + ) + }), + ) + ) + }) + .when(has_hit_condition || self.is_selected, |this| { + this.child(div() .map(self.add_focus_styles( ActiveBreakpointStripMode::HitCondition, supports_hit_condition, @@ -1507,10 +1510,8 @@ impl RenderOnce for BreakpointOptionsStrip { cx, ) }), - ) - .when(!has_hit_condition && !self.is_selected, |this| { - this.invisible() - }), - ) + )) + + }) } } From 8aa2158418ea30ae391ac6624002bee2125d904b Mon Sep 17 00:00:00 2001 From: Dino Date: Thu, 30 Oct 2025 20:33:03 +0000 Subject: [PATCH 25/82] vim: Improve pasting while in replace mode (#41549) - Update `vim::normal::Vim.normal_replace` to work with more than one character - Add `vim::replace::Vim.paste_replace` to handle pasting the clipboard's contents while in replace mode - Update vim's handling of the `editor::actions::Paste` action so that the `paste_replace` method is called when vim is in replace mode, otherwise it'll just call the regular `editor::Editor.paste` method Closes #41378 Release Notes: - Improved pasting while in Vim's Replace mode, ensuring that the Zed replaces the same number of characters as the length of the contents being pasted --- crates/vim/src/normal.rs | 13 +++++++++++-- crates/vim/src/replace.rs | 39 +++++++++++++++++++++++++++++++++++++-- crates/vim/src/vim.rs | 12 ++++++++++++ 3 files changed, 60 insertions(+), 4 deletions(-) diff --git a/crates/vim/src/normal.rs b/crates/vim/src/normal.rs index 739b40124181044326144c85897cf7e1d7536d5c..8b4aefcaac371383dd3114c2b12abd166ef9aa72 100644 --- a/crates/vim/src/normal.rs +++ b/crates/vim/src/normal.rs @@ -965,8 +965,17 @@ impl Vim { window: &mut Window, cx: &mut Context, ) { + // We need to use `text.chars().count()` instead of `text.len()` here as + // `len()` counts bytes, not characters. + let char_count = text.chars().count(); + let count = Vim::take_count(cx).unwrap_or(char_count); let is_return_char = text == "\n".into() || text == "\r".into(); - let count = Vim::take_count(cx).unwrap_or(1); + let repeat_count = match (is_return_char, char_count) { + (true, _) => 0, + (_, 1) => count, + (_, _) => 1, + }; + Vim::take_forced_motion(cx); self.stop_recording(cx); self.update_editor(cx, |_, editor, cx| { @@ -989,7 +998,7 @@ impl Vim { edits.push(( range.start.to_offset(&display_map, Bias::Left) ..range.end.to_offset(&display_map, Bias::Left), - text.repeat(if is_return_char { 0 } else { count }), + text.repeat(repeat_count), )); } diff --git a/crates/vim/src/replace.rs b/crates/vim/src/replace.rs index c9a9fbdb9ee3428ce80c934a686a73a63ddee714..93c30141daeac21805e8ea1aab610988a09a9635 100644 --- a/crates/vim/src/replace.rs +++ b/crates/vim/src/replace.rs @@ -1,5 +1,5 @@ use crate::{ - Vim, + Operator, Vim, motion::{self, Motion}, object::Object, state::Mode, @@ -8,7 +8,7 @@ use editor::{ Anchor, Bias, Editor, EditorSnapshot, SelectionEffects, ToOffset, ToPoint, display_map::ToDisplayPoint, }; -use gpui::{Context, Window, actions}; +use gpui::{ClipboardEntry, Context, Window, actions}; use language::{Point, SelectionGoal}; use std::ops::Range; use std::sync::Arc; @@ -278,10 +278,27 @@ impl Vim { ); } } + + /// Pastes the clipboard contents, replacing the same number of characters + /// as the clipboard's contents. + pub fn paste_replace(&mut self, window: &mut Window, cx: &mut Context) { + let clipboard_text = + cx.read_from_clipboard() + .and_then(|item| match item.entries().first() { + Some(ClipboardEntry::String(text)) => Some(text.text().to_string()), + _ => None, + }); + + if let Some(text) = clipboard_text { + self.push_operator(Operator::Replace, window, cx); + self.normal_replace(Arc::from(text), window, cx); + } + } } #[cfg(test)] mod test { + use gpui::ClipboardItem; use indoc::indoc; use crate::{ @@ -521,4 +538,22 @@ mod test { assert_eq!(0, highlights.len()); }); } + + #[gpui::test] + async fn test_paste_replace(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state(indoc! {"ˇ123"}, Mode::Replace); + cx.write_to_clipboard(ClipboardItem::new_string("456".to_string())); + cx.dispatch_action(editor::actions::Paste); + cx.assert_state(indoc! {"45ˇ6"}, Mode::Replace); + + // If the clipboard's contents length is greater than the remaining text + // length, nothing sould be replace and cursor should remain in the same + // position. + cx.set_state(indoc! {"ˇ123"}, Mode::Replace); + cx.write_to_clipboard(ClipboardItem::new_string("4567".to_string())); + cx.dispatch_action(editor::actions::Paste); + cx.assert_state(indoc! {"ˇ123"}, Mode::Replace); + } } diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index e0b9bfd6e06c3528bd81b81e98d5cb65abb35aa3..cb553b64e91eadbb5e529d56bb1e1a5a7da2c7be 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -23,6 +23,7 @@ use collections::HashMap; use editor::{ Anchor, Bias, Editor, EditorEvent, EditorSettings, HideMouseCursorOrigin, SelectionEffects, ToPoint, + actions::Paste, movement::{self, FindRange}, }; use gpui::{ @@ -919,6 +920,17 @@ impl Vim { ); }); + Vim::action( + editor, + cx, + |vim, _: &editor::actions::Paste, window, cx| match vim.mode { + Mode::Replace => vim.paste_replace(window, cx), + _ => { + vim.update_editor(cx, |_, editor, cx| editor.paste(&Paste, window, cx)); + } + }, + ); + normal::register(editor, cx); insert::register(editor, cx); helix::register(editor, cx); From 60c546196a37d684a03bc19e830121c089a5f858 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Thu, 30 Oct 2025 18:41:09 -0300 Subject: [PATCH 26/82] zeta2: Expose llm-based context retrieval via zeta_cli (#41584) Release Notes: - N/A --------- Co-authored-by: Max Brunsfeld Co-authored-by: Oleksiy Syvokon --- .../src/cloud_zeta2_prompt.rs | 27 +- crates/zeta2/src/merge_excerpts.rs | 26 +- crates/zeta2/src/related_excerpts.rs | 66 +- crates/zeta2/src/zeta2.rs | 34 +- crates/zeta2_tools/src/zeta2_context_view.rs | 7 +- crates/zeta_cli/src/main.rs | 605 ++++++++++++------ ...val_stats.rs => syntax_retrieval_stats.rs} | 0 7 files changed, 509 insertions(+), 256 deletions(-) rename crates/zeta_cli/src/{retrieval_stats.rs => syntax_retrieval_stats.rs} (100%) diff --git a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs index 1c8b1caf80db28ef936aa9a747b4a163e183134f..a0df39b50eb6753397f5afd37aa30b71b853b9c5 100644 --- a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs +++ b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs @@ -182,8 +182,8 @@ pub fn build_prompt( } for related_file in &request.included_files { - writeln!(&mut prompt, "`````filename={}", related_file.path.display()).unwrap(); - write_excerpts( + write_codeblock( + &related_file.path, &related_file.excerpts, if related_file.path == request.excerpt_path { &insertions @@ -194,7 +194,6 @@ pub fn build_prompt( request.prompt_format == PromptFormat::NumLinesUniDiff, &mut prompt, ); - write!(&mut prompt, "`````\n\n").unwrap(); } } @@ -205,6 +204,25 @@ pub fn build_prompt( Ok((prompt, section_labels)) } +pub fn write_codeblock<'a>( + path: &Path, + excerpts: impl IntoIterator, + sorted_insertions: &[(Point, &str)], + file_line_count: Line, + include_line_numbers: bool, + output: &'a mut String, +) { + writeln!(output, "`````path={}", path.display()).unwrap(); + write_excerpts( + excerpts, + sorted_insertions, + file_line_count, + include_line_numbers, + output, + ); + write!(output, "`````\n\n").unwrap(); +} + pub fn write_excerpts<'a>( excerpts: impl IntoIterator, sorted_insertions: &[(Point, &str)], @@ -597,8 +615,7 @@ impl<'a> SyntaxBasedPrompt<'a> { disjoint_snippets.push(current_snippet); } - // TODO: remove filename=? - writeln!(output, "`````filename={}", file_path.display()).ok(); + writeln!(output, "`````path={}", file_path.display()).ok(); let mut skipped_last_snippet = false; for (snippet, range) in disjoint_snippets { let section_index = section_ranges.len(); diff --git a/crates/zeta2/src/merge_excerpts.rs b/crates/zeta2/src/merge_excerpts.rs index 4cb7ab6cf4d3b63e641087f0c22cf0f900f56adc..846d8034a8c2e88b8552dc8c9d48af6ccdc5efcf 100644 --- a/crates/zeta2/src/merge_excerpts.rs +++ b/crates/zeta2/src/merge_excerpts.rs @@ -1,4 +1,4 @@ -use cloud_llm_client::predict_edits_v3::{self, Excerpt}; +use cloud_llm_client::predict_edits_v3::Excerpt; use edit_prediction_context::Line; use language::{BufferSnapshot, Point}; use std::ops::Range; @@ -58,26 +58,12 @@ pub fn merge_excerpts( output } -pub fn write_merged_excerpts( - buffer: &BufferSnapshot, - sorted_line_ranges: impl IntoIterator>, - sorted_insertions: &[(predict_edits_v3::Point, &str)], - output: &mut String, -) { - cloud_zeta2_prompt::write_excerpts( - merge_excerpts(buffer, sorted_line_ranges).iter(), - sorted_insertions, - Line(buffer.max_point().row), - true, - output, - ); -} - #[cfg(test)] mod tests { use std::sync::Arc; use super::*; + use cloud_llm_client::predict_edits_v3; use gpui::{TestAppContext, prelude::*}; use indoc::indoc; use language::{Buffer, Language, LanguageConfig, LanguageMatcher, OffsetRangeExt}; @@ -168,7 +154,13 @@ mod tests { .collect(); let mut output = String::new(); - write_merged_excerpts(&buffer.snapshot(), ranges, &insertions, &mut output); + cloud_zeta2_prompt::write_excerpts( + merge_excerpts(&buffer.snapshot(), ranges).iter(), + &insertions, + Line(buffer.max_point().row), + true, + &mut output, + ); assert_eq!(output, expected_output); }); } diff --git a/crates/zeta2/src/related_excerpts.rs b/crates/zeta2/src/related_excerpts.rs index d8fff7e0201716be45451c302c4f83b667727bc2..dd27992274ae2b25ec07e2a47dc8a60b46f5f3f2 100644 --- a/crates/zeta2/src/related_excerpts.rs +++ b/crates/zeta2/src/related_excerpts.rs @@ -1,13 +1,13 @@ use std::{ - cmp::Reverse, collections::hash_map::Entry, fmt::Write, ops::Range, path::PathBuf, sync::Arc, - time::Instant, + cmp::Reverse, collections::hash_map::Entry, ops::Range, path::PathBuf, sync::Arc, time::Instant, }; use crate::{ - ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo, - merge_excerpts::write_merged_excerpts, + ZetaContextRetrievalDebugInfo, ZetaContextRetrievalStartedDebugInfo, ZetaDebugInfo, + ZetaSearchQueryDebugInfo, merge_excerpts::merge_excerpts, }; use anyhow::{Result, anyhow}; +use cloud_zeta2_prompt::write_codeblock; use collections::HashMap; use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions, Line}; use futures::{ @@ -22,8 +22,9 @@ use language::{ }; use language_model::{ LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId, - LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, - LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse, MessageContent, Role, + LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, + LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, + LanguageModelToolUse, MessageContent, Role, }; use project::{ Project, WorktreeSettings, @@ -63,7 +64,7 @@ const SEARCH_PROMPT: &str = indoc! {r#" ## Current cursor context - `````filename={current_file_path} + `````path={current_file_path} {cursor_excerpt} ````` @@ -130,11 +131,13 @@ pub struct LlmContextOptions { pub excerpt: EditPredictionExcerptOptions, } -pub fn find_related_excerpts<'a>( +pub const MODEL_PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID; + +pub fn find_related_excerpts( buffer: Entity, cursor_position: Anchor, project: &Entity, - events: impl Iterator, + mut edit_history_unified_diff: String, options: &LlmContextOptions, debug_tx: Option>, cx: &App, @@ -144,23 +147,15 @@ pub fn find_related_excerpts<'a>( .read(cx) .available_models(cx) .find(|model| { - model.provider_id() == language_model::ANTHROPIC_PROVIDER_ID + model.provider_id() == MODEL_PROVIDER_ID && model.id() == LanguageModelId("claude-haiku-4-5-latest".into()) }) else { - return Task::ready(Err(anyhow!("could not find claude model"))); + return Task::ready(Err(anyhow!("could not find context model"))); }; - let mut edits_string = String::new(); - - for event in events { - if let Some(event) = event.to_request_event(cx) { - writeln!(&mut edits_string, "{event}").ok(); - } - } - - if edits_string.is_empty() { - edits_string.push_str("(No user edits yet)"); + if edit_history_unified_diff.is_empty() { + edit_history_unified_diff.push_str("(No user edits yet)"); } // TODO [zeta2] include breadcrumbs? @@ -178,10 +173,22 @@ pub fn find_related_excerpts<'a>( .unwrap_or_else(|| "untitled".to_string()); let prompt = SEARCH_PROMPT - .replace("{edits}", &edits_string) + .replace("{edits}", &edit_history_unified_diff) .replace("{current_file_path}", ¤t_file_path) .replace("{cursor_excerpt}", &cursor_excerpt.text(&snapshot).body); + if let Some(debug_tx) = &debug_tx { + debug_tx + .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted( + ZetaContextRetrievalStartedDebugInfo { + project: project.clone(), + timestamp: Instant::now(), + search_prompt: prompt.clone(), + }, + )) + .ok(); + } + let path_style = project.read(cx).path_style(cx); let exclude_matcher = { @@ -428,19 +435,14 @@ pub fn find_related_excerpts<'a>( .line_ranges .sort_unstable_by_key(|range| (range.start, Reverse(range.end))); - writeln!( - &mut merged_result, - "`````filename={}", - matched.full_path.display() - ) - .unwrap(); - write_merged_excerpts( - &matched.snapshot, - matched.line_ranges, + write_codeblock( + &matched.full_path, + merge_excerpts(&matched.snapshot, matched.line_ranges).iter(), &[], + Line(matched.snapshot.max_point().row), + true, &mut merged_result, ); - merged_result.push_str("`````\n\n"); result_buffers_by_path.insert( matched.full_path, diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index b6311f9d25dfc91c078f6614b344eb91cabd51eb..bff091b6f0cd5a37c19ee015f8a0383c8b138b40 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -28,6 +28,7 @@ use project::Project; use release_channel::AppVersion; use serde::de::DeserializeOwned; use std::collections::{VecDeque, hash_map}; +use std::fmt::Write; use std::ops::Range; use std::path::Path; use std::str::FromStr as _; @@ -38,10 +39,10 @@ use util::ResultExt as _; use util::rel_path::RelPathBuf; use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_notification}; -mod merge_excerpts; +pub mod merge_excerpts; mod prediction; mod provider; -mod related_excerpts; +pub mod related_excerpts; use crate::merge_excerpts::merge_excerpts; use crate::prediction::EditPrediction; @@ -135,7 +136,7 @@ impl ContextMode { } pub enum ZetaDebugInfo { - ContextRetrievalStarted(ZetaContextRetrievalDebugInfo), + ContextRetrievalStarted(ZetaContextRetrievalStartedDebugInfo), SearchQueriesGenerated(ZetaSearchQueryDebugInfo), SearchQueriesExecuted(ZetaContextRetrievalDebugInfo), SearchResultsFiltered(ZetaContextRetrievalDebugInfo), @@ -143,6 +144,12 @@ pub enum ZetaDebugInfo { EditPredicted(ZetaEditPredictionDebugInfo), } +pub struct ZetaContextRetrievalStartedDebugInfo { + pub project: Entity, + pub timestamp: Instant, + pub search_prompt: String, +} + pub struct ZetaContextRetrievalDebugInfo { pub project: Entity, pub timestamp: Instant, @@ -1086,17 +1093,6 @@ impl Zeta { zeta_project .refresh_context_task .get_or_insert(cx.spawn(async move |this, cx| { - if let Some(debug_tx) = &debug_tx { - debug_tx - .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted( - ZetaContextRetrievalDebugInfo { - project: project.clone(), - timestamp: Instant::now(), - }, - )) - .ok(); - } - let related_excerpts = this .update(cx, |this, cx| { let Some(zeta_project) = this.projects.get(&project.entity_id()) else { @@ -1107,11 +1103,19 @@ impl Zeta { return Task::ready(anyhow::Ok(HashMap::default())); }; + let mut edit_history_unified_diff = String::new(); + + for event in zeta_project.events.iter() { + if let Some(event) = event.to_request_event(cx) { + writeln!(&mut edit_history_unified_diff, "{event}").ok(); + } + } + find_related_excerpts( buffer.clone(), cursor_position, &project, - zeta_project.events.iter(), + edit_history_unified_diff, options, debug_tx, cx, diff --git a/crates/zeta2_tools/src/zeta2_context_view.rs b/crates/zeta2_tools/src/zeta2_context_view.rs index 0abca0fbf451955c285fe3a9df482c507dc4ff10..9532d77622645f80696d69ed92b0190e48f838c7 100644 --- a/crates/zeta2_tools/src/zeta2_context_view.rs +++ b/crates/zeta2_tools/src/zeta2_context_view.rs @@ -24,7 +24,10 @@ use ui::{ v_flex, }; use workspace::{Item, ItemHandle as _}; -use zeta2::{Zeta, ZetaContextRetrievalDebugInfo, ZetaDebugInfo, ZetaSearchQueryDebugInfo}; +use zeta2::{ + Zeta, ZetaContextRetrievalDebugInfo, ZetaContextRetrievalStartedDebugInfo, ZetaDebugInfo, + ZetaSearchQueryDebugInfo, +}; pub struct Zeta2ContextView { empty_focus_handle: FocusHandle, @@ -130,7 +133,7 @@ impl Zeta2ContextView { fn handle_context_retrieval_started( &mut self, - info: ZetaContextRetrievalDebugInfo, + info: ZetaContextRetrievalStartedDebugInfo, window: &mut Window, cx: &mut Context, ) { diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index eea80898870d68a8ad361de43d4556438ed25444..7a6d4b26dc87cd9db7d40fe2745520ee5f574ea6 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -1,26 +1,29 @@ mod headless; -mod retrieval_stats; mod source_location; +mod syntax_retrieval_stats; mod util; -use crate::retrieval_stats::retrieval_stats; +use crate::syntax_retrieval_stats::retrieval_stats; +use ::serde::Serialize; use ::util::paths::PathStyle; -use anyhow::{Result, anyhow}; +use anyhow::{Context as _, Result, anyhow}; use clap::{Args, Parser, Subcommand}; -use cloud_llm_client::predict_edits_v3::{self}; +use cloud_llm_client::predict_edits_v3::{self, Excerpt}; +use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock}; use edit_prediction_context::{ - EditPredictionContextOptions, EditPredictionExcerptOptions, EditPredictionScoreOptions, + EditPredictionContextOptions, EditPredictionExcerpt, EditPredictionExcerptOptions, + EditPredictionScoreOptions, Line, }; -use gpui::{Application, AsyncApp, prelude::*}; -use language::Bias; -use language_model::LlmApiToken; -use project::Project; -use release_channel::AppVersion; +use futures::StreamExt as _; +use futures::channel::mpsc; +use gpui::{Application, AsyncApp, Entity, prelude::*}; +use language::{Bias, Buffer, BufferSnapshot, OffsetRangeExt, Point}; +use language_model::LanguageModelRegistry; +use project::{Project, Worktree}; use reqwest_client::ReqwestClient; use serde_json::json; use std::{collections::HashSet, path::PathBuf, process::exit, str::FromStr, sync::Arc}; -use zeta::{PerformPredictEditsParams, Zeta}; -use zeta2::ContextMode; +use zeta2::{ContextMode, LlmContextOptions, SearchToolQuery}; use crate::headless::ZetaCliAppState; use crate::source_location::SourceLocation; @@ -30,27 +33,52 @@ use crate::util::{open_buffer, open_buffer_with_language_server}; #[command(name = "zeta")] struct ZetaCliArgs { #[command(subcommand)] - command: Commands, + command: Command, } #[derive(Subcommand, Debug)] -enum Commands { - Context(ContextArgs), - Zeta2Context { +enum Command { + Zeta1 { + #[command(subcommand)] + command: Zeta1Command, + }, + Zeta2 { #[clap(flatten)] - zeta2_args: Zeta2Args, + args: Zeta2Args, + #[command(subcommand)] + command: Zeta2Command, + }, +} + +#[derive(Subcommand, Debug)] +enum Zeta1Command { + Context { #[clap(flatten)] context_args: ContextArgs, }, - Predict { - #[arg(long)] - predict_edits_body: Option, +} + +#[derive(Subcommand, Debug)] +enum Zeta2Command { + Syntax { #[clap(flatten)] - context_args: Option, + syntax_args: Zeta2SyntaxArgs, + #[command(subcommand)] + command: Zeta2SyntaxCommand, + }, + Llm { + #[command(subcommand)] + command: Zeta2LlmCommand, }, - RetrievalStats { +} + +#[derive(Subcommand, Debug)] +enum Zeta2SyntaxCommand { + Context { #[clap(flatten)] - zeta2_args: Zeta2Args, + context_args: ContextArgs, + }, + Stats { #[arg(long)] worktree: PathBuf, #[arg(long)] @@ -62,6 +90,14 @@ enum Commands { }, } +#[derive(Subcommand, Debug)] +enum Zeta2LlmCommand { + Context { + #[clap(flatten)] + context_args: ContextArgs, + }, +} + #[derive(Debug, Args)] #[group(requires = "worktree")] struct ContextArgs { @@ -72,7 +108,7 @@ struct ContextArgs { #[arg(long)] use_language_server: bool, #[arg(long)] - events: Option, + edit_history: Option, } #[derive(Debug, Args)] @@ -93,12 +129,42 @@ struct Zeta2Args { output_format: OutputFormat, #[arg(long, default_value_t = 42)] file_indexing_parallelism: usize, +} + +#[derive(Debug, Args)] +struct Zeta2SyntaxArgs { #[arg(long, default_value_t = false)] disable_imports_gathering: bool, #[arg(long, default_value_t = u8::MAX)] max_retrieved_definitions: u8, } +fn syntax_args_to_options( + zeta2_args: &Zeta2Args, + syntax_args: &Zeta2SyntaxArgs, + omit_excerpt_overlaps: bool, +) -> zeta2::ZetaOptions { + zeta2::ZetaOptions { + context: ContextMode::Syntax(EditPredictionContextOptions { + max_retrieved_declarations: syntax_args.max_retrieved_definitions, + use_imports: !syntax_args.disable_imports_gathering, + excerpt: EditPredictionExcerptOptions { + max_bytes: zeta2_args.max_excerpt_bytes, + min_bytes: zeta2_args.min_excerpt_bytes, + target_before_cursor_over_total_bytes: zeta2_args + .target_before_cursor_over_total_bytes, + }, + score: EditPredictionScoreOptions { + omit_excerpt_overlaps, + }, + }), + max_diagnostic_bytes: zeta2_args.max_diagnostic_bytes, + max_prompt_bytes: zeta2_args.max_prompt_bytes, + prompt_format: zeta2_args.prompt_format.clone().into(), + file_indexing_parallelism: zeta2_args.file_indexing_parallelism, + } +} + #[derive(clap::ValueEnum, Default, Debug, Clone)] enum PromptFormat { MarkedExcerpt, @@ -153,22 +219,25 @@ impl FromStr for FileOrStdin { } } -enum GetContextOutput { - Zeta1(zeta::GatherContextOutput), - Zeta2(String), +struct LoadedContext { + full_path_str: String, + snapshot: BufferSnapshot, + clipped_cursor: Point, + worktree: Entity, + project: Entity, + buffer: Entity, } -async fn get_context( - zeta2_args: Option, - args: ContextArgs, +async fn load_context( + args: &ContextArgs, app_state: &Arc, cx: &mut AsyncApp, -) -> Result { +) -> Result { let ContextArgs { worktree: worktree_path, cursor, use_language_server, - events, + .. } = args; let worktree_path = worktree_path.canonicalize()?; @@ -192,7 +261,7 @@ async fn get_context( .await?; let mut ready_languages = HashSet::default(); - let (_lsp_open_handle, buffer) = if use_language_server { + let (_lsp_open_handle, buffer) = if *use_language_server { let (lsp_open_handle, _, buffer) = open_buffer_with_language_server( project.clone(), worktree.clone(), @@ -232,95 +301,294 @@ async fn get_context( } } - let events = match events { + Ok(LoadedContext { + full_path_str, + snapshot, + clipped_cursor, + worktree, + project, + buffer, + }) +} + +async fn zeta2_syntax_context( + zeta2_args: Zeta2Args, + syntax_args: Zeta2SyntaxArgs, + args: ContextArgs, + app_state: &Arc, + cx: &mut AsyncApp, +) -> Result { + let LoadedContext { + worktree, + project, + buffer, + clipped_cursor, + .. + } = load_context(&args, app_state, cx).await?; + + // wait for worktree scan before starting zeta2 so that wait_for_initial_indexing waits for + // the whole worktree. + worktree + .read_with(cx, |worktree, _cx| { + worktree.as_local().unwrap().scan_complete() + })? + .await; + let output = cx + .update(|cx| { + let zeta = cx.new(|cx| { + zeta2::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx) + }); + let indexing_done_task = zeta.update(cx, |zeta, cx| { + zeta.set_options(syntax_args_to_options(&zeta2_args, &syntax_args, true)); + zeta.register_buffer(&buffer, &project, cx); + zeta.wait_for_initial_indexing(&project, cx) + }); + cx.spawn(async move |cx| { + indexing_done_task.await?; + let request = zeta + .update(cx, |zeta, cx| { + let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor); + zeta.cloud_request_for_zeta_cli(&project, &buffer, cursor, cx) + })? + .await?; + + let (prompt_string, section_labels) = cloud_zeta2_prompt::build_prompt(&request)?; + + match zeta2_args.output_format { + OutputFormat::Prompt => anyhow::Ok(prompt_string), + OutputFormat::Request => anyhow::Ok(serde_json::to_string_pretty(&request)?), + OutputFormat::Full => anyhow::Ok(serde_json::to_string_pretty(&json!({ + "request": request, + "prompt": prompt_string, + "section_labels": section_labels, + }))?), + } + }) + })? + .await?; + + Ok(output) +} + +async fn zeta2_llm_context( + zeta2_args: Zeta2Args, + context_args: ContextArgs, + app_state: &Arc, + cx: &mut AsyncApp, +) -> Result { + let LoadedContext { + buffer, + clipped_cursor, + snapshot: cursor_snapshot, + project, + .. + } = load_context(&context_args, app_state, cx).await?; + + let cursor_position = cursor_snapshot.anchor_after(clipped_cursor); + + cx.update(|cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry + .provider(&zeta2::related_excerpts::MODEL_PROVIDER_ID) + .unwrap() + .authenticate(cx) + }) + })? + .await?; + + let edit_history_unified_diff = match context_args.edit_history { Some(events) => events.read_to_string().await?, None => String::new(), }; - if let Some(zeta2_args) = zeta2_args { - // wait for worktree scan before starting zeta2 so that wait_for_initial_indexing waits for - // the whole worktree. - worktree - .read_with(cx, |worktree, _cx| { - worktree.as_local().unwrap().scan_complete() - })? - .await; - let output = cx - .update(|cx| { - let zeta = cx.new(|cx| { - zeta2::Zeta::new(app_state.client.clone(), app_state.user_store.clone(), cx) - }); - let indexing_done_task = zeta.update(cx, |zeta, cx| { - zeta.set_options(zeta2_args.to_options(true)); - zeta.register_buffer(&buffer, &project, cx); - zeta.wait_for_initial_indexing(&project, cx) - }); - cx.spawn(async move |cx| { - indexing_done_task.await?; - let request = zeta - .update(cx, |zeta, cx| { - let cursor = buffer.read(cx).snapshot().anchor_before(clipped_cursor); - zeta.cloud_request_for_zeta_cli(&project, &buffer, cursor, cx) - })? - .await?; - - let (prompt_string, section_labels) = - cloud_zeta2_prompt::build_prompt(&request)?; - - match zeta2_args.output_format { - OutputFormat::Prompt => anyhow::Ok(prompt_string), - OutputFormat::Request => { - anyhow::Ok(serde_json::to_string_pretty(&request)?) - } - OutputFormat::Full => anyhow::Ok(serde_json::to_string_pretty(&json!({ - "request": request, - "prompt": prompt_string, - "section_labels": section_labels, - }))?), - } - }) - })? - .await?; - Ok(GetContextOutput::Zeta2(output)) - } else { - let prompt_for_events = move || (events, 0); - Ok(GetContextOutput::Zeta1( - cx.update(|cx| { - zeta::gather_context( - full_path_str, - &snapshot, - clipped_cursor, - prompt_for_events, - cx, - ) - })? - .await?, - )) - } -} + let (debug_tx, mut debug_rx) = mpsc::unbounded(); -impl Zeta2Args { - fn to_options(&self, omit_excerpt_overlaps: bool) -> zeta2::ZetaOptions { - zeta2::ZetaOptions { - context: ContextMode::Syntax(EditPredictionContextOptions { - max_retrieved_declarations: self.max_retrieved_definitions, - use_imports: !self.disable_imports_gathering, - excerpt: EditPredictionExcerptOptions { - max_bytes: self.max_excerpt_bytes, - min_bytes: self.min_excerpt_bytes, - target_before_cursor_over_total_bytes: self - .target_before_cursor_over_total_bytes, - }, - score: EditPredictionScoreOptions { - omit_excerpt_overlaps, + let excerpt_options = EditPredictionExcerptOptions { + max_bytes: zeta2_args.max_excerpt_bytes, + min_bytes: zeta2_args.min_excerpt_bytes, + target_before_cursor_over_total_bytes: zeta2_args.target_before_cursor_over_total_bytes, + }; + + let related_excerpts = cx + .update(|cx| { + zeta2::related_excerpts::find_related_excerpts( + buffer, + cursor_position, + &project, + edit_history_unified_diff, + &LlmContextOptions { + excerpt: excerpt_options.clone(), }, - }), - max_diagnostic_bytes: self.max_diagnostic_bytes, - max_prompt_bytes: self.max_prompt_bytes, - prompt_format: self.prompt_format.clone().into(), - file_indexing_parallelism: self.file_indexing_parallelism, + Some(debug_tx), + cx, + ) + })? + .await?; + + let cursor_excerpt = EditPredictionExcerpt::select_from_buffer( + clipped_cursor, + &cursor_snapshot, + &excerpt_options, + None, + ) + .context("line didn't fit")?; + + #[derive(Serialize)] + struct Output { + excerpts: Vec, + formatted_excerpts: String, + meta: OutputMeta, + } + + #[derive(Default, Serialize)] + struct OutputMeta { + search_prompt: String, + search_queries: Vec, + } + + #[derive(Serialize)] + struct OutputExcerpt { + path: PathBuf, + #[serde(flatten)] + excerpt: Excerpt, + } + + let mut meta = OutputMeta::default(); + + while let Some(debug_info) = debug_rx.next().await { + match debug_info { + zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => { + meta.search_prompt = info.search_prompt; + } + zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => { + meta.search_queries = info.queries + } + _ => {} } } + + cx.update(|cx| { + let mut excerpts = Vec::new(); + let mut formatted_excerpts = String::new(); + + let cursor_insertions = [( + predict_edits_v3::Point { + line: Line(clipped_cursor.row), + column: clipped_cursor.column, + }, + CURSOR_MARKER, + )]; + + let mut cursor_excerpt_added = false; + + for (buffer, ranges) in related_excerpts { + let excerpt_snapshot = buffer.read(cx).snapshot(); + + let mut line_ranges = ranges + .into_iter() + .map(|range| { + let point_range = range.to_point(&excerpt_snapshot); + Line(point_range.start.row)..Line(point_range.end.row) + }) + .collect::>(); + + let Some(file) = excerpt_snapshot.file() else { + continue; + }; + let path = file.full_path(cx); + + let is_cursor_file = path == cursor_snapshot.file().unwrap().full_path(cx); + if is_cursor_file { + let insertion_ix = line_ranges + .binary_search_by(|probe| { + probe + .start + .cmp(&cursor_excerpt.line_range.start) + .then(cursor_excerpt.line_range.end.cmp(&probe.end)) + }) + .unwrap_or_else(|ix| ix); + line_ranges.insert(insertion_ix, cursor_excerpt.line_range.clone()); + cursor_excerpt_added = true; + } + + let merged_excerpts = + zeta2::merge_excerpts::merge_excerpts(&excerpt_snapshot, line_ranges) + .into_iter() + .map(|excerpt| OutputExcerpt { + path: path.clone(), + excerpt, + }); + + let excerpt_start_ix = excerpts.len(); + excerpts.extend(merged_excerpts); + + write_codeblock( + &path, + excerpts[excerpt_start_ix..].iter().map(|e| &e.excerpt), + if is_cursor_file { + &cursor_insertions + } else { + &[] + }, + Line(excerpt_snapshot.max_point().row), + true, + &mut formatted_excerpts, + ); + } + + if !cursor_excerpt_added { + write_codeblock( + &cursor_snapshot.file().unwrap().full_path(cx), + &[Excerpt { + start_line: cursor_excerpt.line_range.start, + text: cursor_excerpt.text(&cursor_snapshot).body.into(), + }], + &cursor_insertions, + Line(cursor_snapshot.max_point().row), + true, + &mut formatted_excerpts, + ); + } + + let output = Output { + excerpts, + formatted_excerpts, + meta, + }; + + Ok(serde_json::to_string_pretty(&output)?) + }) + .unwrap() +} + +async fn zeta1_context( + args: ContextArgs, + app_state: &Arc, + cx: &mut AsyncApp, +) -> Result { + let LoadedContext { + full_path_str, + snapshot, + clipped_cursor, + .. + } = load_context(&args, app_state, cx).await?; + + let events = match args.edit_history { + Some(events) => events.read_to_string().await?, + None => String::new(), + }; + + let prompt_for_events = move || (events, 0); + cx.update(|cx| { + zeta::gather_context( + full_path_str, + &snapshot, + clipped_cursor, + prompt_for_events, + cx, + ) + })? + .await } fn main() { @@ -334,80 +602,47 @@ fn main() { let app_state = Arc::new(headless::init(cx)); cx.spawn(async move |cx| { let result = match args.command { - Commands::Zeta2Context { - zeta2_args, - context_args, - } => match get_context(Some(zeta2_args), context_args, &app_state, cx).await { - Ok(GetContextOutput::Zeta1 { .. }) => unreachable!(), - Ok(GetContextOutput::Zeta2(output)) => Ok(output), - Err(err) => Err(err), - }, - Commands::Context(context_args) => { - match get_context(None, context_args, &app_state, cx).await { - Ok(GetContextOutput::Zeta1(output)) => { - Ok(serde_json::to_string_pretty(&output.body).unwrap()) - } - Ok(GetContextOutput::Zeta2 { .. }) => unreachable!(), - Err(err) => Err(err), - } - } - Commands::Predict { - predict_edits_body, - context_args, - } => { - cx.spawn(async move |cx| { - let app_version = cx.update(|cx| AppVersion::global(cx))?; - app_state.client.sign_in(true, cx).await?; - let llm_token = LlmApiToken::default(); - llm_token.refresh(&app_state.client).await?; - - let predict_edits_body = - if let Some(predict_edits_body) = predict_edits_body { - serde_json::from_str(&predict_edits_body.read_to_string().await?)? - } else if let Some(context_args) = context_args { - match get_context(None, context_args, &app_state, cx).await? { - GetContextOutput::Zeta1(output) => output.body, - GetContextOutput::Zeta2 { .. } => unreachable!(), - } - } else { - return Err(anyhow!( - "Expected either --predict-edits-body-file \ - or the required args of the `context` command." - )); - }; - - let (response, _usage) = - Zeta::perform_predict_edits(PerformPredictEditsParams { - client: app_state.client.clone(), - llm_token, - app_version, - body: predict_edits_body, - }) - .await?; - - Ok(response.output_excerpt) - }) - .await - } - Commands::RetrievalStats { - zeta2_args, - worktree, - extension, - limit, - skip, + Command::Zeta1 { + command: Zeta1Command::Context { context_args }, } => { - retrieval_stats( - worktree, - app_state, - extension, - limit, - skip, - (&zeta2_args).to_options(false), - cx, - ) - .await + let context = zeta1_context(context_args, &app_state, cx).await.unwrap(); + serde_json::to_string_pretty(&context.body).map_err(|err| anyhow::anyhow!(err)) } + Command::Zeta2 { args, command } => match command { + Zeta2Command::Syntax { + syntax_args, + command, + } => match command { + Zeta2SyntaxCommand::Context { context_args } => { + zeta2_syntax_context(args, syntax_args, context_args, &app_state, cx) + .await + } + Zeta2SyntaxCommand::Stats { + worktree, + extension, + limit, + skip, + } => { + retrieval_stats( + worktree, + app_state, + extension, + limit, + skip, + syntax_args_to_options(&args, &syntax_args, false), + cx, + ) + .await + } + }, + Zeta2Command::Llm { command } => match command { + Zeta2LlmCommand::Context { context_args } => { + zeta2_llm_context(args, context_args, &app_state, cx).await + } + }, + }, }; + match result { Ok(output) => { println!("{}", output); diff --git a/crates/zeta_cli/src/retrieval_stats.rs b/crates/zeta_cli/src/syntax_retrieval_stats.rs similarity index 100% rename from crates/zeta_cli/src/retrieval_stats.rs rename to crates/zeta_cli/src/syntax_retrieval_stats.rs From 03c6d6285c6d5988c1242c129ac246dc634e2c71 Mon Sep 17 00:00:00 2001 From: Chris <80088549+zeld-a@users.noreply.github.com> Date: Thu, 30 Oct 2025 17:48:37 -0400 Subject: [PATCH 27/82] outline_panel: Fix collapse/expand all entries (#41342) Closes #39937 Release Notes: - Fixed expand/collapse all entries not working in singleton buffer mode --- crates/outline_panel/src/outline_panel.rs | 465 ++++++++++++++++++---- 1 file changed, 391 insertions(+), 74 deletions(-) diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 112aa3d21ebda9ef57d3bedda20e3f90735a0173..f9b1afe34e5ebf51576b07164f5ccfa23428ca56 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -1635,56 +1635,79 @@ impl OutlinePanel { let Some(active_editor) = self.active_editor() else { return; }; - let mut buffers_to_unfold = HashSet::default(); - let expanded_entries = - self.fs_entries - .iter() - .fold(HashSet::default(), |mut entries, fs_entry| { - match fs_entry { - FsEntry::ExternalFile(external_file) => { - buffers_to_unfold.insert(external_file.buffer_id); - entries.insert(CollapsedEntry::ExternalFile(external_file.buffer_id)); - entries.extend( - self.excerpts - .get(&external_file.buffer_id) - .into_iter() - .flat_map(|excerpts| { - excerpts.keys().map(|excerpt_id| { - CollapsedEntry::Excerpt( - external_file.buffer_id, - *excerpt_id, - ) - }) - }), - ); - } - FsEntry::Directory(directory) => { - entries.insert(CollapsedEntry::Dir( - directory.worktree_id, - directory.entry.id, + + let mut to_uncollapse: HashSet = HashSet::default(); + let mut buffers_to_unfold: HashSet = HashSet::default(); + + for fs_entry in &self.fs_entries { + match fs_entry { + FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + }) => { + to_uncollapse.insert(CollapsedEntry::File(*worktree_id, *buffer_id)); + buffers_to_unfold.insert(*buffer_id); + } + FsEntry::ExternalFile(FsEntryExternalFile { buffer_id, .. }) => { + to_uncollapse.insert(CollapsedEntry::ExternalFile(*buffer_id)); + buffers_to_unfold.insert(*buffer_id); + } + FsEntry::Directory(FsEntryDirectory { + worktree_id, entry, .. + }) => { + to_uncollapse.insert(CollapsedEntry::Dir(*worktree_id, entry.id)); + } + } + } + + for (&buffer_id, excerpts) in &self.excerpts { + for (&excerpt_id, excerpt) in excerpts { + match &excerpt.outlines { + ExcerptOutlines::Outlines(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline( + buffer_id, + excerpt_id, + outline.range.clone(), )); } - FsEntry::File(file) => { - buffers_to_unfold.insert(file.buffer_id); - entries.insert(CollapsedEntry::File(file.worktree_id, file.buffer_id)); - entries.extend( - self.excerpts.get(&file.buffer_id).into_iter().flat_map( - |excerpts| { - excerpts.keys().map(|excerpt_id| { - CollapsedEntry::Excerpt(file.buffer_id, *excerpt_id) - }) - }, - ), - ); + } + ExcerptOutlines::Invalidated(outlines) => { + for outline in outlines { + to_uncollapse.insert(CollapsedEntry::Outline( + buffer_id, + excerpt_id, + outline.range.clone(), + )); } - }; - entries - }); + } + ExcerptOutlines::NotFetched => {} + } + to_uncollapse.insert(CollapsedEntry::Excerpt(buffer_id, excerpt_id)); + } + } + + for cached in &self.cached_entries { + if let PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) = &cached.entry + { + if let Some(last) = entries.last() { + to_uncollapse.insert(CollapsedEntry::Dir(*worktree_id, last.id)); + } + } + } + self.collapsed_entries - .retain(|entry| !expanded_entries.contains(entry)); + .retain(|entry| !to_uncollapse.contains(entry)); + active_editor.update(cx, |editor, cx| { buffers_to_unfold.retain(|buffer_id| editor.is_buffer_folded(*buffer_id, cx)); }); + if buffers_to_unfold.is_empty() { self.update_cached_entries(None, window, cx); } else { @@ -1703,37 +1726,44 @@ impl OutlinePanel { return; }; let mut buffers_to_fold = HashSet::default(); - let new_entries = self - .cached_entries - .iter() - .flat_map(|cached_entry| match &cached_entry.entry { - PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { - worktree_id, entry, .. - })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)), - PanelEntry::Fs(FsEntry::File(FsEntryFile { - worktree_id, - buffer_id, - .. - })) => { - buffers_to_fold.insert(*buffer_id); - Some(CollapsedEntry::File(*worktree_id, *buffer_id)) - } - PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { - buffers_to_fold.insert(external_file.buffer_id); - Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) - } - PanelEntry::FoldedDirs(FoldedDirsEntry { - worktree_id, - entries, - .. - }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), - PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { - Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) - } - PanelEntry::Search(_) | PanelEntry::Outline(..) => None, - }) - .collect::>(); - self.collapsed_entries.extend(new_entries); + self.collapsed_entries + .extend(self.cached_entries.iter().filter_map( + |cached_entry| match &cached_entry.entry { + PanelEntry::Fs(FsEntry::Directory(FsEntryDirectory { + worktree_id, + entry, + .. + })) => Some(CollapsedEntry::Dir(*worktree_id, entry.id)), + PanelEntry::Fs(FsEntry::File(FsEntryFile { + worktree_id, + buffer_id, + .. + })) => { + buffers_to_fold.insert(*buffer_id); + Some(CollapsedEntry::File(*worktree_id, *buffer_id)) + } + PanelEntry::Fs(FsEntry::ExternalFile(external_file)) => { + buffers_to_fold.insert(external_file.buffer_id); + Some(CollapsedEntry::ExternalFile(external_file.buffer_id)) + } + PanelEntry::FoldedDirs(FoldedDirsEntry { + worktree_id, + entries, + .. + }) => Some(CollapsedEntry::Dir(*worktree_id, entries.last()?.id)), + PanelEntry::Outline(OutlineEntry::Excerpt(excerpt)) => { + Some(CollapsedEntry::Excerpt(excerpt.buffer_id, excerpt.id)) + } + PanelEntry::Outline(OutlineEntry::Outline(outline)) => { + Some(CollapsedEntry::Outline( + outline.buffer_id, + outline.excerpt_id, + outline.outline.range.clone(), + )) + } + PanelEntry::Search(_) => None, + }, + )); active_editor.update(cx, |editor, cx| { buffers_to_fold.retain(|buffer_id| !editor.is_buffer_folded(*buffer_id, cx)); @@ -6592,6 +6622,60 @@ outline: struct OutlineEntryExcerpt search: {{ "something": "static" }} src/ app/(site)/ + components/ + ErrorBoundary.tsx <==== selected + search: static"# + ) + ); + }); + + outline_panel.update_in(cx, |outline_panel, window, cx| { + outline_panel.collapse_all_entries(&CollapseAllEntries, window, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &project, + &snapshot(outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry(), + cx, + ), + format!(r#"frontend-project/"#) + ); + }); + + outline_panel.update_in(cx, |outline_panel, window, cx| { + outline_panel.expand_all_entries(&ExpandAllEntries, window, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &project, + &snapshot(outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry(), + cx, + ), + format!( + r#"frontend-project/ + public/lottie/ + syntax-tree.json + search: {{ "something": "static" }} + src/ + app/(site)/ + (about)/jobs/[slug]/ + page.tsx + search: static + (blog)/post/[slug]/ + page.tsx + search: static components/ ErrorBoundary.tsx <==== selected search: static"# @@ -7510,4 +7594,237 @@ outline: fn main()" ); }); } + + #[gpui::test] + async fn test_outline_expand_collapse_all(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + "/test", + json!({ + "src": { + "lib.rs": indoc!(" + mod outer { + pub struct OuterStruct { + field: String, + } + impl OuterStruct { + pub fn new() -> Self { + Self { field: String::new() } + } + pub fn method(&self) { + println!(\"{}\", self.field); + } + } + mod inner { + pub fn inner_function() { + let x = 42; + println!(\"{}\", x); + } + pub struct InnerStruct { + value: i32, + } + } + } + fn main() { + let s = outer::OuterStruct::new(); + s.method(); + } + "), + } + }), + ) + .await; + + let project = Project::test(fs.clone(), ["/test".as_ref()], cx).await; + project.read_with(cx, |project, _| { + project.languages().add(Arc::new( + rust_lang() + .with_outline_query( + r#" + (struct_item + (visibility_modifier)? @context + "struct" @context + name: (_) @name) @item + (impl_item + "impl" @context + trait: (_)? @context + "for"? @context + type: (_) @context + body: (_)) @item + (function_item + (visibility_modifier)? @context + "fn" @context + name: (_) @name + parameters: (_) @context) @item + (mod_item + (visibility_modifier)? @context + "mod" @context + name: (_) @name) @item + (enum_item + (visibility_modifier)? @context + "enum" @context + name: (_) @name) @item + (field_declaration + (visibility_modifier)? @context + name: (_) @name + ":" @context + type: (_) @context) @item + "#, + ) + .unwrap(), + )) + }); + let workspace = add_outline_panel(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + let outline_panel = outline_panel(&workspace, cx); + + outline_panel.update_in(cx, |outline_panel, window, cx| { + outline_panel.set_active(true, window, cx) + }); + + workspace + .update(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from("/test/src/lib.rs"), + OpenOptions { + visible: Some(OpenVisible::All), + ..Default::default() + }, + window, + cx, + ) + }) + .unwrap() + .await + .unwrap(); + + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(500)); + cx.run_until_parked(); + + // Force another update cycle to ensure outlines are fetched + outline_panel.update_in(cx, |panel, window, cx| { + panel.update_non_fs_items(window, cx); + panel.update_cached_entries(Some(UPDATE_DEBOUNCE), window, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(500)); + cx.run_until_parked(); + + outline_panel.update(cx, |outline_panel, cx| { + assert_eq!( + display_entries( + &project, + &snapshot(outline_panel, cx), + &outline_panel.cached_entries, + outline_panel.selected_entry(), + cx, + ), + indoc!( + " +outline: mod outer <==== selected + outline: pub struct OuterStruct + outline: field: String + outline: impl OuterStruct + outline: pub fn new() + outline: pub fn method(&self) + outline: mod inner + outline: pub fn inner_function() + outline: pub struct InnerStruct + outline: value: i32 +outline: fn main()" + ) + ); + }); + + let _parent_outline = outline_panel + .read_with(cx, |panel, _cx| { + panel + .cached_entries + .iter() + .find_map(|entry| match &entry.entry { + PanelEntry::Outline(OutlineEntry::Outline(outline)) + if panel + .outline_children_cache + .get(&outline.buffer_id) + .and_then(|children_map| { + let key = + (outline.outline.range.clone(), outline.outline.depth); + children_map.get(&key) + }) + .copied() + .unwrap_or(false) => + { + Some(entry.entry.clone()) + } + _ => None, + }) + }) + .expect("Should find an outline with children"); + + // Collapse all entries + outline_panel.update_in(cx, |panel, window, cx| { + panel.collapse_all_entries(&CollapseAllEntries, window, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + + let expected_collapsed_output = indoc!( + " + outline: mod outer <==== selected + outline: fn main()" + ); + + outline_panel.update(cx, |panel, cx| { + assert_eq! { + display_entries( + &project, + &snapshot(panel, cx), + &panel.cached_entries, + panel.selected_entry(), + cx, + ), + expected_collapsed_output + }; + }); + + // Expand all entries + outline_panel.update_in(cx, |panel, window, cx| { + panel.expand_all_entries(&ExpandAllEntries, window, cx); + }); + cx.executor() + .advance_clock(UPDATE_DEBOUNCE + Duration::from_millis(100)); + cx.run_until_parked(); + + let expected_expanded_output = indoc!( + " + outline: mod outer <==== selected + outline: pub struct OuterStruct + outline: field: String + outline: impl OuterStruct + outline: pub fn new() + outline: pub fn method(&self) + outline: mod inner + outline: pub fn inner_function() + outline: pub struct InnerStruct + outline: value: i32 + outline: fn main()" + ); + + outline_panel.update(cx, |panel, cx| { + assert_eq! { + display_entries( + &project, + &snapshot(panel, cx), + &panel.cached_entries, + panel.selected_entry(), + cx, + ), + expected_expanded_output + }; + }); + } } From b059c1fce7714b1b23fa5b5f1971a23be251100e Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 30 Oct 2025 23:14:41 +0100 Subject: [PATCH 28/82] agent_servers: Expand `~` in path from settings (#41602) Closes #40796 Release Notes: - Fixed an issue where `~` would not be expanded when specifiying the path of an ACP server --- crates/project/src/agent_server_store.rs | 42 ++++++++++++++++++++++-- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index a1897a89d1f0fe52fedf8902e8c631a367627b20..a6efa1ef75786d3f0dc77ed2e57ec0edec42fc8c 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -1638,7 +1638,9 @@ impl BuiltinAgentServerSettings { impl From for BuiltinAgentServerSettings { fn from(value: settings::BuiltinAgentServerSettings) -> Self { BuiltinAgentServerSettings { - path: value.path, + path: value + .path + .map(|p| PathBuf::from(shellexpand::tilde(&p.to_string_lossy()).as_ref())), args: value.args, env: value.env, ignore_system_version: value.ignore_system_version, @@ -1673,7 +1675,7 @@ impl From for CustomAgentServerSettings { fn from(value: settings::CustomAgentServerSettings) -> Self { CustomAgentServerSettings { command: AgentServerCommand { - path: value.path, + path: PathBuf::from(shellexpand::tilde(&value.path.to_string_lossy()).as_ref()), args: value.args, env: value.env, }, @@ -1893,4 +1895,40 @@ mod extension_agent_tests { let target = manifest_entry.targets.get("linux-x86_64").unwrap(); assert_eq!(target.cmd, "./release-agent"); } + + #[test] + fn test_tilde_expansion_in_settings() { + let settings = settings::BuiltinAgentServerSettings { + path: Some(PathBuf::from("~/bin/agent")), + args: Some(vec!["--flag".into()]), + env: None, + ignore_system_version: None, + default_mode: None, + }; + + let BuiltinAgentServerSettings { path, .. } = settings.into(); + + let path = path.unwrap(); + assert!( + !path.to_string_lossy().starts_with("~"), + "Tilde should be expanded for builtin agent path" + ); + + let settings = settings::CustomAgentServerSettings { + path: PathBuf::from("~/custom/agent"), + args: vec!["serve".into()], + env: None, + default_mode: None, + }; + + let CustomAgentServerSettings { + command: AgentServerCommand { path, .. }, + .. + } = settings.into(); + + assert!( + !path.to_string_lossy().starts_with("~"), + "Tilde should be expanded for custom agent path" + ); + } } From 7070038c92f7fff325b5ed887a054eb010cd68b4 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 30 Oct 2025 15:17:45 -0700 Subject: [PATCH 29/82] gpui: Remove type bound (#41603) Release Notes: - N/A --- crates/gpui/src/window.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index c44b0d642a2970dfb803109591d8dc0e2c6cacc6..66ab327a19e2c326f8ba2cdc8710b3d772b45a59 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -4326,10 +4326,10 @@ impl Window { } /// Returns a generic event listener that invokes the given listener with the view and context associated with the given view handle. - pub fn listener_for( + pub fn listener_for( &self, - view: &Entity, - f: impl Fn(&mut V, &E, &mut Window, &mut Context) + 'static, + view: &Entity, + f: impl Fn(&mut T, &E, &mut Window, &mut Context) + 'static, ) -> impl Fn(&E, &mut Window, &mut App) + 'static { let view = view.downgrade(); move |e: &E, window: &mut Window, cx: &mut App| { From 977856407e085a4e2608193fbf1bd548217d476a Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 30 Oct 2025 23:25:47 +0100 Subject: [PATCH 30/82] Add bennetbo to REVIEWERS.conl (#41604) Release Notes: - N/A --- REVIEWERS.conl | 1 + 1 file changed, 1 insertion(+) diff --git a/REVIEWERS.conl b/REVIEWERS.conl index 78563fe466f38c644cd6a19c76ffe231a086fd56..45c73ffe2abefae08789fe6235ba8fd5d7244187 100644 --- a/REVIEWERS.conl +++ b/REVIEWERS.conl @@ -80,6 +80,7 @@ ai = @rtfeldman = @danilo-leal = @benbrandt + = @bennetbo design = @danilo-leal From c2537fad436221650509a95c5961cfdddf1c1b73 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 30 Oct 2025 17:28:03 -0600 Subject: [PATCH 31/82] Add a no-op compare_perf workflow (#41605) Testing PR for @zed-zippy Release Notes: - N/A --- .github/workflows/compare_perf.yml | 13 +++++++++++ tooling/xtask/src/tasks/workflows.rs | 2 ++ .../xtask/src/tasks/workflows/compare_perf.rs | 22 +++++++++++++++++++ 3 files changed, 37 insertions(+) create mode 100644 .github/workflows/compare_perf.yml create mode 100644 tooling/xtask/src/tasks/workflows/compare_perf.rs diff --git a/.github/workflows/compare_perf.yml b/.github/workflows/compare_perf.yml new file mode 100644 index 0000000000000000000000000000000000000000..3f30ff1ec54afb88bc27557c89d189a8e1e21dff --- /dev/null +++ b/.github/workflows/compare_perf.yml @@ -0,0 +1,13 @@ +# Generated from xtask::workflows::compare_perf +# Rebuild with `cargo xtask workflows`. +name: compare_perf +on: + workflow_dispatch: {} +jobs: + run_perf: + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index 0fd17088c14d87812e49809461ea97d4f2456960..f29a590ca19d8be4781aaba9d7fd23d90933f34c 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -3,6 +3,7 @@ use clap::Parser; use std::fs; use std::path::Path; +mod compare_perf; mod danger; mod nix_build; mod release_nightly; @@ -24,6 +25,7 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { ("run_bundling.yml", run_bundling::run_bundling()), ("release_nightly.yml", release_nightly::release_nightly()), ("run_tests.yml", run_tests::run_tests()), + ("compare_perf.yml", compare_perf::compare_perf()), ]; fs::create_dir_all(dir) .with_context(|| format!("Failed to create directory: {}", dir.display()))?; diff --git a/tooling/xtask/src/tasks/workflows/compare_perf.rs b/tooling/xtask/src/tasks/workflows/compare_perf.rs new file mode 100644 index 0000000000000000000000000000000000000000..b46a3bb0ca1329906a8f0445e55c4edf5059cb95 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/compare_perf.rs @@ -0,0 +1,22 @@ +use gh_workflow::*; + +use crate::tasks::workflows::{ + runners, + steps::{self, NamedJob, named}, +}; + +/// Generates the danger.yml workflow +pub fn compare_perf() -> Workflow { + let run_perf = run_perf(); + named::workflow() + .on(Event::default().workflow_dispatch(WorkflowDispatch::default())) + .add_job(run_perf.name, run_perf.job) +} + +pub fn run_perf() -> NamedJob { + named::job( + Job::default() + .runs_on(runners::LINUX_SMALL) + .add_step(steps::checkout_repo()), + ) +} From eab06eb1d990adc5cfcd4a09fdfa5283642ef3c4 Mon Sep 17 00:00:00 2001 From: Andrew Farkas <6060305+HactarCE@users.noreply.github.com> Date: Thu, 30 Oct 2025 21:53:46 -0400 Subject: [PATCH 32/82] Keep selection in `SwitchToHelixNormalMode` (#41583) Closes #41125 Release Notes: - Fixed `SwitchToHelixNormalMode` to keep selection - Added default keybinds for `SwitchToHelixNormalMode` when in Helix mode --- assets/keymaps/vim.json | 6 +++ crates/agent_ui/src/text_thread_editor.rs | 3 +- crates/debugger_tools/src/dap_log.rs | 6 ++- crates/editor/src/editor.rs | 41 ++++++++++----- crates/editor/src/items.rs | 3 +- crates/language_tools/src/lsp_log_view.rs | 6 ++- crates/search/src/buffer_search.rs | 21 +++++--- crates/search/src/project_search.rs | 10 ++-- crates/terminal_view/src/terminal_view.rs | 1 + crates/vim/src/helix.rs | 64 +++++++++++++++++++++-- crates/vim/src/motion.rs | 45 +++++++++------- crates/vim/src/normal/search.rs | 19 ++++--- crates/vim/src/state.rs | 8 ++- crates/vim/src/vim.rs | 4 +- crates/vim/src/visual.rs | 8 +-- crates/workspace/src/searchable.rs | 5 +- 16 files changed, 179 insertions(+), 71 deletions(-) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index d6bdff1cd02fcd0bfb31fb48d2c47a321c54de2c..9bde6ca7575b958d456d46a002a14e4289fe10fd 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -421,6 +421,12 @@ "ctrl-[": "editor::Cancel" } }, + { + "context": "vim_mode == helix_select && !menu", + "bindings": { + "escape": "vim::SwitchToHelixNormalMode" + } + }, { "context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu", "bindings": { diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index 667ccb8938b892dcf59232d5cd7ea8dda04bc4b2..44c80a2258d1146fb7a5f2fb6124d08d61d8cb57 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -2591,11 +2591,12 @@ impl SearchableItem for TextThreadEditor { &mut self, index: usize, matches: &[Self::Match], + collapse: bool, window: &mut Window, cx: &mut Context, ) { self.editor.update(cx, |editor, cx| { - editor.activate_match(index, matches, window, cx); + editor.activate_match(index, matches, collapse, window, cx); }); } diff --git a/crates/debugger_tools/src/dap_log.rs b/crates/debugger_tools/src/dap_log.rs index 4c994ad7eb749dcb5828daa83bad34a579f9f14c..738c60870f2200e11e710f9c94d02682b94677f7 100644 --- a/crates/debugger_tools/src/dap_log.rs +++ b/crates/debugger_tools/src/dap_log.rs @@ -1029,11 +1029,13 @@ impl SearchableItem for DapLogView { &mut self, index: usize, matches: &[Self::Match], + collapse: bool, window: &mut Window, cx: &mut Context, ) { - self.editor - .update(cx, |e, cx| e.activate_match(index, matches, window, cx)) + self.editor.update(cx, |e, cx| { + e.activate_match(index, matches, collapse, window, cx) + }) } fn select_matches( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 3839da917078ae2340ead97f9cf4fa624b5c588a..9bdebcd24a20697fb041ae14fa7dc2f034d00d92 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1069,7 +1069,6 @@ pub struct Editor { searchable: bool, cursor_shape: CursorShape, current_line_highlight: Option, - collapse_matches: bool, autoindent_mode: Option, workspace: Option<(WeakEntity, Option)>, input_enabled: bool, @@ -2119,7 +2118,7 @@ impl Editor { .unwrap_or_default(), current_line_highlight: None, autoindent_mode: Some(AutoindentMode::EachLine), - collapse_matches: false, + workspace: None, input_enabled: !is_minimap, use_modal_editing: full_mode, @@ -2272,7 +2271,7 @@ impl Editor { } } EditorEvent::Edited { .. } => { - if !vim_enabled(cx) { + if vim_flavor(cx).is_none() { let display_map = editor.display_snapshot(cx); let selections = editor.selections.all_adjusted_display(&display_map); let pop_state = editor @@ -2881,12 +2880,12 @@ impl Editor { self.current_line_highlight = current_line_highlight; } - pub fn set_collapse_matches(&mut self, collapse_matches: bool) { - self.collapse_matches = collapse_matches; - } - - pub fn range_for_match(&self, range: &Range) -> Range { - if self.collapse_matches { + pub fn range_for_match( + &self, + range: &Range, + collapse: bool, + ) -> Range { + if collapse { return range.start..range.start; } range.clone() @@ -16654,7 +16653,7 @@ impl Editor { editor.update_in(cx, |editor, window, cx| { let range = target_range.to_point(target_buffer.read(cx)); - let range = editor.range_for_match(&range); + let range = editor.range_for_match(&range, false); let range = collapse_multiline_range(range); if !split @@ -21457,7 +21456,7 @@ impl Editor { .and_then(|e| e.to_str()) .map(|a| a.to_string())); - let vim_mode = vim_enabled(cx); + let vim_mode = vim_flavor(cx).is_some(); let edit_predictions_provider = all_language_settings(file, cx).edit_predictions.provider; let copilot_enabled = edit_predictions_provider @@ -22088,10 +22087,26 @@ fn edit_for_markdown_paste<'a>( (range, new_text) } -fn vim_enabled(cx: &App) -> bool { - vim_mode_setting::VimModeSetting::try_get(cx) +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum VimFlavor { + Vim, + Helix, +} + +pub fn vim_flavor(cx: &App) -> Option { + if vim_mode_setting::HelixModeSetting::try_get(cx) + .map(|helix_mode| helix_mode.0) + .unwrap_or(false) + { + Some(VimFlavor::Helix) + } else if vim_mode_setting::VimModeSetting::try_get(cx) .map(|vim_mode| vim_mode.0) .unwrap_or(false) + { + Some(VimFlavor::Vim) + } else { + None // neither vim nor helix mode + } } fn process_completion_for_edit( diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 346574eba440622a40139a52be6977e55e909980..c064e3dbaf2873fef03d65dbd5794e6453599cec 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1587,11 +1587,12 @@ impl SearchableItem for Editor { &mut self, index: usize, matches: &[Range], + collapse: bool, window: &mut Window, cx: &mut Context, ) { self.unfold_ranges(&[matches[index].clone()], false, true, cx); - let range = self.range_for_match(&matches[index]); + let range = self.range_for_match(&matches[index], collapse); self.change_selections(Default::default(), window, cx, |s| { s.select_ranges([range]); }) diff --git a/crates/language_tools/src/lsp_log_view.rs b/crates/language_tools/src/lsp_log_view.rs index d480eadc73b9546e5a59b204b036a3ff88a018c7..ef9cc1ef3af88310d5870aa4d2da3d1a077139f1 100644 --- a/crates/language_tools/src/lsp_log_view.rs +++ b/crates/language_tools/src/lsp_log_view.rs @@ -812,11 +812,13 @@ impl SearchableItem for LspLogView { &mut self, index: usize, matches: &[Self::Match], + collapse: bool, window: &mut Window, cx: &mut Context, ) { - self.editor - .update(cx, |e, cx| e.activate_match(index, matches, window, cx)) + self.editor.update(cx, |e, cx| { + e.activate_match(index, matches, collapse, window, cx) + }) } fn select_matches( diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 25697bb45ac5f617b586d7a4346ee8761b7a4ed3..f01073b6228ed3d314990187e63262a111f365c5 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -10,8 +10,9 @@ use any_vec::AnyVec; use anyhow::Context as _; use collections::HashMap; use editor::{ - DisplayPoint, Editor, EditorSettings, + DisplayPoint, Editor, EditorSettings, VimFlavor, actions::{Backtab, Tab}, + vim_flavor, }; use futures::channel::oneshot; use gpui::{ @@ -825,7 +826,8 @@ impl BufferSearchBar { .searchable_items_with_matches .get(&active_searchable_item.downgrade()) { - active_searchable_item.activate_match(match_ix, matches, window, cx) + let collapse = editor::vim_flavor(cx) == Some(VimFlavor::Vim); + active_searchable_item.activate_match(match_ix, matches, collapse, window, cx) } } @@ -970,7 +972,8 @@ impl BufferSearchBar { window: &mut Window, cx: &mut Context, ) { - self.select_match(Direction::Next, 1, window, cx); + let collapse = vim_flavor(cx) == Some(VimFlavor::Vim); + self.select_match(Direction::Next, 1, collapse, window, cx); } fn select_prev_match( @@ -979,7 +982,8 @@ impl BufferSearchBar { window: &mut Window, cx: &mut Context, ) { - self.select_match(Direction::Prev, 1, window, cx); + let collapse = vim_flavor(cx) == Some(VimFlavor::Vim); + self.select_match(Direction::Prev, 1, collapse, window, cx); } pub fn select_all_matches( @@ -1004,6 +1008,7 @@ impl BufferSearchBar { &mut self, direction: Direction, count: usize, + collapse: bool, window: &mut Window, cx: &mut Context, ) { @@ -1026,7 +1031,7 @@ impl BufferSearchBar { .match_index_for_direction(matches, index, direction, count, window, cx); searchable_item.update_matches(matches, window, cx); - searchable_item.activate_match(new_match_index, matches, window, cx); + searchable_item.activate_match(new_match_index, matches, collapse, window, cx); } } @@ -1040,7 +1045,8 @@ impl BufferSearchBar { return; } searchable_item.update_matches(matches, window, cx); - searchable_item.activate_match(0, matches, window, cx); + let collapse = vim_flavor(cx) == Some(VimFlavor::Vim); + searchable_item.activate_match(0, matches, collapse, window, cx); } } @@ -1055,7 +1061,8 @@ impl BufferSearchBar { } let new_match_index = matches.len() - 1; searchable_item.update_matches(matches, window, cx); - searchable_item.activate_match(new_match_index, matches, window, cx); + let collapse = vim_flavor(cx) == Some(VimFlavor::Vim); + searchable_item.activate_match(new_match_index, matches, collapse, window, cx); } } diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index a8be82d5d5a3fcb20b8ea964af19e3f60fea0573..0bb05ecb93cd5cc6c9730307792c1737531a39a5 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -9,10 +9,10 @@ use anyhow::Context as _; use collections::HashMap; use editor::{ Anchor, Editor, EditorEvent, EditorSettings, MAX_TAB_TITLE_LEN, MultiBuffer, PathKey, - SelectionEffects, + SelectionEffects, VimFlavor, actions::{Backtab, SelectAll, Tab}, items::active_match_index, - multibuffer_context_lines, + multibuffer_context_lines, vim_flavor, }; use futures::{StreamExt, stream::FuturesOrdered}; use gpui::{ @@ -1344,7 +1344,8 @@ impl ProjectSearchView { let range_to_select = match_ranges[new_index].clone(); self.results_editor.update(cx, |editor, cx| { - let range_to_select = editor.range_for_match(&range_to_select); + let collapse = vim_flavor(cx) == Some(VimFlavor::Vim); + let range_to_select = editor.range_for_match(&range_to_select, collapse); editor.unfold_ranges(std::slice::from_ref(&range_to_select), false, true, cx); editor.change_selections(Default::default(), window, cx, |s| { s.select_ranges([range_to_select]) @@ -1415,9 +1416,10 @@ impl ProjectSearchView { let is_new_search = self.search_id != prev_search_id; self.results_editor.update(cx, |editor, cx| { if is_new_search { + let collapse = vim_flavor(cx) == Some(VimFlavor::Vim); let range_to_select = match_ranges .first() - .map(|range| editor.range_for_match(range)); + .map(|range| editor.range_for_match(range, collapse)); editor.change_selections(Default::default(), window, cx, |s| { s.select_ranges(range_to_select) }); diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 63d6c503b29d1eec6500bd4acb5c2f0f6ef36e33..2a9720357b27b91f3a5ff7689f4cb0f16787031b 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1447,6 +1447,7 @@ impl SearchableItem for TerminalView { &mut self, index: usize, _: &[Self::Match], + _collapse: bool, _window: &mut Window, cx: &mut Context, ) { diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index 6788a186fb45222f7b09fe756862e6cb337c6d90..ee7c0a14fb721116c3fc1f2c3d1bf7b716b43f18 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -450,7 +450,7 @@ impl Vim { prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode: self.mode, - helix_select: true, + is_helix_regex_search: true, } }); } @@ -1278,6 +1278,24 @@ mod test { cx.assert_state("«one ˇ»two", Mode::HelixSelect); } + #[gpui::test] + async fn test_exit_visual_mode(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state("ˇone two", Mode::Normal); + cx.simulate_keystrokes("v w"); + cx.assert_state("«one tˇ»wo", Mode::Visual); + cx.simulate_keystrokes("escape"); + cx.assert_state("one ˇtwo", Mode::Normal); + + cx.enable_helix(); + cx.set_state("ˇone two", Mode::HelixNormal); + cx.simulate_keystrokes("v w"); + cx.assert_state("«one ˇ»two", Mode::HelixSelect); + cx.simulate_keystrokes("escape"); + cx.assert_state("«one ˇ»two", Mode::HelixNormal); + } + #[gpui::test] async fn test_helix_select_regex(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; @@ -1297,9 +1315,47 @@ mod test { cx.simulate_keystrokes("enter"); cx.assert_state("«oneˇ» two «oneˇ»", Mode::HelixNormal); - cx.set_state("ˇone two one", Mode::HelixNormal); - cx.simulate_keystrokes("s o n e enter"); - cx.assert_state("ˇone two one", Mode::HelixNormal); + // TODO: change "search_in_selection" to not perform any search when in helix select mode with no selection + // cx.set_state("ˇstuff one two one", Mode::HelixNormal); + // cx.simulate_keystrokes("s o n e enter"); + // cx.assert_state("ˇstuff one two one", Mode::HelixNormal); + } + + #[gpui::test] + async fn test_helix_select_next_match(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state("ˇhello two one two one two one", Mode::Visual); + cx.simulate_keystrokes("/ o n e"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes("n n"); + cx.assert_state("«hello two one two one two oˇ»ne", Mode::Visual); + + cx.set_state("ˇhello two one two one two one", Mode::Normal); + cx.simulate_keystrokes("/ o n e"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes("n n"); + cx.assert_state("hello two one two one two ˇone", Mode::Normal); + + cx.set_state("ˇhello two one two one two one", Mode::Normal); + cx.simulate_keystrokes("/ o n e"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes("n g n g n"); + cx.assert_state("hello two one two «one two oneˇ»", Mode::Visual); + + cx.enable_helix(); + + cx.set_state("ˇhello two one two one two one", Mode::HelixNormal); + cx.simulate_keystrokes("/ o n e"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes("n n"); + cx.assert_state("hello two one two one two «oneˇ»", Mode::HelixNormal); + + cx.set_state("ˇhello two one two one two one", Mode::HelixSelect); + cx.simulate_keystrokes("/ o n e"); + cx.simulate_keystrokes("enter"); + cx.simulate_keystrokes("n n"); + cx.assert_state("ˇhello two «oneˇ» two «oneˇ» two «oneˇ»", Mode::HelixSelect); } #[gpui::test] diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 1a617e36c18ffa52906cac06d4b9eddb11a91f8e..2da1083ee6623cc8a463ef31be7e90dca0063b34 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -672,31 +672,40 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { impl Vim { pub(crate) fn search_motion(&mut self, m: Motion, window: &mut Window, cx: &mut Context) { - if let Motion::ZedSearchResult { - prior_selections, .. + let Motion::ZedSearchResult { + prior_selections, + new_selections, } = &m - { - match self.mode { - Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { - if !prior_selections.is_empty() { - self.update_editor(cx, |_, editor, cx| { - editor.change_selections(Default::default(), window, cx, |s| { - s.select_ranges(prior_selections.iter().cloned()) - }) + else { + return; + }; + + match self.mode { + Mode::Visual | Mode::VisualLine | Mode::VisualBlock => { + if !prior_selections.is_empty() { + self.update_editor(cx, |_, editor, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(prior_selections.iter().cloned()); }); - } + }); } - Mode::Normal | Mode::Replace | Mode::Insert => { - if self.active_operator().is_none() { - return; - } + self.motion(m, window, cx); + } + Mode::Normal | Mode::Replace | Mode::Insert => { + if self.active_operator().is_some() { + self.motion(m, window, cx); } + } - Mode::HelixNormal | Mode::HelixSelect => {} + Mode::HelixNormal => {} + Mode::HelixSelect => { + self.update_editor(cx, |_, editor, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges(prior_selections.iter().chain(new_selections).cloned()); + }); + }); } } - - self.motion(m, window, cx) } pub(crate) fn motion(&mut self, motion: Motion, window: &mut Window, cx: &mut Context) { diff --git a/crates/vim/src/normal/search.rs b/crates/vim/src/normal/search.rs index 6c4294a474dad13c9d00e58ab117a4a6a74c28d3..2e80a08eb824b93783bf1249970e5e7ad7378ff2 100644 --- a/crates/vim/src/normal/search.rs +++ b/crates/vim/src/normal/search.rs @@ -1,5 +1,6 @@ -use editor::{Editor, EditorSettings}; +use editor::{Editor, EditorSettings, VimFlavor}; use gpui::{Action, Context, Window, actions}; + use language::Point; use schemars::JsonSchema; use search::{BufferSearchBar, SearchOptions, buffer_search}; @@ -195,7 +196,7 @@ impl Vim { prior_selections, prior_operator: self.operator_stack.last().cloned(), prior_mode, - helix_select: false, + is_helix_regex_search: false, } }); } @@ -219,7 +220,7 @@ impl Vim { let new_selections = self.editor_selections(window, cx); let result = pane.update(cx, |pane, cx| { let search_bar = pane.toolbar().read(cx).item_of_type::()?; - if self.search.helix_select { + if self.search.is_helix_regex_search { search_bar.update(cx, |search_bar, cx| { search_bar.select_all_matches(&Default::default(), window, cx) }); @@ -240,7 +241,8 @@ impl Vim { count = count.saturating_sub(1) } self.search.count = 1; - search_bar.select_match(direction, count, window, cx); + let collapse = !self.mode.is_helix(); + search_bar.select_match(direction, count, collapse, window, cx); search_bar.focus_editor(&Default::default(), window, cx); let prior_selections: Vec<_> = self.search.prior_selections.drain(..).collect(); @@ -307,7 +309,8 @@ impl Vim { if !search_bar.has_active_match() || !search_bar.show(window, cx) { return false; } - search_bar.select_match(direction, count, window, cx); + let collapse = !self.mode.is_helix(); + search_bar.select_match(direction, count, collapse, window, cx); true }) }); @@ -316,6 +319,7 @@ impl Vim { } let new_selections = self.editor_selections(window, cx); + self.search_motion( Motion::ZedSearchResult { prior_selections, @@ -381,7 +385,8 @@ impl Vim { cx.spawn_in(window, async move |_, cx| { search.await?; search_bar.update_in(cx, |search_bar, window, cx| { - search_bar.select_match(direction, count, window, cx); + let collapse = editor::vim_flavor(cx) == Some(VimFlavor::Vim); + search_bar.select_match(direction, count, collapse, window, cx); vim.update(cx, |vim, cx| { let new_selections = vim.editor_selections(window, cx); @@ -444,7 +449,7 @@ impl Vim { cx.spawn_in(window, async move |_, cx| { search.await?; search_bar.update_in(cx, |search_bar, window, cx| { - search_bar.select_match(direction, 1, window, cx) + search_bar.select_match(direction, 1, true, window, cx) })?; anyhow::Ok(()) }) diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 959edff63dd50fa549edcbae1bea213224b923af..dc9ac7104c00a5f49758dbab219ec72d46023b27 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -66,12 +66,16 @@ impl Display for Mode { } impl Mode { - pub fn is_visual(&self) -> bool { + pub fn is_visual(self) -> bool { match self { Self::Visual | Self::VisualLine | Self::VisualBlock | Self::HelixSelect => true, Self::Normal | Self::Insert | Self::Replace | Self::HelixNormal => false, } } + + pub fn is_helix(self) -> bool { + matches!(self, Mode::HelixNormal | Mode::HelixSelect) + } } impl Default for Mode { @@ -990,7 +994,7 @@ pub struct SearchState { pub prior_selections: Vec>, pub prior_operator: Option, pub prior_mode: Mode, - pub helix_select: bool, + pub is_helix_regex_search: bool, } impl Operator { diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index cb553b64e91eadbb5e529d56bb1e1a5a7da2c7be..91ce66d43e76f3a40a5e074f01527953def1b188 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -669,7 +669,7 @@ impl Vim { editor, cx, |vim, _: &SwitchToHelixNormalMode, window, cx| { - vim.switch_mode(Mode::HelixNormal, false, window, cx) + vim.switch_mode(Mode::HelixNormal, true, window, cx) }, ); Vim::action(editor, cx, |_, _: &PushForcedMotion, _, cx| { @@ -953,7 +953,6 @@ impl Vim { fn deactivate(editor: &mut Editor, cx: &mut Context) { editor.set_cursor_shape(CursorShape::Bar, cx); editor.set_clip_at_line_ends(false, cx); - editor.set_collapse_matches(false); editor.set_input_enabled(true); editor.set_autoindent(true); editor.selections.set_line_mode(false); @@ -1929,7 +1928,6 @@ impl Vim { self.update_editor(cx, |vim, editor, cx| { editor.set_cursor_shape(vim.cursor_shape(cx), cx); editor.set_clip_at_line_ends(vim.clip_at_line_ends(), cx); - editor.set_collapse_matches(true); editor.set_input_enabled(vim.editor_input_enabled()); editor.set_autoindent(vim.should_autoindent()); editor diff --git a/crates/vim/src/visual.rs b/crates/vim/src/visual.rs index 59555205d9862e51c2778eec1f321338fd5e7569..17423f32dc9c235effe53d5a47edca0573bcda6f 100644 --- a/crates/vim/src/visual.rs +++ b/crates/vim/src/visual.rs @@ -847,9 +847,6 @@ impl Vim { let mut start_selection = 0usize; let mut end_selection = 0usize; - self.update_editor(cx, |_, editor, _| { - editor.set_collapse_matches(false); - }); if vim_is_normal { pane.update(cx, |pane, cx| { if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::() @@ -860,7 +857,7 @@ impl Vim { } // without update_match_index there is a bug when the cursor is before the first match search_bar.update_match_index(window, cx); - search_bar.select_match(direction.opposite(), 1, window, cx); + search_bar.select_match(direction.opposite(), 1, false, window, cx); }); } }); @@ -878,7 +875,7 @@ impl Vim { if let Some(search_bar) = pane.toolbar().read(cx).item_of_type::() { search_bar.update(cx, |search_bar, cx| { search_bar.update_match_index(window, cx); - search_bar.select_match(direction, count, window, cx); + search_bar.select_match(direction, count, false, window, cx); match_exists = search_bar.match_exists(window, cx); }); } @@ -905,7 +902,6 @@ impl Vim { editor.change_selections(Default::default(), window, cx, |s| { s.select_ranges([start_selection..end_selection]); }); - editor.set_collapse_matches(true); }); match self.maybe_pop_operator() { diff --git a/crates/workspace/src/searchable.rs b/crates/workspace/src/searchable.rs index 310fae908dbd6864c1636ebd393e4920d0f9ad02..18da3f16f2e7a1e57dd42287059c0041d9309a78 100644 --- a/crates/workspace/src/searchable.rs +++ b/crates/workspace/src/searchable.rs @@ -104,6 +104,7 @@ pub trait SearchableItem: Item + EventEmitter { &mut self, index: usize, matches: &[Self::Match], + collapse: bool, window: &mut Window, cx: &mut Context, ); @@ -184,6 +185,7 @@ pub trait SearchableItemHandle: ItemHandle { &self, index: usize, matches: &AnyVec, + collapse: bool, window: &mut Window, cx: &mut App, ); @@ -274,12 +276,13 @@ impl SearchableItemHandle for Entity { &self, index: usize, matches: &AnyVec, + collapse: bool, window: &mut Window, cx: &mut App, ) { let matches = matches.downcast_ref().unwrap(); self.update(cx, |this, cx| { - this.activate_match(index, matches.as_slice(), window, cx) + this.activate_match(index, matches.as_slice(), collapse, window, cx) }); } From 7c29c6d7a6ba012511ba6565bcce720e372363c3 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 30 Oct 2025 21:26:36 -0700 Subject: [PATCH 33/82] Increased the max height of pickers (#41617) Release Notes: - Increased the max size of picker based UI --- crates/picker/src/picker.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/picker/src/picker.rs b/crates/picker/src/picker.rs index d9a23ec93b80287dd1b7b483c8b6315b2119bfd5..6027ae5cd5e77db938116568ac7001548e97bde9 100644 --- a/crates/picker/src/picker.rs +++ b/crates/picker/src/picker.rs @@ -314,7 +314,7 @@ impl Picker { confirm_on_update: None, width: None, widest_item: None, - max_height: Some(rems(18.).into()), + max_height: Some(rems(24.).into()), show_scrollbar: false, is_modal: true, }; From f2ce06c7b00a79a4e2932ffbbbc880c1a3fb0a15 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 31 Oct 2025 11:39:01 +0100 Subject: [PATCH 34/82] sum_tree: Replace rayon with futures (#41586) Release Notes: - N/A *or* Added/Fixed/Improved ... Co-authored by: Kate --- Cargo.lock | 14 +- crates/acp_thread/src/diff.rs | 4 +- crates/action_log/src/action_log.rs | 18 +- .../src/edit_agent/streaming_fuzzy_matcher.rs | 69 +++-- crates/agent/src/tools/edit_file_tool.rs | 5 +- crates/agent_ui/src/agent_configuration.rs | 8 +- crates/agent_ui/src/buffer_codegen.rs | 7 +- .../assistant_text_thread/src/text_thread.rs | 3 +- crates/buffer_diff/src/buffer_diff.rs | 58 +++- crates/channel/src/channel_buffer.rs | 1 + crates/collab/src/db/queries/buffers.rs | 4 +- crates/collab/src/db/tests/buffer_tests.rs | 56 ++-- crates/collab/src/tests/integration_tests.rs | 4 +- .../random_project_collaboration_tests.rs | 7 +- crates/diagnostics/src/diagnostics_tests.rs | 4 +- .../src/edit_prediction_button.rs | 9 +- crates/editor/src/display_map.rs | 3 +- crates/editor/src/display_map/inlay_map.rs | 36 ++- crates/editor/src/display_map/tab_map.rs | 2 +- crates/editor/src/display_map/wrap_map.rs | 9 +- crates/editor/src/editor.rs | 2 +- crates/editor/src/git/blame.rs | 5 +- crates/editor/src/inlays.rs | 18 +- crates/editor/src/movement.rs | 9 +- crates/editor/src/signature_help.rs | 2 +- crates/extension_host/src/extension_host.rs | 15 +- crates/git_ui/src/commit_view.rs | 9 +- crates/git_ui/src/file_diff_view.rs | 11 +- crates/gpui/src/app/async_context.rs | 13 + crates/gpui/src/app/test_context.rs | 5 + crates/gpui/src/executor.rs | 2 +- crates/keymap_editor/src/keymap_editor.rs | 4 +- crates/language/src/buffer.rs | 21 +- crates/language/src/buffer_tests.rs | 39 ++- .../src/syntax_map/syntax_map_tests.rs | 45 ++- .../src/extension_lsp_adapter.rs | 7 +- crates/languages/src/c.rs | 10 +- crates/languages/src/go.rs | 24 +- crates/languages/src/python.rs | 7 +- crates/languages/src/rust.rs | 15 +- crates/markdown/src/markdown.rs | 4 +- .../markdown_preview/src/markdown_parser.rs | 2 +- crates/multi_buffer/src/multi_buffer_tests.rs | 33 +- crates/project/src/buffer_store.rs | 18 +- crates/project/src/git_store/conflict_set.rs | 44 ++- crates/project/src/prettier_store.rs | 139 ++++++--- crates/project/src/project.rs | 13 +- crates/project/src/project_tests.rs | 18 +- .../remote_server/src/remote_editing_tests.rs | 8 +- crates/rich_text/src/rich_text.rs | 17 +- crates/rope/Cargo.toml | 2 +- crates/rope/benches/rope_benchmark.rs | 37 ++- crates/rope/src/rope.rs | 276 +++++++++++------ crates/rules_library/src/rules_library.rs | 10 +- crates/streaming_diff/Cargo.toml | 1 + crates/streaming_diff/src/streaming_diff.rs | 99 +++--- crates/sum_tree/Cargo.toml | 4 +- crates/sum_tree/src/sum_tree.rs | 131 +++++--- crates/text/Cargo.toml | 1 + crates/text/src/tests.rs | 281 ++++++++++++------ crates/text/src/text.rs | 80 +++-- crates/vim/src/motion.rs | 10 +- crates/workspace/src/workspace.rs | 4 +- crates/worktree/src/worktree_tests.rs | 34 ++- crates/zed/src/zed.rs | 54 ++-- crates/zed/src/zed/open_listener.rs | 4 +- crates/zeta/src/zeta.rs | 3 +- 67 files changed, 1271 insertions(+), 640 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 78c972865a4e01ba66357142ff8737b634639b27..25a22e64c6db0632ca1357cebe02f0bbe04fa0a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -12711,6 +12711,12 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7" +[[package]] +name = "pollster" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3" + [[package]] name = "portable-atomic" version = "1.11.1" @@ -12759,7 +12765,7 @@ dependencies = [ "log", "parking_lot", "pin-project", - "pollster", + "pollster 0.2.5", "static_assertions", "thiserror 1.0.69", ] @@ -14311,7 +14317,6 @@ dependencies = [ "gpui", "log", "rand 0.9.2", - "rayon", "sum_tree", "unicode-segmentation", "util", @@ -16237,6 +16242,7 @@ checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520" name = "streaming_diff" version = "0.1.0" dependencies = [ + "gpui", "ordered-float 2.10.1", "rand 0.9.2", "rope", @@ -16355,9 +16361,11 @@ version = "0.1.0" dependencies = [ "arrayvec", "ctor", + "futures 0.3.31", + "itertools 0.14.0", "log", + "pollster 0.4.0", "rand 0.9.2", - "rayon", "zlog", ] diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 055b2f7fb86ffe9d7f12459b6b16405ce77815a0..39cd8ad38e5bf223987dc8efe771614b3ed2172b 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -361,10 +361,12 @@ async fn build_buffer_diff( ) -> Result> { let buffer = cx.update(|cx| buffer.read(cx).snapshot())?; + let executor = cx.background_executor().clone(); let old_text_rope = cx .background_spawn({ let old_text = old_text.clone(); - async move { Rope::from(old_text.as_str()) } + let executor = executor.clone(); + async move { Rope::from_str(old_text.as_str(), &executor) } }) .await; let base_buffer = cx diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index b7722f211afda3a77bc96292a50acf869e7424d6..1730163a4ce7b53aa051a6af87da8ab10ad4320f 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -3,7 +3,9 @@ use buffer_diff::BufferDiff; use clock; use collections::BTreeMap; use futures::{FutureExt, StreamExt, channel::mpsc}; -use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity}; +use gpui::{ + App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, Subscription, Task, WeakEntity, +}; use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint}; use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; use std::{cmp, ops::Range, sync::Arc}; @@ -321,6 +323,7 @@ impl ActionLog { let unreviewed_edits = tracked_buffer.unreviewed_edits.clone(); let edits = diff_snapshots(&old_snapshot, &new_snapshot); let mut has_user_changes = false; + let executor = cx.background_executor().clone(); async move { if let ChangeAuthor::User = author { has_user_changes = apply_non_conflicting_edits( @@ -328,6 +331,7 @@ impl ActionLog { edits, &mut base_text, new_snapshot.as_rope(), + &executor, ); } @@ -382,6 +386,7 @@ impl ActionLog { let agent_diff_base = tracked_buffer.diff_base.clone(); let git_diff_base = git_diff.read(cx).base_text().as_rope().clone(); let buffer_text = tracked_buffer.snapshot.as_rope().clone(); + let executor = cx.background_executor().clone(); anyhow::Ok(cx.background_spawn(async move { let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable(); let committed_edits = language::line_diff( @@ -416,8 +421,11 @@ impl ActionLog { ), new_agent_diff_base.max_point(), )); - new_agent_diff_base - .replace(old_byte_start..old_byte_end, &unreviewed_new); + new_agent_diff_base.replace( + old_byte_start..old_byte_end, + &unreviewed_new, + &executor, + ); row_delta += unreviewed.new_len() as i32 - unreviewed.old_len() as i32; } @@ -611,6 +619,7 @@ impl ActionLog { .snapshot .text_for_range(new_range) .collect::(), + cx.background_executor(), ); delta += edit.new_len() as i32 - edit.old_len() as i32; false @@ -824,6 +833,7 @@ fn apply_non_conflicting_edits( edits: Vec>, old_text: &mut Rope, new_text: &Rope, + executor: &BackgroundExecutor, ) -> bool { let mut old_edits = patch.edits().iter().cloned().peekable(); let mut new_edits = edits.into_iter().peekable(); @@ -877,6 +887,7 @@ fn apply_non_conflicting_edits( old_text.replace( old_bytes, &new_text.chunks_in_range(new_bytes).collect::(), + executor, ); applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32; has_made_changes = true; @@ -2282,6 +2293,7 @@ mod tests { old_text.replace( old_start..old_end, &new_text.slice_rows(edit.new.clone()).to_string(), + cx.background_executor(), ); } pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string()); diff --git a/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs index 904ec05a8c7565d5052cd546fc0bf6d723ffa375..021892e738eed229568c909f72f327d93199cdc0 100644 --- a/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs +++ b/crates/agent/src/edit_agent/streaming_fuzzy_matcher.rs @@ -305,18 +305,20 @@ impl SearchMatrix { #[cfg(test)] mod tests { use super::*; + use gpui::TestAppContext; use indoc::indoc; use language::{BufferId, TextBuffer}; use rand::prelude::*; use text::ReplicaId; use util::test::{generate_marked_text, marked_text_ranges}; - #[test] - fn test_empty_query() { + #[gpui::test] + fn test_empty_query(cx: &mut gpui::TestAppContext) { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Hello world\nThis is a test\nFoo bar baz", + cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -325,12 +327,13 @@ mod tests { assert_eq!(finish(finder), None); } - #[test] - fn test_streaming_exact_match() { + #[gpui::test] + fn test_streaming_exact_match(cx: &mut gpui::TestAppContext) { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Hello world\nThis is a test\nFoo bar baz", + cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -349,8 +352,8 @@ mod tests { assert_eq!(finish(finder), Some("This is a test".to_string())); } - #[test] - fn test_streaming_fuzzy_match() { + #[gpui::test] + fn test_streaming_fuzzy_match(cx: &mut gpui::TestAppContext) { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), @@ -363,6 +366,7 @@ mod tests { return x * y; } "}, + cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -383,12 +387,13 @@ mod tests { ); } - #[test] - fn test_incremental_improvement() { + #[gpui::test] + fn test_incremental_improvement(cx: &mut gpui::TestAppContext) { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "Line 1\nLine 2\nLine 3\nLine 4\nLine 5", + cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -408,8 +413,8 @@ mod tests { assert_eq!(finish(finder), Some("Line 3\nLine 4".to_string())); } - #[test] - fn test_incomplete_lines_buffering() { + #[gpui::test] + fn test_incomplete_lines_buffering(cx: &mut gpui::TestAppContext) { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), @@ -418,6 +423,7 @@ mod tests { jumps over the lazy dog Pack my box with five dozen liquor jugs "}, + cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -435,8 +441,8 @@ mod tests { ); } - #[test] - fn test_multiline_fuzzy_match() { + #[gpui::test] + fn test_multiline_fuzzy_match(cx: &mut gpui::TestAppContext) { let buffer = TextBuffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), @@ -456,6 +462,7 @@ mod tests { } } "#}, + cx.background_executor(), ); let snapshot = buffer.snapshot(); @@ -509,7 +516,7 @@ mod tests { } #[gpui::test(iterations = 100)] - fn test_resolve_location_single_line(mut rng: StdRng) { + fn test_resolve_location_single_line(mut rng: StdRng, cx: &mut TestAppContext) { assert_location_resolution( concat!( " Lorem\n", @@ -519,11 +526,12 @@ mod tests { ), "ipsum", &mut rng, + cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_multiline(mut rng: StdRng) { + fn test_resolve_location_multiline(mut rng: StdRng, cx: &mut TestAppContext) { assert_location_resolution( concat!( " Lorem\n", @@ -533,11 +541,12 @@ mod tests { ), "ipsum\ndolor sit amet", &mut rng, + cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_function_with_typo(mut rng: StdRng) { + fn test_resolve_location_function_with_typo(mut rng: StdRng, cx: &mut TestAppContext) { assert_location_resolution( indoc! {" «fn foo1(a: usize) -> usize { @@ -550,11 +559,12 @@ mod tests { "}, "fn foo1(a: usize) -> u32 {\n40\n}", &mut rng, + cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_class_methods(mut rng: StdRng) { + fn test_resolve_location_class_methods(mut rng: StdRng, cx: &mut TestAppContext) { assert_location_resolution( indoc! {" class Something { @@ -575,11 +585,12 @@ mod tests { six() { return 6666; } "}, &mut rng, + cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_imports_no_match(mut rng: StdRng) { + fn test_resolve_location_imports_no_match(mut rng: StdRng, cx: &mut TestAppContext) { assert_location_resolution( indoc! {" use std::ops::Range; @@ -609,11 +620,12 @@ mod tests { use std::sync::Arc; "}, &mut rng, + cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_nested_closure(mut rng: StdRng) { + fn test_resolve_location_nested_closure(mut rng: StdRng, cx: &mut TestAppContext) { assert_location_resolution( indoc! {" impl Foo { @@ -641,11 +653,12 @@ mod tests { " });", ), &mut rng, + cx, ); } #[gpui::test(iterations = 100)] - fn test_resolve_location_tool_invocation(mut rng: StdRng) { + fn test_resolve_location_tool_invocation(mut rng: StdRng, cx: &mut TestAppContext) { assert_location_resolution( indoc! {r#" let tool = cx @@ -673,11 +686,12 @@ mod tests { " .output;", ), &mut rng, + cx, ); } #[gpui::test] - fn test_line_hint_selection() { + fn test_line_hint_selection(cx: &mut TestAppContext) { let text = indoc! {r#" fn first_function() { return 42; @@ -696,6 +710,7 @@ mod tests { ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.to_string(), + cx.background_executor(), ); let snapshot = buffer.snapshot(); let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); @@ -727,9 +742,19 @@ mod tests { } #[track_caller] - fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) { + fn assert_location_resolution( + text_with_expected_range: &str, + query: &str, + rng: &mut StdRng, + cx: &mut TestAppContext, + ) { let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false); - let buffer = TextBuffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.clone()); + let buffer = TextBuffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + text.clone(), + cx.background_executor(), + ); let snapshot = buffer.snapshot(); let mut matcher = StreamingFuzzyMatcher::new(snapshot); diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 0adff2dee3571f09b40ee69896c05e50c56b51b9..078273dbb8a4399e1770ca08daeb1f7f44491e2a 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -569,6 +569,7 @@ mod tests { use prompt_store::ProjectContext; use serde_json::json; use settings::SettingsStore; + use text::Rope; use util::{path, rel_path::rel_path}; #[gpui::test] @@ -741,7 +742,7 @@ mod tests { // Create the file fs.save( path!("/root/src/main.rs").as_ref(), - &"initial content".into(), + &Rope::from_str_small("initial content"), language::LineEnding::Unix, ) .await @@ -908,7 +909,7 @@ mod tests { // Create a simple file with trailing whitespace fs.save( path!("/root/src/main.rs").as_ref(), - &"initial content".into(), + &Rope::from_str_small("initial content"), language::LineEnding::Unix, ) .await diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index ef0d4735d2d7690111ee2549cdee8ab31e32196e..61f8ee60a794cbd6622759a89efb6f40c8f1503d 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -28,6 +28,7 @@ use project::{ agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME}, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; +use rope::Rope; use settings::{SettingsStore, update_settings_file}; use ui::{ Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, @@ -1114,8 +1115,11 @@ async fn open_new_agent_servers_entry_in_settings_editor( ) -> Result<()> { let settings_editor = workspace .update_in(cx, |_, window, cx| { - create_and_open_local_file(paths::settings_file(), window, cx, || { - settings::initial_user_settings_content().as_ref().into() + create_and_open_local_file(paths::settings_file(), window, cx, |cx| { + Rope::from_str( + &settings::initial_user_settings_content(), + cx.background_executor(), + ) }) })? .await? diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 215e2a74d7be9cbcb18442dcefa1581d08eec7b2..f9269e0bb62160633dc991b147d1d779a517e2e8 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -487,9 +487,10 @@ impl CodegenAlternative { ) { let start_time = Instant::now(); let snapshot = self.snapshot.clone(); - let selected_text = snapshot - .text_for_range(self.range.start..self.range.end) - .collect::(); + let selected_text = Rope::from_iter( + snapshot.text_for_range(self.range.start..self.range.end), + cx.background_executor(), + ); let selection_start = self.range.start.to_point(&snapshot); diff --git a/crates/assistant_text_thread/src/text_thread.rs b/crates/assistant_text_thread/src/text_thread.rs index 9ad383cdfd43eed236268349e2ff97c34a0178c0..ddc8912aef5c08ecb9406cc27fbcdf5418ec48e2 100644 --- a/crates/assistant_text_thread/src/text_thread.rs +++ b/crates/assistant_text_thread/src/text_thread.rs @@ -744,12 +744,13 @@ impl TextThread { telemetry: Option>, cx: &mut Context, ) -> Self { - let buffer = cx.new(|_cx| { + let buffer = cx.new(|cx| { let buffer = Buffer::remote( language::BufferId::new(1).unwrap(), replica_id, capability, "", + cx.background_executor(), ); buffer.set_language_registry(language_registry.clone()); buffer diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index d6ae5545200bb47976554814e346be3039fa276e..b8ce85b6db25fdcad21245b41e4979ef61220485 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -1,6 +1,9 @@ use futures::channel::oneshot; use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch}; -use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel}; +use gpui::{ + App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, + TaskLabel, +}; use language::{Language, LanguageRegistry}; use rope::Rope; use std::{ @@ -191,7 +194,7 @@ impl BufferDiffSnapshot { let base_text_exists; let base_text_snapshot; if let Some(text) = &base_text { - let base_text_rope = Rope::from(text.as_str()); + let base_text_rope = Rope::from_str(text.as_str(), cx.background_executor()); base_text_pair = Some((text.clone(), base_text_rope.clone())); let snapshot = language::Buffer::build_snapshot(base_text_rope, language, language_registry, cx); @@ -311,6 +314,7 @@ impl BufferDiffInner { hunks: &[DiffHunk], buffer: &text::BufferSnapshot, file_exists: bool, + cx: &BackgroundExecutor, ) -> Option { let head_text = self .base_text_exists @@ -505,7 +509,7 @@ impl BufferDiffInner { for (old_range, replacement_text) in edits { new_index_text.append(index_cursor.slice(old_range.start)); index_cursor.seek_forward(old_range.end); - new_index_text.push(&replacement_text); + new_index_text.push(&replacement_text, cx); } new_index_text.append(index_cursor.suffix()); Some(new_index_text) @@ -962,6 +966,7 @@ impl BufferDiff { hunks, buffer, file_exists, + cx.background_executor(), ); cx.emit(BufferDiffEvent::HunksStagedOrUnstaged( @@ -1385,7 +1390,12 @@ mod tests { " .unindent(); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + buffer_text, + cx.background_executor(), + ); let mut diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), @@ -1394,7 +1404,7 @@ mod tests { &[(1..2, "two\n", "HELLO\n", DiffHunkStatus::modified_none())], ); - buffer.edit([(0..0, "point five\n")]); + buffer.edit([(0..0, "point five\n")], cx.background_executor()); diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx); assert_hunks( diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer), @@ -1459,7 +1469,12 @@ mod tests { " .unindent(); - let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); + let buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + buffer_text, + cx.background_executor(), + ); let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx); let mut uncommitted_diff = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx); @@ -1528,7 +1543,12 @@ mod tests { " .unindent(); - let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); + let buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + buffer_text, + cx.background_executor(), + ); let diff = cx .update(|cx| { BufferDiffSnapshot::new_with_base_text( @@ -1791,7 +1811,12 @@ mod tests { for example in table { let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false); - let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text); + let buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + buffer_text, + cx.background_executor(), + ); let hunk_range = buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end); @@ -1868,6 +1893,7 @@ mod tests { ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text.clone(), + cx.background_executor(), ); let unstaged = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx); let uncommitted = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx); @@ -1941,7 +1967,12 @@ mod tests { " .unindent(); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text_1); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + buffer_text_1, + cx.background_executor(), + ); let empty_diff = cx.update(|cx| BufferDiffSnapshot::empty(&buffer, cx)); let diff_1 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); @@ -1961,6 +1992,7 @@ mod tests { NINE " .unindent(), + cx.background_executor(), ); let diff_2 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); assert_eq!(None, diff_2.inner.compare(&diff_1.inner, &buffer)); @@ -1978,6 +2010,7 @@ mod tests { NINE " .unindent(), + cx.background_executor(), ); let diff_3 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); let range = diff_3.inner.compare(&diff_2.inner, &buffer).unwrap(); @@ -1995,6 +2028,7 @@ mod tests { NINE " .unindent(), + cx.background_executor(), ); let diff_4 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx); let range = diff_4.inner.compare(&diff_3.inner, &buffer).unwrap(); @@ -2013,6 +2047,7 @@ mod tests { NINE " .unindent(), + cx.background_executor(), ); let diff_5 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text.clone(), cx); let range = diff_5.inner.compare(&diff_4.inner, &buffer).unwrap(); @@ -2031,6 +2066,7 @@ mod tests { «nine» " .unindent(), + cx.background_executor(), ); let diff_6 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text, cx); let range = diff_6.inner.compare(&diff_5.inner, &buffer).unwrap(); @@ -2140,14 +2176,14 @@ mod tests { let working_copy = gen_working_copy(rng, &head_text); let working_copy = cx.new(|cx| { language::Buffer::local_normalized( - Rope::from(working_copy.as_str()), + Rope::from_str(working_copy.as_str(), cx.background_executor()), text::LineEnding::default(), cx, ) }); let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot()); let mut index_text = if rng.random() { - Rope::from(head_text.as_str()) + Rope::from_str(head_text.as_str(), cx.background_executor()) } else { working_copy.as_rope().clone() }; diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index efa0850753887c2116ee7916727a870a3528b627..0e59ccedf5e8e0767eb9be56608eb433d63d1bf4 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -70,6 +70,7 @@ impl ChannelBuffer { ReplicaId::new(response.replica_id as u16), capability, base_text, + cx.background_executor(), ) })?; buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?; diff --git a/crates/collab/src/db/queries/buffers.rs b/crates/collab/src/db/queries/buffers.rs index 6c4cd58d132bdeaaa791f4da8406e0e6d9052981..fb457abcd46cf32b4a34d87637011b307bbacf9d 100644 --- a/crates/collab/src/db/queries/buffers.rs +++ b/crates/collab/src/db/queries/buffers.rs @@ -701,12 +701,12 @@ impl Database { return Ok(()); } - let mut text_buffer = text::Buffer::new( + let mut text_buffer = text::Buffer::new_slow( clock::ReplicaId::LOCAL, text::BufferId::new(1).unwrap(), base_text, ); - text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire)); + text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire), None); let base_text = text_buffer.text(); let epoch = buffer.epoch + 1; diff --git a/crates/collab/src/db/tests/buffer_tests.rs b/crates/collab/src/db/tests/buffer_tests.rs index 4eae7a54cba4a906351f05e5945cff5691fd1126..82310331ffc864d4bba942f3924dcc644427891b 100644 --- a/crates/collab/src/db/tests/buffer_tests.rs +++ b/crates/collab/src/db/tests/buffer_tests.rs @@ -74,11 +74,21 @@ async fn test_channel_buffers(db: &Arc) { ReplicaId::new(0), text::BufferId::new(1).unwrap(), "".to_string(), + &db.test_options.as_ref().unwrap().executor, ); let operations = vec![ - buffer_a.edit([(0..0, "hello world")]), - buffer_a.edit([(5..5, ", cruel")]), - buffer_a.edit([(0..5, "goodbye")]), + buffer_a.edit( + [(0..0, "hello world")], + &db.test_options.as_ref().unwrap().executor, + ), + buffer_a.edit( + [(5..5, ", cruel")], + &db.test_options.as_ref().unwrap().executor, + ), + buffer_a.edit( + [(0..5, "goodbye")], + &db.test_options.as_ref().unwrap().executor, + ), buffer_a.undo().unwrap().1, ]; assert_eq!(buffer_a.text(), "hello, cruel world"); @@ -102,15 +112,19 @@ async fn test_channel_buffers(db: &Arc) { ReplicaId::new(0), text::BufferId::new(1).unwrap(), buffer_response_b.base_text, + &db.test_options.as_ref().unwrap().executor, + ); + buffer_b.apply_ops( + buffer_response_b.operations.into_iter().map(|operation| { + let operation = proto::deserialize_operation(operation).unwrap(); + if let language::Operation::Buffer(operation) = operation { + operation + } else { + unreachable!() + } + }), + None, ); - buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| { - let operation = proto::deserialize_operation(operation).unwrap(); - if let language::Operation::Buffer(operation) = operation { - operation - } else { - unreachable!() - } - })); assert_eq!(buffer_b.text(), "hello, cruel world"); @@ -247,6 +261,7 @@ async fn test_channel_buffers_last_operations(db: &Database) { ReplicaId::new(res.replica_id as u16), text::BufferId::new(1).unwrap(), "".to_string(), + &db.test_options.as_ref().unwrap().executor, )); } @@ -255,9 +270,9 @@ async fn test_channel_buffers_last_operations(db: &Database) { user_id, db, vec![ - text_buffers[0].edit([(0..0, "a")]), - text_buffers[0].edit([(0..0, "b")]), - text_buffers[0].edit([(0..0, "c")]), + text_buffers[0].edit([(0..0, "a")], &db.test_options.as_ref().unwrap().executor), + text_buffers[0].edit([(0..0, "b")], &db.test_options.as_ref().unwrap().executor), + text_buffers[0].edit([(0..0, "c")], &db.test_options.as_ref().unwrap().executor), ], ) .await; @@ -267,9 +282,9 @@ async fn test_channel_buffers_last_operations(db: &Database) { user_id, db, vec![ - text_buffers[1].edit([(0..0, "d")]), - text_buffers[1].edit([(1..1, "e")]), - text_buffers[1].edit([(2..2, "f")]), + text_buffers[1].edit([(0..0, "d")], &db.test_options.as_ref().unwrap().executor), + text_buffers[1].edit([(1..1, "e")], &db.test_options.as_ref().unwrap().executor), + text_buffers[1].edit([(2..2, "f")], &db.test_options.as_ref().unwrap().executor), ], ) .await; @@ -286,14 +301,15 @@ async fn test_channel_buffers_last_operations(db: &Database) { replica_id, text::BufferId::new(1).unwrap(), "def".to_string(), + &db.test_options.as_ref().unwrap().executor, ); update_buffer( buffers[1].channel_id, user_id, db, vec![ - text_buffers[1].edit([(0..0, "g")]), - text_buffers[1].edit([(0..0, "h")]), + text_buffers[1].edit([(0..0, "g")], &db.test_options.as_ref().unwrap().executor), + text_buffers[1].edit([(0..0, "h")], &db.test_options.as_ref().unwrap().executor), ], ) .await; @@ -302,7 +318,7 @@ async fn test_channel_buffers_last_operations(db: &Database) { buffers[2].channel_id, user_id, db, - vec![text_buffers[2].edit([(0..0, "i")])], + vec![text_buffers[2].edit([(0..0, "i")], &db.test_options.as_ref().unwrap().executor)], ) .await; diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 4fa32b6c9ba55e6962547510f52251f16fc9be81..37e6622b0343bca9ae6b9179c830071999bf51df 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -3694,7 +3694,7 @@ async fn test_buffer_reloading( assert_eq!(buf.line_ending(), LineEnding::Unix); }); - let new_contents = Rope::from("d\ne\nf"); + let new_contents = Rope::from_str_small("d\ne\nf"); client_a .fs() .save( @@ -4479,7 +4479,7 @@ async fn test_reloading_buffer_manually( .fs() .save( path!("/a/a.rs").as_ref(), - &Rope::from("let seven = 7;"), + &Rope::from_str_small("let seven = 7;"), LineEnding::Unix, ) .await diff --git a/crates/collab/src/tests/random_project_collaboration_tests.rs b/crates/collab/src/tests/random_project_collaboration_tests.rs index 7e9b84c0571ed6dff19702ce3532c45d56f6413f..399f1a663fe72798a4269804955dcfd3678c5cca 100644 --- a/crates/collab/src/tests/random_project_collaboration_tests.rs +++ b/crates/collab/src/tests/random_project_collaboration_tests.rs @@ -27,6 +27,7 @@ use std::{ rc::Rc, sync::Arc, }; +use text::Rope; use util::{ ResultExt, path, paths::PathStyle, @@ -938,7 +939,11 @@ impl RandomizedTest for ProjectCollaborationTest { client .fs() - .save(&path, &content.as_str().into(), text::LineEnding::Unix) + .save( + &path, + &Rope::from_str_small(content.as_str()), + text::LineEnding::Unix, + ) .await .unwrap(); } diff --git a/crates/diagnostics/src/diagnostics_tests.rs b/crates/diagnostics/src/diagnostics_tests.rs index d97a5ab65aab4bb238182040821ecf9fdf828bc3..824d4db6a58c06db5df4c04ac79ee1e509d55d4d 100644 --- a/crates/diagnostics/src/diagnostics_tests.rs +++ b/crates/diagnostics/src/diagnostics_tests.rs @@ -877,7 +877,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S vec![Inlay::edit_prediction( post_inc(&mut next_inlay_id), snapshot.buffer_snapshot().anchor_before(position), - Rope::from_iter(["Test inlay ", "next_inlay_id"]), + Rope::from_iter_small(["Test inlay ", "next_inlay_id"]), )], cx, ); @@ -2070,7 +2070,7 @@ fn random_lsp_diagnostic( const ERROR_MARGIN: usize = 10; let file_content = fs.read_file_sync(path).unwrap(); - let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref()); + let file_text = Rope::from_str_small(String::from_utf8_lossy(&file_content).as_ref()); let start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN)); let end = rng.random_range(start..file_text.len().saturating_add(ERROR_MARGIN)); diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 70c861ab1112630c2e3293cb54a4e96c6754b3bd..594c290730d5c734430e747ac6d09d6cbbbd4d0e 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -13,7 +13,7 @@ use gpui::{ }; use indoc::indoc; use language::{ - EditPredictionsMode, File, Language, + EditPredictionsMode, File, Language, Rope, language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings}, }; use project::DisableAiSettings; @@ -1056,8 +1056,11 @@ async fn open_disabled_globs_setting_in_editor( ) -> Result<()> { let settings_editor = workspace .update_in(cx, |_, window, cx| { - create_and_open_local_file(paths::settings_file(), window, cx, || { - settings::initial_user_settings_content().as_ref().into() + create_and_open_local_file(paths::settings_file(), window, cx, |cx| { + Rope::from_str( + settings::initial_user_settings_content().as_ref(), + cx.background_executor(), + ) }) })? .await? diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 7a225d6019edf8f09b1758d62e8181917649cc2b..a269d22d71a95eef1ca1485437863091e3505439 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1569,6 +1569,7 @@ pub mod tests { use lsp::LanguageServerId; use project::Project; use rand::{Rng, prelude::*}; + use rope::Rope; use settings::{SettingsContent, SettingsStore}; use smol::stream::StreamExt; use std::{env, sync::Arc}; @@ -2074,7 +2075,7 @@ pub mod tests { vec![Inlay::edit_prediction( 0, buffer_snapshot.anchor_after(0), - "\n", + Rope::from_str_small("\n"), )], cx, ); diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 486676f1120bc2e9d85effd4c328a2b7a547e06b..3c7cedb6574d02bcf6b06075b8db79cc3a6080db 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -700,16 +700,20 @@ impl InlayMap { .collect::(); let next_inlay = if i % 2 == 0 { + use rope::Rope; + Inlay::mock_hint( post_inc(next_inlay_id), snapshot.buffer.anchor_at(position, bias), - &text, + Rope::from_str_small(&text), ) } else { + use rope::Rope; + Inlay::edit_prediction( post_inc(next_inlay_id), snapshot.buffer.anchor_at(position, bias), - &text, + Rope::from_str_small(&text), ) }; let inlay_id = next_inlay.id; @@ -1301,7 +1305,7 @@ mod tests { vec![Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_after(3), - "|123|", + Rope::from_str_small("|123|"), )], ); assert_eq!(inlay_snapshot.text(), "abc|123|defghi"); @@ -1378,12 +1382,12 @@ mod tests { Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(3), - "|123|", + Rope::from_str_small("|123|"), ), Inlay::edit_prediction( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_after(3), - "|456|", + Rope::from_str_small("|456|"), ), ], ); @@ -1593,17 +1597,17 @@ mod tests { Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(0), - "|123|\n", + Rope::from_str_small("|123|\n"), ), Inlay::mock_hint( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(4), - "|456|", + Rope::from_str_small("|456|"), ), Inlay::edit_prediction( post_inc(&mut next_inlay_id), buffer.read(cx).snapshot(cx).anchor_before(7), - "\n|567|\n", + Rope::from_str_small("\n|567|\n"), ), ], ); @@ -1677,9 +1681,14 @@ mod tests { (offset, inlay.clone()) }) .collect::>(); - let mut expected_text = Rope::from(&buffer_snapshot.text()); + let mut expected_text = + Rope::from_str(&buffer_snapshot.text(), cx.background_executor()); for (offset, inlay) in inlays.iter().rev() { - expected_text.replace(*offset..*offset, &inlay.text().to_string()); + expected_text.replace( + *offset..*offset, + &inlay.text().to_string(), + cx.background_executor(), + ); } assert_eq!(inlay_snapshot.text(), expected_text.to_string()); @@ -2067,7 +2076,7 @@ mod tests { let inlay = Inlay { id: InlayId::Hint(0), position, - content: InlayContent::Text(text::Rope::from(inlay_text)), + content: InlayContent::Text(text::Rope::from_str(inlay_text, cx.background_executor())), }; let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]); @@ -2181,7 +2190,10 @@ mod tests { let inlay = Inlay { id: InlayId::Hint(0), position, - content: InlayContent::Text(text::Rope::from(test_case.inlay_text)), + content: InlayContent::Text(text::Rope::from_str( + test_case.inlay_text, + cx.background_executor(), + )), }; let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]); diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 7a63723f53a49483eaa728373a5ae8530aa6f4d6..084ced82b3aa311f90f905077e2d18dd831e0bd6 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -1042,7 +1042,7 @@ mod tests { let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size); let tabs_snapshot = tab_map.set_max_expansion_column(32); - let text = text::Rope::from(tabs_snapshot.text().as_str()); + let text = text::Rope::from_str(tabs_snapshot.text().as_str(), cx.background_executor()); log::info!( "TabMap text (tab size: {}): {:?}", tab_size, diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 7371eb678538dbc12abe43bde4073ffd9d2bdb21..1f50ff28daff51e9e16da683053104ea4800977b 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -863,7 +863,7 @@ impl WrapSnapshot { } } - let text = language::Rope::from(self.text().as_str()); + let text = language::Rope::from_str_small(self.text().as_str()); let mut input_buffer_rows = self.tab_snapshot.rows(0); let mut expected_buffer_rows = Vec::new(); let mut prev_tab_row = 0; @@ -1413,9 +1413,10 @@ mod tests { } } - let mut initial_text = Rope::from(initial_snapshot.text().as_str()); + let mut initial_text = + Rope::from_str(initial_snapshot.text().as_str(), cx.background_executor()); for (snapshot, patch) in edits { - let snapshot_text = Rope::from(snapshot.text().as_str()); + let snapshot_text = Rope::from_str(snapshot.text().as_str(), cx.background_executor()); for edit in &patch { let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0)); let old_end = initial_text.point_to_offset(cmp::min( @@ -1431,7 +1432,7 @@ mod tests { .chunks_in_range(new_start..new_end) .collect::(); - initial_text.replace(old_start..old_end, &new_text); + initial_text.replace(old_start..old_end, &new_text, cx.background_executor()); } assert_eq!(initial_text.to_string(), snapshot_text.to_string()); } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 9bdebcd24a20697fb041ae14fa7dc2f034d00d92..ed6b8ec2eca4dcb558bc832ac56b92af8791712c 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -7852,7 +7852,7 @@ impl Editor { let inlay = Inlay::edit_prediction( post_inc(&mut self.next_inlay_id), range.start, - new_text.as_str(), + Rope::from_str_small(new_text.as_str()), ); inlay_ids.push(inlay.id); inlays.push(inlay); diff --git a/crates/editor/src/git/blame.rs b/crates/editor/src/git/blame.rs index b36a57a7e47bf148fff4201ec87ac7c868658a04..94bc67e684cd512942d42527d0adb802500ed49f 100644 --- a/crates/editor/src/git/blame.rs +++ b/crates/editor/src/git/blame.rs @@ -1115,18 +1115,19 @@ mod tests { let fs = FakeFs::new(cx.executor()); let buffer_initial_text_len = rng.random_range(5..15); - let mut buffer_initial_text = Rope::from( + let mut buffer_initial_text = Rope::from_str( RandomCharIter::new(&mut rng) .take(buffer_initial_text_len) .collect::() .as_str(), + cx.background_executor(), ); let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5); newline_ixs.sort_unstable(); for newline_ix in newline_ixs.into_iter().rev() { let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right); - buffer_initial_text.replace(newline_ix..newline_ix, "\n"); + buffer_initial_text.replace(newline_ix..newline_ix, "\n", cx.background_executor()); } log::info!("initial buffer text: {:?}", buffer_initial_text); diff --git a/crates/editor/src/inlays.rs b/crates/editor/src/inlays.rs index f07bf0b315161f0ce9cdf3ef7e2f6db6d60abfb5..1d411fef5617c00ef4d34b521f2321ac9baac934 100644 --- a/crates/editor/src/inlays.rs +++ b/crates/editor/src/inlays.rs @@ -59,10 +59,10 @@ impl Inlay { pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self { let mut text = hint.text(); if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') { - text.push(" "); + text.push_small(" "); } if hint.padding_left && text.chars_at(0).next() != Some(' ') { - text.push_front(" "); + text.push_front_small(" "); } Self { id, @@ -72,11 +72,11 @@ impl Inlay { } #[cfg(any(test, feature = "test-support"))] - pub fn mock_hint(id: usize, position: Anchor, text: impl Into) -> Self { + pub fn mock_hint(id: usize, position: Anchor, text: Rope) -> Self { Self { id: InlayId::Hint(id), position, - content: InlayContent::Text(text.into()), + content: InlayContent::Text(text), } } @@ -88,19 +88,19 @@ impl Inlay { } } - pub fn edit_prediction>(id: usize, position: Anchor, text: T) -> Self { + pub fn edit_prediction(id: usize, position: Anchor, text: Rope) -> Self { Self { id: InlayId::EditPrediction(id), position, - content: InlayContent::Text(text.into()), + content: InlayContent::Text(text), } } - pub fn debugger>(id: usize, position: Anchor, text: T) -> Self { + pub fn debugger(id: usize, position: Anchor, text: Rope) -> Self { Self { id: InlayId::DebuggerValue(id), position, - content: InlayContent::Text(text.into()), + content: InlayContent::Text(text), } } @@ -108,7 +108,7 @@ impl Inlay { static COLOR_TEXT: OnceLock = OnceLock::new(); match &self.content { InlayContent::Text(text) => text, - InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("◼")), + InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from_str_small("◼")), } } diff --git a/crates/editor/src/movement.rs b/crates/editor/src/movement.rs index 418fa4fcb442b1de133972457497c0e592e77d15..1c15990b13ea99db269d21bcdcd591e50ebf4d69 100644 --- a/crates/editor/src/movement.rs +++ b/crates/editor/src/movement.rs @@ -878,6 +878,7 @@ mod tests { use gpui::{AppContext as _, font, px}; use language::Capability; use project::{Project, project_settings::DiagnosticSeverity}; + use rope::Rope; use settings::SettingsStore; use util::post_inc; @@ -1024,22 +1025,22 @@ mod tests { Inlay::edit_prediction( post_inc(&mut id), buffer_snapshot.anchor_before(offset), - "test", + Rope::from_str_small("test"), ), Inlay::edit_prediction( post_inc(&mut id), buffer_snapshot.anchor_after(offset), - "test", + Rope::from_str_small("test"), ), Inlay::mock_hint( post_inc(&mut id), buffer_snapshot.anchor_before(offset), - "test", + Rope::from_str_small("test"), ), Inlay::mock_hint( post_inc(&mut id), buffer_snapshot.anchor_after(offset), - "test", + Rope::from_str_small("test"), ), ] }) diff --git a/crates/editor/src/signature_help.rs b/crates/editor/src/signature_help.rs index 8d74638e4c2aaf356ffabdeef717b9b105487ee3..3ef8ca09ab0af2714c353b1ad3c31556b0783c3d 100644 --- a/crates/editor/src/signature_help.rs +++ b/crates/editor/src/signature_help.rs @@ -193,7 +193,7 @@ impl Editor { if let Some(language) = language { for signature in &mut signature_help.signatures { - let text = Rope::from(signature.label.as_ref()); + let text = Rope::from_str_small(signature.label.as_ref()); let highlights = language .highlight_text(&text, 0..signature.label.len()) .into_iter() diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 04b03352d83fd3323770a00a13c4377dc111535a..50b5169f7ad1196a3628c59d4fda6162126b2190 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -1468,6 +1468,7 @@ impl ExtensionStore { let extensions_dir = self.installed_dir.clone(); let index_path = self.index_path.clone(); let proxy = self.proxy.clone(); + let executor = cx.background_executor().clone(); cx.background_spawn(async move { let start_time = Instant::now(); let mut index = ExtensionIndex::default(); @@ -1501,10 +1502,14 @@ impl ExtensionStore { } if let Ok(index_json) = serde_json::to_string_pretty(&index) { - fs.save(&index_path, &index_json.as_str().into(), Default::default()) - .await - .context("failed to save extension index") - .log_err(); + fs.save( + &index_path, + &Rope::from_str(&index_json, &executor), + Default::default(), + ) + .await + .context("failed to save extension index") + .log_err(); } log::info!("rebuilt extension index in {:?}", start_time.elapsed()); @@ -1671,7 +1676,7 @@ impl ExtensionStore { let manifest_toml = toml::to_string(&loaded_extension.manifest)?; fs.save( &tmp_dir.join(EXTENSION_TOML), - &Rope::from(manifest_toml), + &Rope::from_str_small(&manifest_toml), language::LineEnding::Unix, ) .await?; diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 0a0c4c18e1f528a9ebaad9a8d9862982632dd04f..b302d551ddccd17a757b81452f0ed597dde88c57 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -170,7 +170,10 @@ impl CommitView { ReplicaId::LOCAL, cx.entity_id().as_non_zero_u64().into(), LineEnding::default(), - format_commit(&commit, stash.is_some()).into(), + Rope::from_str( + &format_commit(&commit, stash.is_some()), + cx.background_executor(), + ), ); metadata_buffer_id = Some(buffer.remote_id()); Buffer::build(buffer, Some(file.clone()), Capability::ReadWrite) @@ -336,7 +339,7 @@ async fn build_buffer( ) -> Result> { let line_ending = LineEnding::detect(&text); LineEnding::normalize(&mut text); - let text = Rope::from(text); + let text = Rope::from_str(&text, cx.background_executor()); let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx))?; let language = if let Some(language) = language { language_registry @@ -376,7 +379,7 @@ async fn build_buffer_diff( let base_buffer = cx .update(|cx| { Buffer::build_snapshot( - old_text.as_deref().unwrap_or("").into(), + Rope::from_str(old_text.as_deref().unwrap_or(""), cx.background_executor()), buffer.language().cloned(), Some(language_registry.clone()), cx, diff --git a/crates/git_ui/src/file_diff_view.rs b/crates/git_ui/src/file_diff_view.rs index 387bda808708cf38beded2fe17edd92466885672..a99b7f8e2428ca0bcf726f2ac7661df171bef34a 100644 --- a/crates/git_ui/src/file_diff_view.rs +++ b/crates/git_ui/src/file_diff_view.rs @@ -359,6 +359,7 @@ mod tests { use super::*; use editor::test::editor_test_context::assert_state_with_diff; use gpui::TestAppContext; + use language::Rope; use project::{FakeFs, Fs, Project}; use settings::SettingsStore; use std::path::PathBuf; @@ -429,7 +430,7 @@ mod tests { // Modify the new file on disk fs.save( path!("/test/new_file.txt").as_ref(), - &unindent( + &Rope::from_str_small(&unindent( " new line 1 line 2 @@ -437,8 +438,7 @@ mod tests { line 4 new line 5 ", - ) - .into(), + )), Default::default(), ) .await @@ -465,15 +465,14 @@ mod tests { // Modify the old file on disk fs.save( path!("/test/old_file.txt").as_ref(), - &unindent( + &Rope::from_str_small(&unindent( " new line 1 line 2 old line 3 line 4 ", - ) - .into(), + )), Default::default(), ) .await diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index 381541d4b11377b988dd30e03155855c7ba25aed..260a07cc3ba6805b91207e000b02d23e57f2be4e 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -260,6 +260,19 @@ impl AsyncApp { } } +impl sum_tree::BackgroundSpawn for BackgroundExecutor { + type Task + = Task + where + R: Send + Sync; + fn background_spawn(&self, future: impl Future + Send + 'static) -> Self::Task + where + R: Send + Sync + 'static, + { + self.spawn(future) + } +} + /// A cloneable, owned handle to the application context, /// composed with the window associated with the current task. #[derive(Clone, Deref, DerefMut)] diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index d974823396d9f0d546a6b035f47b569145eb021b..40397f9d9d359d2ac914b6006b0ae883fa151fc2 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -393,6 +393,11 @@ impl TestAppContext { } } + /// Returns the background executor for this context. + pub fn background_executor(&self) -> &BackgroundExecutor { + &self.background_executor + } + /// Wait until there are no more pending tasks. pub fn run_until_parked(&mut self) { self.background_executor.run_until_parked() diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index b6d3a407f5dbbab07e0273e668e9b5710824edda..c1e5c066b43604f5e7d47588ef3c2ebc33cd524e 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -342,7 +342,7 @@ impl BackgroundExecutor { /// for all of them to complete before returning. pub async fn scoped<'scope, F>(&self, scheduler: F) where - F: FnOnce(&mut Scope<'scope>), + F: for<'a> FnOnce(&'a mut Scope<'scope>), { let mut scope = Scope::new(self.clone()); (scheduler)(&mut scope); diff --git a/crates/keymap_editor/src/keymap_editor.rs b/crates/keymap_editor/src/keymap_editor.rs index e3fb30d46eb57059afc53682c57be392ec8254ed..70e58de3d14403440a0cd291754e0a4593290d01 100644 --- a/crates/keymap_editor/src/keymap_editor.rs +++ b/crates/keymap_editor/src/keymap_editor.rs @@ -22,7 +22,7 @@ use gpui::{ ScrollWheelEvent, Stateful, StyledText, Subscription, Task, TextStyleRefinement, WeakEntity, actions, anchored, deferred, div, }; -use language::{Language, LanguageConfig, ToOffset as _}; +use language::{Language, LanguageConfig, Rope, ToOffset as _}; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{CompletionDisplayOptions, Project}; use settings::{ @@ -2119,7 +2119,7 @@ impl RenderOnce for SyntaxHighlightedText { let highlights = self .language - .highlight_text(&text.as_ref().into(), 0..text.len()); + .highlight_text(&Rope::from_str_small(text.as_ref()), 0..text.len()); let mut runs = Vec::with_capacity(highlights.len()); let mut offset = 0; diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index c72350f38561e7aea62b7d3402eaa24bbdb08044..d67434741032ae7f42dc5e95ec34a57b7c84ebb4 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -24,8 +24,8 @@ use collections::HashMap; use fs::MTime; use futures::channel::oneshot; use gpui::{ - App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText, - Task, TaskLabel, TextStyle, + App, AppContext as _, BackgroundExecutor, Context, Entity, EventEmitter, HighlightStyle, + SharedString, StyledText, Task, TaskLabel, TextStyle, }; use lsp::{LanguageServerId, NumberOrString}; @@ -832,6 +832,7 @@ impl Buffer { ReplicaId::LOCAL, cx.entity_id().as_non_zero_u64().into(), base_text.into(), + &cx.background_executor(), ), None, Capability::ReadWrite, @@ -862,9 +863,10 @@ impl Buffer { replica_id: ReplicaId, capability: Capability, base_text: impl Into, + cx: &BackgroundExecutor, ) -> Self { Self::build( - TextBuffer::new(replica_id, remote_id, base_text.into()), + TextBuffer::new(replica_id, remote_id, base_text.into(), cx), None, capability, ) @@ -877,9 +879,10 @@ impl Buffer { capability: Capability, message: proto::BufferState, file: Option>, + cx: &BackgroundExecutor, ) -> Result { let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?; - let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text); + let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text, cx); let mut this = Self::build(buffer, file, capability); this.text.set_line_ending(proto::deserialize_line_ending( rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?, @@ -1138,13 +1141,14 @@ impl Buffer { let old_snapshot = self.text.snapshot(); let mut branch_buffer = self.text.branch(); let mut syntax_snapshot = self.syntax_map.lock().snapshot(); + let executor = cx.background_executor().clone(); cx.background_spawn(async move { if !edits.is_empty() { if let Some(language) = language.clone() { syntax_snapshot.reparse(&old_snapshot, registry.clone(), language); } - branch_buffer.edit(edits.iter().cloned()); + branch_buffer.edit(edits.iter().cloned(), &executor); let snapshot = branch_buffer.snapshot(); syntax_snapshot.interpolate(&snapshot); @@ -2361,7 +2365,9 @@ impl Buffer { let autoindent_request = autoindent_mode .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode))); - let edit_operation = self.text.edit(edits.iter().cloned()); + let edit_operation = self + .text + .edit(edits.iter().cloned(), cx.background_executor()); let edit_id = edit_operation.timestamp(); if let Some((before_edit, mode)) = autoindent_request { @@ -2592,7 +2598,8 @@ impl Buffer { for operation in buffer_ops.iter() { self.send_operation(Operation::Buffer(operation.clone()), false, cx); } - self.text.apply_ops(buffer_ops); + self.text + .apply_ops(buffer_ops, Some(cx.background_executor())); self.deferred_ops.insert(deferred_ops); self.flush_deferred_ops(cx); self.did_edit(&old_version, was_dirty, cx); diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs index f824639ad762191f4168586551af51fb4e37c8dc..6b6d85c3790123acd6e95dd1d196f6c5845f5ede 100644 --- a/crates/language/src/buffer_tests.rs +++ b/crates/language/src/buffer_tests.rs @@ -75,6 +75,7 @@ fn test_set_line_ending(cx: &mut TestAppContext) { Capability::ReadWrite, base.read(cx).to_proto(cx), None, + cx.background_executor(), ) .unwrap() }); @@ -255,14 +256,18 @@ async fn test_first_line_pattern(cx: &mut TestAppContext) { .is_none() ); assert!( - cx.read(|cx| languages.language_for_file(&file("the/script"), Some(&"nothing".into()), cx)) - .is_none() + cx.read(|cx| languages.language_for_file( + &file("the/script"), + Some(&Rope::from_str("nothing", cx.background_executor())), + cx + )) + .is_none() ); assert_eq!( cx.read(|cx| languages.language_for_file( &file("the/script"), - Some(&"#!/bin/env node".into()), + Some(&Rope::from_str("#!/bin/env node", cx.background_executor())), cx )) .unwrap() @@ -406,6 +411,7 @@ fn test_edit_events(cx: &mut gpui::App) { ReplicaId::new(1), Capability::ReadWrite, "abcdef", + cx.background_executor(), ) }); let buffer1_ops = Arc::new(Mutex::new(Vec::new())); @@ -2781,8 +2787,14 @@ fn test_serialization(cx: &mut gpui::App) { .background_executor() .block(buffer1.read(cx).serialize_ops(None, cx)); let buffer2 = cx.new(|cx| { - let mut buffer = - Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap(); + let mut buffer = Buffer::from_proto( + ReplicaId::new(1), + Capability::ReadWrite, + state, + None, + cx.background_executor(), + ) + .unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| proto::deserialize_operation(op).unwrap()), @@ -2806,6 +2818,7 @@ fn test_branch_and_merge(cx: &mut TestAppContext) { Capability::ReadWrite, base.read(cx).to_proto(cx), None, + cx.background_executor(), ) .unwrap() }); @@ -3120,9 +3133,14 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { let ops = cx .background_executor() .block(base_buffer.read(cx).serialize_ops(None, cx)); - let mut buffer = - Buffer::from_proto(ReplicaId::new(i as u16), Capability::ReadWrite, state, None) - .unwrap(); + let mut buffer = Buffer::from_proto( + ReplicaId::new(i as u16), + Capability::ReadWrite, + state, + None, + cx.background_executor(), + ) + .unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| proto::deserialize_operation(op).unwrap()), @@ -3251,6 +3269,7 @@ fn test_random_collaboration(cx: &mut App, mut rng: StdRng) { Capability::ReadWrite, old_buffer_state, None, + cx.background_executor(), ) .unwrap(); new_buffer.apply_ops( @@ -3414,7 +3433,7 @@ fn test_contiguous_ranges() { } #[gpui::test(iterations = 500)] -fn test_trailing_whitespace_ranges(mut rng: StdRng) { +fn test_trailing_whitespace_ranges(mut rng: StdRng, cx: &mut TestAppContext) { // Generate a random multi-line string containing // some lines with trailing whitespace. let mut text = String::new(); @@ -3438,7 +3457,7 @@ fn test_trailing_whitespace_ranges(mut rng: StdRng) { _ => {} } - let rope = Rope::from(text.as_str()); + let rope = Rope::from_str(text.as_str(), cx.background_executor()); let actual_ranges = trailing_whitespace_ranges(&rope); let expected_ranges = TRAILING_WHITESPACE_REGEX .find_iter(&text) diff --git a/crates/language/src/syntax_map/syntax_map_tests.rs b/crates/language/src/syntax_map/syntax_map_tests.rs index 9c4eecad363de386cddc6e943e20e5762634d713..99fd365b50f5c93b965b7193365b49b2bc636a2e 100644 --- a/crates/language/src/syntax_map/syntax_map_tests.rs +++ b/crates/language/src/syntax_map/syntax_map_tests.rs @@ -100,6 +100,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) { } "# .unindent(), + cx.background_executor(), ); let mut syntax_map = SyntaxMap::new(&buffer); @@ -147,7 +148,7 @@ fn test_syntax_map_layers_for_range(cx: &mut App) { // Replace a vec! macro invocation with a plain slice, removing a syntactic layer. let macro_name_range = range_for_text(&buffer, "vec!"); - buffer.edit([(macro_name_range, "&")]); + buffer.edit([(macro_name_range, "&")], cx.background_executor()); syntax_map.interpolate(&buffer); syntax_map.reparse(language.clone(), &buffer); @@ -199,6 +200,7 @@ fn test_dynamic_language_injection(cx: &mut App) { ``` "# .unindent(), + cx.background_executor(), ); let mut syntax_map = SyntaxMap::new(&buffer); @@ -218,7 +220,10 @@ fn test_dynamic_language_injection(cx: &mut App) { // Replace `rs` with a path to ending in `.rb` in code block. let macro_name_range = range_for_text(&buffer, "rs"); - buffer.edit([(macro_name_range, "foo/bar/baz.rb")]); + buffer.edit( + [(macro_name_range, "foo/bar/baz.rb")], + cx.background_executor(), + ); syntax_map.interpolate(&buffer); syntax_map.reparse(markdown.clone(), &buffer); syntax_map.reparse(markdown_inline.clone(), &buffer); @@ -235,7 +240,7 @@ fn test_dynamic_language_injection(cx: &mut App) { // Replace Ruby with a language that hasn't been loaded yet. let macro_name_range = range_for_text(&buffer, "foo/bar/baz.rb"); - buffer.edit([(macro_name_range, "html")]); + buffer.edit([(macro_name_range, "html")], cx.background_executor()); syntax_map.interpolate(&buffer); syntax_map.reparse(markdown.clone(), &buffer); syntax_map.reparse(markdown_inline.clone(), &buffer); @@ -811,7 +816,12 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) { .unindent(); let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone())); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + text, + cx.background_executor(), + ); let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); @@ -859,7 +869,7 @@ fn test_syntax_map_languages_loading_with_erb(cx: &mut App) { .unindent(); log::info!("editing"); - buffer.edit_via_marked_text(&text); + buffer.edit_via_marked_text(&text, cx.background_executor()); syntax_map.interpolate(&buffer); syntax_map.reparse(language, &buffer); @@ -903,7 +913,7 @@ fn test_random_syntax_map_edits_rust_macros(rng: StdRng, cx: &mut App) { let language = Arc::new(rust_lang()); registry.add(language.clone()); - test_random_edits(text, registry, language, rng); + test_random_edits(text, registry, language, rng, cx); } #[gpui::test(iterations = 50)] @@ -932,7 +942,7 @@ fn test_random_syntax_map_edits_with_erb(rng: StdRng, cx: &mut App) { registry.add(Arc::new(ruby_lang())); registry.add(Arc::new(html_lang())); - test_random_edits(text, registry, language, rng); + test_random_edits(text, registry, language, rng, cx); } #[gpui::test(iterations = 50)] @@ -965,7 +975,7 @@ fn test_random_syntax_map_edits_with_heex(rng: StdRng, cx: &mut App) { registry.add(Arc::new(heex_lang())); registry.add(Arc::new(html_lang())); - test_random_edits(text, registry, language, rng); + test_random_edits(text, registry, language, rng, cx); } fn test_random_edits( @@ -973,12 +983,18 @@ fn test_random_edits( registry: Arc, language: Arc, mut rng: StdRng, + cx: &mut App, ) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + text, + cx.background_executor(), + ); let mut syntax_map = SyntaxMap::new(&buffer); syntax_map.set_language_registry(registry.clone()); @@ -993,7 +1009,7 @@ fn test_random_edits( let prev_buffer = buffer.snapshot(); let prev_syntax_map = syntax_map.snapshot(); - buffer.randomly_edit(&mut rng, 3); + buffer.randomly_edit(&mut rng, 3, cx.background_executor()); log::info!("text:\n{}", buffer.text()); syntax_map.interpolate(&buffer); @@ -1159,7 +1175,12 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf .now_or_never() .unwrap() .unwrap(); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "", + cx.background_executor(), + ); let mut mutated_syntax_map = SyntaxMap::new(&buffer); mutated_syntax_map.set_language_registry(registry.clone()); @@ -1168,7 +1189,7 @@ fn test_edit_sequence(language_name: &str, steps: &[&str], cx: &mut App) -> (Buf for (i, marked_string) in steps.iter().enumerate() { let marked_string = marked_string.unindent(); log::info!("incremental parse {i}: {marked_string:?}"); - buffer.edit_via_marked_text(&marked_string); + buffer.edit_via_marked_text(&marked_string, cx.background_executor()); // Reparse the syntax map mutated_syntax_map.interpolate(&buffer); diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index 01b726748649e29b4fe69ce26df5564819894985..cc9fb639f228ea7af42238296ae88c95ae439881 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -11,7 +11,7 @@ use futures::{Future, FutureExt, future::join_all}; use gpui::{App, AppContext, AsyncApp, Task}; use language::{ BinaryStatus, CodeLabel, DynLspInstaller, HighlightId, Language, LanguageName, LspAdapter, - LspAdapterDelegate, Toolchain, + LspAdapterDelegate, Rope, Toolchain, }; use lsp::{ CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName, @@ -403,7 +403,10 @@ fn labels_from_extension( let runs = if label.code.is_empty() { Vec::new() } else { - language.highlight_text(&label.code.as_str().into(), 0..label.code.len()) + language.highlight_text( + &Rope::from_str_small(label.code.as_str()), + 0..label.code.len(), + ) }; build_code_label(&label, &runs, language) }) diff --git a/crates/languages/src/c.rs b/crates/languages/src/c.rs index 8e90cf821368c0c88781b2d10e82ad9eaa05989c..bbf4cc3240f0f33ee73fed10d96edc36467e51f4 100644 --- a/crates/languages/src/c.rs +++ b/crates/languages/src/c.rs @@ -189,7 +189,7 @@ impl super::LspAdapter for CLspAdapter { Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => { let detail = completion.detail.as_ref().unwrap(); let text = format!("{} {}", detail, label); - let source = Rope::from(format!("struct S {{ {} }}", text).as_str()); + let source = Rope::from_str_small(format!("struct S {{ {} }}", text).as_str()); let runs = language.highlight_text(&source, 11..11 + text.len()); let filter_range = completion .filter_text @@ -206,7 +206,8 @@ impl super::LspAdapter for CLspAdapter { { let detail = completion.detail.as_ref().unwrap(); let text = format!("{} {}", detail, label); - let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len()); + let runs = + language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len()); let filter_range = completion .filter_text .as_deref() @@ -222,7 +223,8 @@ impl super::LspAdapter for CLspAdapter { { let detail = completion.detail.as_ref().unwrap(); let text = format!("{} {}", detail, label); - let runs = language.highlight_text(&Rope::from(text.as_str()), 0..text.len()); + let runs = + language.highlight_text(&Rope::from_str_small(text.as_str()), 0..text.len()); let filter_range = completion .filter_text .as_deref() @@ -326,7 +328,7 @@ impl super::LspAdapter for CLspAdapter { Some(CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&text.as_str().into(), display_range), + language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), )) } diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index 6c75abf123af62b3f4ab43a6e94d3b040e2f010a..55acc64f3e1b5592a55c551aa6c0b255cae3834a 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -221,7 +221,7 @@ impl LspAdapter for GoLspAdapter { match completion.kind.zip(completion.detail.as_ref()) { Some((lsp::CompletionItemKind::MODULE, detail)) => { let text = format!("{label} {detail}"); - let source = Rope::from(format!("import {text}").as_str()); + let source = Rope::from_str_small(format!("import {text}").as_str()); let runs = language.highlight_text(&source, 7..7 + text[name_offset..].len()); let filter_range = completion .filter_text @@ -238,8 +238,9 @@ impl LspAdapter for GoLspAdapter { detail, )) => { let text = format!("{label} {detail}"); - let source = - Rope::from(format!("var {} {}", &text[name_offset..], detail).as_str()); + let source = Rope::from_str_small( + format!("var {} {}", &text[name_offset..], detail).as_str(), + ); let runs = adjust_runs( name_offset, language.highlight_text(&source, 4..4 + text[name_offset..].len()), @@ -256,7 +257,8 @@ impl LspAdapter for GoLspAdapter { } Some((lsp::CompletionItemKind::STRUCT, _)) => { let text = format!("{label} struct {{}}"); - let source = Rope::from(format!("type {}", &text[name_offset..]).as_str()); + let source = + Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, language.highlight_text(&source, 5..5 + text[name_offset..].len()), @@ -273,7 +275,8 @@ impl LspAdapter for GoLspAdapter { } Some((lsp::CompletionItemKind::INTERFACE, _)) => { let text = format!("{label} interface {{}}"); - let source = Rope::from(format!("type {}", &text[name_offset..]).as_str()); + let source = + Rope::from_str_small(format!("type {}", &text[name_offset..]).as_str()); let runs = adjust_runs( name_offset, language.highlight_text(&source, 5..5 + text[name_offset..].len()), @@ -290,8 +293,9 @@ impl LspAdapter for GoLspAdapter { } Some((lsp::CompletionItemKind::FIELD, detail)) => { let text = format!("{label} {detail}"); - let source = - Rope::from(format!("type T struct {{ {} }}", &text[name_offset..]).as_str()); + let source = Rope::from_str_small( + format!("type T struct {{ {} }}", &text[name_offset..]).as_str(), + ); let runs = adjust_runs( name_offset, language.highlight_text(&source, 16..16 + text[name_offset..].len()), @@ -309,7 +313,9 @@ impl LspAdapter for GoLspAdapter { Some((lsp::CompletionItemKind::FUNCTION | lsp::CompletionItemKind::METHOD, detail)) => { if let Some(signature) = detail.strip_prefix("func") { let text = format!("{label}{signature}"); - let source = Rope::from(format!("func {} {{}}", &text[name_offset..]).as_str()); + let source = Rope::from_str_small( + format!("func {} {{}}", &text[name_offset..]).as_str(), + ); let runs = adjust_runs( name_offset, language.highlight_text(&source, 5..5 + text[name_offset..].len()), @@ -385,7 +391,7 @@ impl LspAdapter for GoLspAdapter { Some(CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&text.as_str().into(), display_range), + language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), )) } diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index f676f5a7a6f028c095d52273fb8c616472a35ee5..a87f17795f5b6a1d69368d826688a6ed48309d23 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -19,6 +19,7 @@ use pet_core::python_environment::{PythonEnvironment, PythonEnvironmentKind}; use pet_virtualenv::is_virtualenv_dir; use project::Fs; use project::lsp_store::language_server_settings; +use rope::Rope; use serde::{Deserialize, Serialize}; use serde_json::{Value, json}; use smol::lock::OnceCell; @@ -466,7 +467,7 @@ impl LspAdapter for PyrightLspAdapter { Some(language::CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&text.as_str().into(), display_range), + language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), )) } @@ -1511,7 +1512,7 @@ impl LspAdapter for PyLspAdapter { Some(language::CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&text.as_str().into(), display_range), + language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), )) } @@ -1800,7 +1801,7 @@ impl LspAdapter for BasedPyrightLspAdapter { Some(language::CodeLabel::new( text[display_range.clone()].to_string(), filter_range, - language.highlight_text(&text.as_str().into(), display_range), + language.highlight_text(&Rope::from_str_small(text.as_str()), display_range), )) } diff --git a/crates/languages/src/rust.rs b/crates/languages/src/rust.rs index 4b56a617735ab1a5932a56a4f6e51397721d8a86..b6f7b10da69f7f3f8d8551a88fa8409f05c2fed8 100644 --- a/crates/languages/src/rust.rs +++ b/crates/languages/src/rust.rs @@ -252,7 +252,7 @@ impl LspAdapter for RustLspAdapter { let name = &completion.label; let text = format!("{name}: {signature}"); let prefix = "struct S { "; - let source = Rope::from_iter([prefix, &text, " }"]); + let source = Rope::from_iter_small([prefix, &text, " }"]); let runs = language.highlight_text(&source, prefix.len()..prefix.len() + text.len()); mk_label(text, &|| 0..completion.label.len(), runs) @@ -264,7 +264,7 @@ impl LspAdapter for RustLspAdapter { let name = &completion.label; let text = format!("{name}: {signature}",); let prefix = "let "; - let source = Rope::from_iter([prefix, &text, " = ();"]); + let source = Rope::from_iter_small([prefix, &text, " = ();"]); let runs = language.highlight_text(&source, prefix.len()..prefix.len() + text.len()); mk_label(text, &|| 0..completion.label.len(), runs) @@ -302,7 +302,7 @@ impl LspAdapter for RustLspAdapter { .filter(|it| it.contains(&label)) .and_then(|it| Some((it, FULL_SIGNATURE_REGEX.find(it)?))) { - let source = Rope::from(function_signature); + let source = Rope::from_str_small(function_signature); let runs = language.highlight_text(&source, 0..function_signature.len()); mk_label( function_signature.to_owned(), @@ -311,7 +311,7 @@ impl LspAdapter for RustLspAdapter { ) } else if let Some((prefix, suffix)) = fn_prefixed { let text = format!("{label}{suffix}"); - let source = Rope::from_iter([prefix, " ", &text, " {}"]); + let source = Rope::from_iter_small([prefix, " ", &text, " {}"]); let run_start = prefix.len() + 1; let runs = language.highlight_text(&source, run_start..run_start + text.len()); mk_label(text, &|| 0..label.len(), runs) @@ -322,7 +322,7 @@ impl LspAdapter for RustLspAdapter { { let text = completion.label.clone(); let len = text.len(); - let source = Rope::from(text.as_str()); + let source = Rope::from_str_small(text.as_str()); let runs = language.highlight_text(&source, 0..len); mk_label(text, &|| 0..completion.label.len(), runs) } else if detail_left.is_none() { @@ -399,7 +399,10 @@ impl LspAdapter for RustLspAdapter { Some(CodeLabel::new( format!("{prefix}{name}"), filter_range, - language.highlight_text(&Rope::from_iter([prefix, name, suffix]), display_range), + language.highlight_text( + &Rope::from_iter_small([prefix, name, suffix]), + display_range, + ), )) } diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index c34ed69288e39c26d105877d76ee76c01c864c72..eb239fd46fe8c0a6cfcfb6ea4a7610ddb6dabf47 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -1558,7 +1558,9 @@ impl MarkdownElementBuilder { if let Some(Some(language)) = self.code_block_stack.last() { let mut offset = 0; - for (range, highlight_id) in language.highlight_text(&Rope::from(text), 0..text.len()) { + for (range, highlight_id) in + language.highlight_text(&Rope::from_str_small(text), 0..text.len()) + { if range.start > offset { self.pending_line .runs diff --git a/crates/markdown_preview/src/markdown_parser.rs b/crates/markdown_preview/src/markdown_parser.rs index 8f2203c25b9a7193759668a35016c2d3203310b6..d46224a736dfd7e2a57c88d9512774562e10dab8 100644 --- a/crates/markdown_preview/src/markdown_parser.rs +++ b/crates/markdown_preview/src/markdown_parser.rs @@ -779,7 +779,7 @@ impl<'a> MarkdownParser<'a> { let highlights = if let Some(language) = &language { if let Some(registry) = &self.language_registry { - let rope: language::Rope = code.as_str().into(); + let rope = language::Rope::from_str_small(code.as_str()); registry .language_for_name_or_extension(language) .await diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs index a9121b9104400d88d5f22801db1bfebaeeb060d6..947d6be1199ca73be910c5cc606147ef75bd9376 100644 --- a/crates/multi_buffer/src/multi_buffer_tests.rs +++ b/crates/multi_buffer/src/multi_buffer_tests.rs @@ -1,6 +1,6 @@ use super::*; use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind}; -use gpui::{App, TestAppContext}; +use gpui::{App, BackgroundExecutor, TestAppContext}; use indoc::indoc; use language::{Buffer, Rope}; use parking_lot::RwLock; @@ -79,9 +79,14 @@ fn test_remote(cx: &mut App) { let ops = cx .background_executor() .block(host_buffer.read(cx).serialize_ops(None, cx)); - let mut buffer = - Buffer::from_proto(ReplicaId::REMOTE_SERVER, Capability::ReadWrite, state, None) - .unwrap(); + let mut buffer = Buffer::from_proto( + ReplicaId::REMOTE_SERVER, + Capability::ReadWrite, + state, + None, + cx.background_executor(), + ) + .unwrap(); buffer.apply_ops( ops.into_iter() .map(|op| language::proto::deserialize_operation(op).unwrap()), @@ -1224,7 +1229,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot); + assert_line_indents(&snapshot, cx.background_executor()); multibuffer.update(cx, |multibuffer, cx| { multibuffer.collapse_diff_hunks(vec![Anchor::min()..Anchor::max()], cx) @@ -1248,7 +1253,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot); + assert_line_indents(&snapshot, cx.background_executor()); // Expand the first diff hunk multibuffer.update(cx, |multibuffer, cx| { @@ -1300,7 +1305,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot); + assert_line_indents(&snapshot, cx.background_executor()); // Edit the buffer before the first hunk buffer.update(cx, |buffer, cx| { @@ -1342,7 +1347,7 @@ fn test_basic_diff_hunks(cx: &mut TestAppContext) { assert_chunks_in_ranges(&snapshot); assert_consistent_line_numbers(&snapshot); assert_position_translation(&snapshot); - assert_line_indents(&snapshot); + assert_line_indents(&snapshot, cx.background_executor()); // Recalculate the diff, changing the first diff hunk. diff.update(cx, |diff, cx| { @@ -2067,7 +2072,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { } assert_position_translation(&snapshot); - assert_line_indents(&snapshot); + assert_line_indents(&snapshot, cx.background_executor()); assert_eq!( snapshot @@ -2118,7 +2123,7 @@ fn test_diff_hunks_with_multiple_excerpts(cx: &mut TestAppContext) { ), ); - assert_line_indents(&snapshot); + assert_line_indents(&snapshot, cx.background_executor()); } /// A naive implementation of a multi-buffer that does not maintain @@ -2888,7 +2893,7 @@ async fn test_random_multibuffer(cx: &mut TestAppContext, mut rng: StdRng) { ); } - let text_rope = Rope::from(expected_text.as_str()); + let text_rope = Rope::from_str(expected_text.as_str(), cx.background_executor()); for _ in 0..10 { let end_ix = text_rope.clip_offset(rng.random_range(0..=text_rope.len()), Bias::Right); let start_ix = text_rope.clip_offset(rng.random_range(0..=end_ix), Bias::Left); @@ -3512,7 +3517,7 @@ fn assert_consistent_line_numbers(snapshot: &MultiBufferSnapshot) { #[track_caller] fn assert_position_translation(snapshot: &MultiBufferSnapshot) { - let text = Rope::from(snapshot.text()); + let text = Rope::from_str_small(&snapshot.text()); let mut left_anchors = Vec::new(); let mut right_anchors = Vec::new(); @@ -3636,10 +3641,10 @@ fn assert_position_translation(snapshot: &MultiBufferSnapshot) { } } -fn assert_line_indents(snapshot: &MultiBufferSnapshot) { +fn assert_line_indents(snapshot: &MultiBufferSnapshot, executor: &BackgroundExecutor) { let max_row = snapshot.max_point().row; let buffer_id = snapshot.excerpts().next().unwrap().1.remote_id(); - let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text()); + let text = text::Buffer::new(ReplicaId::LOCAL, buffer_id, snapshot.text(), executor); let mut line_indents = text .line_indents_in_row_range(0..max_row + 1) .collect::>(); diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 39e302a2d9b1ae92cce9691c957cb9fcfbf26d7d..3fb702518690585d3237324c04802c9deec0892e 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -180,7 +180,13 @@ impl RemoteBufferStore { buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?) as Arc); } - Buffer::from_proto(replica_id, capability, state, buffer_file) + Buffer::from_proto( + replica_id, + capability, + state, + buffer_file, + cx.background_executor(), + ) }); match buffer_result { @@ -628,9 +634,10 @@ impl LocalBufferStore { Ok(loaded) => { let reservation = cx.reserve_entity::()?; let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64()); + let executor = cx.background_executor().clone(); let text_buffer = cx .background_spawn(async move { - text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text) + text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text, &executor) }) .await; cx.insert_entity(reservation, |_| { @@ -639,7 +646,12 @@ impl LocalBufferStore { } Err(error) if is_not_found_error(&error) => cx.new(|cx| { let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64()); - let text_buffer = text::Buffer::new(ReplicaId::LOCAL, buffer_id, ""); + let text_buffer = text::Buffer::new( + ReplicaId::LOCAL, + buffer_id, + "", + cx.background_executor(), + ); Buffer::build( text_buffer, Some(Arc::new(File { diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 160a384a4a0ff4481c97b6eda75faded28f01624..46c2e1f92415044ce1d9e8bdf9053a3d3768f372 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -276,8 +276,8 @@ mod tests { use util::{path, rel_path::rel_path}; use worktree::WorktreeSettings; - #[test] - fn test_parse_conflicts_in_buffer() { + #[gpui::test] + fn test_parse_conflicts_in_buffer(cx: &mut TestAppContext) { // Create a buffer with conflict markers let test_content = r#" This is some text before the conflict. @@ -299,7 +299,12 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); + let buffer = Buffer::new( + ReplicaId::LOCAL, + buffer_id, + test_content, + cx.background_executor(), + ); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -355,8 +360,8 @@ mod tests { assert_eq!(conflicts_in_range.len(), 0); } - #[test] - fn test_nested_conflict_markers() { + #[gpui::test] + fn test_nested_conflict_markers(cx: &mut TestAppContext) { // Create a buffer with nested conflict markers let test_content = r#" This is some text before the conflict. @@ -374,7 +379,12 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); + let buffer = Buffer::new( + ReplicaId::LOCAL, + buffer_id, + test_content, + cx.background_executor(), + ); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); @@ -396,8 +406,8 @@ mod tests { assert_eq!(their_text, "This is their version in a nested conflict\n"); } - #[test] - fn test_conflict_markers_at_eof() { + #[gpui::test] + fn test_conflict_markers_at_eof(cx: &mut TestAppContext) { let test_content = r#" <<<<<<< ours ======= @@ -405,15 +415,20 @@ mod tests { >>>>>>> "# .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content); + let buffer = Buffer::new( + ReplicaId::LOCAL, + buffer_id, + test_content, + cx.background_executor(), + ); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); assert_eq!(conflict_snapshot.conflicts.len(), 1); } - #[test] - fn test_conflicts_in_range() { + #[gpui::test] + fn test_conflicts_in_range(cx: &mut TestAppContext) { // Create a buffer with conflict markers let test_content = r#" one @@ -447,7 +462,12 @@ mod tests { .unindent(); let buffer_id = BufferId::new(1).unwrap(); - let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content.clone()); + let buffer = Buffer::new( + ReplicaId::LOCAL, + buffer_id, + test_content.clone(), + cx.background_executor(), + ); let snapshot = buffer.snapshot(); let conflict_snapshot = ConflictSet::parse(&snapshot); diff --git a/crates/project/src/prettier_store.rs b/crates/project/src/prettier_store.rs index 40deac76404ddb4378fe08cae931d0f0e3583487..3743f9769eaaff7f3acd1cc5bad16e31f6e80987 100644 --- a/crates/project/src/prettier_store.rs +++ b/crates/project/src/prettier_store.rs @@ -13,7 +13,9 @@ use futures::{ future::{self, Shared}, stream::FuturesUnordered, }; -use gpui::{AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, WeakEntity}; +use gpui::{ + AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task, WeakEntity, +}; use language::{ Buffer, LanguageRegistry, LocalFile, language_settings::{Formatter, LanguageSettings}, @@ -558,99 +560,137 @@ impl PrettierStore { let plugins_to_install = new_plugins.clone(); let fs = Arc::clone(&self.fs); let new_installation_task = cx - .spawn(async move |prettier_store, cx| { - cx.background_executor().timer(Duration::from_millis(30)).await; + .spawn(async move |prettier_store, cx| { + cx.background_executor() + .timer(Duration::from_millis(30)) + .await; let location_data = prettier_store.update(cx, |prettier_store, cx| { - worktree.and_then(|worktree_id| { - prettier_store.worktree_store - .read(cx) - .worktree_for_id(worktree_id, cx) - .map(|worktree| worktree.read(cx).abs_path()) - }).map(|locate_from| { - let installed_prettiers = prettier_store.prettier_instances.keys().cloned().collect(); - (locate_from, installed_prettiers) - }) + worktree + .and_then(|worktree_id| { + prettier_store + .worktree_store + .read(cx) + .worktree_for_id(worktree_id, cx) + .map(|worktree| worktree.read(cx).abs_path()) + }) + .map(|locate_from| { + let installed_prettiers = + prettier_store.prettier_instances.keys().cloned().collect(); + (locate_from, installed_prettiers) + }) })?; let locate_prettier_installation = match location_data { - Some((locate_from, installed_prettiers)) => Prettier::locate_prettier_installation( - fs.as_ref(), - &installed_prettiers, - locate_from.as_ref(), - ) - .await - .context("locate prettier installation").map_err(Arc::new)?, + Some((locate_from, installed_prettiers)) => { + Prettier::locate_prettier_installation( + fs.as_ref(), + &installed_prettiers, + locate_from.as_ref(), + ) + .await + .context("locate prettier installation") + .map_err(Arc::new)? + } None => ControlFlow::Continue(None), }; - match locate_prettier_installation - { + match locate_prettier_installation { ControlFlow::Break(()) => return Ok(()), ControlFlow::Continue(prettier_path) => { if prettier_path.is_some() { new_plugins.clear(); } - let mut needs_install = should_write_prettier_server_file(fs.as_ref()).await; + let mut needs_install = + should_write_prettier_server_file(fs.as_ref()).await; if let Some(previous_installation_task) = previous_installation_task - && let Err(e) = previous_installation_task.await { - log::error!("Failed to install default prettier: {e:#}"); - prettier_store.update(cx, |prettier_store, _| { - if let PrettierInstallation::NotInstalled { attempts, not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier { - *attempts += 1; - new_plugins.extend(not_installed_plugins.iter().cloned()); - installation_attempt = *attempts; - needs_install = true; - }; - })?; - }; + && let Err(e) = previous_installation_task.await + { + log::error!("Failed to install default prettier: {e:#}"); + prettier_store.update(cx, |prettier_store, _| { + if let PrettierInstallation::NotInstalled { + attempts, + not_installed_plugins, + .. + } = &mut prettier_store.default_prettier.prettier + { + *attempts += 1; + new_plugins.extend(not_installed_plugins.iter().cloned()); + installation_attempt = *attempts; + needs_install = true; + }; + })?; + }; if installation_attempt > prettier::FAIL_THRESHOLD { prettier_store.update(cx, |prettier_store, _| { - if let PrettierInstallation::NotInstalled { installation_task, .. } = &mut prettier_store.default_prettier.prettier { + if let PrettierInstallation::NotInstalled { + installation_task, + .. + } = &mut prettier_store.default_prettier.prettier + { *installation_task = None; }; })?; log::warn!( - "Default prettier installation had failed {installation_attempt} times, not attempting again", + "Default prettier installation had failed {installation_attempt} \ + times, not attempting again", ); return Ok(()); } prettier_store.update(cx, |prettier_store, _| { new_plugins.retain(|plugin| { - !prettier_store.default_prettier.installed_plugins.contains(plugin) + !prettier_store + .default_prettier + .installed_plugins + .contains(plugin) }); - if let PrettierInstallation::NotInstalled { not_installed_plugins, .. } = &mut prettier_store.default_prettier.prettier { + if let PrettierInstallation::NotInstalled { + not_installed_plugins, + .. + } = &mut prettier_store.default_prettier.prettier + { not_installed_plugins.retain(|plugin| { - !prettier_store.default_prettier.installed_plugins.contains(plugin) + !prettier_store + .default_prettier + .installed_plugins + .contains(plugin) }); not_installed_plugins.extend(new_plugins.iter().cloned()); } needs_install |= !new_plugins.is_empty(); })?; if needs_install { - log::info!("Initializing default prettier with plugins {new_plugins:?}"); + log::info!( + "Initializing default prettier with plugins {new_plugins:?}" + ); let installed_plugins = new_plugins.clone(); + let executor = cx.background_executor().clone(); cx.background_spawn(async move { install_prettier_packages(fs.as_ref(), new_plugins, node).await?; // Save the server file last, so the reinstall need could be determined by the absence of the file. - save_prettier_server_file(fs.as_ref()).await?; + save_prettier_server_file(fs.as_ref(), &executor).await?; anyhow::Ok(()) }) - .await - .context("prettier & plugins install") - .map_err(Arc::new)?; - log::info!("Initialized default prettier with plugins: {installed_plugins:?}"); + .await + .context("prettier & plugins install") + .map_err(Arc::new)?; + log::info!( + "Initialized default prettier with plugins: {installed_plugins:?}" + ); prettier_store.update(cx, |prettier_store, _| { prettier_store.default_prettier.prettier = PrettierInstallation::Installed(PrettierInstance { attempt: 0, prettier: None, }); - prettier_store.default_prettier + prettier_store + .default_prettier .installed_plugins .extend(installed_plugins); })?; } else { prettier_store.update(cx, |prettier_store, _| { - if let PrettierInstallation::NotInstalled { .. } = &mut prettier_store.default_prettier.prettier { + if let PrettierInstallation::NotInstalled { .. } = + &mut prettier_store.default_prettier.prettier + { prettier_store.default_prettier.prettier = PrettierInstallation::Installed(PrettierInstance { attempt: 0, @@ -936,11 +976,14 @@ async fn install_prettier_packages( anyhow::Ok(()) } -async fn save_prettier_server_file(fs: &dyn Fs) -> anyhow::Result<()> { +async fn save_prettier_server_file( + fs: &dyn Fs, + executor: &BackgroundExecutor, +) -> anyhow::Result<()> { let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE); fs.save( &prettier_wrapper_path, - &text::Rope::from(prettier::PRETTIER_SERVER_JS), + &text::Rope::from_str(prettier::PRETTIER_SERVER_JS, executor), text::LineEnding::Unix, ) .await diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index e188ebd5e32947777f987ff43df52f09d006d58f..7c7fe9a43091611a53dbde0ecbaf6691b7d768d0 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -712,8 +712,10 @@ pub enum ResolveState { impl InlayHint { pub fn text(&self) -> Rope { match &self.label { - InlayHintLabel::String(s) => Rope::from(s), - InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &*part.value).collect(), + InlayHintLabel::String(s) => Rope::from_str_small(s), + InlayHintLabel::LabelParts(parts) => { + Rope::from_iter_small(parts.iter().map(|part| &*part.value)) + } } } } @@ -5402,7 +5404,12 @@ impl Project { worktree .update(cx, |worktree, cx| { let line_ending = text::LineEnding::detect(&new_text); - worktree.write_file(rel_path.clone(), new_text.into(), line_ending, cx) + worktree.write_file( + rel_path.clone(), + Rope::from_str(&new_text, cx.background_executor()), + line_ending, + cx, + ) })? .await .context("Failed to write settings file")?; diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 891ad2420c6f8a79659a1f05afd0821b995b5b1a..3dc918d5a757af56038471e1a601d6f2cf7dbbe1 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1461,21 +1461,21 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon .unwrap(); fs.save( path!("/the-root/Cargo.lock").as_ref(), - &"".into(), + &Rope::default(), Default::default(), ) .await .unwrap(); fs.save( path!("/the-stdlib/LICENSE").as_ref(), - &"".into(), + &Rope::default(), Default::default(), ) .await .unwrap(); fs.save( path!("/the/stdlib/src/string.rs").as_ref(), - &"".into(), + &Rope::default(), Default::default(), ) .await @@ -4072,7 +4072,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) // to be detected by the worktree, so that the buffer starts reloading. fs.save( path!("/dir/file1").as_ref(), - &"the first contents".into(), + &Rope::from_str("the first contents", cx.background_executor()), Default::default(), ) .await @@ -4083,7 +4083,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) // previous file change may still be in progress. fs.save( path!("/dir/file1").as_ref(), - &"the second contents".into(), + &Rope::from_str("the second contents", cx.background_executor()), Default::default(), ) .await @@ -4127,7 +4127,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) { // to be detected by the worktree, so that the buffer starts reloading. fs.save( path!("/dir/file1").as_ref(), - &"the first contents".into(), + &Rope::from_str("the first contents", cx.background_executor()), Default::default(), ) .await @@ -4805,7 +4805,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n"); fs.save( path!("/dir/the-file").as_ref(), - &new_contents.as_str().into(), + &Rope::from_str(new_contents.as_str(), cx.background_executor()), LineEnding::Unix, ) .await @@ -4837,7 +4837,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) { // Change the file on disk again, adding blank lines to the beginning. fs.save( path!("/dir/the-file").as_ref(), - &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(), + &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()), LineEnding::Unix, ) .await @@ -4889,7 +4889,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) { // state updates correctly. fs.save( path!("/dir/file1").as_ref(), - &"aaa\nb\nc\n".into(), + &Rope::from_str("aaa\nb\nc\n", cx.background_executor()), LineEnding::Windows, ) .await diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 969363fb2bd02e7bc514cd68d488ca57aef9f0b9..c7e09e3f681d770959709893561cf7a1ba377b37 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -13,7 +13,7 @@ use fs::{FakeFs, Fs}; use gpui::{AppContext as _, Entity, SemanticVersion, SharedString, TestAppContext}; use http_client::{BlockedHttpClient, FakeHttpClient}; use language::{ - Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, + Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, Rope, language_settings::{AllLanguageSettings, language_settings}, }; use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind, LanguageServerName}; @@ -120,7 +120,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test // sees the new file. fs.save( path!("/code/project1/src/main.rs").as_ref(), - &"fn main() {}".into(), + &Rope::from_str_small("fn main() {}"), Default::default(), ) .await @@ -766,7 +766,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont fs.save( &PathBuf::from(path!("/code/project1/src/lib.rs")), - &("bangles".to_string().into()), + &Rope::from_str_small("bangles"), LineEnding::Unix, ) .await @@ -781,7 +781,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont fs.save( &PathBuf::from(path!("/code/project1/src/lib.rs")), - &("bloop".to_string().into()), + &Rope::from_str_small("bloop"), LineEnding::Unix, ) .await diff --git a/crates/rich_text/src/rich_text.rs b/crates/rich_text/src/rich_text.rs index 2af9988f032c5dc9651e1da6e8c3b52c6c668866..4e30c22e7f4da2f2656861b792ada5ef6fa9311b 100644 --- a/crates/rich_text/src/rich_text.rs +++ b/crates/rich_text/src/rich_text.rs @@ -1,9 +1,10 @@ use futures::FutureExt; use gpui::{ - AnyElement, AnyView, App, ElementId, FontStyle, FontWeight, HighlightStyle, InteractiveText, - IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, Window, + AnyElement, AnyView, App, BackgroundExecutor, ElementId, FontStyle, FontWeight, HighlightStyle, + InteractiveText, IntoElement, SharedString, StrikethroughStyle, StyledText, UnderlineStyle, + Window, }; -use language::{HighlightId, Language, LanguageRegistry}; +use language::{HighlightId, Language, LanguageRegistry, Rope}; use std::{ops::Range, sync::Arc}; use theme::ActiveTheme; use ui::LinkPreview; @@ -56,6 +57,7 @@ impl RichText { block: String, mentions: &[Mention], language_registry: &Arc, + executor: &BackgroundExecutor, ) -> Self { let mut text = String::new(); let mut highlights = Vec::new(); @@ -70,6 +72,7 @@ impl RichText { &mut highlights, &mut link_ranges, &mut link_urls, + executor, ); text.truncate(text.trim_end().len()); @@ -184,6 +187,7 @@ pub fn render_markdown_mut( highlights: &mut Vec<(Range, Highlight)>, link_ranges: &mut Vec>, link_urls: &mut Vec, + executor: &BackgroundExecutor, ) { use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag, TagEnd}; @@ -202,7 +206,7 @@ pub fn render_markdown_mut( match event { Event::Text(t) => { if let Some(language) = ¤t_language { - render_code(text, highlights, t.as_ref(), language); + render_code(text, highlights, t.as_ref(), language, executor); } else { while let Some(mention) = mentions.first() { if !source_range.contains_inclusive(&mention.range) { @@ -373,11 +377,14 @@ pub fn render_code( highlights: &mut Vec<(Range, Highlight)>, content: &str, language: &Arc, + executor: &BackgroundExecutor, ) { let prev_len = text.len(); text.push_str(content); let mut offset = 0; - for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) { + for (range, highlight_id) in + language.highlight_text(&Rope::from_str(content, executor), 0..content.len()) + { if range.start > offset { highlights.push((prev_len + offset..prev_len + range.start, Highlight::Code)); } diff --git a/crates/rope/Cargo.toml b/crates/rope/Cargo.toml index 4107c2e012debc13b0cc44003250f4da63e5039f..30f702292bf1e04524fe0c2489b1c4a8783e9ca4 100644 --- a/crates/rope/Cargo.toml +++ b/crates/rope/Cargo.toml @@ -14,10 +14,10 @@ path = "src/rope.rs" [dependencies] arrayvec = "0.7.1" log.workspace = true -rayon.workspace = true sum_tree.workspace = true unicode-segmentation.workspace = true util.workspace = true +gpui.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/rope/benches/rope_benchmark.rs b/crates/rope/benches/rope_benchmark.rs index 030bec01df4d223cd5288842ba0f9c1386dac31b..5075dff788dfadd49783e89937e19986d9234580 100644 --- a/crates/rope/benches/rope_benchmark.rs +++ b/crates/rope/benches/rope_benchmark.rs @@ -3,6 +3,7 @@ use std::ops::Range; use criterion::{ BatchSize, BenchmarkId, Criterion, Throughput, black_box, criterion_group, criterion_main, }; +use gpui::{AsyncApp, TestAppContext}; use rand::prelude::*; use rand::rngs::StdRng; use rope::{Point, Rope}; @@ -26,10 +27,10 @@ fn generate_random_text(rng: &mut StdRng, len: usize) -> String { str } -fn generate_random_rope(rng: &mut StdRng, text_len: usize) -> Rope { +fn generate_random_rope(rng: &mut StdRng, text_len: usize, cx: &AsyncApp) -> Rope { let text = generate_random_text(rng, text_len); let mut rope = Rope::new(); - rope.push(&text); + rope.push(&text, cx.background_executor()); rope } @@ -82,11 +83,13 @@ fn rope_benchmarks(c: &mut Criterion) { group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); let text = generate_random_text(&mut rng, *size); + let cx = TestAppContext::single(); + let cx = cx.to_async(); b.iter(|| { let mut rope = Rope::new(); for _ in 0..10 { - rope.push(&text); + rope.push(&text, cx.background_executor()); } }); }); @@ -99,8 +102,10 @@ fn rope_benchmarks(c: &mut Criterion) { group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); let mut random_ropes = Vec::new(); + let cx = TestAppContext::single(); + let cx = cx.to_async(); for _ in 0..5 { - let rope = generate_random_rope(&mut rng, *size); + let rope = generate_random_rope(&mut rng, *size, &cx); random_ropes.push(rope); } @@ -119,7 +124,9 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let rope = generate_random_rope(&mut rng, *size); + let cx = TestAppContext::single(); + let cx = cx.to_async(); + let rope = generate_random_rope(&mut rng, *size, &cx); b.iter_batched( || generate_random_rope_ranges(&mut rng, &rope), @@ -139,7 +146,9 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let rope = generate_random_rope(&mut rng, *size); + let cx = TestAppContext::single(); + let cx = cx.to_async(); + let rope = generate_random_rope(&mut rng, *size, &cx); b.iter_batched( || generate_random_rope_ranges(&mut rng, &rope), @@ -160,7 +169,9 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let rope = generate_random_rope(&mut rng, *size); + let cx = TestAppContext::single(); + let cx = cx.to_async(); + let rope = generate_random_rope(&mut rng, *size, &cx); b.iter(|| { let chars = rope.chars().count(); @@ -175,7 +186,9 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let rope = generate_random_rope(&mut rng, *size); + let cx = TestAppContext::single(); + let cx = cx.to_async(); + let rope = generate_random_rope(&mut rng, *size, &cx); b.iter_batched( || generate_random_rope_points(&mut rng, &rope), @@ -196,7 +209,9 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let rope = generate_random_rope(&mut rng, *size); + let cx = TestAppContext::single(); + let cx = cx.to_async(); + let rope = generate_random_rope(&mut rng, *size, &cx); b.iter_batched( || generate_random_rope_points(&mut rng, &rope), @@ -216,7 +231,9 @@ fn rope_benchmarks(c: &mut Criterion) { group.throughput(Throughput::Bytes(*size as u64)); group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| { let mut rng = StdRng::seed_from_u64(SEED); - let rope = generate_random_rope(&mut rng, *size); + let cx = TestAppContext::single(); + let cx = cx.to_async(); + let rope = generate_random_rope(&mut rng, *size, &cx); b.iter_batched( || { diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 394e6ef0ca589d19ffcf7cf07a92bcd15c8e4a18..b515f46ea89ddd5f8f29ca7d462b48fe8fff1d38 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -5,7 +5,7 @@ mod point_utf16; mod unclipped; use arrayvec::ArrayVec; -use rayon::iter::{IntoParallelIterator, ParallelIterator as _}; +use gpui::BackgroundExecutor; use std::{ cmp, fmt, io, mem, ops::{self, AddAssign, Range}, @@ -31,6 +31,41 @@ impl Rope { Self::default() } + /// Create a new rope from a string without trying to parallelize the construction for large strings. + pub fn from_str_small(text: &str) -> Self { + let mut rope = Self::new(); + rope.push_small(text); + rope + } + + /// Create a new rope from a string. + pub fn from_str(text: &str, executor: &BackgroundExecutor) -> Self { + let mut rope = Self::new(); + rope.push(text, executor); + rope + } + + /// Create a new rope from a string without trying to parallelize the construction for large strings. + pub fn from_iter_small<'a, T: IntoIterator>(iter: T) -> Self { + let mut rope = Rope::new(); + for chunk in iter { + rope.push_small(chunk); + } + rope + } + + /// Create a new rope from a string. + pub fn from_iter<'a, T: IntoIterator>( + iter: T, + executor: &BackgroundExecutor, + ) -> Self { + let mut rope = Rope::new(); + for chunk in iter { + rope.push(chunk, executor); + } + rope + } + /// Checks that `index`-th byte is the first byte in a UTF-8 code point /// sequence or the end of the string. /// @@ -145,12 +180,12 @@ impl Rope { self.check_invariants(); } - pub fn replace(&mut self, range: Range, text: &str) { + pub fn replace(&mut self, range: Range, text: &str, executor: &BackgroundExecutor) { let mut new_rope = Rope::new(); let mut cursor = self.cursor(0); new_rope.append(cursor.slice(range.start)); cursor.seek_forward(range.end); - new_rope.push(text); + new_rope.push(text, executor); new_rope.append(cursor.suffix()); *self = new_rope; } @@ -168,28 +203,12 @@ impl Rope { self.slice(start..end) } - pub fn push(&mut self, mut text: &str) { - self.chunks.update_last( - |last_chunk| { - let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE { - text.len() - } else { - let mut split_ix = cmp::min( - chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), - text.len(), - ); - while !text.is_char_boundary(split_ix) { - split_ix += 1; - } - split_ix - }; + pub fn push(&mut self, mut text: &str, executor: &BackgroundExecutor) { + self.fill_last_chunk(&mut text); - let (suffix, remainder) = text.split_at(split_ix); - last_chunk.push_str(suffix); - text = remainder; - }, - (), - ); + if text.is_empty() { + return; + } #[cfg(all(test, not(rust_analyzer)))] const NUM_CHUNKS: usize = 16; @@ -200,7 +219,8 @@ impl Rope { // but given the chunk boundary can land within a character // we need to accommodate for the worst case where every chunk gets cut short by up to 4 bytes if text.len() > NUM_CHUNKS * chunk::MAX_BASE - NUM_CHUNKS * 4 { - return self.push_large(text); + let future = self.push_large(text, executor.clone()); + return executor.block(future); } // 16 is enough as otherwise we will hit the branch above let mut new_chunks = ArrayVec::<_, NUM_CHUNKS>::new(); @@ -220,8 +240,57 @@ impl Rope { self.check_invariants(); } + /// Pushes a string into the rope. Unlike [`push`], this method does not parallelize the construction on large strings. + pub fn push_small(&mut self, mut text: &str) { + self.fill_last_chunk(&mut text); + if text.is_empty() { + return; + } + + // 16 is enough as otherwise we will hit the branch above + let mut new_chunks = Vec::new(); + + while !text.is_empty() { + let mut split_ix = cmp::min(chunk::MAX_BASE, text.len()); + while !text.is_char_boundary(split_ix) { + split_ix -= 1; + } + let (chunk, remainder) = text.split_at(split_ix); + new_chunks.push(chunk); + text = remainder; + } + self.chunks + .extend(new_chunks.into_iter().map(Chunk::new), ()); + + self.check_invariants(); + } + + fn fill_last_chunk(&mut self, text: &mut &str) { + self.chunks.update_last( + |last_chunk| { + let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE { + text.len() + } else { + let mut split_ix = cmp::min( + chunk::MIN_BASE.saturating_sub(last_chunk.text.len()), + text.len(), + ); + while !text.is_char_boundary(split_ix) { + split_ix += 1; + } + split_ix + }; + + let (suffix, remainder) = text.split_at(split_ix); + last_chunk.push_str(suffix); + *text = remainder; + }, + (), + ); + } + /// A copy of `push` specialized for working with large quantities of text. - fn push_large(&mut self, mut text: &str) { + async fn push_large(&mut self, mut text: &str, executor: BackgroundExecutor) { // To avoid frequent reallocs when loading large swaths of file contents, // we estimate worst-case `new_chunks` capacity; // Chunk is a fixed-capacity buffer. If a character falls on @@ -254,8 +323,22 @@ impl Rope { const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { - self.chunks - .par_extend(new_chunks.into_par_iter().map(Chunk::new), ()); + let cx2 = executor.clone(); + executor + .scoped(|scope| { + // SAFETY: transmuting to 'static is safe because the future is scoped + // and the underlying string data cannot go out of scope because dropping the scope + // will wait for the task to finish + let new_chunks = + unsafe { std::mem::transmute::, Vec<&'static str>>(new_chunks) }; + + let async_extend = self + .chunks + .async_extend(new_chunks.into_iter().map(Chunk::new), cx2); + + scope.spawn(async_extend); + }) + .await; } else { self.chunks .extend(new_chunks.into_iter().map(Chunk::new), ()); @@ -292,8 +375,13 @@ impl Rope { } } - pub fn push_front(&mut self, text: &str) { - let suffix = mem::replace(self, Rope::from(text)); + pub fn push_front(&mut self, text: &str, cx: &BackgroundExecutor) { + let suffix = mem::replace(self, Rope::from_str(text, cx)); + self.append(suffix); + } + + pub fn push_front_small(&mut self, text: &str) { + let suffix = mem::replace(self, Rope::from_str_small(text)); self.append(suffix); } @@ -577,37 +665,19 @@ impl Rope { } } -impl<'a> From<&'a str> for Rope { - fn from(text: &'a str) -> Self { - let mut rope = Self::new(); - rope.push(text); - rope - } -} +// impl From for Rope { +// #[inline(always)] +// fn from(text: String) -> Self { +// Rope::from(text.as_str()) +// } +// } -impl<'a> FromIterator<&'a str> for Rope { - fn from_iter>(iter: T) -> Self { - let mut rope = Rope::new(); - for chunk in iter { - rope.push(chunk); - } - rope - } -} - -impl From for Rope { - #[inline(always)] - fn from(text: String) -> Self { - Rope::from(text.as_str()) - } -} - -impl From<&String> for Rope { - #[inline(always)] - fn from(text: &String) -> Self { - Rope::from(text.as_str()) - } -} +// impl From<&String> for Rope { +// #[inline(always)] +// fn from(text: &String) -> Self { +// Rope::from(text.as_str()) +// } +// } impl fmt::Display for Rope { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { @@ -1639,6 +1709,7 @@ where mod tests { use super::*; use Bias::{Left, Right}; + use gpui::TestAppContext; use rand::prelude::*; use std::{cmp::Ordering, env, io::Read}; use util::RandomCharIter; @@ -1648,17 +1719,17 @@ mod tests { zlog::init_test(); } - #[test] - fn test_all_4_byte_chars() { + #[gpui::test] + async fn test_all_4_byte_chars(cx: &mut TestAppContext) { let mut rope = Rope::new(); let text = "🏀".repeat(256); - rope.push(&text); + rope.push(&text, cx.background_executor()); assert_eq!(rope.text(), text); } - #[test] - fn test_clip() { - let rope = Rope::from("🧘"); + #[gpui::test] + fn test_clip(cx: &mut TestAppContext) { + let rope = Rope::from_str("🧘", cx.background_executor()); assert_eq!(rope.clip_offset(1, Bias::Left), 0); assert_eq!(rope.clip_offset(1, Bias::Right), 4); @@ -1704,9 +1775,9 @@ mod tests { ); } - #[test] - fn test_prev_next_line() { - let rope = Rope::from("abc\ndef\nghi\njkl"); + #[gpui::test] + fn test_prev_next_line(cx: &mut TestAppContext) { + let rope = Rope::from_str("abc\ndef\nghi\njkl", cx.background_executor()); let mut chunks = rope.chunks(); assert_eq!(chunks.peek().unwrap().chars().next().unwrap(), 'a'); @@ -1748,16 +1819,16 @@ mod tests { assert_eq!(chunks.peek(), None); } - #[test] - fn test_lines() { - let rope = Rope::from("abc\ndefg\nhi"); + #[gpui::test] + fn test_lines(cx: &mut TestAppContext) { + let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor()); let mut lines = rope.chunks().lines(); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), Some("defg")); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), None); - let rope = Rope::from("abc\ndefg\nhi\n"); + let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor()); let mut lines = rope.chunks().lines(); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), Some("defg")); @@ -1765,14 +1836,14 @@ mod tests { assert_eq!(lines.next(), Some("")); assert_eq!(lines.next(), None); - let rope = Rope::from("abc\ndefg\nhi"); + let rope = Rope::from_str("abc\ndefg\nhi", cx.background_executor()); let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines(); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), Some("defg")); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), None); - let rope = Rope::from("abc\ndefg\nhi\n"); + let rope = Rope::from_str("abc\ndefg\nhi\n", cx.background_executor()); let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines(); assert_eq!(lines.next(), Some("")); assert_eq!(lines.next(), Some("hi")); @@ -1780,14 +1851,14 @@ mod tests { assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), None); - let rope = Rope::from("abc\nlonger line test\nhi"); + let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor()); let mut lines = rope.chunks().lines(); assert_eq!(lines.next(), Some("abc")); assert_eq!(lines.next(), Some("longer line test")); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), None); - let rope = Rope::from("abc\nlonger line test\nhi"); + let rope = Rope::from_str("abc\nlonger line test\nhi", cx.background_executor()); let mut lines = rope.reversed_chunks_in_range(0..rope.len()).lines(); assert_eq!(lines.next(), Some("hi")); assert_eq!(lines.next(), Some("longer line test")); @@ -1796,7 +1867,7 @@ mod tests { } #[gpui::test(iterations = 100)] - fn test_random_rope(mut rng: StdRng) { + async fn test_random_rope(cx: &mut TestAppContext, mut rng: StdRng) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); @@ -1812,7 +1883,7 @@ mod tests { let mut new_actual = Rope::new(); let mut cursor = actual.cursor(0); new_actual.append(cursor.slice(start_ix)); - new_actual.push(&new_text); + new_actual.push(&new_text, cx.background_executor()); cursor.seek_forward(end_ix); new_actual.append(cursor.suffix()); actual = new_actual; @@ -2112,10 +2183,10 @@ mod tests { } } - #[test] - fn test_chunks_equals_str() { + #[gpui::test] + fn test_chunks_equals_str(cx: &mut TestAppContext) { let text = "This is a multi-chunk\n& multi-line test string!"; - let rope = Rope::from(text); + let rope = Rope::from_str(text, cx.background_executor()); for start in 0..text.len() { for end in start..text.len() { let range = start..end; @@ -2158,34 +2229,37 @@ mod tests { } } - let rope = Rope::from(""); + let rope = Rope::from_str("", cx.background_executor()); assert!(rope.chunks_in_range(0..0).equals_str("")); assert!(rope.reversed_chunks_in_range(0..0).equals_str("")); assert!(!rope.chunks_in_range(0..0).equals_str("foo")); assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo")); } - #[test] - fn test_is_char_boundary() { + #[gpui::test] + fn test_is_char_boundary(cx: &mut TestAppContext) { let fixture = "地"; - let rope = Rope::from("地"); + let rope = Rope::from_str("地", cx.background_executor()); for b in 0..=fixture.len() { assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); } let fixture = ""; - let rope = Rope::from(""); + let rope = Rope::from_str("", cx.background_executor()); for b in 0..=fixture.len() { assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); } let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; - let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); + let rope = Rope::from_str( + "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩", + cx.background_executor(), + ); for b in 0..=fixture.len() { assert_eq!(rope.is_char_boundary(b), fixture.is_char_boundary(b)); } } - #[test] - fn test_floor_char_boundary() { + #[gpui::test] + fn test_floor_char_boundary(cx: &mut TestAppContext) { // polyfill of str::floor_char_boundary fn floor_char_boundary(str: &str, index: usize) -> usize { if index >= str.len() { @@ -2201,7 +2275,7 @@ mod tests { } let fixture = "地"; - let rope = Rope::from("地"); + let rope = Rope::from_str("地", cx.background_executor()); for b in 0..=fixture.len() { assert_eq!( rope.floor_char_boundary(b), @@ -2210,7 +2284,7 @@ mod tests { } let fixture = ""; - let rope = Rope::from(""); + let rope = Rope::from_str("", cx.background_executor()); for b in 0..=fixture.len() { assert_eq!( rope.floor_char_boundary(b), @@ -2219,7 +2293,10 @@ mod tests { } let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; - let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); + let rope = Rope::from_str( + "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩", + cx.background_executor(), + ); for b in 0..=fixture.len() { assert_eq!( rope.floor_char_boundary(b), @@ -2228,8 +2305,8 @@ mod tests { } } - #[test] - fn test_ceil_char_boundary() { + #[gpui::test] + fn test_ceil_char_boundary(cx: &mut TestAppContext) { // polyfill of str::ceil_char_boundary fn ceil_char_boundary(str: &str, index: usize) -> usize { if index > str.len() { @@ -2244,19 +2321,22 @@ mod tests { } let fixture = "地"; - let rope = Rope::from("地"); + let rope = Rope::from_str("地", cx.background_executor()); for b in 0..=fixture.len() { assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); } let fixture = ""; - let rope = Rope::from(""); + let rope = Rope::from_str("", cx.background_executor()); for b in 0..=fixture.len() { assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); } let fixture = "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"; - let rope = Rope::from("🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩"); + let rope = Rope::from_str( + "🔴🟠🟡🟢🔵🟣⚫️⚪️🟤\n🏳️‍⚧️🏁🏳️‍🌈🏴‍☠️⛳️📬📭🏴🏳️🚩", + cx.background_executor(), + ); for b in 0..=fixture.len() { assert_eq!(rope.ceil_char_boundary(b), ceil_char_boundary(&fixture, b)); } diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index 207a9841e41bf35e1f63bb00b0c62073c1cf0224..3cc05fd2d26fa52282030ad1eb564e3cfd8cb609 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -554,7 +554,7 @@ impl RulesLibrary { let prompt_id = PromptId::new(); let save = self.store.update(cx, |store, cx| { - store.save(prompt_id, None, false, "".into(), cx) + store.save(prompt_id, None, false, Default::default(), cx) }); self.picker .update(cx, |picker, cx| picker.refresh(window, cx)); @@ -888,7 +888,13 @@ impl RulesLibrary { let new_id = PromptId::new(); let body = rule.body_editor.read(cx).text(cx); let save = self.store.update(cx, |store, cx| { - store.save(new_id, Some(title.into()), false, body.into(), cx) + store.save( + new_id, + Some(title.into()), + false, + Rope::from_str(&body, cx.background_executor()), + cx, + ) }); self.picker .update(cx, |picker, cx| picker.refresh(window, cx)); diff --git a/crates/streaming_diff/Cargo.toml b/crates/streaming_diff/Cargo.toml index b3645a182c3abf52c6ee2f2c23feaedeacf8574a..8825914baa8d08734e66485b4bea418840d72228 100644 --- a/crates/streaming_diff/Cargo.toml +++ b/crates/streaming_diff/Cargo.toml @@ -14,6 +14,7 @@ path = "src/streaming_diff.rs" [dependencies] ordered-float.workspace = true rope.workspace = true +gpui.workspace = true [dev-dependencies] rand.workspace = true diff --git a/crates/streaming_diff/src/streaming_diff.rs b/crates/streaming_diff/src/streaming_diff.rs index 5677981b0dc9878963e01d09e7281749d6603c8f..34a74afa84431079b4d9d0815c96e0114248ca98 100644 --- a/crates/streaming_diff/src/streaming_diff.rs +++ b/crates/streaming_diff/src/streaming_diff.rs @@ -503,11 +503,12 @@ fn is_line_end(point: Point, text: &Rope) -> bool { #[cfg(test)] mod tests { use super::*; + use gpui::BackgroundExecutor; use rand::prelude::*; use std::env; - #[test] - fn test_delete_first_of_two_lines() { + #[gpui::test] + fn test_delete_first_of_two_lines(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Delete { bytes: 5 }, @@ -523,18 +524,18 @@ mod tests { apply_line_operations(old_text, &new_text, &expected_line_ops) ); - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!(line_ops, expected_line_ops); } - #[test] - fn test_delete_second_of_two_lines() { + #[gpui::test] + fn test_delete_second_of_two_lines(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, CharOperation::Delete { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -550,8 +551,8 @@ mod tests { ); } - #[test] - fn test_add_new_line() { + #[gpui::test] + fn test_add_new_line(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 9 }, @@ -559,7 +560,7 @@ mod tests { text: "\ncccc".into(), }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -574,15 +575,15 @@ mod tests { ); } - #[test] - fn test_delete_line_in_middle() { + #[gpui::test] + fn test_delete_line_in_middle(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb\ncccc"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, CharOperation::Delete { bytes: 5 }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -598,8 +599,8 @@ mod tests { ); } - #[test] - fn test_replace_line() { + #[gpui::test] + fn test_replace_line(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb\ncccc"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, @@ -609,7 +610,7 @@ mod tests { }, CharOperation::Keep { bytes: 5 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -626,8 +627,8 @@ mod tests { ); } - #[test] - fn test_multiple_edits_on_different_lines() { + #[gpui::test] + fn test_multiple_edits_on_different_lines(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb\ncccc\ndddd"; let char_ops = vec![ CharOperation::Insert { text: "A".into() }, @@ -638,7 +639,7 @@ mod tests { text: "\nEEEE".into(), }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -656,15 +657,15 @@ mod tests { ); } - #[test] - fn test_edit_at_end_of_line() { + #[gpui::test] + fn test_edit_at_end_of_line(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb\ncccc"; let char_ops = vec![ CharOperation::Keep { bytes: 4 }, CharOperation::Insert { text: "A".into() }, CharOperation::Keep { bytes: 10 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -680,8 +681,8 @@ mod tests { ); } - #[test] - fn test_insert_newline_character() { + #[gpui::test] + fn test_insert_newline_character(cx: &mut gpui::TestAppContext) { let old_text = "aaaabbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 4 }, @@ -689,7 +690,7 @@ mod tests { CharOperation::Keep { bytes: 4 }, ]; let new_text = apply_char_operations(old_text, &char_ops); - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -703,14 +704,14 @@ mod tests { ); } - #[test] - fn test_insert_newline_at_beginning() { + #[gpui::test] + fn test_insert_newline_at_beginning(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Insert { text: "\n".into() }, CharOperation::Keep { bytes: 9 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -725,15 +726,15 @@ mod tests { ); } - #[test] - fn test_delete_newline() { + #[gpui::test] + fn test_delete_newline(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 4 }, CharOperation::Delete { bytes: 1 }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -749,8 +750,8 @@ mod tests { ); } - #[test] - fn test_insert_multiple_newlines() { + #[gpui::test] + fn test_insert_multiple_newlines(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, @@ -759,7 +760,7 @@ mod tests { }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -775,15 +776,15 @@ mod tests { ); } - #[test] - fn test_delete_multiple_newlines() { + #[gpui::test] + fn test_delete_multiple_newlines(cx: &mut gpui::TestAppContext) { let old_text = "aaaa\n\n\nbbbb"; let char_ops = vec![ CharOperation::Keep { bytes: 5 }, CharOperation::Delete { bytes: 2 }, CharOperation::Keep { bytes: 4 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -799,8 +800,8 @@ mod tests { ); } - #[test] - fn test_complex_scenario() { + #[gpui::test] + fn test_complex_scenario(cx: &mut gpui::TestAppContext) { let old_text = "line1\nline2\nline3\nline4"; let char_ops = vec![ CharOperation::Keep { bytes: 6 }, @@ -814,7 +815,7 @@ mod tests { }, CharOperation::Keep { bytes: 6 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -834,8 +835,8 @@ mod tests { ); } - #[test] - fn test_cleaning_up_common_suffix() { + #[gpui::test] + fn test_cleaning_up_common_suffix(cx: &mut gpui::TestAppContext) { let old_text = concat!( " for y in 0..size.y() {\n", " let a = 10;\n", @@ -883,7 +884,7 @@ mod tests { }, CharOperation::Keep { bytes: 1 }, ]; - let line_ops = char_ops_to_line_ops(old_text, &char_ops); + let line_ops = char_ops_to_line_ops(old_text, &char_ops, cx.background_executor()); assert_eq!( line_ops, vec![ @@ -901,8 +902,8 @@ mod tests { ); } - #[test] - fn test_random_diffs() { + #[gpui::test] + fn test_random_diffs(cx: &mut gpui::TestAppContext) { random_test(|mut rng| { let old_text_len = env::var("OLD_TEXT_LEN") .map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable")) @@ -922,15 +923,19 @@ mod tests { assert_eq!(patched, new); // Test char_ops_to_line_ops - let line_ops = char_ops_to_line_ops(&old, &char_operations); + let line_ops = char_ops_to_line_ops(&old, &char_operations, cx.background_executor()); println!("line operations: {:?}", line_ops); let patched = apply_line_operations(&old, &new, &line_ops); assert_eq!(patched, new); }); } - fn char_ops_to_line_ops(old_text: &str, char_ops: &[CharOperation]) -> Vec { - let old_rope = Rope::from(old_text); + fn char_ops_to_line_ops( + old_text: &str, + char_ops: &[CharOperation], + executor: &BackgroundExecutor, + ) -> Vec { + let old_rope = Rope::from_str(old_text, executor); let mut diff = LineDiff::default(); for op in char_ops { diff.push_char_operation(op, &old_rope); diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index 81916c842225085ceec4721dbd8d212608f6bcb9..fd39bd4d83c65501b4731f31d3f357a3ff7f6fa3 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -15,10 +15,12 @@ doctest = false [dependencies] arrayvec = "0.7.1" -rayon.workspace = true log.workspace = true +futures.workspace = true +itertools.workspace = true [dev-dependencies] ctor.workspace = true rand.workspace = true zlog.workspace = true +pollster = "0.4.0" diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 95fbd5ed0d5f5700d0c894cda68ed15ce6590ced..8562766b1b49ac8eb1e3c816f210d1a60cae2aed 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -3,7 +3,8 @@ mod tree_map; use arrayvec::ArrayVec; pub use cursor::{Cursor, FilterCursor, Iter}; -use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator as _}; +use futures::{StreamExt, stream}; +use itertools::Itertools as _; use std::marker::PhantomData; use std::mem; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; @@ -14,6 +15,18 @@ pub const TREE_BASE: usize = 2; #[cfg(not(test))] pub const TREE_BASE: usize = 6; +pub trait BackgroundSpawn { + type Task: Future + Send + Sync + where + R: Send + Sync; + fn background_spawn( + &self, + future: impl Future + Send + Sync + 'static, + ) -> Self::Task + where + R: Send + Sync + 'static; +} + /// An item that can be stored in a [`SumTree`] /// /// Must be summarized by a type that implements [`Summary`] @@ -298,62 +311,71 @@ impl SumTree { } } - pub fn from_par_iter(iter: I, cx: ::Context<'_>) -> Self + pub async fn from_iter_async(iter: I, spawn: S) -> Self where - I: IntoParallelIterator, - Iter: IndexedParallelIterator, - T: Send + Sync, - T::Summary: Send + Sync, - for<'a> ::Context<'a>: Sync, + T: 'static + Send + Sync, + for<'a> T::Summary: Summary = ()> + Send + Sync, + S: BackgroundSpawn, + I: IntoIterator, { - let mut nodes = iter - .into_par_iter() - .chunks(2 * TREE_BASE) - .map(|items| { - let items: ArrayVec = items.into_iter().collect(); + let mut futures = vec![]; + let chunks = iter.into_iter().chunks(2 * TREE_BASE); + for chunk in chunks.into_iter() { + let items: ArrayVec = chunk.into_iter().collect(); + futures.push(async move { let item_summaries: ArrayVec = - items.iter().map(|item| item.summary(cx)).collect(); + items.iter().map(|item| item.summary(())).collect(); let mut summary = item_summaries[0].clone(); for item_summary in &item_summaries[1..] { - ::add_summary(&mut summary, item_summary, cx); + ::add_summary(&mut summary, item_summary, ()); } SumTree(Arc::new(Node::Leaf { summary, items, item_summaries, })) - }) - .collect::>(); + }); + } + + let mut nodes = futures::stream::iter(futures) + .map(|future| spawn.background_spawn(future)) + .buffered(4) + .collect::>() + .await; let mut height = 0; while nodes.len() > 1 { height += 1; - nodes = nodes - .into_par_iter() + let current_nodes = mem::take(&mut nodes); + nodes = stream::iter(current_nodes) .chunks(2 * TREE_BASE) - .map(|child_nodes| { - let child_trees: ArrayVec, { 2 * TREE_BASE }> = - child_nodes.into_iter().collect(); - let child_summaries: ArrayVec = child_trees - .iter() - .map(|child_tree| child_tree.summary().clone()) - .collect(); - let mut summary = child_summaries[0].clone(); - for child_summary in &child_summaries[1..] { - ::add_summary(&mut summary, child_summary, cx); - } - SumTree(Arc::new(Node::Internal { - height, - summary, - child_summaries, - child_trees, - })) + .map(|chunk| { + spawn.background_spawn(async move { + let child_trees: ArrayVec, { 2 * TREE_BASE }> = + chunk.into_iter().collect(); + let child_summaries: ArrayVec = child_trees + .iter() + .map(|child_tree| child_tree.summary().clone()) + .collect(); + let mut summary = child_summaries[0].clone(); + for child_summary in &child_summaries[1..] { + ::add_summary(&mut summary, child_summary, ()); + } + SumTree(Arc::new(Node::Internal { + height, + summary, + child_summaries, + child_trees, + })) + }) }) - .collect::>(); + .buffered(4) + .collect::>() + .await; } if nodes.is_empty() { - Self::new(cx) + Self::new(()) } else { debug_assert_eq!(nodes.len(), 1); nodes.pop().unwrap() @@ -597,15 +619,15 @@ impl SumTree { self.append(Self::from_iter(iter, cx), cx); } - pub fn par_extend(&mut self, iter: I, cx: ::Context<'_>) + pub async fn async_extend(&mut self, iter: I, spawn: S) where - I: IntoParallelIterator, - Iter: IndexedParallelIterator, - T: Send + Sync, - T::Summary: Send + Sync, - for<'a> ::Context<'a>: Sync, + S: BackgroundSpawn, + I: IntoIterator + 'static, + T: 'static + Send + Sync, + for<'b> T::Summary: Summary = ()> + Send + Sync, { - self.append(Self::from_par_iter(iter, cx), cx); + let other = Self::from_iter_async(iter, spawn); + self.append(other.await, ()); } pub fn push(&mut self, item: T, cx: ::Context<'_>) { @@ -1070,6 +1092,23 @@ mod tests { #[test] fn test_random() { + struct NoSpawn; + impl BackgroundSpawn for NoSpawn { + type Task + = std::pin::Pin + Sync + Send>> + where + R: Send + Sync; + fn background_spawn( + &self, + future: impl Future + Send + Sync + 'static, + ) -> Self::Task + where + R: Send + Sync + 'static, + { + Box::pin(future) + } + } + let mut starting_seed = 0; if let Ok(value) = std::env::var("SEED") { starting_seed = value.parse().expect("invalid SEED variable"); @@ -1095,7 +1134,7 @@ mod tests { .sample_iter(StandardUniform) .take(count) .collect::>(); - tree.par_extend(items, ()); + pollster::block_on(tree.async_extend(items, NoSpawn)); } for _ in 0..num_operations { @@ -1117,7 +1156,7 @@ mod tests { if rng.random() { new_tree.extend(new_items, ()); } else { - new_tree.par_extend(new_items, ()); + pollster::block_on(new_tree.async_extend(new_items, NoSpawn)); } cursor.seek(&Count(splice_end), Bias::Right); new_tree.append(cursor.slice(&tree_end, Bias::Right), ()); diff --git a/crates/text/Cargo.toml b/crates/text/Cargo.toml index ed02381eb83db5daececd159171a90072244a340..e9f9279f0d0b41f651c2ac218adf58bd76af2021 100644 --- a/crates/text/Cargo.toml +++ b/crates/text/Cargo.toml @@ -28,6 +28,7 @@ rope.workspace = true smallvec.workspace = true sum_tree.workspace = true util.workspace = true +gpui.workspace = true [dev-dependencies] collections = { workspace = true, features = ["test-support"] } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index c9e04e407ffdb8ffde6b139e01d78822e54e1a4b..6281c2f0e2ef21cb3756cfe5da814d294b49b108 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -14,24 +14,29 @@ fn init_logger() { zlog::init_test(); } -#[test] -fn test_edit() { - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abc"); +#[gpui::test] +fn test_edit(cx: &mut gpui::TestAppContext) { + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "abc", + cx.background_executor(), + ); assert_eq!(buffer.text(), "abc"); - buffer.edit([(3..3, "def")]); + buffer.edit([(3..3, "def")], cx.background_executor()); assert_eq!(buffer.text(), "abcdef"); - buffer.edit([(0..0, "ghi")]); + buffer.edit([(0..0, "ghi")], cx.background_executor()); assert_eq!(buffer.text(), "ghiabcdef"); - buffer.edit([(5..5, "jkl")]); + buffer.edit([(5..5, "jkl")], cx.background_executor()); assert_eq!(buffer.text(), "ghiabjklcdef"); - buffer.edit([(6..7, "")]); + buffer.edit([(6..7, "")], cx.background_executor()); assert_eq!(buffer.text(), "ghiabjlcdef"); - buffer.edit([(4..9, "mno")]); + buffer.edit([(4..9, "mno")], cx.background_executor()); assert_eq!(buffer.text(), "ghiamnoef"); } #[gpui::test(iterations = 100)] -fn test_random_edits(mut rng: StdRng) { +fn test_random_edits(cx: &mut gpui::TestAppContext, mut rng: StdRng) { let operations = env::var("OPERATIONS") .map(|i| i.parse().expect("invalid `OPERATIONS` variable")) .unwrap_or(10); @@ -44,6 +49,7 @@ fn test_random_edits(mut rng: StdRng) { ReplicaId::LOCAL, BufferId::new(1).unwrap(), reference_string.clone(), + cx.background_executor(), ); LineEnding::normalize(&mut reference_string); @@ -56,7 +62,7 @@ fn test_random_edits(mut rng: StdRng) { ); for _i in 0..operations { - let (edits, _) = buffer.randomly_edit(&mut rng, 5); + let (edits, _) = buffer.randomly_edit(&mut rng, 5, cx.background_executor()); for (old_range, new_text) in edits.iter().rev() { reference_string.replace_range(old_range.clone(), new_text); } @@ -106,7 +112,11 @@ fn test_random_edits(mut rng: StdRng) { let mut text = old_buffer.visible_text.clone(); for edit in edits { let new_text: String = buffer.text_for_range(edit.new.clone()).collect(); - text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text); + text.replace( + edit.new.start..edit.new.start + edit.old.len(), + &new_text, + cx.background_executor(), + ); } assert_eq!(text.to_string(), buffer.text()); @@ -161,14 +171,18 @@ fn test_random_edits(mut rng: StdRng) { let mut text = old_buffer.visible_text.clone(); for edit in subscription_edits.into_inner() { let new_text: String = buffer.text_for_range(edit.new.clone()).collect(); - text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text); + text.replace( + edit.new.start..edit.new.start + edit.old.len(), + &new_text, + cx.background_executor(), + ); } assert_eq!(text.to_string(), buffer.text()); } } -#[test] -fn test_line_endings() { +#[gpui::test] +fn test_line_endings(cx: &mut gpui::TestAppContext) { assert_eq!(LineEnding::detect(&"🍐✅\n".repeat(1000)), LineEnding::Unix); assert_eq!(LineEnding::detect(&"abcd\n".repeat(1000)), LineEnding::Unix); assert_eq!( @@ -184,25 +198,34 @@ fn test_line_endings() { ReplicaId::LOCAL, BufferId::new(1).unwrap(), "one\r\ntwo\rthree", + cx.background_executor(), ); assert_eq!(buffer.text(), "one\ntwo\nthree"); assert_eq!(buffer.line_ending(), LineEnding::Windows); buffer.check_invariants(); - buffer.edit([(buffer.len()..buffer.len(), "\r\nfour")]); - buffer.edit([(0..0, "zero\r\n")]); + buffer.edit( + [(buffer.len()..buffer.len(), "\r\nfour")], + cx.background_executor(), + ); + buffer.edit([(0..0, "zero\r\n")], cx.background_executor()); assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour"); assert_eq!(buffer.line_ending(), LineEnding::Windows); buffer.check_invariants(); } -#[test] -fn test_line_len() { - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); - buffer.edit([(0..0, "abcd\nefg\nhij")]); - buffer.edit([(12..12, "kl\nmno")]); - buffer.edit([(18..18, "\npqrs\n")]); - buffer.edit([(18..21, "\nPQ")]); +#[gpui::test] +fn test_line_len(cx: &mut gpui::TestAppContext) { + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "", + cx.background_executor(), + ); + buffer.edit([(0..0, "abcd\nefg\nhij")], cx.background_executor()); + buffer.edit([(12..12, "kl\nmno")], cx.background_executor()); + buffer.edit([(18..18, "\npqrs\n")], cx.background_executor()); + buffer.edit([(18..21, "\nPQ")], cx.background_executor()); assert_eq!(buffer.line_len(0), 4); assert_eq!(buffer.line_len(1), 3); @@ -212,10 +235,15 @@ fn test_line_len() { assert_eq!(buffer.line_len(5), 0); } -#[test] -fn test_common_prefix_at_position() { +#[gpui::test] +fn test_common_prefix_at_position(cx: &mut gpui::TestAppContext) { let text = "a = str; b = δα"; - let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text); + let buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + text, + cx.background_executor(), + ); let offset1 = offset_after(text, "str"); let offset2 = offset_after(text, "δα"); @@ -261,12 +289,13 @@ fn test_common_prefix_at_position() { } } -#[test] -fn test_text_summary_for_range() { +#[gpui::test] +fn test_text_summary_for_range(cx: &mut gpui::TestAppContext) { let buffer = Buffer::new( ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ab\nefg\nhklm\nnopqrs\ntuvwxyz", + cx.background_executor(), ); assert_eq!( buffer.text_summary_for_range::(0..2), @@ -354,13 +383,18 @@ fn test_text_summary_for_range() { ); } -#[test] -fn test_chars_at() { - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); - buffer.edit([(0..0, "abcd\nefgh\nij")]); - buffer.edit([(12..12, "kl\nmno")]); - buffer.edit([(18..18, "\npqrs")]); - buffer.edit([(18..21, "\nPQ")]); +#[gpui::test] +fn test_chars_at(cx: &mut gpui::TestAppContext) { + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "", + cx.background_executor(), + ); + buffer.edit([(0..0, "abcd\nefgh\nij")], cx.background_executor()); + buffer.edit([(12..12, "kl\nmno")], cx.background_executor()); + buffer.edit([(18..18, "\npqrs")], cx.background_executor()); + buffer.edit([(18..21, "\nPQ")], cx.background_executor()); let chars = buffer.chars_at(Point::new(0, 0)); assert_eq!(chars.collect::(), "abcd\nefgh\nijkl\nmno\nPQrs"); @@ -378,43 +412,53 @@ fn test_chars_at() { assert_eq!(chars.collect::(), "PQrs"); // Regression test: - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); - buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")]); - buffer.edit([(60..60, "\n")]); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "", + cx.background_executor(), + ); + buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")], cx.background_executor()); + buffer.edit([(60..60, "\n")], cx.background_executor()); let chars = buffer.chars_at(Point::new(6, 0)); assert_eq!(chars.collect::(), " \"xray_wasm\",\n]\n"); } -#[test] -fn test_anchors() { - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); - buffer.edit([(0..0, "abc")]); +#[gpui::test] +fn test_anchors(cx: &mut gpui::TestAppContext) { + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "", + cx.background_executor(), + ); + buffer.edit([(0..0, "abc")], cx.background_executor()); let left_anchor = buffer.anchor_before(2); let right_anchor = buffer.anchor_after(2); - buffer.edit([(1..1, "def\n")]); + buffer.edit([(1..1, "def\n")], cx.background_executor()); assert_eq!(buffer.text(), "adef\nbc"); assert_eq!(left_anchor.to_offset(&buffer), 6); assert_eq!(right_anchor.to_offset(&buffer), 6); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - buffer.edit([(2..3, "")]); + buffer.edit([(2..3, "")], cx.background_executor()); assert_eq!(buffer.text(), "adf\nbc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 5); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 }); - buffer.edit([(5..5, "ghi\n")]); + buffer.edit([(5..5, "ghi\n")], cx.background_executor()); assert_eq!(buffer.text(), "adf\nbghi\nc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 9); assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 }); assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 }); - buffer.edit([(7..9, "")]); + buffer.edit([(7..9, "")], cx.background_executor()); assert_eq!(buffer.text(), "adf\nbghc"); assert_eq!(left_anchor.to_offset(&buffer), 5); assert_eq!(right_anchor.to_offset(&buffer), 7); @@ -504,13 +548,18 @@ fn test_anchors() { ); } -#[test] -fn test_anchors_at_start_and_end() { - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), ""); +#[gpui::test] +fn test_anchors_at_start_and_end(cx: &mut gpui::TestAppContext) { + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "", + cx.background_executor(), + ); let before_start_anchor = buffer.anchor_before(0); let after_end_anchor = buffer.anchor_after(0); - buffer.edit([(0..0, "abc")]); + buffer.edit([(0..0, "abc")], cx.background_executor()); assert_eq!(buffer.text(), "abc"); assert_eq!(before_start_anchor.to_offset(&buffer), 0); assert_eq!(after_end_anchor.to_offset(&buffer), 3); @@ -518,8 +567,8 @@ fn test_anchors_at_start_and_end() { let after_start_anchor = buffer.anchor_after(0); let before_end_anchor = buffer.anchor_before(3); - buffer.edit([(3..3, "def")]); - buffer.edit([(0..0, "ghi")]); + buffer.edit([(3..3, "def")], cx.background_executor()); + buffer.edit([(0..0, "ghi")], cx.background_executor()); assert_eq!(buffer.text(), "ghiabcdef"); assert_eq!(before_start_anchor.to_offset(&buffer), 0); assert_eq!(after_start_anchor.to_offset(&buffer), 3); @@ -527,15 +576,20 @@ fn test_anchors_at_start_and_end() { assert_eq!(after_end_anchor.to_offset(&buffer), 9); } -#[test] -fn test_undo_redo() { - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234"); +#[gpui::test] +fn test_undo_redo(cx: &mut gpui::TestAppContext) { + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "1234", + cx.background_executor(), + ); // Set group interval to zero so as to not group edits in the undo stack. buffer.set_group_interval(Duration::from_secs(0)); - buffer.edit([(1..1, "abx")]); - buffer.edit([(3..4, "yzef")]); - buffer.edit([(3..5, "cd")]); + buffer.edit([(1..1, "abx")], cx.background_executor()); + buffer.edit([(3..4, "yzef")], cx.background_executor()); + buffer.edit([(3..5, "cd")], cx.background_executor()); assert_eq!(buffer.text(), "1abcdef234"); let entries = buffer.history.undo_stack.clone(); @@ -563,26 +617,31 @@ fn test_undo_redo() { assert_eq!(buffer.text(), "1234"); } -#[test] -fn test_history() { +#[gpui::test] +fn test_history(cx: &mut gpui::TestAppContext) { let mut now = Instant::now(); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456"); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "123456", + cx.background_executor(), + ); buffer.set_group_interval(Duration::from_millis(300)); let transaction_1 = buffer.start_transaction_at(now).unwrap(); - buffer.edit([(2..4, "cd")]); + buffer.edit([(2..4, "cd")], cx.background_executor()); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56"); buffer.start_transaction_at(now); - buffer.edit([(4..5, "e")]); + buffer.edit([(4..5, "e")], cx.background_executor()); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "12cde6"); now += buffer.transaction_group_interval() + Duration::from_millis(1); buffer.start_transaction_at(now); - buffer.edit([(0..1, "a")]); - buffer.edit([(1..1, "b")]); + buffer.edit([(0..1, "a")], cx.background_executor()); + buffer.edit([(1..1, "b")], cx.background_executor()); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "ab2cde6"); @@ -609,7 +668,7 @@ fn test_history() { // Redo stack gets cleared after performing an edit. buffer.start_transaction_at(now); - buffer.edit([(0..0, "X")]); + buffer.edit([(0..0, "X")], cx.background_executor()); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "X12cde6"); buffer.redo(); @@ -630,26 +689,31 @@ fn test_history() { assert_eq!(buffer.text(), "X12cde6"); } -#[test] -fn test_finalize_last_transaction() { +#[gpui::test] +fn test_finalize_last_transaction(cx: &mut gpui::TestAppContext) { let now = Instant::now(); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "123456"); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "123456", + cx.background_executor(), + ); buffer.history.group_interval = Duration::from_millis(1); buffer.start_transaction_at(now); - buffer.edit([(2..4, "cd")]); + buffer.edit([(2..4, "cd")], cx.background_executor()); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56"); buffer.finalize_last_transaction(); buffer.start_transaction_at(now); - buffer.edit([(4..5, "e")]); + buffer.edit([(4..5, "e")], cx.background_executor()); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "12cde6"); buffer.start_transaction_at(now); - buffer.edit([(0..1, "a")]); - buffer.edit([(1..1, "b")]); + buffer.edit([(0..1, "a")], cx.background_executor()); + buffer.edit([(1..1, "b")], cx.background_executor()); buffer.end_transaction_at(now).unwrap(); assert_eq!(buffer.text(), "ab2cde6"); @@ -666,14 +730,19 @@ fn test_finalize_last_transaction() { assert_eq!(buffer.text(), "ab2cde6"); } -#[test] -fn test_edited_ranges_for_transaction() { +#[gpui::test] +fn test_edited_ranges_for_transaction(cx: &mut gpui::TestAppContext) { let now = Instant::now(); - let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "1234567"); + let mut buffer = Buffer::new( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + "1234567", + cx.background_executor(), + ); buffer.start_transaction_at(now); - buffer.edit([(2..4, "cd")]); - buffer.edit([(6..6, "efg")]); + buffer.edit([(2..4, "cd")], cx.background_executor()); + buffer.edit([(6..6, "efg")], cx.background_executor()); buffer.end_transaction_at(now); assert_eq!(buffer.text(), "12cd56efg7"); @@ -685,7 +754,7 @@ fn test_edited_ranges_for_transaction() { [2..4, 6..9] ); - buffer.edit([(5..5, "hijk")]); + buffer.edit([(5..5, "hijk")], cx.background_executor()); assert_eq!(buffer.text(), "12cd5hijk6efg7"); assert_eq!( buffer @@ -694,7 +763,7 @@ fn test_edited_ranges_for_transaction() { [2..4, 10..13] ); - buffer.edit([(4..4, "l")]); + buffer.edit([(4..4, "l")], cx.background_executor()); assert_eq!(buffer.text(), "12cdl5hijk6efg7"); assert_eq!( buffer @@ -704,27 +773,42 @@ fn test_edited_ranges_for_transaction() { ); } -#[test] -fn test_concurrent_edits() { +#[gpui::test] +fn test_concurrent_edits(cx: &mut gpui::TestAppContext) { let text = "abcdef"; - let mut buffer1 = Buffer::new(ReplicaId::new(1), BufferId::new(1).unwrap(), text); - let mut buffer2 = Buffer::new(ReplicaId::new(2), BufferId::new(1).unwrap(), text); - let mut buffer3 = Buffer::new(ReplicaId::new(3), BufferId::new(1).unwrap(), text); + let mut buffer1 = Buffer::new( + ReplicaId::new(1), + BufferId::new(1).unwrap(), + text, + cx.background_executor(), + ); + let mut buffer2 = Buffer::new( + ReplicaId::new(2), + BufferId::new(1).unwrap(), + text, + cx.background_executor(), + ); + let mut buffer3 = Buffer::new( + ReplicaId::new(3), + BufferId::new(1).unwrap(), + text, + cx.background_executor(), + ); - let buf1_op = buffer1.edit([(1..2, "12")]); + let buf1_op = buffer1.edit([(1..2, "12")], cx.background_executor()); assert_eq!(buffer1.text(), "a12cdef"); - let buf2_op = buffer2.edit([(3..4, "34")]); + let buf2_op = buffer2.edit([(3..4, "34")], cx.background_executor()); assert_eq!(buffer2.text(), "abc34ef"); - let buf3_op = buffer3.edit([(5..6, "56")]); + let buf3_op = buffer3.edit([(5..6, "56")], cx.background_executor()); assert_eq!(buffer3.text(), "abcde56"); - buffer1.apply_op(buf2_op.clone()); - buffer1.apply_op(buf3_op.clone()); - buffer2.apply_op(buf1_op.clone()); - buffer2.apply_op(buf3_op); - buffer3.apply_op(buf1_op); - buffer3.apply_op(buf2_op); + buffer1.apply_op(buf2_op.clone(), Some(cx.background_executor())); + buffer1.apply_op(buf3_op.clone(), Some(cx.background_executor())); + buffer2.apply_op(buf1_op.clone(), Some(cx.background_executor())); + buffer2.apply_op(buf3_op, Some(cx.background_executor())); + buffer3.apply_op(buf1_op, Some(cx.background_executor())); + buffer3.apply_op(buf2_op, Some(cx.background_executor())); assert_eq!(buffer1.text(), "a12c34e56"); assert_eq!(buffer2.text(), "a12c34e56"); @@ -732,7 +816,7 @@ fn test_concurrent_edits() { } #[gpui::test(iterations = 100)] -fn test_random_concurrent_edits(mut rng: StdRng) { +fn test_random_concurrent_edits(mut rng: StdRng, cx: &mut gpui::TestAppContext) { let peers = env::var("PEERS") .map(|i| i.parse().expect("invalid `PEERS` variable")) .unwrap_or(5); @@ -753,6 +837,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { ReplicaId::new(i as u16), BufferId::new(1).unwrap(), base_text.clone(), + cx.background_executor(), ); buffer.history.group_interval = Duration::from_millis(rng.random_range(0..=200)); buffers.push(buffer); @@ -769,7 +854,9 @@ fn test_random_concurrent_edits(mut rng: StdRng) { let buffer = &mut buffers[replica_index]; match rng.random_range(0..=100) { 0..=50 if mutation_count != 0 => { - let op = buffer.randomly_edit(&mut rng, 5).1; + let op = buffer + .randomly_edit(&mut rng, 5, cx.background_executor()) + .1; network.broadcast(buffer.replica_id, vec![op]); log::info!("buffer {:?} text: {:?}", buffer.replica_id, buffer.text()); mutation_count -= 1; @@ -787,7 +874,7 @@ fn test_random_concurrent_edits(mut rng: StdRng) { replica_id, ops.len() ); - buffer.apply_ops(ops); + buffer.apply_ops(ops, Some(cx.background_executor())); } } _ => {} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 6403c66106dca88cdac85e09888012d890158a23..9d3034c0e9603ee37dca802e545af9a593b930a1 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -15,6 +15,7 @@ use anyhow::{Context as _, Result}; use clock::Lamport; pub use clock::ReplicaId; use collections::{HashMap, HashSet}; +use gpui::BackgroundExecutor; use locator::Locator; use operation_queue::OperationQueue; pub use patch::Patch; @@ -709,11 +710,41 @@ impl FromIterator for LineIndent { } impl Buffer { - pub fn new(replica_id: ReplicaId, remote_id: BufferId, base_text: impl Into) -> Buffer { + /// Create a new buffer from a string. + pub fn new( + replica_id: ReplicaId, + remote_id: BufferId, + base_text: impl Into, + executor: &BackgroundExecutor, + ) -> Buffer { let mut base_text = base_text.into(); let line_ending = LineEnding::detect(&base_text); LineEnding::normalize(&mut base_text); - Self::new_normalized(replica_id, remote_id, line_ending, Rope::from(&*base_text)) + Self::new_normalized( + replica_id, + remote_id, + line_ending, + Rope::from_str(&base_text, executor), + ) + } + + /// Create a new buffer from a string. + /// + /// Unlike [`Buffer::new`], this does not construct the backing rope in parallel if it is large enough. + pub fn new_slow( + replica_id: ReplicaId, + remote_id: BufferId, + base_text: impl Into, + ) -> Buffer { + let mut base_text = base_text.into(); + let line_ending = LineEnding::detect(&base_text); + LineEnding::normalize(&mut base_text); + Self::new_normalized( + replica_id, + remote_id, + line_ending, + Rope::from_str_small(&base_text), + ) } pub fn new_normalized( @@ -808,7 +839,7 @@ impl Buffer { self.history.group_interval } - pub fn edit(&mut self, edits: R) -> Operation + pub fn edit(&mut self, edits: R, cx: &BackgroundExecutor) -> Operation where R: IntoIterator, I: ExactSizeIterator, T)>, @@ -821,7 +852,7 @@ impl Buffer { self.start_transaction(); let timestamp = self.lamport_clock.tick(); - let operation = Operation::Edit(self.apply_local_edit(edits, timestamp)); + let operation = Operation::Edit(self.apply_local_edit(edits, timestamp, cx)); self.history.push(operation.clone()); self.history.push_undo(operation.timestamp()); @@ -834,6 +865,7 @@ impl Buffer { &mut self, edits: impl ExactSizeIterator, T)>, timestamp: clock::Lamport, + executor: &BackgroundExecutor, ) -> EditOperation { let mut edits_patch = Patch::default(); let mut edit_op = EditOperation { @@ -922,7 +954,7 @@ impl Buffer { }); insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); new_insertions.push(InsertionFragment::insert_new(&fragment)); - new_ropes.push_str(new_text.as_ref()); + new_ropes.push_str(new_text.as_ref(), executor); new_fragments.push(fragment, &None); insertion_offset += new_text.len(); } @@ -1001,22 +1033,26 @@ impl Buffer { self.snapshot.line_ending = line_ending; } - pub fn apply_ops>(&mut self, ops: I) { + pub fn apply_ops>( + &mut self, + ops: I, + executor: Option<&BackgroundExecutor>, + ) { let mut deferred_ops = Vec::new(); for op in ops { self.history.push(op.clone()); if self.can_apply_op(&op) { - self.apply_op(op); + self.apply_op(op, executor); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); } } self.deferred_ops.insert(deferred_ops); - self.flush_deferred_ops(); + self.flush_deferred_ops(executor); } - fn apply_op(&mut self, op: Operation) { + fn apply_op(&mut self, op: Operation, executor: Option<&BackgroundExecutor>) { match op { Operation::Edit(edit) => { if !self.version.observed(edit.timestamp) { @@ -1025,6 +1061,7 @@ impl Buffer { &edit.ranges, &edit.new_text, edit.timestamp, + executor, ); self.snapshot.version.observe(edit.timestamp); self.lamport_clock.observe(edit.timestamp); @@ -1055,6 +1092,7 @@ impl Buffer { ranges: &[Range], new_text: &[Arc], timestamp: clock::Lamport, + executor: Option<&BackgroundExecutor>, ) { if ranges.is_empty() { return; @@ -1170,7 +1208,10 @@ impl Buffer { }); insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); new_insertions.push(InsertionFragment::insert_new(&fragment)); - new_ropes.push_str(new_text); + match executor { + Some(executor) => new_ropes.push_str(new_text, executor), + None => new_ropes.push_str_small(new_text), + } new_fragments.push(fragment, &None); insertion_offset += new_text.len(); } @@ -1348,12 +1389,12 @@ impl Buffer { self.subscriptions.publish_mut(&edits); } - fn flush_deferred_ops(&mut self) { + fn flush_deferred_ops(&mut self, executor: Option<&BackgroundExecutor>) { self.deferred_replicas.clear(); let mut deferred_ops = Vec::new(); for op in self.deferred_ops.drain().iter().cloned() { if self.can_apply_op(&op) { - self.apply_op(op); + self.apply_op(op, executor); } else { self.deferred_replicas.insert(op.replica_id()); deferred_ops.push(op); @@ -1711,9 +1752,9 @@ impl Buffer { #[cfg(any(test, feature = "test-support"))] impl Buffer { #[track_caller] - pub fn edit_via_marked_text(&mut self, marked_string: &str) { + pub fn edit_via_marked_text(&mut self, marked_string: &str, cx: &BackgroundExecutor) { let edits = self.edits_for_marked_text(marked_string); - self.edit(edits); + self.edit(edits, cx); } #[track_caller] @@ -1850,6 +1891,7 @@ impl Buffer { &mut self, rng: &mut T, edit_count: usize, + executor: &BackgroundExecutor, ) -> (Vec<(Range, Arc)>, Operation) where T: rand::Rng, @@ -1857,7 +1899,7 @@ impl Buffer { let mut edits = self.get_random_edits(rng, edit_count); log::info!("mutating buffer {:?} with {:?}", self.replica_id, edits); - let op = self.edit(edits.iter().cloned()); + let op = self.edit(edits.iter().cloned(), executor); if let Operation::Edit(edit) = &op { assert_eq!(edits.len(), edit.new_text.len()); for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) { @@ -2692,8 +2734,12 @@ impl<'a> RopeBuilder<'a> { } } - fn push_str(&mut self, text: &str) { - self.new_visible.push(text); + fn push_str(&mut self, text: &str, cx: &BackgroundExecutor) { + self.new_visible.push(text, cx); + } + + fn push_str_small(&mut self, text: &str) { + self.new_visible.push_small(text); } fn finish(mut self) -> (Rope, Rope) { diff --git a/crates/vim/src/motion.rs b/crates/vim/src/motion.rs index 2da1083ee6623cc8a463ef31be7e90dca0063b34..a885538e13e11b9720c3d7ffed5f7e6461943598 100644 --- a/crates/vim/src/motion.rs +++ b/crates/vim/src/motion.rs @@ -3096,6 +3096,7 @@ mod test { use indoc::indoc; use language::Point; use multi_buffer::MultiBufferRow; + use text::Rope; #[gpui::test] async fn test_start_end_of_paragraph(cx: &mut gpui::TestAppContext) { @@ -3822,7 +3823,7 @@ mod test { cx.update_editor(|editor, _window, cx| { let range = editor.selections.newest_anchor().range(); let inlay_text = " field: int,\n field2: string\n field3: float"; - let inlay = Inlay::edit_prediction(1, range.start, inlay_text); + let inlay = Inlay::edit_prediction(1, range.start, Rope::from_str_small(inlay_text)); editor.splice_inlays(&[], vec![inlay], cx); }); @@ -3854,7 +3855,7 @@ mod test { let end_of_line = snapshot.anchor_after(Point::new(0, snapshot.line_len(MultiBufferRow(0)))); let inlay_text = " hint"; - let inlay = Inlay::edit_prediction(1, end_of_line, inlay_text); + let inlay = Inlay::edit_prediction(1, end_of_line, Rope::from_str_small(inlay_text)); editor.splice_inlays(&[], vec![inlay], cx); }); cx.simulate_keystrokes("$"); @@ -3893,7 +3894,7 @@ mod test { // The empty line is at line 3 (0-indexed) let line_start = snapshot.anchor_after(Point::new(3, 0)); let inlay_text = ": Vec"; - let inlay = Inlay::edit_prediction(1, line_start, inlay_text); + let inlay = Inlay::edit_prediction(1, line_start, Rope::from_str_small(inlay_text)); editor.splice_inlays(&[], vec![inlay], cx); }); @@ -3937,7 +3938,8 @@ mod test { let snapshot = editor.buffer().read(cx).snapshot(cx); let empty_line_start = snapshot.anchor_after(Point::new(2, 0)); let inlay_text = ": i32"; - let inlay = Inlay::edit_prediction(2, empty_line_start, inlay_text); + let inlay = + Inlay::edit_prediction(2, empty_line_start, Rope::from_str_small(inlay_text)); editor.splice_inlays(&[], vec![inlay], cx); }); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b1de240eb62bcca6967333641bf8234825730300..62e29f215146c03060afe81ee67b78e3b3ea8a59 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -7580,13 +7580,13 @@ pub fn create_and_open_local_file( path: &'static Path, window: &mut Window, cx: &mut Context, - default_content: impl 'static + Send + FnOnce() -> Rope, + default_content: impl 'static + Send + FnOnce(&mut AsyncApp) -> Rope, ) -> Task>> { cx.spawn_in(window, async move |workspace, cx| { let fs = workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?; if !fs.is_file(path).await { fs.create_file(path, Default::default()).await?; - fs.save(path, &default_content(), Default::default()) + fs.save(path, &default_content(cx), Default::default()) .await?; } diff --git a/crates/worktree/src/worktree_tests.rs b/crates/worktree/src/worktree_tests.rs index d89e1ef4e4df7dbef3cf51789c1f1fc8a5309eb1..1cce23712ae88f0e42faf240099ebecd9000fc4e 100644 --- a/crates/worktree/src/worktree_tests.rs +++ b/crates/worktree/src/worktree_tests.rs @@ -20,6 +20,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; +use text::Rope; use util::{ ResultExt, path, rel_path::{RelPath, rel_path}, @@ -646,9 +647,13 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) { // Update the gitignore so that node_modules is no longer ignored, // but a subdirectory is ignored - fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default()) - .await - .unwrap(); + fs.save( + "/root/.gitignore".as_ref(), + &Rope::from_str("e", cx.background_executor()), + Default::default(), + ) + .await + .unwrap(); cx.executor().run_until_parked(); // All of the directories that are no longer ignored are now loaded. @@ -716,7 +721,7 @@ async fn test_write_file(cx: &mut TestAppContext) { .update(cx, |tree, cx| { tree.write_file( rel_path("tracked-dir/file.txt").into(), - "hello".into(), + Rope::from_str("hello", cx.background_executor()), Default::default(), cx, ) @@ -727,7 +732,7 @@ async fn test_write_file(cx: &mut TestAppContext) { .update(cx, |tree, cx| { tree.write_file( rel_path("ignored-dir/file.txt").into(), - "world".into(), + Rope::from_str("world", cx.background_executor()), Default::default(), cx, ) @@ -1465,7 +1470,7 @@ async fn test_random_worktree_operations_during_initial_scan( let fs = FakeFs::new(cx.background_executor.clone()) as Arc; fs.as_fake().insert_tree(root_dir, json!({})).await; for _ in 0..initial_entries { - randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await; } log::info!("generated initial tree"); @@ -1555,7 +1560,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) let fs = FakeFs::new(cx.background_executor.clone()) as Arc; fs.as_fake().insert_tree(root_dir, json!({})).await; for _ in 0..initial_entries { - randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await; } log::info!("generated initial tree"); @@ -1598,7 +1603,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) .await .log_err(); } else { - randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await; + randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng, cx.background_executor()).await; } let buffered_event_count = fs.as_fake().buffered_event_count(); @@ -1607,7 +1612,7 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) log::info!("flushing {} events", len); fs.as_fake().flush_events(len); } else { - randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await; + randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng, cx.background_executor()).await; mutations_len -= 1; } @@ -1759,8 +1764,12 @@ fn randomly_mutate_worktree( }) } else { log::info!("overwriting file {:?} ({})", &entry.path, entry.id.0); - let task = - worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx); + let task = worktree.write_file( + entry.path.clone(), + Rope::default(), + Default::default(), + cx, + ); cx.background_spawn(async move { task.await?; Ok(()) @@ -1775,6 +1784,7 @@ async fn randomly_mutate_fs( root_path: &Path, insertion_probability: f64, rng: &mut impl Rng, + executor: &BackgroundExecutor, ) { log::info!("mutating fs"); let mut files = Vec::new(); @@ -1849,7 +1859,7 @@ async fn randomly_mutate_fs( ); fs.save( &ignore_path, - &ignore_contents.as_str().into(), + &Rope::from_str(ignore_contents.as_str(), executor), Default::default(), ) .await diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 2d7d47e968e93eef3d455cec9c324a4d4e0cff42..bd0a600ce52a265f9785b1e26e7a123f270ce263 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -28,10 +28,10 @@ use git_ui::commit_view::CommitViewToolbar; use git_ui::git_panel::GitPanel; use git_ui::project_diff::ProjectDiffToolbar; use gpui::{ - Action, App, AppContext as _, Context, DismissEvent, Element, Entity, Focusable, KeyBinding, - ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, Task, - TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache, point, - px, retain_all, + Action, App, AppContext as _, AsyncApp, Context, DismissEvent, Element, Entity, Focusable, + KeyBinding, ParentElement, PathPromptOptions, PromptLevel, ReadGlobal, SharedString, Styled, + Task, TitlebarOptions, UpdateGlobal, Window, WindowKind, WindowOptions, actions, image_cache, + point, px, retain_all, }; use image_viewer::ImageInfo; use language::Capability; @@ -201,7 +201,12 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::keymap_file(), - || settings::initial_keymap_content().as_ref().into(), + |cx| { + Rope::from_str( + settings::initial_keymap_content().as_ref(), + cx.background_executor(), + ) + }, window, cx, ); @@ -211,7 +216,12 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::settings_file(), - || settings::initial_user_settings_content().as_ref().into(), + |cx| { + Rope::from_str( + settings::initial_user_settings_content().as_ref(), + cx.background_executor(), + ) + }, window, cx, ); @@ -226,7 +236,12 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::tasks_file(), - || settings::initial_tasks_content().as_ref().into(), + |cx| { + Rope::from_str( + settings::initial_tasks_content().as_ref(), + cx.background_executor(), + ) + }, window, cx, ); @@ -236,7 +251,12 @@ pub fn init(cx: &mut App) { with_active_or_new_workspace(cx, |_, window, cx| { open_settings_file( paths::debug_scenarios_file(), - || settings::initial_debug_tasks_content().as_ref().into(), + |cx| { + Rope::from_str( + settings::initial_debug_tasks_content().as_ref(), + cx.background_executor(), + ) + }, window, cx, ); @@ -1939,7 +1959,7 @@ fn open_bundled_file( fn open_settings_file( abs_path: &'static Path, - default_content: impl FnOnce() -> Rope + Send + 'static, + default_content: impl FnOnce(&mut AsyncApp) -> Rope + Send + 'static, window: &mut Window, cx: &mut Context, ) { @@ -4355,7 +4375,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &r#"{"base_keymap": "Atom"}"#.into(), + &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#), Default::default(), ) .await @@ -4365,7 +4385,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(), + &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#), Default::default(), ) .await @@ -4413,7 +4433,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#.into(), + &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#), Default::default(), ) .await @@ -4433,7 +4453,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &r#"{"base_keymap": "JetBrains"}"#.into(), + &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#), Default::default(), ) .await @@ -4473,7 +4493,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &r#"{"base_keymap": "Atom"}"#.into(), + &Rope::from_str_small(r#"{"base_keymap": "Atom"}"#), Default::default(), ) .await @@ -4482,7 +4502,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#.into(), + &Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#), Default::default(), ) .await @@ -4525,7 +4545,7 @@ mod tests { .fs .save( "/keymap.json".as_ref(), - &r#"[{"bindings": {"backspace": null}}]"#.into(), + &Rope::from_str_small(r#"[{"bindings": {"backspace": null}}]"#), Default::default(), ) .await @@ -4545,7 +4565,7 @@ mod tests { .fs .save( "/settings.json".as_ref(), - &r#"{"base_keymap": "JetBrains"}"#.into(), + &Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#), Default::default(), ) .await diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 3abb76715d67e3d288cf812fc6a4bff58ac3ddfe..bc6c25105e69eb85e8db3714c48dc30791683109 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -861,7 +861,7 @@ mod tests { .fs .save( Path::new(file1_path), - &Rope::from("content1"), + &Rope::from_str("content1", cx.background_executor()), LineEnding::Unix, ) .await @@ -875,7 +875,7 @@ mod tests { .fs .save( Path::new(file2_path), - &Rope::from("content2"), + &Rope::from_str("content2", cx.background_executor()), LineEnding::Unix, ) .await diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 454a1526a9e8c6a75d47bda875feb6843b454a0d..ca2edd0682e181c8db7b8f1973386d3190eab12d 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -1836,12 +1836,13 @@ mod tests { let fs = project::FakeFs::new(cx.executor()); let project = Project::test(fs.clone(), [], cx).await; - let buffer = cx.new(|_cx| { + let buffer = cx.new(|cx| { Buffer::remote( language::BufferId::new(1).unwrap(), ReplicaId::new(1), language::Capability::ReadWrite, "fn main() {\n println!(\"Hello\");\n}", + cx.background_executor(), ) }); From 1f938c08d26c79184059aa5382324613557466c5 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 31 Oct 2025 10:55:08 -0300 Subject: [PATCH 35/82] project panel: Remove extra separator when "Rename" is hidden (#41639) Closes https://github.com/zed-industries/zed/issues/41633 Release Notes: - N/A --- crates/project_panel/src/project_panel.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 8794b625e2b63384041264d67b7d8bf729707735..2dbdfdd22e80cf61de48784470fe3bf8375b624c 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -1038,9 +1038,8 @@ impl ProjectPanel { "Copy Relative Path", Box::new(zed_actions::workspace::CopyRelativePath), ) - .separator() .when(!should_hide_rename, |menu| { - menu.action("Rename", Box::new(Rename)) + menu.separator().action("Rename", Box::new(Rename)) }) .when(!is_root && !is_remote, |menu| { menu.action("Trash", Box::new(Trash { skip_prompt: false })) From c6d61870e2a685d3f402d7c2dd39b3d9740e4292 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 31 Oct 2025 15:24:23 +0100 Subject: [PATCH 36/82] editor: Fix incorrect hover popup row clamping (#41645) Fixes ZED-2TR Fixes ZED-2TQ Fixes ZED-2TB Fixes ZED-2SW Fixes ZED-2SQ Release Notes: - Fixed panic in repainting hover popups Co-authored by: David --- crates/editor/src/element.rs | 12 +++++++----- crates/editor/src/hover_popover.rs | 18 +++++++++++------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 17b9ea9ced8d34396426e0a2640904b6e8df97a4..7579441595c5d774e8d96439d0e03a21f3e624b8 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -5114,19 +5114,21 @@ impl EditorElement { cx, ) }); - let Some((position, hover_popovers)) = hover_popovers else { + let Some((popover_position, hover_popovers)) = hover_popovers else { return; }; // This is safe because we check on layout whether the required row is available - let hovered_row_layout = - &line_layouts[position.row().minus(visible_display_row_range.start) as usize]; + let hovered_row_layout = &line_layouts[popover_position + .row() + .minus(visible_display_row_range.start) + as usize]; // Compute Hovered Point - let x = hovered_row_layout.x_for_index(position.column() as usize) + let x = hovered_row_layout.x_for_index(popover_position.column() as usize) - Pixels::from(scroll_pixel_position.x); let y = Pixels::from( - position.row().as_f64() * ScrollPixelOffset::from(line_height) + popover_position.row().as_f64() * ScrollPixelOffset::from(line_height) - scroll_pixel_position.y, ); let hovered_point = content_origin + point(x, y); diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index 6227d90e9be7a5fbbe98b9dd8900860c219d07d2..7446b21b9cca5158c3df1c9c13fcb4f7d65b3445 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -797,23 +797,22 @@ impl HoverState { }) })?; let mut point = anchor.to_display_point(&snapshot.display_snapshot); - // Clamp the point within the visible rows in case the popup source spans multiple lines - if point.row() < visible_rows.start { - point = crate::movement::down_by_rows( + if visible_rows.end <= point.row() { + point = crate::movement::up_by_rows( &snapshot.display_snapshot, point, - (visible_rows.start - point.row()).0, + 1 + (point.row() - visible_rows.end).0, text::SelectionGoal::None, true, text_layout_details, ) .0; - } else if visible_rows.end <= point.row() { - point = crate::movement::up_by_rows( + } else if point.row() < visible_rows.start { + point = crate::movement::down_by_rows( &snapshot.display_snapshot, point, - (visible_rows.end - point.row()).0, + (visible_rows.start - point.row()).0, text::SelectionGoal::None, true, text_layout_details, @@ -821,6 +820,11 @@ impl HoverState { .0; } + if !visible_rows.contains(&point.row()) { + log::error!("Hover popover point out of bounds after moving"); + return None; + } + let mut elements = Vec::new(); if let Some(diagnostic_popover) = self.diagnostic_popover.as_ref() { From f73d6fe4ce1cd9c01220da77e071f1f0c12cccbe Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 31 Oct 2025 16:01:50 +0100 Subject: [PATCH 37/82] terminal: Kill the terminal child process, not the terminal process on exit (#41631) When rerunning a task, our process id fetching seems to sometimes return the previous terminal's process id when respawning the task, causing us to kill the new terminal once the previous one drops as we spawn a new one, then drop the old one. This results in rerun sometimes spawning a blank task as the terminal immediately exits. The fix here is simple, we actually want to kill the process running inside the terminal process, not the terminal process itself when we exit in the terminal. No relnotes as this was introduced yesterday in https://github.com/zed-industries/zed/pull/41562 Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/terminal/src/pty_info.rs | 23 +++++++++++++++-------- crates/terminal/src/terminal.rs | 2 +- crates/terminal_view/src/terminal_view.rs | 3 ++- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/crates/terminal/src/pty_info.rs b/crates/terminal/src/pty_info.rs index f8e8a122f845253175ba6f176c12408056163fdb..c92de2f23b83ba4dbedc6980ceb9b106d06467c1 100644 --- a/crates/terminal/src/pty_info.rs +++ b/crates/terminal/src/pty_info.rs @@ -15,6 +15,12 @@ pub struct ProcessIdGetter { fallback_pid: u32, } +impl ProcessIdGetter { + pub fn fallback_pid(&self) -> Pid { + Pid::from_u32(self.fallback_pid) + } +} + #[cfg(unix)] impl ProcessIdGetter { fn new(pty: &Pty) -> ProcessIdGetter { @@ -31,10 +37,6 @@ impl ProcessIdGetter { } Some(Pid::from_u32(pid as u32)) } - - pub fn fallback_pid(&self) -> u32 { - self.fallback_pid - } } #[cfg(windows)] @@ -66,10 +68,6 @@ impl ProcessIdGetter { } Some(Pid::from_u32(pid)) } - - pub fn fallback_pid(&self) -> u32 { - self.fallback_pid - } } #[derive(Clone, Debug)] @@ -122,10 +120,19 @@ impl PtyProcessInfo { } } + fn get_child(&self) -> Option<&Process> { + let pid = self.pid_getter.fallback_pid(); + self.system.process(pid) + } + pub(crate) fn kill_current_process(&mut self) -> bool { self.refresh().is_some_and(|process| process.kill()) } + pub(crate) fn kill_child_process(&mut self) -> bool { + self.get_child().is_some_and(|process| process.kill()) + } + fn load(&mut self) -> Option { let process = self.refresh()?; let cwd = process.cwd().map_or(PathBuf::new(), |p| p.to_owned()); diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index f91a47630614b4736ebe5bb642d6430574c8c8ed..0550e4512a2c7594fc27dae37448cf6a09a5606a 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -2237,7 +2237,7 @@ unsafe fn append_text_to_term(term: &mut Term, text_lines: &[&str]) impl Drop for Terminal { fn drop(&mut self) { if let TerminalType::Pty { pty_tx, info } = &mut self.terminal_type { - info.kill_current_process(); + info.kill_child_process(); pty_tx.0.send(Msg::Shutdown).ok(); } } diff --git a/crates/terminal_view/src/terminal_view.rs b/crates/terminal_view/src/terminal_view.rs index 2a9720357b27b91f3a5ff7689f4cb0f16787031b..7208f28a80149d39d056a4767c7b8742f1147009 100644 --- a/crates/terminal_view/src/terminal_view.rs +++ b/crates/terminal_view/src/terminal_view.rs @@ -1141,7 +1141,8 @@ impl Item for TerminalView { let pid = terminal.pid_getter()?.fallback_pid(); Some(TabTooltipContent::Custom(Box::new(move |_window, cx| { - cx.new(|_| TerminalTooltip::new(title.clone(), pid)).into() + cx.new(|_| TerminalTooltip::new(title.clone(), pid.as_u32())) + .into() }))) } From f3a5ebc31501c4f46c5fe0478759ce21b6249c13 Mon Sep 17 00:00:00 2001 From: Dino Date: Fri, 31 Oct 2025 15:04:54 +0000 Subject: [PATCH 38/82] vim: Only focus when associated editor is also focused (#41487) Update `Vim::activate` to ensure that the `Vim.focused` method is only called if the associated editor is also focused. This ensures that the `VimEvent::Focused` event is only emitted when the editor is actually focused, preventing a bug where, after starting Zed, Vim's mode indicator would show that the mode was `Insert` even though it was in `Normal` mode in the main editor. Closes #41353 Release Notes: - Fixed vim's mode being shown as `Inserted` right after opening Zed Co-authored-by: Conrad Irwin --- crates/vim/src/vim.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 91ce66d43e76f3a40a5e074f01527953def1b188..3310c1dab1ac3c05bc24aa1b56f94dcfa22511f8 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -944,9 +944,11 @@ impl Vim { change_list::register(editor, cx); digraph::register(editor, cx); - cx.defer_in(window, |vim, window, cx| { - vim.focused(false, window, cx); - }) + if editor.is_focused(window) { + cx.defer_in(window, |vim, window, cx| { + vim.focused(false, window, cx); + }) + } }) } From c42d54af175721913ea2b67ce5b8a268262812ae Mon Sep 17 00:00:00 2001 From: Dino Date: Fri, 31 Oct 2025 15:17:26 +0000 Subject: [PATCH 39/82] agent_ui: Autoscroll after inserting selections (#41370) Update the behavior of the `zed_actions::agent::AddSelectionToThread` action so that, after the selecitons are added to the current thread, the editor automatically scrolls to the cursor's position, fixing an issue where the inserted selection's UI component could wrap the cursor to the next line below, leaving it outside the viewable area. Closes #39694 Release Notes: - Improved the `agent: add selection to thread` action so as to automatically scroll to the cursor's position after selections are inserted --- crates/agent_ui/src/acp/message_editor.rs | 175 ++++++++++++++++++++++ 1 file changed, 175 insertions(+) diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 90991182dc77e00c07fb7c7330695f72da9a2f44..856cc4d0d47d1e1d618c0056c771dfabe3c0bda4 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -16,6 +16,7 @@ use editor::{ MultiBuffer, ToOffset, actions::Paste, display_map::{Crease, CreaseId, FoldId}, + scroll::Autoscroll, }; use futures::{ FutureExt as _, @@ -591,6 +592,21 @@ impl MessageEditor { ), ); } + + // Take this explanation with a grain of salt but, with creases being + // inserted, GPUI's recomputes the editor layout in the next frames, so + // directly calling `editor.request_autoscroll` wouldn't work as + // expected. We're leveraging `cx.on_next_frame` to wait 2 frames and + // ensure that the layout has been recalculated so that the autoscroll + // request actually shows the cursor's new position. + let editor = self.editor.clone(); + cx.on_next_frame(window, move |_, window, cx| { + cx.on_next_frame(window, move |_, _, cx| { + editor.update(cx, |editor, cx| { + editor.request_autoscroll(Autoscroll::fit(), cx) + }); + }); + }); } fn confirm_mention_for_thread( @@ -1030,6 +1046,7 @@ impl MessageEditor { self.editor.update(cx, |message_editor, cx| { message_editor.edit([(cursor_anchor..cursor_anchor, completion.new_text)], cx); + message_editor.request_autoscroll(Autoscroll::fit(), cx); }); if let Some(confirm) = completion.confirm { confirm(CompletionIntent::Complete, window, cx); @@ -2745,6 +2762,7 @@ mod tests { _ => panic!("Expected Text mention for small file"), } } + #[gpui::test] async fn test_insert_thread_summary(cx: &mut TestAppContext) { init_test(cx); @@ -2878,4 +2896,161 @@ mod tests { })] ); } + + #[gpui::test] + async fn test_autoscroll_after_insert_selections(cx: &mut TestAppContext) { + init_test(cx); + + let app_state = cx.update(AppState::test); + + cx.update(|cx| { + language::init(cx); + editor::init(cx); + workspace::init(app_state.clone(), cx); + Project::init_settings(cx); + }); + + app_state + .fs + .as_fake() + .insert_tree( + path!("/dir"), + json!({ + "test.txt": "line1\nline2\nline3\nline4\nline5\n", + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await; + let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx)); + let workspace = window.root(cx).unwrap(); + + let worktree = project.update(cx, |project, cx| { + let mut worktrees = project.worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 1); + worktrees.pop().unwrap() + }); + let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id()); + + let mut cx = VisualTestContext::from_window(*window, cx); + + // Open a regular editor with the created file, and select a portion of + // the text that will be used for the selections that are meant to be + // inserted in the agent panel. + let editor = workspace + .update_in(&mut cx, |workspace, window, cx| { + workspace.open_path( + ProjectPath { + worktree_id, + path: rel_path("test.txt").into(), + }, + None, + false, + window, + cx, + ) + }) + .await + .unwrap() + .downcast::() + .unwrap(); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |selections| { + selections.select_ranges([Point::new(0, 0)..Point::new(0, 5)]); + }); + }); + + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx)); + + // Create a new `MessageEditor`. The `EditorMode::full()` has to be used + // to ensure we have a fixed viewport, so we can eventually actually + // place the cursor outside of the visible area. + let message_editor = workspace.update_in(&mut cx, |workspace, window, cx| { + let workspace_handle = cx.weak_entity(); + let message_editor = cx.new(|cx| { + MessageEditor::new( + workspace_handle, + project.clone(), + history_store.clone(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::full(), + window, + cx, + ) + }); + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item( + Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))), + true, + true, + None, + window, + cx, + ); + }); + + message_editor + }); + + message_editor.update_in(&mut cx, |message_editor, window, cx| { + message_editor.editor.update(cx, |editor, cx| { + // Update the Agent Panel's Message Editor text to have 100 + // lines, ensuring that the cursor is set at line 90 and that we + // then scroll all the way to the top, so the cursor's position + // remains off screen. + let mut lines = String::new(); + for _ in 1..=100 { + lines.push_str(&"Another line in the agent panel's message editor\n"); + } + editor.set_text(lines.as_str(), window, cx); + editor.change_selections(Default::default(), window, cx, |selections| { + selections.select_ranges([Point::new(90, 0)..Point::new(90, 0)]); + }); + editor.set_scroll_position(gpui::Point::new(0., 0.), window, cx); + }); + }); + + cx.run_until_parked(); + + // Before proceeding, let's assert that the cursor is indeed off screen, + // otherwise the rest of the test doesn't make sense. + message_editor.update_in(&mut cx, |message_editor, window, cx| { + message_editor.editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(window, cx); + let cursor_row = editor.selections.newest::(&snapshot).head().row; + let scroll_top = snapshot.scroll_position().y as u32; + let visible_lines = editor.visible_line_count().unwrap() as u32; + let visible_range = scroll_top..(scroll_top + visible_lines); + + assert!(!visible_range.contains(&cursor_row)); + }) + }); + + // Now let's insert the selection in the Agent Panel's editor and + // confirm that, after the insertion, the cursor is now in the visible + // range. + message_editor.update_in(&mut cx, |message_editor, window, cx| { + message_editor.insert_selections(window, cx); + }); + + cx.run_until_parked(); + + message_editor.update_in(&mut cx, |message_editor, window, cx| { + message_editor.editor.update(cx, |editor, cx| { + let snapshot = editor.snapshot(window, cx); + let cursor_row = editor.selections.newest::(&snapshot).head().row; + let scroll_top = snapshot.scroll_position().y as u32; + let visible_lines = editor.visible_line_count().unwrap() as u32; + let visible_range = scroll_top..(scroll_top + visible_lines); + + assert!(visible_range.contains(&cursor_row)); + }) + }); + } } From c1dea842fff6ff20707fd570c5ec068b87103da4 Mon Sep 17 00:00:00 2001 From: versecafe <147033096+versecafe@users.noreply.github.com> Date: Fri, 31 Oct 2025 09:12:46 -0700 Subject: [PATCH 40/82] agent: Model name context (#41490) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #41478 Release Notes: - Fixed #41478 Screenshot 2025-10-29 at 1 31 26 PM > caused by using haiku 4.5 from the anthropic provider and then swapping to sonnet 3.7 through zed, doing this does mess with prompt caching but a model swap already invalidates that so it shouldn't have any cost impact on end users --- crates/agent/src/edit_agent/evals.rs | 1 + crates/agent/src/templates.rs | 3 +++ crates/agent/src/templates/system_prompt.hbs | 6 ++++++ crates/agent/src/thread.rs | 1 + 4 files changed, 11 insertions(+) diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index 84cdd101f57546a0bfbc86a290bf1f453e69a979..2cc6a6b4242a07b688d1232cd39d13797c70b02b 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1581,6 +1581,7 @@ impl EditAgentTest { let template = crate::SystemPromptTemplate { project: &project_context, available_tools: tool_names, + model_name: None, }; let templates = Templates::new(); template.render(&templates).unwrap() diff --git a/crates/agent/src/templates.rs b/crates/agent/src/templates.rs index 72a8f6633cb7bb926580dbb4f9e65ec032162d93..db787d834e63746fdbea9e837f4fd0615f85c984 100644 --- a/crates/agent/src/templates.rs +++ b/crates/agent/src/templates.rs @@ -38,6 +38,7 @@ pub struct SystemPromptTemplate<'a> { #[serde(flatten)] pub project: &'a prompt_store::ProjectContext, pub available_tools: Vec, + pub model_name: Option, } impl Template for SystemPromptTemplate<'_> { @@ -79,9 +80,11 @@ mod tests { let template = SystemPromptTemplate { project: &project, available_tools: vec!["echo".into()], + model_name: Some("test-model".to_string()), }; let templates = Templates::new(); let rendered = template.render(&templates).unwrap(); assert!(rendered.contains("## Fixing Diagnostics")); + assert!(rendered.contains("test-model")); } } diff --git a/crates/agent/src/templates/system_prompt.hbs b/crates/agent/src/templates/system_prompt.hbs index ca324fad7acccb3e50f1140c8f99d52319d159d4..4620647135631fdb367b0dc2604e89770a938c07 100644 --- a/crates/agent/src/templates/system_prompt.hbs +++ b/crates/agent/src/templates/system_prompt.hbs @@ -150,6 +150,12 @@ Otherwise, follow debugging best practices: Operating System: {{os}} Default Shell: {{shell}} +{{#if model_name}} +## Model Information + +You are powered by the model named {{model_name}}. + +{{/if}} {{#if (or has_rules has_user_rules)}} ## User's Custom Instructions diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 64e512690beeaebd4a343bc5f2df473c795aed3f..4c0fb00163744e66b5644a0fe76b1aa853fb8237 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1928,6 +1928,7 @@ impl Thread { let system_prompt = SystemPromptTemplate { project: self.project_context.read(cx), available_tools, + model_name: self.model.as_ref().map(|m| m.name().0.to_string()), } .render(&self.templates) .context("failed to build system prompt") From 4e6a562efedda5f3f46de3b1a2319d77b256e342 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 31 Oct 2025 17:39:55 +0100 Subject: [PATCH 41/82] editor: Fix `refresh_linked_ranges` panics due to old snapshot use (#41657) Fixes ZED-29Z Release Notes: - Fixed panic in `refresh_linked_ranges` --- crates/editor/src/linked_editing_ranges.rs | 21 ++++++++------ crates/text/src/text.rs | 32 ++++++++++++++++------ 2 files changed, 35 insertions(+), 18 deletions(-) diff --git a/crates/editor/src/linked_editing_ranges.rs b/crates/editor/src/linked_editing_ranges.rs index c883ec14fb4c50a11fb4dfba1031baebf4637f11..ab16fe7eb4bce28ef6bfee2c2bde1d52fda86561 100644 --- a/crates/editor/src/linked_editing_ranges.rs +++ b/crates/editor/src/linked_editing_ranges.rs @@ -1,5 +1,5 @@ use collections::HashMap; -use gpui::{Context, Window}; +use gpui::{AppContext, Context, Window}; use itertools::Itertools; use std::{ops::Range, time::Duration}; use text::{AnchorRangeExt, BufferId, ToPoint}; @@ -59,8 +59,9 @@ pub(super) fn refresh_linked_ranges( let mut applicable_selections = Vec::new(); editor .update(cx, |editor, cx| { - let selections = editor.selections.all::(&editor.display_snapshot(cx)); - let snapshot = editor.buffer.read(cx).snapshot(cx); + let display_snapshot = editor.display_snapshot(cx); + let selections = editor.selections.all::(&display_snapshot); + let snapshot = display_snapshot.buffer_snapshot(); let buffer = editor.buffer.read(cx); for selection in selections { let cursor_position = selection.head(); @@ -90,14 +91,16 @@ pub(super) fn refresh_linked_ranges( let highlights = project .update(cx, |project, cx| { let mut linked_edits_tasks = vec![]; - for (buffer, start, end) in &applicable_selections { - let snapshot = buffer.read(cx).snapshot(); - let buffer_id = buffer.read(cx).remote_id(); - let linked_edits_task = project.linked_edits(buffer, *start, cx); - let highlights = move || async move { + let cx = cx.to_async(); + let highlights = async move { let edits = linked_edits_task.await.log_err()?; + let snapshot = cx + .read_entity(&buffer, |buffer, _| buffer.snapshot()) + .ok()?; + let buffer_id = snapshot.remote_id(); + // Find the range containing our current selection. // We might not find one, because the selection contains both the start and end of the contained range // (think of selecting <`html>foo` - even though there's a matching closing tag, the selection goes beyond the range of the opening tag) @@ -128,7 +131,7 @@ pub(super) fn refresh_linked_ranges( siblings.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0, &snapshot)); Some((buffer_id, siblings)) }; - linked_edits_tasks.push(highlights()); + linked_edits_tasks.push(highlights); } linked_edits_tasks }) diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 9d3034c0e9603ee37dca802e545af9a593b930a1..de559075403f53468e0b0cbeb4ecdc1754f4375b 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2354,6 +2354,7 @@ impl BufferSnapshot { self.visible_text.len() } else { debug_assert!(anchor.buffer_id == Some(self.remote_id)); + debug_assert!(self.version.observed(anchor.timestamp)); let anchor_key = InsertionFragmentKey { timestamp: anchor.timestamp, split_offset: anchor.offset, @@ -2377,10 +2378,7 @@ impl BufferSnapshot { .item() .filter(|insertion| insertion.timestamp == anchor.timestamp) else { - panic!( - "invalid anchor {:?}. buffer id: {}, version: {:?}", - anchor, self.remote_id, self.version - ); + self.panic_bad_anchor(anchor); }; let (start, _, item) = self @@ -2399,13 +2397,29 @@ impl BufferSnapshot { } } - fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator { - self.try_fragment_id_for_anchor(anchor).unwrap_or_else(|| { + #[cold] + fn panic_bad_anchor(&self, anchor: &Anchor) -> ! { + if anchor.buffer_id.is_some_and(|id| id != self.remote_id) { + panic!( + "invalid anchor - buffer id does not match: anchor {anchor:?}; buffer id: {}, version: {:?}", + self.remote_id, self.version + ); + } else if !self.version.observed(anchor.timestamp) { + panic!( + "invalid anchor - snapshot has not observed lamport: {:?}; version: {:?}", + anchor, self.version + ); + } else { panic!( "invalid anchor {:?}. buffer id: {}, version: {:?}", - anchor, self.remote_id, self.version, - ) - }) + anchor, self.remote_id, self.version + ); + } + } + + fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator { + self.try_fragment_id_for_anchor(anchor) + .unwrap_or_else(|| self.panic_bad_anchor(anchor)) } fn try_fragment_id_for_anchor(&self, anchor: &Anchor) -> Option<&Locator> { From 1cb512f336e62afe6e44cdd0bc476cb50eb2eb29 Mon Sep 17 00:00:00 2001 From: versecafe <147033096+versecafe@users.noreply.github.com> Date: Fri, 31 Oct 2025 09:43:45 -0700 Subject: [PATCH 42/82] bedrock: Fix duplicate region input (#41341) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #41313 Release Notes: - Fixes #41313 Screenshot 2025-10-27 at 10 23 37 PM Co-authored-by: Richard Feldman --- crates/language_models/src/provider/bedrock.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index f3e265e925822b2de7950af9fbef5b121da3ed82..5699dd8e6693c26bd62f65fb160e0e30a62dda63 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -1221,7 +1221,6 @@ impl Render for ConfigurationView { ) ) .child(self.render_static_credentials_ui()) - .child(self.region_editor.clone()) .child( Label::new( format!("You can also assign the {ZED_BEDROCK_ACCESS_KEY_ID_VAR}, {ZED_BEDROCK_SECRET_ACCESS_KEY_VAR} AND {ZED_BEDROCK_REGION_VAR} environment variables and restart Zed."), From cf31b736f796fbea669e751a8ad6fcd906e8d85f Mon Sep 17 00:00:00 2001 From: Andrew Farkas <6060305+HactarCE@users.noreply.github.com> Date: Fri, 31 Oct 2025 12:48:21 -0400 Subject: [PATCH 43/82] Add Andrew to REVIEWERS.conl (#41662) Release Notes: - N/A --- REVIEWERS.conl | 1 + 1 file changed, 1 insertion(+) diff --git a/REVIEWERS.conl b/REVIEWERS.conl index 45c73ffe2abefae08789fe6235ba8fd5d7244187..4d21f1be5c8d18df96ef67cb6f1c22568cc73bd9 100644 --- a/REVIEWERS.conl +++ b/REVIEWERS.conl @@ -19,6 +19,7 @@ = @dinocosta = @smitbarmase = @cole-miller + = @HactarCE vim = @ConradIrwin From 34e0c97dbc3bb61abd8608a0751470d46e726d41 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 31 Oct 2025 10:51:06 -0600 Subject: [PATCH 44/82] Generate dwarf files for builds again (#41651) Closes #ISSUE Release Notes: - N/A --- script/bundle-mac | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/script/bundle-mac b/script/bundle-mac index 0b4c1285fb21915c24bfeafecd9d8f1e190681fc..901d59f1d011a0ed2b20065bd86739605a81a487 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -278,30 +278,36 @@ function sign_binary() { /usr/bin/codesign --deep --force --timestamp --options runtime --entitlements crates/zed/resources/zed.entitlements --sign "$IDENTITY" "${binary_path}" -v fi } -cp target/${target_triple}/${target_dir}/zed "${app_path}/Contents/MacOS/zed" -cp target/${target_triple}/${target_dir}/cli "${app_path}/Contents/MacOS/cli" -sign_app_binaries - -sign_binary "target/$target_triple/release/remote_server" -gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$remote_server_arch.gz -function upload_debug_info() { +function upload_debug_symbols() { if [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then echo "Uploading zed debug symbols to sentry..." + exe_path="target/${target_triple}/release/Zed" + if ! dsymutil --flat "target/${target_triple}/${target_dir}/zed" 2> target/dsymutil.log; then + echo "dsymutil failed" + cat target/dsymutil.log + exit 1 + fi + if ! dsymutil --flat "target/${target_triple}/${target_dir}/remote_server" 2> target/dsymutil.log; then + echo "dsymutil failed" + cat target/dsymutil.log + exit 1 + fi # note: this uploads the unstripped binary which is needed because it contains # .eh_frame data for stack unwinding. see https://github.com/getsentry/symbolic/issues/783 sentry-cli debug-files upload --include-sources --wait -p zed -o zed-dev \ - "target/${target_triple}/${target_dir}/zed" \ - "target/${target_triple}/${target_dir}/remote_server" \ - "target/${target_triple}/${target_dir}/zed.dwarf" + "target/${target_triple}/${target_dir}/zed.dwarf" \ + "target/${target_triple}/${target_dir}/remote_server.dwarf" else echo "missing SENTRY_AUTH_TOKEN. skipping sentry upload." fi } -if command -v sentry-cli >/dev/null 2>&1; then - upload_debug_info -else - echo "sentry-cli not found. skipping sentry upload." - echo "install with: 'curl -sL https://sentry.io/get-cli | bash'" -fi +upload_debug_symbols + +cp target/${target_triple}/${target_dir}/zed "${app_path}/Contents/MacOS/zed" +cp target/${target_triple}/${target_dir}/cli "${app_path}/Contents/MacOS/cli" +sign_app_binaries + +sign_binary "target/$target_triple/release/remote_server" +gzip -f --stdout --best target/$target_triple/release/remote_server > target/zed-remote-server-macos-$remote_server_arch.gz From 12d71b37bbf73a2f6fe3c9aa3aeddfa1a9db390e Mon Sep 17 00:00:00 2001 From: tidely <43219534+tidely@users.noreply.github.com> Date: Fri, 31 Oct 2025 20:12:02 +0200 Subject: [PATCH 45/82] ollama: Add button for refreshing available models (#38181) Closes #17524 This PR adds a button to the bottom right corner of the ollama settings ui. It resets the available ollama models, also resets the "Connected" state in the process. This means it can be used to check if the connection is still valid as well. It's a question whether we should clear the available models on ALL `fetch_models` calls, since these only happen during auth anyway. Ollama is a local model provider which means clicking the refresh button often only flashes the "not connected" state because the latency of the request is so low. This accentuates changes in the UI, however I don't think there's a way around this without adding some rather cumbersome deferred ui updates. I've attached the refresh button to the "Connected" `ButtonLike`, since I don't think automatic UI spacing should separate these elements. I think this is okay because the "Connected" isn't actually something that the user can interact with. Before: image After: image Alternative approach: There was also a suggestion to simply add a entry to the command palette, however none of the other providers have this ability currently either so I went with this approach. The current approach also makes it more discoverable to the user. Release Notes: - Added a button for refreshing available ollama models --------- Co-authored-by: Bennet Bo Fenner --- crates/language_models/src/provider/ollama.rs | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index 2150966c1af0fdb1bdcc028cba67bcb7b7cbf89f..6341baa6f36db36a180d14c957b49dadd901e9a0 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -906,6 +906,16 @@ impl Render for ConfigurationView { .child(Icon::new(IconName::Check).color(Color::Success)) .child(Label::new("Connected")) .into_any_element(), + ) + .child( + IconButton::new("refresh-models", IconName::RotateCcw) + .tooltip(Tooltip::text("Refresh models")) + .on_click(cx.listener(|this, _, _, cx| { + this.state.update(cx, |state, _| { + state.fetched_models.clear(); + }); + this.retry_connection(cx); + })), ), ) } else { From 3a4aba1df2b8aa800adf6a2800270297196c11b1 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Fri, 31 Oct 2025 11:23:25 -0700 Subject: [PATCH 46/82] gh-workflow release (#41502) Closes #ISSUE Rewrite our release pipeline to be generated by `gh-workflow` Release Notes: - N/A *or* Added/Fixed/Improved ... --- .github/workflows/ci.yml | 1 + .github/workflows/release.yml | 478 ++++++++++++++++++ .github/workflows/run_bundling.yml | 30 +- tooling/xtask/src/tasks/workflows.rs | 2 + tooling/xtask/src/tasks/workflows/release.rs | 207 ++++++++ .../xtask/src/tasks/workflows/run_bundling.rs | 80 ++- .../xtask/src/tasks/workflows/run_tests.rs | 5 +- tooling/xtask/src/tasks/workflows/steps.rs | 9 +- tooling/xtask/src/tasks/workflows/vars.rs | 8 +- 9 files changed, 790 insertions(+), 30 deletions(-) create mode 100644 .github/workflows/release.yml create mode 100644 tooling/xtask/src/tasks/workflows/release.rs diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4e1d5d59c551976c94272b682250e100ed3957ed..448c81bcdb99680f8ac4fd3b824ba22bd0e53e91 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -4,6 +4,7 @@ on: push: tags: - "v*" + - "!v00.00.00-test" # todo! remove concurrency: # Allow only one workflow per any non-`main` branch. diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000000000000000000000000000000000000..49fe8f82b1fb1b395c70b0813f79ea882cf83860 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,478 @@ +# Generated from xtask::workflows::release +# Rebuild with `cargo xtask workflows`. +name: release +on: + push: + tags: + - v* +jobs: + run_tests_mac: + if: github.repository_owner == 'zed-industries' + runs-on: self-mini-macos + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::clippy + run: ./script/clippy + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + run_tests_linux: + if: github.repository_owner == 'zed-industries' + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::clippy + run: ./script/clippy + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 100 + shell: bash -euxo pipefail {0} + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + run_tests_windows: + if: github.repository_owner == 'zed-industries' + runs-on: self-32vcpu-windows-2022 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + New-Item -ItemType Directory -Path "./../.cargo" -Force + Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml" + shell: pwsh + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::clippy + run: ./script/clippy.ps1 + shell: pwsh + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: pwsh + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than.ps1 250 + shell: pwsh + - name: steps::cargo_nextest + run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final + shell: pwsh + - name: steps::cleanup_cargo_config + if: always() + run: | + Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue + shell: pwsh + timeout-minutes: 60 + check_scripts: + if: github.repository_owner == 'zed-industries' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: run_tests::check_scripts::run_shellcheck + run: ./script/shellcheck-scripts error + shell: bash -euxo pipefail {0} + - id: get_actionlint + name: run_tests::check_scripts::download_actionlint + run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) + shell: bash -euxo pipefail {0} + - name: run_tests::check_scripts::run_actionlint + run: | + ${{ steps.get_actionlint.outputs.executable }} -color + shell: bash -euxo pipefail {0} + - name: run_tests::check_scripts::check_xtask_workflows + run: | + cargo xtask workflows + if ! git diff --exit-code .github; then + echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'" + echo "Please run 'cargo xtask workflows' locally and commit the changes" + exit 1 + fi + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + create_draft_release: + if: github.repository_owner == 'zed-industries' + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: 25 + ref: ${{ github.ref }} + - name: script/determine-release-channel + run: script/determine-release-channel + shell: bash -euxo pipefail {0} + - name: mkdir -p target/ + run: mkdir -p target/ + shell: bash -euxo pipefail {0} + - name: script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true + run: script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true + shell: bash -euxo pipefail {0} + - name: script/create-draft-release target/release-notes.md + run: script/create-draft-release target/release-notes.md + shell: bash -euxo pipefail {0} + timeout-minutes: 60 + bundle_linux_arm64: + needs: + - run_tests_linux + - check_scripts + runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz + path: target/release/zed-*.tar.gz + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz + path: target/zed-remote-server-*.gz + if-no-files-found: error + outputs: + zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz + timeout-minutes: 60 + bundle_linux_x86_64: + needs: + - run_tests_linux + - check_scripts + runs-on: namespace-profile-32x64-ubuntu-2004 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: ./script/bundle-linux + run: ./script/bundle-linux + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + path: target/release/zed-*.tar.gz + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + path: target/zed-remote-server-*.gz + if-no-files-found: error + outputs: + zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + timeout-minutes: 60 + bundle_mac_arm64: + needs: + - run_tests_mac + - check_scripts + runs-on: self-mini-macos + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: run_bundling::bundle_mac + run: ./script/bundle-mac aarch64-apple-darwin + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg + path: target/aarch64-apple-darwin/release/Zed.dmg + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz + path: target/zed-remote-server-macos-aarch64.gz + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz + timeout-minutes: 60 + bundle_mac_x86_64: + needs: + - run_tests_mac + - check_scripts + runs-on: self-mini-macos + env: + MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} + MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} + APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} + APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} + APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_node + uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 + with: + node-version: '20' + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 300 + shell: bash -euxo pipefail {0} + - name: run_bundling::bundle_mac + run: ./script/bundle-mac x86_64-apple-darwin + shell: bash -euxo pipefail {0} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg + path: target/x86_64-apple-darwin/release/Zed.dmg + if-no-files-found: error + - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz + path: target/zed-remote-server-macos-x86_64.gz + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz + timeout-minutes: 60 + bundle_windows_arm64: + needs: + - run_tests_windows + - check_scripts + runs-on: self-32vcpu-windows-2022 + env: + AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} + ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} + CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} + ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} + FILE_DIGEST: SHA256 + TIMESTAMP_DIGEST: SHA256 + TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: run_bundling::bundle_windows + run: script/bundle-windows.ps1 -Architecture aarch64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe + path: ${{ env.SETUP_PATH }} + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe + timeout-minutes: 60 + bundle_windows_x86_64: + needs: + - run_tests_windows + - check_scripts + runs-on: self-32vcpu-windows-2022 + env: + AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} + AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} + AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} + ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} + CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} + ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} + FILE_DIGEST: SHA256 + TIMESTAMP_DIGEST: SHA256 + TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_sentry + uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b + with: + token: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: run_bundling::bundle_windows + run: script/bundle-windows.ps1 -Architecture x86_64 + shell: pwsh + working-directory: ${{ env.ZED_WORKSPACE }} + - name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe' + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe + path: ${{ env.SETUP_PATH }} + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe + timeout-minutes: 60 + upload_release_assets: + needs: + - create_draft_release + - bundle_linux_arm64 + - bundle_linux_x86_64 + - bundle_mac_arm64 + - bundle_mac_x86_64 + - bundle_windows_arm64 + - bundle_windows_x86_64 + runs-on: namespace-profile-4x8-ubuntu-2204 + steps: + - name: release::upload_release_assets::download_workflow_artifacts + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 + with: + path: ./artifacts/ + - name: ls -lR ./artifacts + run: ls -lR ./artifacts + shell: bash -euxo pipefail {0} + - name: release::upload_release_assets::prep_release_artifacts + run: |- + mkdir -p release-artifacts/ + + mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.dmg + mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.dmg + mv ./artifacts/${{ needs.bundle_windows_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.exe + mv ./artifacts/${{ needs.bundle_windows_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.exe + mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.zed }}/* release-artifacts/zed-linux-aarch64.tar.gz + mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.zed }}/* release-artifacts/zed-linux-x86_64.tar.gz + mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-x86_64.gz + mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-aarch64.gz + mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-x86_64.gz + mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-aarch64.gz + shell: bash -euxo pipefail {0} + - name: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/* + run: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/* + shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + auto_release_preview: + needs: + - upload_release_assets + if: | + false + && startsWith(github.ref, 'refs/tags/v') + && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false + run: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false + shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: release::create_sentry_release + uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c + with: + environment: production + env: + SENTRY_ORG: zed-dev + SENTRY_PROJECT: zed + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 9766c7c14b64007692cfb1c68efead5b23382426..a6d563b5b12faa2d5f2cf03b644cfcacbdd17400 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -48,11 +48,16 @@ jobs: with: name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg path: target/x86_64-apple-darwin/release/Zed.dmg + if-no-files-found: error - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz path: target/zed-remote-server-macos-x86_64.gz + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz timeout-minutes: 60 bundle_mac_arm64: if: |- @@ -89,11 +94,16 @@ jobs: with: name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg path: target/aarch64-apple-darwin/release/Zed.dmg + if-no-files-found: error - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz path: target/zed-remote-server-macos-aarch64.gz + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz timeout-minutes: 60 bundle_linux_x86_64: if: |- @@ -123,11 +133,16 @@ jobs: with: name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz path: target/release/zed-*.tar.gz + if-no-files-found: error - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz - path: target/release/zed-remote-server-*.tar.gz + path: target/zed-remote-server-*.gz + if-no-files-found: error + outputs: + zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz timeout-minutes: 60 bundle_linux_arm64: if: |- @@ -157,11 +172,16 @@ jobs: with: name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz path: target/release/zed-*.tar.gz + if-no-files-found: error - name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz' uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz - path: target/release/zed-remote-server-*.tar.gz + path: target/zed-remote-server-*.gz + if-no-files-found: error + outputs: + zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz + remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz timeout-minutes: 60 bundle_windows_x86_64: if: |- @@ -196,6 +216,9 @@ jobs: with: name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe path: ${{ env.SETUP_PATH }} + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe timeout-minutes: 60 bundle_windows_arm64: if: |- @@ -230,6 +253,9 @@ jobs: with: name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe path: ${{ env.SETUP_PATH }} + if-no-files-found: error + outputs: + zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe timeout-minutes: 60 concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index f29a590ca19d8be4781aaba9d7fd23d90933f34c..a8472606ffd6aea48775f3fca28f9c30b2223cc5 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -9,6 +9,7 @@ mod nix_build; mod release_nightly; mod run_bundling; +mod release; mod run_tests; mod runners; mod steps; @@ -25,6 +26,7 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { ("run_bundling.yml", run_bundling::run_bundling()), ("release_nightly.yml", release_nightly::release_nightly()), ("run_tests.yml", run_tests::run_tests()), + ("release.yml", release::release()), ("compare_perf.yml", compare_perf::compare_perf()), ]; fs::create_dir_all(dir) diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs new file mode 100644 index 0000000000000000000000000000000000000000..1a6533d0a4c9ede450930b0f0561562cf2556c45 --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -0,0 +1,207 @@ +use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow}; + +use crate::tasks::workflows::{ + run_bundling, run_tests, runners, + steps::{self, NamedJob, dependant_job, named, release_job}, + vars, +}; + +pub(crate) fn release() -> Workflow { + let macos_tests = run_tests::run_platform_tests(runners::Platform::Mac); + let linux_tests = run_tests::run_platform_tests(runners::Platform::Linux); + let windows_tests = run_tests::run_platform_tests(runners::Platform::Windows); + let check_scripts = run_tests::check_scripts(); + + let create_draft_release = create_draft_release(); + + let bundle = ReleaseBundleJobs { + linux_arm64: bundle_linux_arm64(&[&linux_tests, &check_scripts]), + linux_x86_64: bundle_linux_x86_64(&[&linux_tests, &check_scripts]), + mac_arm64: bundle_mac_arm64(&[&macos_tests, &check_scripts]), + mac_x86_64: bundle_mac_x86_64(&[&macos_tests, &check_scripts]), + windows_arm64: bundle_windows_arm64(&[&windows_tests, &check_scripts]), + windows_x86_64: bundle_windows_x86_64(&[&windows_tests, &check_scripts]), + }; + + let upload_release_assets = upload_release_assets(&[&create_draft_release], &bundle); + + let auto_release_preview = auto_release_preview(&[&upload_release_assets]); + + named::workflow() + .on(Event::default().push(Push::default().tags(vec!["v*".to_string()]))) + .concurrency(vars::one_workflow_per_non_main_branch()) + .add_job(macos_tests.name, macos_tests.job) + .add_job(linux_tests.name, linux_tests.job) + .add_job(windows_tests.name, windows_tests.job) + .add_job(check_scripts.name, check_scripts.job) + .add_job(create_draft_release.name, create_draft_release.job) + .add_job(bundle.linux_arm64.name, bundle.linux_arm64.job) + .add_job(bundle.linux_x86_64.name, bundle.linux_x86_64.job) + .add_job(bundle.mac_arm64.name, bundle.mac_arm64.job) + .add_job(bundle.mac_x86_64.name, bundle.mac_x86_64.job) + .add_job(bundle.windows_arm64.name, bundle.windows_arm64.job) + .add_job(bundle.windows_x86_64.name, bundle.windows_x86_64.job) + .add_job(upload_release_assets.name, upload_release_assets.job) + .add_job(auto_release_preview.name, auto_release_preview.job) +} + +fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob { + named::job( + dependant_job(deps) + .runs_on(runners::LINUX_SMALL) + .cond(Expression::new(indoc::indoc!( + r#" + false + && startsWith(github.ref, 'refs/tags/v') + && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') + "# // todo(ci-release) enable + ))) + .add_step( + steps::script( + r#"gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false"#, + ) + .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")), + ) + .add_step(create_sentry_release()), + ) +} + +fn create_sentry_release() -> Step { + named::uses( + "getsentry", + "action-release", + "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 + ) + .add_env(("SENTRY_ORG", "zed-dev")) + .add_env(("SENTRY_PROJECT", "zed")) + .add_env(("SENTRY_AUTH_TOKEN", "${{ secrets.SENTRY_AUTH_TOKEN }}")) + .add_with(("environment", "production")) +} + +struct ReleaseBundleJobs { + linux_arm64: NamedJob, + linux_x86_64: NamedJob, + mac_arm64: NamedJob, + mac_x86_64: NamedJob, + windows_arm64: NamedJob, + windows_x86_64: NamedJob, +} + +fn upload_release_assets(deps: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs) -> NamedJob { + fn download_workflow_artifacts() -> Step { + named::uses( + "actions", + "download-artifact", + "018cc2cf5baa6db3ef3c5f8a56943fffe632ef53", // v6.0.0 + ) + .add_with(("path", "./artifacts/")) + } + + fn prep_release_artifacts(bundle: &ReleaseBundleJobs) -> Step { + let assets = [ + (&bundle.mac_x86_64.name, "zed", "Zed-x86_64.dmg"), + (&bundle.mac_arm64.name, "zed", "Zed-aarch64.dmg"), + (&bundle.windows_x86_64.name, "zed", "Zed-x86_64.exe"), + (&bundle.windows_arm64.name, "zed", "Zed-aarch64.exe"), + (&bundle.linux_arm64.name, "zed", "zed-linux-aarch64.tar.gz"), + (&bundle.linux_x86_64.name, "zed", "zed-linux-x86_64.tar.gz"), + ( + &bundle.linux_x86_64.name, + "remote-server", + "zed-remote-server-linux-x86_64.gz", + ), + ( + &bundle.linux_arm64.name, + "remote-server", + "zed-remote-server-linux-aarch64.gz", + ), + ( + &bundle.mac_x86_64.name, + "remote-server", + "zed-remote-server-macos-x86_64.gz", + ), + ( + &bundle.mac_arm64.name, + "remote-server", + "zed-remote-server-macos-aarch64.gz", + ), + ]; + + let mut script_lines = vec!["mkdir -p release-artifacts/\n".to_string()]; + for (job_name, artifact_kind, release_artifact_name) in assets { + let artifact_path = + ["${{ needs.", job_name, ".outputs.", artifact_kind, " }}"].join(""); + let mv_command = format!( + "mv ./artifacts/{artifact_path}/* release-artifacts/{release_artifact_name}" + ); + script_lines.push(mv_command) + } + + named::bash(&script_lines.join("\n")) + } + + let mut deps = deps.to_vec(); + deps.extend([ + &bundle_jobs.linux_arm64, + &bundle_jobs.linux_x86_64, + &bundle_jobs.mac_arm64, + &bundle_jobs.mac_x86_64, + &bundle_jobs.windows_arm64, + &bundle_jobs.windows_x86_64, + ]); + + named::job( + dependant_job(&deps) + .runs_on(runners::LINUX_MEDIUM) + .add_step(download_workflow_artifacts()) + .add_step(steps::script("ls -lR ./artifacts")) + .add_step(prep_release_artifacts(bundle_jobs)) + .add_step( + steps::script("gh release upload \"$GITHUB_REF_NAME\" --repo=zed-industries/zed release-artifacts/*") + .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")), + ), + ) +} + +fn create_draft_release() -> NamedJob { + named::job( + release_job(&[]) + .runs_on(runners::LINUX_SMALL) + // We need to fetch more than one commit so that `script/draft-release-notes` + // is able to diff between the current and previous tag. + // + // 25 was chosen arbitrarily. + .add_step( + steps::checkout_repo() + .add_with(("fetch-depth", 25)) + .add_with(("clean", false)) + .add_with(("ref", "${{ github.ref }}")), + ) + .add_step(steps::script("script/determine-release-channel")) // export RELEASE_CHANNEL and RELEASE_VERSION + .add_step(steps::script("mkdir -p target/")) + .add_step(steps::script(r#"script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true"#)) + .add_step(steps::script("script/create-draft-release target/release-notes.md")), + ) +} + +fn bundle_mac_x86_64(deps: &[&NamedJob]) -> NamedJob { + named::job(run_bundling::bundle_mac_job(runners::Arch::X86_64, deps)) +} +fn bundle_mac_arm64(deps: &[&NamedJob]) -> NamedJob { + named::job(run_bundling::bundle_mac_job(runners::Arch::ARM64, deps)) +} +fn bundle_linux_x86_64(deps: &[&NamedJob]) -> NamedJob { + named::job(run_bundling::bundle_linux_job(runners::Arch::X86_64, deps)) +} +fn bundle_linux_arm64(deps: &[&NamedJob]) -> NamedJob { + named::job(run_bundling::bundle_linux_job(runners::Arch::ARM64, deps)) +} +fn bundle_windows_x86_64(deps: &[&NamedJob]) -> NamedJob { + named::job(run_bundling::bundle_windows_job( + runners::Arch::X86_64, + deps, + )) +} +fn bundle_windows_arm64(deps: &[&NamedJob]) -> NamedJob { + named::job(run_bundling::bundle_windows_job(runners::Arch::ARM64, deps)) +} diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index 2e83678967ca030ec64493ec0d802ba42664496b..2e8990faf8b5fef7a9b4fbcdc2ad2fe91bfed4b2 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -1,10 +1,11 @@ use crate::tasks::workflows::{ - steps::{FluentBuilder, named}, + steps::{FluentBuilder, NamedJob, dependant_job, named}, vars::{mac_bundle_envs, windows_bundle_envs}, }; use super::{runners, steps, vars}; use gh_workflow::*; +use indexmap::IndexMap; pub fn run_bundling() -> Workflow { named::workflow() @@ -22,32 +23,47 @@ pub fn run_bundling() -> Workflow { .add_env(("RUST_BACKTRACE", "1")) .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT)) - .add_job("bundle_mac_x86_64", bundle_mac_job(runners::Arch::X86_64)) - .add_job("bundle_mac_arm64", bundle_mac_job(runners::Arch::ARM64)) - .add_job("bundle_linux_x86_64", bundle_linux(runners::Arch::X86_64)) - .add_job("bundle_linux_arm64", bundle_linux(runners::Arch::ARM64)) + .add_job( + "bundle_mac_x86_64", + bundle_mac_job(runners::Arch::X86_64, &[]), + ) + .add_job( + "bundle_mac_arm64", + bundle_mac_job(runners::Arch::ARM64, &[]), + ) + .add_job( + "bundle_linux_x86_64", + bundle_linux_job(runners::Arch::X86_64, &[]), + ) + .add_job( + "bundle_linux_arm64", + bundle_linux_job(runners::Arch::ARM64, &[]), + ) .add_job( "bundle_windows_x86_64", - bundle_windows_job(runners::Arch::X86_64), + bundle_windows_job(runners::Arch::X86_64, &[]), ) .add_job( "bundle_windows_arm64", - bundle_windows_job(runners::Arch::ARM64), + bundle_windows_job(runners::Arch::ARM64, &[]), ) } -fn bundle_job() -> Job { - Job::default() - .cond(Expression::new( +fn bundle_job(deps: &[&NamedJob]) -> Job { + dependant_job(deps) + .when(deps.len() == 0, |job| + job.cond(Expression::new( "(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') || (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))", - )) + ))) .timeout_minutes(60u32) } -fn bundle_mac_job(arch: runners::Arch) -> Job { +pub(crate) fn bundle_mac_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job { use vars::GITHUB_SHA; - bundle_job() + let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.dmg"); + let remote_server_artifact_name = format!("zed-remote-server-{GITHUB_SHA}-macos-{arch}.gz"); + bundle_job(deps) .runs_on(runners::MAC_DEFAULT) .envs(mac_bundle_envs()) .add_step(steps::checkout_repo()) @@ -56,27 +72,35 @@ fn bundle_mac_job(arch: runners::Arch) -> Job { .add_step(steps::clear_target_dir_if_large(runners::Platform::Mac)) .add_step(bundle_mac(arch)) .add_step(steps::upload_artifact( - &format!("Zed_{GITHUB_SHA}-{arch}.dmg"), + &artifact_name, &format!("target/{arch}-apple-darwin/release/Zed.dmg"), )) .add_step(steps::upload_artifact( - &format!("zed-remote-server-{GITHUB_SHA}-macos-{arch}.gz"), + &remote_server_artifact_name, &format!("target/zed-remote-server-macos-{arch}.gz"), )) + .outputs( + [ + ("zed".to_string(), artifact_name), + ("remote-server".to_string(), remote_server_artifact_name), + ] + .into_iter() + .collect::>(), + ) } pub fn bundle_mac(arch: runners::Arch) -> Step { named::bash(&format!("./script/bundle-mac {arch}-apple-darwin")) } -fn bundle_linux(arch: runners::Arch) -> Job { +pub(crate) fn bundle_linux_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job { let artifact_name = format!("zed-{}-{}.tar.gz", vars::GITHUB_SHA, arch.triple()); let remote_server_artifact_name = format!( "zed-remote-server-{}-{}.tar.gz", vars::GITHUB_SHA, arch.triple() ); - bundle_job() + bundle_job(deps) .runs_on(arch.linux_bundler()) .add_step(steps::checkout_repo()) .add_step(steps::setup_sentry()) @@ -88,22 +112,36 @@ fn bundle_linux(arch: runners::Arch) -> Job { )) .add_step(steps::upload_artifact( &remote_server_artifact_name, - "target/release/zed-remote-server-*.tar.gz", + "target/zed-remote-server-*.gz", )) + .outputs( + [ + ("zed".to_string(), artifact_name), + ("remote-server".to_string(), remote_server_artifact_name), + ] + .into_iter() + .collect::>(), + ) } -fn bundle_windows_job(arch: runners::Arch) -> Job { +pub(crate) fn bundle_windows_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job { use vars::GITHUB_SHA; - bundle_job() + let artifact_name = format!("Zed_{GITHUB_SHA}-{arch}.exe"); + bundle_job(deps) .runs_on(runners::WINDOWS_DEFAULT) .envs(windows_bundle_envs()) .add_step(steps::checkout_repo()) .add_step(steps::setup_sentry()) .add_step(bundle_windows(arch)) .add_step(steps::upload_artifact( - &format!("Zed_{GITHUB_SHA}-{arch}.exe"), + &artifact_name, "${{ env.SETUP_PATH }}", )) + .outputs( + [("zed".to_string(), artifact_name)] + .into_iter() + .collect::>(), + ) } fn bundle_windows(arch: runners::Arch) -> Step { diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 3328d857fb22e174a0e452626e0caf54b58065de..88874754706661939490fc470c58d8a0c867c0d8 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -302,9 +302,6 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { NamedJob { name: format!("run_tests_{platform}"), job: release_job(&[]) - .cond(Expression::new( - "github.repository_owner == 'zed-industries'", - )) .runs_on(runner) .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(platform)) @@ -436,7 +433,7 @@ fn check_docs() -> NamedJob { ) } -fn check_scripts() -> NamedJob { +pub(crate) fn check_scripts() -> NamedJob { fn download_actionlint() -> Step { named::bash( "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)", diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 14ee5cc5b50a464d1fcc54d7497906926f85321e..5a6196b599ec56b68948afc55316175779877a48 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -95,6 +95,7 @@ pub fn upload_artifact(name: &str, path: &str) -> Step { ) .add_with(("name", name)) .add_with(("path", path)) + .add_with(("if-no-files-found", "error")) } pub fn clear_target_dir_if_large(platform: Platform) -> Step { @@ -156,11 +157,15 @@ pub(crate) struct NamedJob { // } pub(crate) fn release_job(deps: &[&NamedJob]) -> Job { - let job = Job::default() + dependant_job(deps) .cond(Expression::new( "github.repository_owner == 'zed-industries'", )) - .timeout_minutes(60u32); + .timeout_minutes(60u32) +} + +pub(crate) fn dependant_job(deps: &[&NamedJob]) -> Job { + let job = Job::default(); if deps.len() > 0 { job.needs(deps.iter().map(|j| j.name.clone()).collect::>()) } else { diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index 257bf31b5e981cbfccfddfa77939b9a0f2c3f603..b852e12400098c3d49f806c0010458d123ad24fa 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -1,6 +1,6 @@ use std::cell::RefCell; -use gh_workflow::{Env, Expression}; +use gh_workflow::{Concurrency, Env, Expression}; use crate::tasks::workflows::steps::NamedJob; @@ -62,6 +62,12 @@ pub fn windows_bundle_envs() -> Env { .add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com") } +pub(crate) fn one_workflow_per_non_main_branch() -> Concurrency { + Concurrency::default() + .group("${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}") + .cancel_in_progress(true) +} + // Represents a pattern to check for changed files and corresponding output variable pub(crate) struct PathCondition { pub name: &'static str, From a05358f47fa03502a86d997766c47d888c75b249 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 31 Oct 2025 12:58:47 -0600 Subject: [PATCH 47/82] Delete old ci.yml (#41668) The new one is much better Release Notes: - N/A --- .github/workflows/ci.yml | 842 ------------------ .github/workflows/danger.yml | 4 +- .github/workflows/nix_build.yml | 97 -- .github/workflows/release.yml | 2 +- .github/workflows/release_nightly.yml | 32 +- .github/workflows/run_tests.yml | 14 +- tooling/xtask/src/tasks/workflows/danger.rs | 72 +- .../xtask/src/tasks/workflows/nix_build.rs | 98 +- tooling/xtask/src/tasks/workflows/release.rs | 42 +- .../src/tasks/workflows/release_nightly.rs | 77 +- .../xtask/src/tasks/workflows/run_bundling.rs | 2 +- 11 files changed, 161 insertions(+), 1121 deletions(-) delete mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/nix_build.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 448c81bcdb99680f8ac4fd3b824ba22bd0e53e91..0000000000000000000000000000000000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,842 +0,0 @@ -name: CI - -on: - push: - tags: - - "v*" - - "!v00.00.00-test" # todo! remove - -concurrency: - # Allow only one workflow per any non-`main` branch. - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} - cancel-in-progress: true - -env: - CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: 0 - RUST_BACKTRACE: 1 - DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }} - DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }} - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - -jobs: - job_spec: - name: Decide which jobs to run - if: github.repository_owner == 'zed-industries' - outputs: - run_tests: ${{ steps.filter.outputs.run_tests }} - run_license: ${{ steps.filter.outputs.run_license }} - run_docs: ${{ steps.filter.outputs.run_docs }} - run_nix: ${{ steps.filter.outputs.run_nix }} - run_actionlint: ${{ steps.filter.outputs.run_actionlint }} - runs-on: - - namespace-profile-2x4-ubuntu-2404 - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - # 350 is arbitrary; ~10days of history on main (5secs); full history is ~25secs - fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }} - - name: Fetch git history and generate output filters - id: filter - run: | - if [ -z "$GITHUB_BASE_REF" ]; then - echo "Not in a PR context (i.e., push to main/stable/preview)" - COMPARE_REV="$(git rev-parse HEAD~1)" - else - echo "In a PR context comparing to pull_request.base.ref" - git fetch origin "$GITHUB_BASE_REF" --depth=350 - COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)" - fi - CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})" - - # Specify anything which should potentially skip full test suite in this regex: - # - docs/ - # - script/update_top_ranking_issues/ - # - .github/ISSUE_TEMPLATE/ - # - .github/workflows/ (except .github/workflows/ci.yml) - SKIP_REGEX='^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))' - - echo "$CHANGED_FILES" | grep -qvP "$SKIP_REGEX" && \ - echo "run_tests=true" >> "$GITHUB_OUTPUT" || \ - echo "run_tests=false" >> "$GITHUB_OUTPUT" - - echo "$CHANGED_FILES" | grep -qP '^docs/' && \ - echo "run_docs=true" >> "$GITHUB_OUTPUT" || \ - echo "run_docs=false" >> "$GITHUB_OUTPUT" - - echo "$CHANGED_FILES" | grep -qP '^\.github/(workflows/|actions/|actionlint.yml)' && \ - echo "run_actionlint=true" >> "$GITHUB_OUTPUT" || \ - echo "run_actionlint=false" >> "$GITHUB_OUTPUT" - - echo "$CHANGED_FILES" | grep -qP '^(Cargo.lock|script/.*licenses)' && \ - echo "run_license=true" >> "$GITHUB_OUTPUT" || \ - echo "run_license=false" >> "$GITHUB_OUTPUT" - - echo "$CHANGED_FILES" | grep -qP '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' && \ - echo "$GITHUB_REF_NAME" | grep -qvP '^v[0-9]+\.[0-9]+\.[0-9x](-pre)?$' && \ - echo "run_nix=true" >> "$GITHUB_OUTPUT" || \ - echo "run_nix=false" >> "$GITHUB_OUTPUT" - - migration_checks: - name: Check Postgres and Protobuf migrations, mergability - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - needs.job_spec.outputs.run_tests == 'true' - timeout-minutes: 60 - runs-on: - - self-mini-macos - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - fetch-depth: 0 # fetch full history - - - name: Remove untracked files - run: git clean -df - - - name: Find modified migrations - shell: bash -euxo pipefail {0} - run: | - export SQUAWK_GITHUB_TOKEN=${{ github.token }} - . ./script/squawk - - - name: Ensure fresh merge - shell: bash -euxo pipefail {0} - run: | - if [ -z "$GITHUB_BASE_REF" ]; - then - echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV" - else - git checkout -B temp - git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp" - echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV" - fi - - - uses: bufbuild/buf-setup-action@v1 - with: - version: v1.29.0 - - uses: bufbuild/buf-breaking-action@v1 - with: - input: "crates/proto/proto/" - against: "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/" - - style: - timeout-minutes: 60 - name: Check formatting and spelling - needs: [job_spec] - if: github.repository_owner == 'zed-industries' - runs-on: - - namespace-profile-4x8-ubuntu-2204 - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - - uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0 - with: - version: 9 - - - name: Prettier Check on /docs - working-directory: ./docs - run: | - pnpm dlx "prettier@${PRETTIER_VERSION}" . --check || { - echo "To fix, run from the root of the Zed repo:" - echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .." - false - } - env: - PRETTIER_VERSION: 3.5.0 - - - name: Prettier Check on default.json - run: | - pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || { - echo "To fix, run from the root of the Zed repo:" - echo " pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write" - false - } - env: - PRETTIER_VERSION: 3.5.0 - - # To support writing comments that they will certainly be revisited. - - name: Check for todo! and FIXME comments - run: script/check-todos - - - name: Check modifier use in keymaps - run: script/check-keymaps - - - name: Run style checks - uses: ./.github/actions/check_style - - - name: Check for typos - uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1 # v1.38.1 - with: - config: ./typos.toml - - check_docs: - timeout-minutes: 60 - name: Check docs - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - (needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true') - runs-on: - - namespace-profile-8x16-ubuntu-2204 - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Build docs - uses: ./.github/actions/build_docs - - actionlint: - runs-on: namespace-profile-2x4-ubuntu-2404 - if: github.repository_owner == 'zed-industries' && needs.job_spec.outputs.run_actionlint == 'true' - needs: [job_spec] - steps: - - uses: actions/checkout@v4 - - name: Download actionlint - id: get_actionlint - run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) - shell: bash - - name: Check workflow files - run: ${{ steps.get_actionlint.outputs.executable }} -color - shell: bash - - macos_tests: - timeout-minutes: 60 - name: (macOS) Run Clippy and tests - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - needs.job_spec.outputs.run_tests == 'true' - runs-on: - - self-mini-macos - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Check that Cargo.lock is up to date - run: | - cargo update --locked --workspace - - - name: cargo clippy - run: ./script/clippy - - - name: Install cargo-machete - uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2 - with: - command: install - args: cargo-machete@0.7.0 - - - name: Check unused dependencies - uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386 # v2 - with: - command: machete - - - name: Check licenses - run: | - script/check-licenses - if [[ "${{ needs.job_spec.outputs.run_license }}" == "true" ]]; then - script/generate-licenses /tmp/zed_licenses_output - fi - - - name: Check for new vulnerable dependencies - if: github.event_name == 'pull_request' - uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 # v4 - with: - license-check: false - - - name: Run tests - uses: ./.github/actions/run_tests - - - name: Build collab - # we should do this on a linux x86 machinge - run: cargo build -p collab - - - name: Build other binaries and features - run: | - cargo build --workspace --bins --examples - - # Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug. - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo - - linux_tests: - timeout-minutes: 60 - name: (Linux) Run Clippy and tests - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - needs.job_spec.outputs.run_tests == 'true' - runs-on: - - namespace-profile-16x32-ubuntu-2204 - steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Linux dependencies - run: ./script/linux - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: cargo clippy - run: ./script/clippy - - - name: Run tests - uses: ./.github/actions/run_tests - - - name: Build other binaries and features - run: | - cargo build -p zed - cargo check -p workspace - cargo check -p gpui --examples - - # Even the Linux runner is not stateful, in theory there is no need to do this cleanup. - # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code - # to clean up the config file, I’ve included the cleanup code here as a precaution. - # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution. - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo - - doctests: - # Nextest currently doesn't support doctests, so run them separately and in parallel. - timeout-minutes: 60 - name: (Linux) Run doctests - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - needs.job_spec.outputs.run_tests == 'true' - runs-on: - - namespace-profile-16x32-ubuntu-2204 - steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Linux dependencies - run: ./script/linux - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Run doctests - run: cargo test --workspace --doc --no-fail-fast - - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo - - build_remote_server: - timeout-minutes: 60 - name: (Linux) Build Remote Server - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - needs.job_spec.outputs.run_tests == 'true' - runs-on: - - namespace-profile-16x32-ubuntu-2204 - steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Clang & Mold - run: ./script/remote-server && ./script/install-mold 2.34.0 - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Build Remote Server - run: cargo build -p remote_server - - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo - - windows_tests: - timeout-minutes: 60 - name: (Windows) Run Clippy and tests - needs: [job_spec] - if: | - github.repository_owner == 'zed-industries' && - needs.job_spec.outputs.run_tests == 'true' - runs-on: [self-32vcpu-windows-2022] - steps: - - name: Environment Setup - run: | - $RunnerDir = Split-Path -Parent $env:RUNNER_WORKSPACE - Write-Output ` - "RUSTUP_HOME=$RunnerDir\.rustup" ` - "CARGO_HOME=$RunnerDir\.cargo" ` - "PATH=$RunnerDir\.cargo\bin;$env:PATH" ` - >> $env:GITHUB_ENV - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Configure CI - run: | - New-Item -ItemType Directory -Path "./../.cargo" -Force - Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml" - - - name: cargo clippy - run: | - .\script\clippy.ps1 - - - name: Run tests - uses: ./.github/actions/run_tests_windows - - - name: Build Zed - run: cargo build - - - name: Limit target directory size - run: ./script/clear-target-dir-if-larger-than.ps1 250 - - - name: Clean CI config file - if: always() - run: Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue - - tests_pass: - name: Tests Pass - runs-on: namespace-profile-2x4-ubuntu-2404 - needs: - - job_spec - - style - - check_docs - - actionlint - - migration_checks - # run_tests: If adding required tests, add them here and to script below. - - linux_tests - - build_remote_server - - macos_tests - - windows_tests - if: | - github.repository_owner == 'zed-industries' && - always() - steps: - - name: Check all tests passed - run: | - # Check dependent jobs... - RET_CODE=0 - # Always check style - [[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; } - - if [[ "${{ needs.job_spec.outputs.run_docs }}" == "true" ]]; then - [[ "${{ needs.check_docs.result }}" != 'success' ]] && { RET_CODE=1; echo "docs checks failed"; } - fi - - if [[ "${{ needs.job_spec.outputs.run_actionlint }}" == "true" ]]; then - [[ "${{ needs.actionlint.result }}" != 'success' ]] && { RET_CODE=1; echo "actionlint checks failed"; } - fi - - # Only check test jobs if they were supposed to run - if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then - [[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; } - [[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; } - [[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; } - [[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; } - # This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431 - # [[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; } - fi - if [[ "$RET_CODE" -eq 0 ]]; then - echo "All tests passed successfully!" - fi - exit $RET_CODE - - bundle-mac: - timeout-minutes: 120 - name: Create a macOS bundle - runs-on: - - self-mini-macos - if: startsWith(github.ref, 'refs/tags/v') - needs: [macos_tests] - env: - MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} - MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }} - APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }} - APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }} - APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }} - steps: - - name: Install Node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: "18" - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - # We need to fetch more than one commit so that `script/draft-release-notes` - # is able to diff between the current and previous tag. - # - # 25 was chosen arbitrarily. - fetch-depth: 25 - clean: false - ref: ${{ github.ref }} - - - name: Limit target directory size - run: script/clear-target-dir-if-larger-than 300 - - - name: Determine version and release channel - run: | - # This exports RELEASE_CHANNEL into env (GITHUB_ENV) - script/determine-release-channel - - - name: Draft release notes - run: | - mkdir -p target/ - # Ignore any errors that occur while drafting release notes to not fail the build. - script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true - script/create-draft-release target/release-notes.md - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Create macOS app bundle (aarch64) - run: script/bundle-mac aarch64-apple-darwin - - - name: Create macOS app bundle (x64) - run: script/bundle-mac x86_64-apple-darwin - - - name: Rename binaries - run: | - mv target/aarch64-apple-darwin/release/Zed.dmg target/aarch64-apple-darwin/release/Zed-aarch64.dmg - mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg - - - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - name: Upload app bundle to release - if: ${{ env.RELEASE_CHANNEL == 'preview' || env.RELEASE_CHANNEL == 'stable' }} - with: - draft: true - prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} - files: | - target/zed-remote-server-macos-x86_64.gz - target/zed-remote-server-macos-aarch64.gz - target/aarch64-apple-darwin/release/Zed-aarch64.dmg - target/x86_64-apple-darwin/release/Zed-x86_64.dmg - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - bundle-linux-x86_x64: - timeout-minutes: 60 - name: Linux x86_x64 release bundle - runs-on: - - namespace-profile-16x32-ubuntu-2004 # ubuntu 20.04 for minimal glibc - if: | - ( startsWith(github.ref, 'refs/tags/v') ) - needs: [linux_tests] - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Install Linux dependencies - run: ./script/linux && ./script/install-mold 2.34.0 - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Determine version and release channel - run: | - # This exports RELEASE_CHANNEL into env (GITHUB_ENV) - script/determine-release-channel - - - name: Create Linux .tar.gz bundle - run: script/bundle-linux - - - name: Upload Artifacts to release - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - with: - draft: true - prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} - files: | - target/zed-remote-server-linux-x86_64.gz - target/release/zed-linux-x86_64.tar.gz - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - bundle-linux-aarch64: # this runs on ubuntu22.04 - timeout-minutes: 60 - name: Linux arm64 release bundle - runs-on: - - namespace-profile-8x32-ubuntu-2004-arm-m4 # ubuntu 20.04 for minimal glibc - if: | - startsWith(github.ref, 'refs/tags/v') - needs: [linux_tests] - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Install Linux dependencies - run: ./script/linux - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Determine version and release channel - run: | - # This exports RELEASE_CHANNEL into env (GITHUB_ENV) - script/determine-release-channel - - - name: Create and upload Linux .tar.gz bundles - run: script/bundle-linux - - - name: Upload Artifacts to release - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - with: - draft: true - prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} - files: | - target/zed-remote-server-linux-aarch64.gz - target/release/zed-linux-aarch64.tar.gz - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - freebsd: - timeout-minutes: 60 - runs-on: github-8vcpu-ubuntu-2404 - if: | - false && ( startsWith(github.ref, 'refs/tags/v') ) - needs: [linux_tests] - name: Build Zed on FreeBSD - steps: - - uses: actions/checkout@v4 - - name: Build FreeBSD remote-server - id: freebsd-build - uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0 - with: - usesh: true - release: 13.5 - copyback: true - prepare: | - pkg install -y \ - bash curl jq git \ - rustup-init cmake-core llvm-devel-lite pkgconf protobuf # ibx11 alsa-lib rust-bindgen-cli - run: | - freebsd-version - sysctl hw.model - sysctl hw.ncpu - sysctl hw.physmem - sysctl hw.usermem - git config --global --add safe.directory /home/runner/work/zed/zed - rustup-init --profile minimal --default-toolchain none -y - . "$HOME/.cargo/env" - ./script/bundle-freebsd - mkdir -p out/ - mv "target/zed-remote-server-freebsd-x86_64.gz" out/ - rm -rf target/ - cargo clean - - - name: Upload Artifact to Workflow - zed-remote-server (run-bundling) - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4 - if: contains(github.event.pull_request.labels.*.name, 'run-bundling') - with: - name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-freebsd.gz - path: out/zed-remote-server-freebsd-x86_64.gz - - - name: Upload Artifacts to release - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }} - with: - draft: true - prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} - files: | - out/zed-remote-server-freebsd-x86_64.gz - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - nix-build: - name: Build with Nix - uses: ./.github/workflows/nix_build.yml - needs: [job_spec] - if: github.repository_owner == 'zed-industries' && - (contains(github.event.pull_request.labels.*.name, 'run-nix') || - needs.job_spec.outputs.run_nix == 'true') - secrets: inherit - - bundle-windows-x64: - timeout-minutes: 120 - name: Create a Windows installer for x86_64 - runs-on: [self-32vcpu-windows-2022] - if: | - ( startsWith(github.ref, 'refs/tags/v') ) - needs: [windows_tests] - env: - AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} - AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} - AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} - ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} - CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} - ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} - FILE_DIGEST: SHA256 - TIMESTAMP_DIGEST: SHA256 - TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com" - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Determine version and release channel - working-directory: ${{ env.ZED_WORKSPACE }} - run: | - # This exports RELEASE_CHANNEL into env (GITHUB_ENV) - script/determine-release-channel.ps1 - - - name: Build Zed installer - working-directory: ${{ env.ZED_WORKSPACE }} - run: script/bundle-windows.ps1 - - - name: Upload Artifacts to release - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - with: - draft: true - prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} - files: ${{ env.SETUP_PATH }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - bundle-windows-aarch64: - timeout-minutes: 120 - name: Create a Windows installer for aarch64 - runs-on: [self-32vcpu-windows-2022] - if: | - ( startsWith(github.ref, 'refs/tags/v') ) - needs: [windows_tests] - env: - AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }} - AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }} - AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }} - ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }} - CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }} - ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }} - FILE_DIGEST: SHA256 - TIMESTAMP_DIGEST: SHA256 - TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com" - steps: - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Setup Sentry CLI - uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b #v2 - with: - token: ${{ SECRETS.SENTRY_AUTH_TOKEN }} - - - name: Determine version and release channel - working-directory: ${{ env.ZED_WORKSPACE }} - run: | - # This exports RELEASE_CHANNEL into env (GITHUB_ENV) - script/determine-release-channel.ps1 - - - name: Build Zed installer - working-directory: ${{ env.ZED_WORKSPACE }} - run: script/bundle-windows.ps1 -Architecture aarch64 - - - name: Upload Artifacts to release - uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1 - with: - draft: true - prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }} - files: ${{ env.SETUP_PATH }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - auto-release-preview: - name: Auto release preview - if: | - false - && startsWith(github.ref, 'refs/tags/v') - && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') - needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64, bundle-windows-aarch64] - runs-on: - - self-mini-macos - steps: - - name: gh release - run: gh release edit "$GITHUB_REF_NAME" --draft=false - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - - name: Create Sentry release - uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c # v3 - env: - SENTRY_ORG: zed-dev - SENTRY_PROJECT: zed - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} - with: - environment: production diff --git a/.github/workflows/danger.yml b/.github/workflows/danger.yml index 1134167e05e29ffebfcf176b4f8c6cfc1b9e862d..054767e5f1fd86c2a5b8fa2112802e797ec10f6e 100644 --- a/.github/workflows/danger.yml +++ b/.github/workflows/danger.yml @@ -29,10 +29,10 @@ jobs: node-version: '20' cache: pnpm cache-dependency-path: script/danger/pnpm-lock.yaml - - name: danger::install_deps + - name: danger::danger_job::install_deps run: pnpm install --dir script/danger shell: bash -euxo pipefail {0} - - name: danger::run + - name: danger::danger_job::run run: pnpm run --dir script/danger danger ci shell: bash -euxo pipefail {0} env: diff --git a/.github/workflows/nix_build.yml b/.github/workflows/nix_build.yml deleted file mode 100644 index 4dd45bd3a740a43785e0284f0b86b2cdef50c1c7..0000000000000000000000000000000000000000 --- a/.github/workflows/nix_build.yml +++ /dev/null @@ -1,97 +0,0 @@ -# Generated from xtask::workflows::nix_build -# Rebuild with `cargo xtask workflows`. -name: nix_build -env: - CARGO_TERM_COLOR: always - RUST_BACKTRACE: '1' - CARGO_INCREMENTAL: '0' -on: - pull_request: - branches: - - '**' - paths: - - nix/** - - flake.* - - Cargo.* - - rust-toolchain.toml - - .cargo/config.toml - push: - branches: - - main - - v[0-9]+.[0-9]+.x - paths: - - nix/** - - flake.* - - Cargo.* - - rust-toolchain.toml - - .cargo/config.toml - workflow_call: {} -jobs: - build_nix_linux_x86_64: - if: github.repository_owner == 'zed-industries' - runs-on: namespace-profile-32x64-ubuntu-2004 - env: - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} - GIT_LFS_SKIP_SMUDGE: '1' - steps: - - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - with: - clean: false - - name: nix_build::install_nix - uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f - with: - github_access_token: ${{ secrets.GITHUB_TOKEN }} - - name: nix_build::cachix_action - uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad - with: - name: zed - authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} - cachixArgs: -v - pushFilter: -zed-editor-[0-9.]*-nightly - - name: nix_build::build - run: nix build .#debug -L --accept-flake-config - shell: bash -euxo pipefail {0} - timeout-minutes: 60 - continue-on-error: true - build_nix_mac_aarch64: - if: github.repository_owner == 'zed-industries' - runs-on: self-mini-macos - env: - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }} - GIT_LFS_SKIP_SMUDGE: '1' - steps: - - name: steps::checkout_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - with: - clean: false - - name: nix_build::set_path - run: | - echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" - echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" - shell: bash -euxo pipefail {0} - - name: nix_build::cachix_action - uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad - with: - name: zed - authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} - cachixArgs: -v - pushFilter: -zed-editor-[0-9.]*-nightly - - name: nix_build::build - run: nix build .#debug -L --accept-flake-config - shell: bash -euxo pipefail {0} - - name: nix_build::limit_store - run: |- - if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then - nix-collect-garbage -d || true - fi - shell: bash -euxo pipefail {0} - timeout-minutes: 60 - continue-on-error: true -concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} - cancel-in-progress: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 49fe8f82b1fb1b395c70b0813f79ea882cf83860..3068232c1a8096fef89de5a8defb4c2073c4a01a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -465,7 +465,7 @@ jobs: shell: bash -euxo pipefail {0} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: release::create_sentry_release + - name: release::auto_release_preview::create_sentry_release uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c with: environment: production diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 80e6534e70e8f7169514fb8cc569f7b11488cd88..20230fb499ea9fa892a316bd1762424869004262 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -201,9 +201,6 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: release_nightly::add_rust_to_path - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - shell: bash -euxo pipefail {0} - name: ./script/linux run: ./script/linux shell: bash -euxo pipefail {0} @@ -242,9 +239,6 @@ jobs: uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b with: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - - name: release_nightly::add_rust_to_path - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - shell: bash -euxo pipefail {0} - name: ./script/linux run: ./script/linux shell: bash -euxo pipefail {0} @@ -298,11 +292,11 @@ jobs: "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: release_nightly::build_zed_installer + - name: run_bundling::bundle_windows run: script/bundle-windows.ps1 -Architecture x86_64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: release_nightly::upload_zed_nightly_windows + - name: release_nightly::upload_zed_nightly run: script/upload-nightly.ps1 -Architecture x86_64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} @@ -340,11 +334,11 @@ jobs: "nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL" shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: release_nightly::build_zed_installer + - name: run_bundling::bundle_windows run: script/bundle-windows.ps1 -Architecture aarch64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} - - name: release_nightly::upload_zed_nightly_windows + - name: release_nightly::upload_zed_nightly run: script/upload-nightly.ps1 -Architecture aarch64 shell: pwsh working-directory: ${{ env.ZED_WORKSPACE }} @@ -365,17 +359,17 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: nix_build::install_nix + - name: nix_build::build_nix::install_nix uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f with: github_access_token: ${{ secrets.GITHUB_TOKEN }} - - name: nix_build::cachix_action + - name: nix_build::build_nix::cachix_action uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad with: name: zed authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} cachixArgs: -v - - name: nix_build::build + - name: nix_build::build_nix::build run: nix build .#default -L --accept-flake-config shell: bash -euxo pipefail {0} timeout-minutes: 60 @@ -396,21 +390,21 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: nix_build::set_path + - name: nix_build::build_nix::set_path run: | echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" shell: bash -euxo pipefail {0} - - name: nix_build::cachix_action + - name: nix_build::build_nix::cachix_action uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad with: name: zed authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} cachixArgs: -v - - name: nix_build::build + - name: nix_build::build_nix::build run: nix build .#default -L --accept-flake-config shell: bash -euxo pipefail {0} - - name: nix_build::limit_store + - name: nix_build::build_nix::limit_store run: |- if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then nix-collect-garbage -d || true @@ -434,7 +428,7 @@ jobs: with: clean: false fetch-depth: 0 - - name: release_nightly::update_nightly_tag + - name: release_nightly::update_nightly_tag_job::update_nightly_tag run: | if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then echo "Nightly tag already points to current commit. Skipping tagging." @@ -445,7 +439,7 @@ jobs: git tag -f nightly git push origin nightly --force shell: bash -euxo pipefail {0} - - name: release_nightly::create_sentry_release + - name: release_nightly::update_nightly_tag_job::create_sentry_release uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c with: environment: production diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 63c882bf7b0cf447bfd641002bcf67667bbea8b6..51ff9ee331ca69b251bb00905e22213527cbf118 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -444,18 +444,18 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: nix_build::install_nix + - name: nix_build::build_nix::install_nix uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f with: github_access_token: ${{ secrets.GITHUB_TOKEN }} - - name: nix_build::cachix_action + - name: nix_build::build_nix::cachix_action uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad with: name: zed authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} cachixArgs: -v pushFilter: -zed-editor-[0-9.]*-nightly - - name: nix_build::build + - name: nix_build::build_nix::build run: nix build .#debug -L --accept-flake-config shell: bash -euxo pipefail {0} timeout-minutes: 60 @@ -475,22 +475,22 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - - name: nix_build::set_path + - name: nix_build::build_nix::set_path run: | echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" shell: bash -euxo pipefail {0} - - name: nix_build::cachix_action + - name: nix_build::build_nix::cachix_action uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad with: name: zed authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} cachixArgs: -v pushFilter: -zed-editor-[0-9.]*-nightly - - name: nix_build::build + - name: nix_build::build_nix::build run: nix build .#debug -L --accept-flake-config shell: bash -euxo pipefail {0} - - name: nix_build::limit_store + - name: nix_build::build_nix::limit_store run: |- if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then nix-collect-garbage -d || true diff --git a/tooling/xtask/src/tasks/workflows/danger.rs b/tooling/xtask/src/tasks/workflows/danger.rs index 6ae7543a76480b37ca53d96d3f682c06b3d073e8..eed2cba732292e5851468766084e846f366b3edc 100644 --- a/tooling/xtask/src/tasks/workflows/danger.rs +++ b/tooling/xtask/src/tasks/workflows/danger.rs @@ -1,11 +1,13 @@ use gh_workflow::*; -use crate::tasks::workflows::steps::named; +use crate::tasks::workflows::steps::{NamedJob, named}; use super::{runners, steps}; /// Generates the danger.yml workflow pub fn danger() -> Workflow { + let danger = danger_job(); + named::workflow() .on( Event::default().pull_request(PullRequest::default().add_branch("main").types([ @@ -15,39 +17,43 @@ pub fn danger() -> Workflow { PullRequestType::Edited, ])), ) - .add_job( - "danger", - Job::default() - .cond(Expression::new( - "github.repository_owner == 'zed-industries'", - )) - .runs_on(runners::LINUX_SMALL) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_pnpm()) - .add_step( - steps::setup_node() - .add_with(("cache", "pnpm")) - .add_with(("cache-dependency-path", "script/danger/pnpm-lock.yaml")), - ) - .add_step(install_deps()) - .add_step(run()), - ) + .add_job(danger.name, danger.job) } -pub fn install_deps() -> Step { - named::bash("pnpm install --dir script/danger") -} +fn danger_job() -> NamedJob { + pub fn install_deps() -> Step { + named::bash("pnpm install --dir script/danger") + } + + pub fn run() -> Step { + named::bash("pnpm run --dir script/danger danger ci") + // This GitHub token is not used, but the value needs to be here to prevent + // Danger from throwing an error. + .add_env(("GITHUB_TOKEN", "not_a_real_token")) + // All requests are instead proxied through an instance of + // https://github.com/maxdeviant/danger-proxy that allows Danger to securely + // authenticate with GitHub while still being able to run on PRs from forks. + .add_env(( + "DANGER_GITHUB_API_BASE_URL", + "https://danger-proxy.fly.dev/github", + )) + } -pub fn run() -> Step { - named::bash("pnpm run --dir script/danger danger ci") - // This GitHub token is not used, but the value needs to be here to prevent - // Danger from throwing an error. - .add_env(("GITHUB_TOKEN", "not_a_real_token")) - // All requests are instead proxied through an instance of - // https://github.com/maxdeviant/danger-proxy that allows Danger to securely - // authenticate with GitHub while still being able to run on PRs from forks. - .add_env(( - "DANGER_GITHUB_API_BASE_URL", - "https://danger-proxy.fly.dev/github", - )) + NamedJob { + name: "danger".to_string(), + job: Job::default() + .cond(Expression::new( + "github.repository_owner == 'zed-industries'", + )) + .runs_on(runners::LINUX_SMALL) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_pnpm()) + .add_step( + steps::setup_node() + .add_with(("cache", "pnpm")) + .add_with(("cache-dependency-path", "script/danger/pnpm-lock.yaml")), + ) + .add_step(install_deps()) + .add_step(run()), + } } diff --git a/tooling/xtask/src/tasks/workflows/nix_build.rs b/tooling/xtask/src/tasks/workflows/nix_build.rs index 85d13ff62fdd3c62ba2db47dfb93c750f29c17c2..3c6818106335aac712bbf4c282107e735cd5c631 100644 --- a/tooling/xtask/src/tasks/workflows/nix_build.rs +++ b/tooling/xtask/src/tasks/workflows/nix_build.rs @@ -14,6 +14,55 @@ pub(crate) fn build_nix( cachix_filter: Option<&str>, deps: &[&NamedJob], ) -> NamedJob { + // on our macs we manually install nix. for some reason the cachix action is running + // under a non-login /bin/bash shell which doesn't source the proper script to add the + // nix profile to PATH, so we manually add them here + pub fn set_path() -> Step { + named::bash(indoc! {r#" + echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" + echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" + "#}) + } + + pub fn install_nix() -> Step { + named::uses( + "cachix", + "install-nix-action", + "02a151ada4993995686f9ed4f1be7cfbb229e56f", // v31 + ) + .add_with(("github_access_token", vars::GITHUB_TOKEN)) + } + + pub fn cachix_action(cachix_filter: Option<&str>) -> Step { + let mut step = named::uses( + "cachix", + "cachix-action", + "0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad", // v16 + ) + .add_with(("name", "zed")) + .add_with(("authToken", vars::CACHIX_AUTH_TOKEN)) + .add_with(("cachixArgs", "-v")); + if let Some(cachix_filter) = cachix_filter { + step = step.add_with(("pushFilter", cachix_filter)); + } + step + } + + pub fn build(flake_output: &str) -> Step { + named::bash(&format!( + "nix build .#{} -L --accept-flake-config", + flake_output + )) + } + + pub fn limit_store() -> Step { + named::bash(indoc! {r#" + if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then + nix-collect-garbage -d || true + fi"# + }) + } + let runner = match platform { Platform::Windows => unimplemented!(), Platform::Linux => runners::LINUX_X86_BUNDLER, @@ -55,52 +104,3 @@ pub(crate) fn build_nix( job, } } - -// on our macs we manually install nix. for some reason the cachix action is running -// under a non-login /bin/bash shell which doesn't source the proper script to add the -// nix profile to PATH, so we manually add them here -pub fn set_path() -> Step { - named::bash(indoc! {r#" - echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH" - echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH" - "#}) -} - -pub fn install_nix() -> Step { - named::uses( - "cachix", - "install-nix-action", - "02a151ada4993995686f9ed4f1be7cfbb229e56f", // v31 - ) - .add_with(("github_access_token", vars::GITHUB_TOKEN)) -} - -pub fn cachix_action(cachix_filter: Option<&str>) -> Step { - let mut step = named::uses( - "cachix", - "cachix-action", - "0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad", // v16 - ) - .add_with(("name", "zed")) - .add_with(("authToken", vars::CACHIX_AUTH_TOKEN)) - .add_with(("cachixArgs", "-v")); - if let Some(cachix_filter) = cachix_filter { - step = step.add_with(("pushFilter", cachix_filter)); - } - step -} - -pub fn build(flake_output: &str) -> Step { - named::bash(&format!( - "nix build .#{} -L --accept-flake-config", - flake_output - )) -} - -pub fn limit_store() -> Step { - named::bash(indoc! {r#" - if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then - nix-collect-garbage -d || true - fi"# - }) -} diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index 1a6533d0a4c9ede450930b0f0561562cf2556c45..54790e4d864646d7e8d484f9f9b5ec8cc9726796 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -45,7 +45,28 @@ pub(crate) fn release() -> Workflow { .add_job(auto_release_preview.name, auto_release_preview.job) } +struct ReleaseBundleJobs { + linux_arm64: NamedJob, + linux_x86_64: NamedJob, + mac_arm64: NamedJob, + mac_x86_64: NamedJob, + windows_arm64: NamedJob, + windows_x86_64: NamedJob, +} + fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob { + fn create_sentry_release() -> Step { + named::uses( + "getsentry", + "action-release", + "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 + ) + .add_env(("SENTRY_ORG", "zed-dev")) + .add_env(("SENTRY_PROJECT", "zed")) + .add_env(("SENTRY_AUTH_TOKEN", "${{ secrets.SENTRY_AUTH_TOKEN }}")) + .add_with(("environment", "production")) + } + named::job( dependant_job(deps) .runs_on(runners::LINUX_SMALL) @@ -66,27 +87,6 @@ fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob { ) } -fn create_sentry_release() -> Step { - named::uses( - "getsentry", - "action-release", - "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 - ) - .add_env(("SENTRY_ORG", "zed-dev")) - .add_env(("SENTRY_PROJECT", "zed")) - .add_env(("SENTRY_AUTH_TOKEN", "${{ secrets.SENTRY_AUTH_TOKEN }}")) - .add_with(("environment", "production")) -} - -struct ReleaseBundleJobs { - linux_arm64: NamedJob, - linux_x86_64: NamedJob, - mac_arm64: NamedJob, - mac_x86_64: NamedJob, - windows_arm64: NamedJob, - windows_x86_64: NamedJob, -} - fn upload_release_assets(deps: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs) -> NamedJob { fn download_workflow_artifacts() -> Step { named::uses( diff --git a/tooling/xtask/src/tasks/workflows/release_nightly.rs b/tooling/xtask/src/tasks/workflows/release_nightly.rs index 7d7de5b289572c49eeccc103979a7518bec82d44..7079cdbf028c31a27eb9cc230c7c93eb67367680 100644 --- a/tooling/xtask/src/tasks/workflows/release_nightly.rs +++ b/tooling/xtask/src/tasks/workflows/release_nightly.rs @@ -1,6 +1,6 @@ use crate::tasks::workflows::{ nix_build::build_nix, - run_bundling::bundle_mac, + run_bundling::{bundle_mac, bundle_windows}, run_tests::run_platform_tests, runners::{Arch, Platform}, steps::NamedJob, @@ -135,7 +135,6 @@ fn bundle_linux_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { .runs_on(arch.linux_bundler()) .add_step(steps::checkout_repo()) .add_step(steps::setup_sentry()) - .add_step(add_rust_to_path()) .add_step(steps::script("./script/linux")); // todo(ci) can we do this on arm too? @@ -163,12 +162,37 @@ fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob { .add_step(steps::checkout_repo()) .add_step(steps::setup_sentry()) .add_step(set_release_channel_to_nightly(platform)) - .add_step(build_zed_installer(arch)) - .add_step(upload_zed_nightly_windows(arch)), + .add_step(bundle_windows(arch)) + .add_step(upload_zed_nightly(platform, arch)), } } fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob { + fn update_nightly_tag() -> Step { + named::bash(indoc::indoc! {r#" + if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then + echo "Nightly tag already points to current commit. Skipping tagging." + exit 0 + fi + git config user.name github-actions + git config user.email github-actions@github.com + git tag -f nightly + git push origin nightly --force + "#}) + } + + fn create_sentry_release() -> Step { + named::uses( + "getsentry", + "action-release", + "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 + ) + .add_env(("SENTRY_ORG", "zed-dev")) + .add_env(("SENTRY_PROJECT", "zed")) + .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN)) + .add_with(("environment", "production")) + } + NamedJob { name: "update_nightly_tag".to_owned(), job: steps::release_job(deps) @@ -197,10 +221,6 @@ fn set_release_channel_to_nightly(platform: Platform) -> Step { } } -fn add_rust_to_path() -> Step { - named::bash(r#"echo "$HOME/.cargo/bin" >> "$GITHUB_PATH""#) -} - fn upload_zed_nightly(platform: Platform, arch: Arch) -> Step { match platform { Platform::Linux => named::bash(&format!("script/upload-nightly linux-targz {arch}")), @@ -214,44 +234,3 @@ fn upload_zed_nightly(platform: Platform, arch: Arch) -> Step { } } } - -fn build_zed_installer(arch: Arch) -> Step { - let cmd = match arch { - Arch::X86_64 => "script/bundle-windows.ps1 -Architecture x86_64", - Arch::ARM64 => "script/bundle-windows.ps1 -Architecture aarch64", - }; - named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}") -} - -fn upload_zed_nightly_windows(arch: Arch) -> Step { - let cmd = match arch { - Arch::X86_64 => "script/upload-nightly.ps1 -Architecture x86_64", - Arch::ARM64 => "script/upload-nightly.ps1 -Architecture aarch64", - }; - named::pwsh(cmd).working_directory("${{ env.ZED_WORKSPACE }}") -} - -fn update_nightly_tag() -> Step { - named::bash(indoc::indoc! {r#" - if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then - echo "Nightly tag already points to current commit. Skipping tagging." - exit 0 - fi - git config user.name github-actions - git config user.email github-actions@github.com - git tag -f nightly - git push origin nightly --force - "#}) -} - -fn create_sentry_release() -> Step { - named::uses( - "getsentry", - "action-release", - "526942b68292201ac6bbb99b9a0747d4abee354c", // v3 - ) - .add_env(("SENTRY_ORG", "zed-dev")) - .add_env(("SENTRY_PROJECT", "zed")) - .add_env(("SENTRY_AUTH_TOKEN", vars::SENTRY_AUTH_TOKEN)) - .add_with(("environment", "production")) -} diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index 2e8990faf8b5fef7a9b4fbcdc2ad2fe91bfed4b2..be163b215bace76c411145d0ad04d8a9dd7788ca 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -144,7 +144,7 @@ pub(crate) fn bundle_windows_job(arch: runners::Arch, deps: &[&NamedJob]) -> Job ) } -fn bundle_windows(arch: runners::Arch) -> Step { +pub fn bundle_windows(arch: runners::Arch) -> Step { let step = match arch { runners::Arch::X86_64 => named::pwsh("script/bundle-windows.ps1 -Architecture x86_64"), runners::Arch::ARM64 => named::pwsh("script/bundle-windows.ps1 -Architecture aarch64"), From 743a9cf25813a36c4d7059bedb54029b4901f111 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 31 Oct 2025 16:45:39 -0300 Subject: [PATCH 48/82] Add included agents in extensions search (#41679) Given agent servers will soon be a thing, I'm adding Claude Code, Gemini CLI, and Codex CLI as included agents in case anyone comes first to search them as extensions before looking up on the agent panel. Release Notes: - N/A --- crates/extensions_ui/src/extensions_ui.rs | 24 +++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index cf59f7d200962b2e541c429c7918f622d6e06587..539f2d8864134effdf0a3edcdefa4ca213b7eff3 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -225,6 +225,9 @@ impl ExtensionFilter { #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] enum Feature { + AgentClaude, + AgentCodex, + AgentGemini, ExtensionRuff, ExtensionTailwind, Git, @@ -244,6 +247,9 @@ fn keywords_by_feature() -> &'static BTreeMap> { static KEYWORDS_BY_FEATURE: OnceLock>> = OnceLock::new(); KEYWORDS_BY_FEATURE.get_or_init(|| { BTreeMap::from_iter([ + (Feature::AgentClaude, vec!["claude", "claude code"]), + (Feature::AgentCodex, vec!["codex", "codex cli"]), + (Feature::AgentGemini, vec!["gemini", "gemini cli"]), (Feature::ExtensionRuff, vec!["ruff"]), (Feature::ExtensionTailwind, vec!["tail", "tailwind"]), (Feature::Git, vec!["git"]), @@ -1422,6 +1428,24 @@ impl ExtensionsPage { for feature in &self.upsells { let banner = match feature { + Feature::AgentClaude => self.render_feature_upsell_banner( + "Claude Code support is built-in to Zed!".into(), + "https://zed.dev/docs/ai/external-agents#claude-code".into(), + false, + cx, + ), + Feature::AgentCodex => self.render_feature_upsell_banner( + "Codex CLI support is built-in to Zed!".into(), + "https://zed.dev/docs/ai/external-agents#codex-cli".into(), + false, + cx, + ), + Feature::AgentGemini => self.render_feature_upsell_banner( + "Gemini CLI support is built-in to Zed!".into(), + "https://zed.dev/docs/ai/external-agents#gemini-cli".into(), + false, + cx, + ), Feature::ExtensionRuff => self.render_feature_upsell_banner( "Ruff (linter for Python) support is built-in to Zed!".into(), "https://zed.dev/docs/languages/python#code-formatting--linting".into(), From d3d71995074282d7d3b4557230374a1044fefdb3 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Fri, 31 Oct 2025 13:29:13 -0700 Subject: [PATCH 49/82] Fix `release.yml` workflow (#41675) Closes #ISSUE Release Notes: - N/A *or* Added/Fixed/Improved ... --- .github/workflows/release.yml | 8 +++++--- tooling/xtask/src/tasks/workflows/release.rs | 17 ++++++++++++++--- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3068232c1a8096fef89de5a8defb4c2073c4a01a..e34242b8089431ac98d21abebc5400a7db041907 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -162,12 +162,14 @@ jobs: - name: mkdir -p target/ run: mkdir -p target/ shell: bash -euxo pipefail {0} - - name: script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true - run: script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true + - name: release::create_draft_release::generate_release_notes + run: node --redirect-warnings=/dev/null ./script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md shell: bash -euxo pipefail {0} - - name: script/create-draft-release target/release-notes.md + - name: release::create_draft_release::create_release run: script/create-draft-release target/release-notes.md shell: bash -euxo pipefail {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} timeout-minutes: 60 bundle_linux_arm64: needs: diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index 54790e4d864646d7e8d484f9f9b5ec8cc9726796..b5aff555e0f55971e77aa754e23d783938950953 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -164,6 +164,17 @@ fn upload_release_assets(deps: &[&NamedJob], bundle_jobs: &ReleaseBundleJobs) -> } fn create_draft_release() -> NamedJob { + fn generate_release_notes() -> Step { + named::bash( + r#"node --redirect-warnings=/dev/null ./script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md"#, + ) + } + + fn create_release() -> Step { + named::bash("script/create-draft-release target/release-notes.md") + .add_env(("GITHUB_TOKEN", "${{ secrets.GITHUB_TOKEN }}")) + } + named::job( release_job(&[]) .runs_on(runners::LINUX_SMALL) @@ -177,10 +188,10 @@ fn create_draft_release() -> NamedJob { .add_with(("clean", false)) .add_with(("ref", "${{ github.ref }}")), ) - .add_step(steps::script("script/determine-release-channel")) // export RELEASE_CHANNEL and RELEASE_VERSION + .add_step(steps::script("script/determine-release-channel")) .add_step(steps::script("mkdir -p target/")) - .add_step(steps::script(r#"script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true"#)) - .add_step(steps::script("script/create-draft-release target/release-notes.md")), + .add_step(generate_release_notes()) + .add_step(create_release()), ) } From bf79592465c1878e49aaa66c70459db267a51172 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 31 Oct 2025 18:15:52 -0300 Subject: [PATCH 50/82] git_ui: Adjust stash picker (#41688) Just tidying it up by removing the unnecessary eye icon buttons in all list items and adding that action in the form of a button in the footer, closer to all other actions. Also reordering the footer buttons so that the likely most common action is in the far right. Release Notes: - N/A --- crates/git_ui/src/stash_picker.rs | 99 ++++++++++++------------------- 1 file changed, 37 insertions(+), 62 deletions(-) diff --git a/crates/git_ui/src/stash_picker.rs b/crates/git_ui/src/stash_picker.rs index 58f17d7a3bb087ff058878f7889d6d83bc1727a6..aa958ab62da6793c7e6fc7fd7b9f51d4ab3c33aa 100644 --- a/crates/git_ui/src/stash_picker.rs +++ b/crates/git_ui/src/stash_picker.rs @@ -5,16 +5,14 @@ use git::stash::StashEntry; use gpui::{ Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement, Render, - SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems, svg, + SharedString, Styled, Subscription, Task, WeakEntity, Window, actions, rems, }; use picker::{Picker, PickerDelegate}; use project::git_store::{Repository, RepositoryEvent}; use std::sync::Arc; use time::{OffsetDateTime, UtcOffset}; use time_format; -use ui::{ - ButtonLike, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*, -}; +use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*}; use util::ResultExt; use workspace::notifications::DetachAndPromptErr; use workspace::{ModalView, Workspace}; @@ -434,7 +432,7 @@ impl PickerDelegate for StashListDelegate { ix: usize, selected: bool, _window: &mut Window, - cx: &mut Context>, + _cx: &mut Context>, ) -> Option { let entry_match = &self.matches[ix]; @@ -446,23 +444,14 @@ impl PickerDelegate for StashListDelegate { .into_any_element(); let branch_name = entry_match.entry.branch.clone().unwrap_or_default(); - let branch_label = h_flex() + let branch_info = h_flex() .gap_1p5() .w_full() .child( - h_flex() - .gap_0p5() - .child( - Icon::new(IconName::GitBranch) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child( - Label::new(branch_name) - .truncate() - .color(Color::Muted) - .size(LabelSize::Small), - ), + Label::new(branch_name) + .truncate() + .color(Color::Muted) + .size(LabelSize::Small), ) .child( Label::new("•") @@ -476,42 +465,12 @@ impl PickerDelegate for StashListDelegate { .size(LabelSize::Small), ); - let show_button = div() - .group("show-button-hover") - .child( - ButtonLike::new("show-button") - .child( - svg() - .size(IconSize::Medium.rems()) - .flex_none() - .path(IconName::Eye.path()) - .text_color(Color::Default.color(cx)) - .group_hover("show-button-hover", |this| { - this.text_color(Color::Accent.color(cx)) - }) - .hover(|this| this.text_color(Color::Accent.color(cx))), - ) - .tooltip(Tooltip::for_action_title("Show Stash", &ShowStashItem)) - .on_click(cx.listener(move |picker, _, window, cx| { - cx.stop_propagation(); - picker.delegate.show_stash_at(ix, window, cx); - })), - ) - .into_any_element(); - Some( ListItem::new(SharedString::from(format!("stash-{ix}"))) .inset(true) .spacing(ListItemSpacing::Sparse) .toggle_state(selected) - .end_slot(show_button) - .child( - v_flex() - .w_full() - .overflow_hidden() - .child(stash_label) - .child(branch_label.into_element()), - ) + .child(v_flex().w_full().child(stash_label).child(branch_info)) .tooltip(Tooltip::text(format!( "stash@{{{}}}", entry_match.entry.index @@ -535,15 +494,35 @@ impl PickerDelegate for StashListDelegate { .border_t_1() .border_color(cx.theme().colors().border_variant) .child( - Button::new("apply-stash", "Apply") + Button::new("drop-stash", "Drop") .key_binding( - KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) - .map(|kb| kb.size(rems_from_px(12.))), + KeyBinding::for_action_in( + &stash_picker::DropStashItem, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(|_, window, cx| { - window.dispatch_action(menu::Confirm.boxed_clone(), cx) + window.dispatch_action(stash_picker::DropStashItem.boxed_clone(), cx) }), ) + .child( + Button::new("view-stash", "View") + .key_binding( + KeyBinding::for_action_in( + &stash_picker::ShowStashItem, + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) + .on_click(cx.listener(move |picker, _, window, cx| { + cx.stop_propagation(); + let selected_ix = picker.delegate.selected_index(); + picker.delegate.show_stash_at(selected_ix, window, cx); + })), + ) .child( Button::new("pop-stash", "Pop") .key_binding( @@ -555,17 +534,13 @@ impl PickerDelegate for StashListDelegate { }), ) .child( - Button::new("drop-stash", "Drop") + Button::new("apply-stash", "Apply") .key_binding( - KeyBinding::for_action_in( - &stash_picker::DropStashItem, - &focus_handle, - cx, - ) - .map(|kb| kb.size(rems_from_px(12.))), + KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx) + .map(|kb| kb.size(rems_from_px(12.))), ) .on_click(|_, window, cx| { - window.dispatch_action(stash_picker::DropStashItem.boxed_clone(), cx) + window.dispatch_action(menu::Confirm.boxed_clone(), cx) }), ) .into_any(), From d406409b72f57eda6dfdd64f70aa36cab598f304 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 31 Oct 2025 18:18:22 -0300 Subject: [PATCH 51/82] Fix categorization of agent server extensions (#41689) We missed making extensions that provide agent servers fill the `provides` field with `agent-servers`, and thus, filtering for this type of extension in both the app and site wouldn't return anything. Release Notes: - N/A --- crates/extension_cli/src/main.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/extension_cli/src/main.rs b/crates/extension_cli/src/main.rs index 1dd65fe446232effc932a497601212cd039b6eed..524e14b0cedcebef259948d73b530236525180c0 100644 --- a/crates/extension_cli/src/main.rs +++ b/crates/extension_cli/src/main.rs @@ -145,6 +145,10 @@ fn extension_provides(manifest: &ExtensionManifest) -> BTreeSet Date: Fri, 31 Oct 2025 22:47:49 +0100 Subject: [PATCH 52/82] git: Make GitPanel more responsive to long-running staging ops (#41667) Currently, this only applies to long-running individually selected unstaged files in the git panel. Next up I would like to make this work for `Stage All`/`Unstage All` however this will most likely require pushing `PendingOperation` into `GitStore` (from the `GitPanel`). Release Notes: - N/A --- crates/git_ui/src/git_panel.rs | 71 +++++++------ crates/project/src/git_store.rs | 171 ++++++++++++++++++-------------- 2 files changed, 135 insertions(+), 107 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 9ff8602a18fd1a7eec5804deecee5c21921c6eee..1be561db9aa3e201e4c98d2e1c4572ed930d5034 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -286,6 +286,12 @@ struct PendingOperation { op_id: usize, } +impl PendingOperation { + fn contains_path(&self, path: &RepoPath) -> bool { + self.entries.iter().any(|p| &p.repo_path == path) + } +} + pub struct GitPanel { pub(crate) active_repository: Option>, pub(crate) commit_editor: Entity, @@ -1240,19 +1246,21 @@ impl GitPanel { }; let (stage, repo_paths) = match entry { GitListEntry::Status(status_entry) => { - if status_entry.status.staging().is_fully_staged() { + let repo_paths = vec![status_entry.clone()]; + let stage = if let Some(status) = self.entry_staging(&status_entry) { + !status.is_fully_staged() + } else if status_entry.status.staging().is_fully_staged() { if let Some(op) = self.bulk_staging.clone() && op.anchor == status_entry.repo_path { self.bulk_staging = None; } - - (false, vec![status_entry.clone()]) + false } else { self.set_bulk_staging_anchor(status_entry.repo_path.clone(), cx); - - (true, vec![status_entry.clone()]) - } + true + }; + (stage, repo_paths) } GitListEntry::Header(section) => { let goal_staged_state = !self.header_state(section.header).selected(); @@ -2677,10 +2685,7 @@ impl GitPanel { if self.pending.iter().any(|pending| { pending.target_status == TargetStatus::Reverted && !pending.finished - && pending - .entries - .iter() - .any(|pending| pending.repo_path == entry.repo_path) + && pending.contains_path(&entry.repo_path) }) { continue; } @@ -2731,10 +2736,7 @@ impl GitPanel { last_pending_staged = pending.entries.first().cloned(); } if let Some(single_staged) = &single_staged_entry - && pending - .entries - .iter() - .any(|entry| entry.repo_path == single_staged.repo_path) + && pending.contains_path(&single_staged.repo_path) { pending_status_for_single_staged = Some(pending.target_status); } @@ -2797,7 +2799,7 @@ impl GitPanel { && let Some(index) = bulk_staging_anchor_new_index && let Some(entry) = self.entries.get(index) && let Some(entry) = entry.status_entry() - && self.entry_staging(entry) == StageStatus::Staged + && self.entry_staging(entry).unwrap_or(entry.staging) == StageStatus::Staged { self.bulk_staging = bulk_staging; } @@ -2845,39 +2847,47 @@ impl GitPanel { self.entry_count += 1; if repo.had_conflict_on_last_merge_head_change(&status_entry.repo_path) { self.conflicted_count += 1; - if self.entry_staging(status_entry).has_staged() { + if self + .entry_staging(status_entry) + .unwrap_or(status_entry.staging) + .has_staged() + { self.conflicted_staged_count += 1; } } else if status_entry.status.is_created() { self.new_count += 1; - if self.entry_staging(status_entry).has_staged() { + if self + .entry_staging(status_entry) + .unwrap_or(status_entry.staging) + .has_staged() + { self.new_staged_count += 1; } } else { self.tracked_count += 1; - if self.entry_staging(status_entry).has_staged() { + if self + .entry_staging(status_entry) + .unwrap_or(status_entry.staging) + .has_staged() + { self.tracked_staged_count += 1; } } } } - fn entry_staging(&self, entry: &GitStatusEntry) -> StageStatus { + fn entry_staging(&self, entry: &GitStatusEntry) -> Option { for pending in self.pending.iter().rev() { - if pending - .entries - .iter() - .any(|pending_entry| pending_entry.repo_path == entry.repo_path) - { + if pending.contains_path(&entry.repo_path) { match pending.target_status { - TargetStatus::Staged => return StageStatus::Staged, - TargetStatus::Unstaged => return StageStatus::Unstaged, + TargetStatus::Staged => return Some(StageStatus::Staged), + TargetStatus::Unstaged => return Some(StageStatus::Unstaged), TargetStatus::Reverted => continue, TargetStatus::Unchanged => continue, } } } - entry.staging + None } pub(crate) fn has_staged_changes(&self) -> bool { @@ -3710,7 +3720,8 @@ impl GitPanel { let ix = self.entry_by_path(&repo_path, cx)?; let entry = self.entries.get(ix)?; - let entry_staging = self.entry_staging(entry.status_entry()?); + let status = entry.status_entry()?; + let entry_staging = self.entry_staging(status).unwrap_or(status.staging); let checkbox = Checkbox::new("stage-file", entry_staging.as_bool().into()) .disabled(!self.has_write_access(cx)) @@ -4004,8 +4015,8 @@ impl GitPanel { let checkbox_id: ElementId = ElementId::Name(format!("entry_{}_{}_checkbox", display_name, ix).into()); - let entry_staging = self.entry_staging(entry); - let mut is_staged: ToggleState = self.entry_staging(entry).as_bool().into(); + let entry_staging = self.entry_staging(entry).unwrap_or(entry.staging); + let mut is_staged: ToggleState = entry_staging.as_bool().into(); if self.show_placeholders && !self.has_staged_changes() && !entry.status.is_created() { is_staged = ToggleState::Selected; } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 03642df3b4f395e190d03feb04203f7595aaf3cf..e29710682b45125ff06a0cc8390e768a11289c6d 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -3717,20 +3717,15 @@ impl Repository { Some(self.git_store.upgrade()?.read(cx).buffer_store.clone()) } - pub fn stage_entries( + fn save_buffers<'a>( &self, - entries: Vec, + entries: impl IntoIterator, cx: &mut Context, - ) -> Task> { - if entries.is_empty() { - return Task::ready(Ok(())); - } - let id = self.id; - + ) -> Vec>> { let mut save_futures = Vec::new(); if let Some(buffer_store) = self.buffer_store(cx) { buffer_store.update(cx, |buffer_store, cx| { - for path in &entries { + for path in entries { let Some(project_path) = self.repo_path_to_project_path(path, cx) else { continue; }; @@ -3746,37 +3741,64 @@ impl Repository { } }) } + save_futures + } + + pub fn stage_entries( + &self, + entries: Vec, + cx: &mut Context, + ) -> Task> { + if entries.is_empty() { + return Task::ready(Ok(())); + } + let id = self.id; + let save_tasks = self.save_buffers(&entries, cx); + let paths = entries + .iter() + .map(|p| p.as_unix_str()) + .collect::>() + .join(" "); + let status = format!("git add {paths}"); + let job_key = match entries.len() { + 1 => Some(GitJobKey::WriteIndex(entries[0].clone())), + _ => None, + }; cx.spawn(async move |this, cx| { - for save_future in save_futures { - save_future.await?; + for save_task in save_tasks { + save_task.await?; } this.update(cx, |this, _| { - this.send_job(None, move |git_repo, _cx| async move { - match git_repo { - RepositoryState::Local { - backend, - environment, - .. - } => backend.stage_paths(entries, environment.clone()).await, - RepositoryState::Remote { project_id, client } => { - client - .request(proto::Stage { - project_id: project_id.0, - repository_id: id.to_proto(), - paths: entries - .into_iter() - .map(|repo_path| repo_path.to_proto()) - .collect(), - }) - .await - .context("sending stage request")?; + this.send_keyed_job( + job_key, + Some(status.into()), + move |git_repo, _cx| async move { + match git_repo { + RepositoryState::Local { + backend, + environment, + .. + } => backend.stage_paths(entries, environment.clone()).await, + RepositoryState::Remote { project_id, client } => { + client + .request(proto::Stage { + project_id: project_id.0, + repository_id: id.to_proto(), + paths: entries + .into_iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), + }) + .await + .context("sending stage request")?; - Ok(()) + Ok(()) + } } - } - }) + }, + ) })? .await??; @@ -3793,57 +3815,52 @@ impl Repository { return Task::ready(Ok(())); } let id = self.id; - - let mut save_futures = Vec::new(); - if let Some(buffer_store) = self.buffer_store(cx) { - buffer_store.update(cx, |buffer_store, cx| { - for path in &entries { - let Some(project_path) = self.repo_path_to_project_path(path, cx) else { - continue; - }; - if let Some(buffer) = buffer_store.get_by_path(&project_path) - && buffer - .read(cx) - .file() - .is_some_and(|file| file.disk_state().exists()) - && buffer.read(cx).has_unsaved_edits() - { - save_futures.push(buffer_store.save_buffer(buffer, cx)); - } - } - }) - } + let save_tasks = self.save_buffers(&entries, cx); + let paths = entries + .iter() + .map(|p| p.as_unix_str()) + .collect::>() + .join(" "); + let status = format!("git reset {paths}"); + let job_key = match entries.len() { + 1 => Some(GitJobKey::WriteIndex(entries[0].clone())), + _ => None, + }; cx.spawn(async move |this, cx| { - for save_future in save_futures { - save_future.await?; + for save_task in save_tasks { + save_task.await?; } this.update(cx, |this, _| { - this.send_job(None, move |git_repo, _cx| async move { - match git_repo { - RepositoryState::Local { - backend, - environment, - .. - } => backend.unstage_paths(entries, environment).await, - RepositoryState::Remote { project_id, client } => { - client - .request(proto::Unstage { - project_id: project_id.0, - repository_id: id.to_proto(), - paths: entries - .into_iter() - .map(|repo_path| repo_path.to_proto()) - .collect(), - }) - .await - .context("sending unstage request")?; + this.send_keyed_job( + job_key, + Some(status.into()), + move |git_repo, _cx| async move { + match git_repo { + RepositoryState::Local { + backend, + environment, + .. + } => backend.unstage_paths(entries, environment).await, + RepositoryState::Remote { project_id, client } => { + client + .request(proto::Unstage { + project_id: project_id.0, + repository_id: id.to_proto(), + paths: entries + .into_iter() + .map(|repo_path| repo_path.to_proto()) + .collect(), + }) + .await + .context("sending unstage request")?; - Ok(()) + Ok(()) + } } - } - }) + }, + ) })? .await??; From ecbdffc84f1165323f256e8485ae84320550c759 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Fri, 31 Oct 2025 19:02:51 -0400 Subject: [PATCH 53/82] debugger: Fix Debugpy attach with connect session startup (#41690) Closes #38345, #34882, #33280 Debugpy has four distinct configuration scenarios, which are: 1. launch 2. attach with process id 3. attach with listen 4. attach with connect Spawning Debugpy directly works with the first three scenarios but not with "attach with connect". Which requires host/port arguments being passed in both with an attach request and when starting up Debugpy. This PR passes in the right arguments when spawning Debugpy in an attach with connect scenario, thus fixing the bug. The VsCode extension comment that explains this: https://github.com/microsoft/vscode-python-debugger/blob/98f5b93ee4259f99b679d7c45163ba7babcbff6d/src/extension/debugger/adapter/factory.ts#L43-L51 Release Notes: - debugger: Fix Python attach-based sessions not working with `connect` or `port` arguments --- Cargo.lock | 3 + crates/dap_adapters/Cargo.toml | 4 + crates/dap_adapters/src/dap_adapters.rs | 62 ++++++ crates/dap_adapters/src/python.rs | 240 ++++++++++++++++++++++-- 4 files changed, 295 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 25a22e64c6db0632ca1357cebe02f0bbe04fa0a8..ec55e4af77f78a9476b147744a9973d758d0e630 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4528,12 +4528,15 @@ dependencies = [ "fs", "futures 0.3.31", "gpui", + "http_client", "json_dotpath", "language", "log", + "node_runtime", "paths", "serde", "serde_json", + "settings", "smol", "task", "util", diff --git a/crates/dap_adapters/Cargo.toml b/crates/dap_adapters/Cargo.toml index 253674c0f3da16574b4303faf679abeb310756d8..7bdf39c74a43165d252d5d53d2afef776bf63f29 100644 --- a/crates/dap_adapters/Cargo.toml +++ b/crates/dap_adapters/Cargo.toml @@ -41,6 +41,10 @@ util.workspace = true [dev-dependencies] dap = { workspace = true, features = ["test-support"] } +fs = { workspace = true, features = ["test-support"] } gpui = { workspace = true, features = ["test-support"] } +http_client.workspace = true +node_runtime.workspace = true +settings = { workspace = true, features = ["test-support"] } task = { workspace = true, features = ["test-support"] } util = { workspace = true, features = ["test-support"] } diff --git a/crates/dap_adapters/src/dap_adapters.rs b/crates/dap_adapters/src/dap_adapters.rs index a4e6beb2495ebe1eec9f08ddb8394b498c0ae410..d8a706ba414af2c9e0beb1cffe8357bcece1dc52 100644 --- a/crates/dap_adapters/src/dap_adapters.rs +++ b/crates/dap_adapters/src/dap_adapters.rs @@ -4,6 +4,8 @@ mod go; mod javascript; mod python; +#[cfg(test)] +use std::path::PathBuf; use std::sync::Arc; use anyhow::Result; @@ -38,3 +40,63 @@ pub fn init(cx: &mut App) { } }) } + +#[cfg(test)] +struct MockDelegate { + worktree_root: PathBuf, +} + +#[cfg(test)] +impl MockDelegate { + fn new() -> Arc { + Arc::new(Self { + worktree_root: PathBuf::from("/tmp/test"), + }) + } +} + +#[cfg(test)] +#[async_trait::async_trait] +impl adapters::DapDelegate for MockDelegate { + fn worktree_id(&self) -> settings::WorktreeId { + settings::WorktreeId::from_usize(0) + } + + fn worktree_root_path(&self) -> &std::path::Path { + &self.worktree_root + } + + fn http_client(&self) -> Arc { + unimplemented!("Not needed for tests") + } + + fn node_runtime(&self) -> node_runtime::NodeRuntime { + unimplemented!("Not needed for tests") + } + + fn toolchain_store(&self) -> Arc { + unimplemented!("Not needed for tests") + } + + fn fs(&self) -> Arc { + unimplemented!("Not needed for tests") + } + + fn output_to_console(&self, _msg: String) {} + + async fn which(&self, _command: &std::ffi::OsStr) -> Option { + None + } + + async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result { + Ok(String::new()) + } + + async fn shell_env(&self) -> collections::HashMap { + collections::HashMap::default() + } + + fn is_headless(&self) -> bool { + false + } +} diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index 66005db77029bd28c66f458bef7f1d2a1ad7a685..e718f66c78099044baed837da0ddc7bfa96ffa1c 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -23,6 +23,11 @@ use std::{ use util::command::new_smol_command; use util::{ResultExt, paths::PathStyle, rel_path::RelPath}; +enum DebugpyLaunchMode<'a> { + Normal, + AttachWithConnect { host: Option<&'a str> }, +} + #[derive(Default)] pub(crate) struct PythonDebugAdapter { base_venv_path: OnceCell, String>>, @@ -36,10 +41,11 @@ impl PythonDebugAdapter { const LANGUAGE_NAME: &'static str = "Python"; - async fn generate_debugpy_arguments( - host: &Ipv4Addr, + async fn generate_debugpy_arguments<'a>( + host: &'a Ipv4Addr, port: u16, - user_installed_path: Option<&Path>, + launch_mode: DebugpyLaunchMode<'a>, + user_installed_path: Option<&'a Path>, user_args: Option>, ) -> Result> { let mut args = if let Some(user_installed_path) = user_installed_path { @@ -62,7 +68,20 @@ impl PythonDebugAdapter { args.extend(if let Some(args) = user_args { args } else { - vec![format!("--host={}", host), format!("--port={}", port)] + match launch_mode { + DebugpyLaunchMode::Normal => { + vec![format!("--host={}", host), format!("--port={}", port)] + } + DebugpyLaunchMode::AttachWithConnect { host } => { + let mut args = vec!["connect".to_string()]; + + if let Some(host) = host { + args.push(format!("{host}:")); + } + args.push(format!("{port}")); + args + } + } }); Ok(args) } @@ -315,7 +334,46 @@ impl PythonDebugAdapter { user_env: Option>, python_from_toolchain: Option, ) -> Result { - let tcp_connection = config.tcp_connection.clone().unwrap_or_default(); + let mut tcp_connection = config.tcp_connection.clone().unwrap_or_default(); + + let (config_port, config_host) = config + .config + .get("connect") + .map(|value| { + ( + value + .get("port") + .and_then(|val| val.as_u64().map(|p| p as u16)), + value.get("host").and_then(|val| val.as_str()), + ) + }) + .unwrap_or_else(|| { + ( + config + .config + .get("port") + .and_then(|port| port.as_u64().map(|p| p as u16)), + config.config.get("host").and_then(|host| host.as_str()), + ) + }); + + let is_attach_with_connect = if config + .config + .get("request") + .is_some_and(|val| val.as_str().is_some_and(|request| request == "attach")) + { + if tcp_connection.host.is_some() && config_host.is_some() { + bail!("Cannot have two different hosts in debug configuration") + } else if tcp_connection.port.is_some() && config_port.is_some() { + bail!("Cannot have two different ports in debug configuration") + } + + tcp_connection.port = config_port; + DebugpyLaunchMode::AttachWithConnect { host: config_host } + } else { + DebugpyLaunchMode::Normal + }; + let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?; let python_path = if let Some(toolchain) = python_from_toolchain { @@ -330,6 +388,7 @@ impl PythonDebugAdapter { let arguments = Self::generate_debugpy_arguments( &host, port, + is_attach_with_connect, user_installed_path.as_deref(), user_args, ) @@ -824,7 +883,148 @@ mod tests { use util::path; use super::*; - use std::{net::Ipv4Addr, path::PathBuf}; + use task::TcpArgumentsTemplate; + + #[gpui::test] + async fn test_tcp_connection_conflict_with_connect_args() { + let adapter = PythonDebugAdapter { + base_venv_path: OnceCell::new(), + debugpy_whl_base_path: OnceCell::new(), + }; + + let config_with_port_conflict = json!({ + "request": "attach", + "connect": { + "port": 5679 + } + }); + + let tcp_connection = TcpArgumentsTemplate { + host: None, + port: Some(5678), + timeout: None, + }; + + let task_def = DebugTaskDefinition { + label: "test".into(), + adapter: PythonDebugAdapter::ADAPTER_NAME.into(), + config: config_with_port_conflict, + tcp_connection: Some(tcp_connection.clone()), + }; + + let result = adapter + .get_installed_binary( + &MockDelegate::new(), + &task_def, + None, + None, + None, + Some("python3".to_string()), + ) + .await; + + assert!(result.is_err()); + assert!( + result + .unwrap_err() + .to_string() + .contains("Cannot have two different ports") + ); + + let host = Ipv4Addr::new(127, 0, 0, 1); + let config_with_host_conflict = json!({ + "request": "attach", + "connect": { + "host": "192.168.1.1", + "port": 5678 + } + }); + + let tcp_connection_with_host = TcpArgumentsTemplate { + host: Some(host), + port: None, + timeout: None, + }; + + let task_def_host = DebugTaskDefinition { + label: "test".into(), + adapter: PythonDebugAdapter::ADAPTER_NAME.into(), + config: config_with_host_conflict, + tcp_connection: Some(tcp_connection_with_host), + }; + + let result_host = adapter + .get_installed_binary( + &MockDelegate::new(), + &task_def_host, + None, + None, + None, + Some("python3".to_string()), + ) + .await; + + assert!(result_host.is_err()); + assert!( + result_host + .unwrap_err() + .to_string() + .contains("Cannot have two different hosts") + ); + } + + #[gpui::test] + async fn test_attach_with_connect_mode_generates_correct_arguments() { + let host = Ipv4Addr::new(127, 0, 0, 1); + let port = 5678; + + let args_without_host = PythonDebugAdapter::generate_debugpy_arguments( + &host, + port, + DebugpyLaunchMode::AttachWithConnect { host: None }, + None, + None, + ) + .await + .unwrap(); + + let expected_suffix = path!("debug_adapters/Debugpy/debugpy/adapter"); + assert!(args_without_host[0].ends_with(expected_suffix)); + assert_eq!(args_without_host[1], "connect"); + assert_eq!(args_without_host[2], "5678"); + + let args_with_host = PythonDebugAdapter::generate_debugpy_arguments( + &host, + port, + DebugpyLaunchMode::AttachWithConnect { + host: Some("192.168.1.100"), + }, + None, + None, + ) + .await + .unwrap(); + + assert!(args_with_host[0].ends_with(expected_suffix)); + assert_eq!(args_with_host[1], "connect"); + assert_eq!(args_with_host[2], "192.168.1.100:"); + assert_eq!(args_with_host[3], "5678"); + + let args_normal = PythonDebugAdapter::generate_debugpy_arguments( + &host, + port, + DebugpyLaunchMode::Normal, + None, + None, + ) + .await + .unwrap(); + + assert!(args_normal[0].ends_with(expected_suffix)); + assert_eq!(args_normal[1], "--host=127.0.0.1"); + assert_eq!(args_normal[2], "--port=5678"); + assert!(!args_normal.contains(&"connect".to_string())); + } #[gpui::test] async fn test_debugpy_install_path_cases() { @@ -833,15 +1033,25 @@ mod tests { // Case 1: User-defined debugpy path (highest precedence) let user_path = PathBuf::from("/custom/path/to/debugpy/src/debugpy/adapter"); - let user_args = - PythonDebugAdapter::generate_debugpy_arguments(&host, port, Some(&user_path), None) - .await - .unwrap(); + let user_args = PythonDebugAdapter::generate_debugpy_arguments( + &host, + port, + DebugpyLaunchMode::Normal, + Some(&user_path), + None, + ) + .await + .unwrap(); - // Case 2: Venv-installed debugpy (uses -m debugpy.adapter) - let venv_args = PythonDebugAdapter::generate_debugpy_arguments(&host, port, None, None) - .await - .unwrap(); + let venv_args = PythonDebugAdapter::generate_debugpy_arguments( + &host, + port, + DebugpyLaunchMode::Normal, + None, + None, + ) + .await + .unwrap(); assert_eq!(user_args[0], "/custom/path/to/debugpy/src/debugpy/adapter"); assert_eq!(user_args[1], "--host=127.0.0.1"); @@ -856,6 +1066,7 @@ mod tests { let user_args = PythonDebugAdapter::generate_debugpy_arguments( &host, port, + DebugpyLaunchMode::Normal, Some(&user_path), Some(vec!["foo".into()]), ) @@ -864,6 +1075,7 @@ mod tests { let venv_args = PythonDebugAdapter::generate_debugpy_arguments( &host, port, + DebugpyLaunchMode::Normal, None, Some(vec!["foo".into()]), ) From 04f7b08ab9dc496c6e2f3948fdf6469709151312 Mon Sep 17 00:00:00 2001 From: Joe Innes Date: Sat, 1 Nov 2025 05:24:59 +0100 Subject: [PATCH 54/82] Give visual feedback when an operation is pending (#41686) Currently, if a commit operation takes some time, there's no visual feedback in the UI that anything's happening. This PR changes the colour of the text on the button to the `Color::Disabled` colour when a commit operation is pending. Release Notes: - Improved UI feedback when a commit is in progress --------- Co-authored-by: Danilo Leal --- crates/git_ui/src/git_panel.rs | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 1be561db9aa3e201e4c98d2e1c4572ed930d5034..0c43058c067aa9b6abcc333e58f7e4933d783b73 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -58,8 +58,8 @@ use std::{collections::HashSet, sync::Arc, time::Duration, usize}; use strum::{IntoEnumIterator, VariantNames}; use time::OffsetDateTime; use ui::{ - Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, IconPosition, Label, LabelSize, - PopoverMenu, ScrollAxes, Scrollbars, SplitButton, Tooltip, WithScrollbar, prelude::*, + ButtonLike, Checkbox, CommonAnimationExt, ContextMenu, ElevationIndex, PopoverMenu, ScrollAxes, + Scrollbars, SplitButton, Tooltip, WithScrollbar, prelude::*, }; use util::paths::PathStyle; use util::{ResultExt, TryFutureExt, maybe}; @@ -3505,6 +3505,12 @@ impl GitPanel { let amend = self.amend_pending(); let signoff = self.signoff_enabled; + let label_color = if self.pending_commit.is_some() { + Color::Disabled + } else { + Color::Default + }; + div() .id("commit-wrapper") .on_hover(cx.listener(move |this, hovered, _, cx| { @@ -3513,14 +3519,15 @@ impl GitPanel { cx.notify() })) .child(SplitButton::new( - ui::ButtonLike::new_rounded_left(ElementId::Name( + ButtonLike::new_rounded_left(ElementId::Name( format!("split-button-left-{}", title).into(), )) - .layer(ui::ElevationIndex::ModalSurface) - .size(ui::ButtonSize::Compact) + .layer(ElevationIndex::ModalSurface) + .size(ButtonSize::Compact) .child( - div() - .child(Label::new(title).size(LabelSize::Small)) + Label::new(title) + .size(LabelSize::Small) + .color(label_color) .mr_0p5(), ) .on_click({ From ade0f1342cdfa7dd90c57f30286c88b418e370cd Mon Sep 17 00:00:00 2001 From: Dijana Pavlovic Date: Sat, 1 Nov 2025 05:29:58 +0100 Subject: [PATCH 55/82] agent_ui: Prevent mode selector tooltip from going off-screen (#41589) Closes #41458 Dynamically position mode selector tooltip to prevent clipping. Position tooltip on the right when panel is docked left, otherwise on the left. This ensures the tooltip remains visible regardless of panel position. **Note:** The tooltip currently vertically aligns with the bottom of the menu rather than individual items. Would be great if it can be aligned with the option it explains. But this doesn't seem trivial to me to implement and not sure if it's important enough atm? Before: Screenshot 2025-10-30 at 22 21 09 After: Screenshot 2025-10-30 at 17 26 52 Screenshot 2025-10-30 at 17 27 15 Release Notes: - Improved the agent panel's mode selector by preventing it to go off-screen in case the panel is docked to the left. --- crates/agent_ui/src/acp/mode_selector.rs | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/crates/agent_ui/src/acp/mode_selector.rs b/crates/agent_ui/src/acp/mode_selector.rs index 36970a29ab7fd30f175d8128f8bbd3c55b71b605..aed151de728ce2e802154a73d4add9681a410933 100644 --- a/crates/agent_ui/src/acp/mode_selector.rs +++ b/crates/agent_ui/src/acp/mode_selector.rs @@ -1,8 +1,10 @@ use acp_thread::AgentSessionModes; use agent_client_protocol as acp; use agent_servers::AgentServer; +use agent_settings::AgentSettings; use fs::Fs; use gpui::{Context, Entity, FocusHandle, WeakEntity, Window, prelude::*}; +use settings::Settings as _; use std::{rc::Rc, sync::Arc}; use ui::{ Button, ContextMenu, ContextMenuEntry, DocumentationEdge, DocumentationSide, KeyBinding, @@ -84,6 +86,14 @@ impl ModeSelector { let current_mode = self.connection.current_mode(); let default_mode = self.agent_server.default_mode(cx); + let settings = AgentSettings::get_global(cx); + let side = match settings.dock { + settings::DockPosition::Left => DocumentationSide::Right, + settings::DockPosition::Bottom | settings::DockPosition::Right => { + DocumentationSide::Left + } + }; + for mode in all_modes { let is_selected = &mode.id == ¤t_mode; let is_default = Some(&mode.id) == default_mode.as_ref(); @@ -91,7 +101,7 @@ impl ModeSelector { .toggleable(IconPosition::End, is_selected); let entry = if let Some(description) = &mode.description { - entry.documentation_aside(DocumentationSide::Left, DocumentationEdge::Bottom, { + entry.documentation_aside(side, DocumentationEdge::Bottom, { let description = description.clone(); move |cx| { From 1552e13799fae7b4efda83bf0bb84f951a1a6319 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 31 Oct 2025 22:55:12 -0600 Subject: [PATCH 56/82] Fix telemetry in release builds (#41695) This was inadvertently broken in v0.211.1-pre when we rewrote the release build Release Notes: - N/A --- .github/workflows/release.yml | 6 ++++++ tooling/xtask/src/tasks/workflows/release.rs | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e34242b8089431ac98d21abebc5400a7db041907..56ea875d5cefec240d554984a8c0e1c1992a574c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,6 +1,12 @@ # Generated from xtask::workflows::release # Rebuild with `cargo xtask workflows`. name: release +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} on: push: tags: diff --git a/tooling/xtask/src/tasks/workflows/release.rs b/tooling/xtask/src/tasks/workflows/release.rs index b5aff555e0f55971e77aa754e23d783938950953..c3d86192fd0e17c268a235292cf24223622094e1 100644 --- a/tooling/xtask/src/tasks/workflows/release.rs +++ b/tooling/xtask/src/tasks/workflows/release.rs @@ -30,6 +30,11 @@ pub(crate) fn release() -> Workflow { named::workflow() .on(Event::default().push(Push::default().tags(vec!["v*".to_string()]))) .concurrency(vars::one_workflow_per_non_main_branch()) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("CARGO_INCREMENTAL", "0")) + .add_env(("RUST_BACKTRACE", "1")) + .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) + .add_env(("ZED_MINIDUMP_ENDPOINT", vars::ZED_SENTRY_MINIDUMP_ENDPOINT)) .add_job(macos_tests.name, macos_tests.job) .add_job(linux_tests.name, linux_tests.job) .add_job(windows_tests.name, windows_tests.job) From 03e0581ee87d58cf77b9ecdc6523900d1ca256b1 Mon Sep 17 00:00:00 2001 From: Charles McLaughlin Date: Sat, 1 Nov 2025 07:14:12 -0700 Subject: [PATCH 57/82] agent_ui: Show notifications also when the panel is hidden (#40942) Currently Zed only displays agent notifications (e.g. when the agent completes a task) if the user has switched apps and Zed is not in the foreground. This adds PR supports the scenario where the agent finishes a long-running task and the user is busy coding within Zed on something else. Releases Note: - If agent notifications are turned on, they will now also be displayed when the agent panel is hidden, in complement to them showing when the Zed window is in the background. --------- Co-authored-by: Danilo Leal --- crates/agent_ui/src/acp/thread_view.rs | 120 ++++++++++++++++++++++++- crates/agent_ui/src/agent_panel.rs | 19 ++++ 2 files changed, 137 insertions(+), 2 deletions(-) diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 3638faf9336f79d692f820df39266ab7b85360a8..0ae60ebe0df91c61eb5c968d5ee23ec18ef87187 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -4571,14 +4571,29 @@ impl AcpThreadView { window: &mut Window, cx: &mut Context, ) { - if window.is_window_active() || !self.notifications.is_empty() { + if !self.notifications.is_empty() { + return; + } + + let settings = AgentSettings::get_global(cx); + + let window_is_inactive = !window.is_window_active(); + let panel_is_hidden = self + .workspace + .upgrade() + .map(|workspace| AgentPanel::is_hidden(&workspace, cx)) + .unwrap_or(true); + + let should_notify = window_is_inactive || panel_is_hidden; + + if !should_notify { return; } // TODO: Change this once we have title summarization for external agents. let title = self.agent.name(); - match AgentSettings::get_global(cx).notify_when_agent_waiting { + match settings.notify_when_agent_waiting { NotifyWhenAgentWaiting::PrimaryScreen => { if let Some(primary) = cx.primary_display() { self.pop_up(icon, caption.into(), title, window, primary, cx); @@ -5892,6 +5907,107 @@ pub(crate) mod tests { ); } + #[gpui::test] + async fn test_notification_when_panel_hidden(cx: &mut TestAppContext) { + init_test(cx); + + let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await; + + add_to_workspace(thread_view.clone(), cx); + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello", window, cx); + }); + + // Window is active (don't deactivate), but panel will be hidden + // Note: In the test environment, the panel is not actually added to the dock, + // so is_agent_panel_hidden will return true + + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.run_until_parked(); + + // Should show notification because window is active but panel is hidden + assert!( + cx.windows() + .iter() + .any(|window| window.downcast::().is_some()), + "Expected notification when panel is hidden" + ); + } + + #[gpui::test] + async fn test_notification_still_works_when_window_inactive(cx: &mut TestAppContext) { + init_test(cx); + + let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await; + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello", window, cx); + }); + + // Deactivate window - should show notification regardless of setting + cx.deactivate_window(); + + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.run_until_parked(); + + // Should still show notification when window is inactive (existing behavior) + assert!( + cx.windows() + .iter() + .any(|window| window.downcast::().is_some()), + "Expected notification when window is inactive" + ); + } + + #[gpui::test] + async fn test_notification_respects_never_setting(cx: &mut TestAppContext) { + init_test(cx); + + // Set notify_when_agent_waiting to Never + cx.update(|cx| { + AgentSettings::override_global( + AgentSettings { + notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, + ..AgentSettings::get_global(cx).clone() + }, + cx, + ); + }); + + let (thread_view, cx) = setup_thread_view(StubAgentServer::default_response(), cx).await; + + let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone()); + message_editor.update_in(cx, |editor, window, cx| { + editor.set_text("Hello", window, cx); + }); + + // Window is active + + thread_view.update_in(cx, |thread_view, window, cx| { + thread_view.send(window, cx); + }); + + cx.run_until_parked(); + + // Should NOT show notification because notify_when_agent_waiting is Never + assert!( + !cx.windows() + .iter() + .any(|window| window.downcast::().is_some()), + "Expected no notification when notify_when_agent_waiting is Never" + ); + } + async fn setup_thread_view( agent: impl AgentServer + 'static, cx: &mut TestAppContext, diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 997a2bec09aa2a0ae39909c909c7de80771c5055..173059ee535d4417cd0ff493842d889559b85ef4 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -729,6 +729,25 @@ impl AgentPanel { &self.context_server_registry } + pub fn is_hidden(workspace: &Entity, cx: &App) -> bool { + let workspace_read = workspace.read(cx); + + workspace_read + .panel::(cx) + .map(|panel| { + let panel_id = Entity::entity_id(&panel); + + let is_visible = workspace_read.all_docks().iter().any(|dock| { + dock.read(cx) + .visible_panel() + .is_some_and(|visible_panel| visible_panel.panel_id() == panel_id) + }); + + !is_visible + }) + .unwrap_or(true) + } + fn active_thread_view(&self) -> Option<&Entity> { match &self.active_view { ActiveView::ExternalAgentThread { thread_view, .. } => Some(thread_view), From d6b58bb948fdcd92b22abb65ebfcae07fd40f94b Mon Sep 17 00:00:00 2001 From: Mark Christiansen Date: Sun, 2 Nov 2025 01:14:21 +1100 Subject: [PATCH 58/82] agent_ui: Use agent font size tokens for thread markdown rendering (#41610) Release Notes: - N/A --- Previously, agent markdown rendering used hardcoded font sizes (TextSize::Default and TextSize::Small) which ignored the agent_ui_font_size and agent_buffer_font_size settings. This updates the markdown style to respect these settings. This pull request adds support for customizing the font size of code blocks in agent responses, making it possible to set a distinct font size for code within the agent panel. The changes ensure that if the new setting is not specified, the font size will fall back to the agent UI font size, maintaining consistent appearance. (I am a frontend developer without any Rust knowledge so this is co-authored with Claude Code) **Theme settings extension:** * Added a new `agent_buffer_code_font_size` setting to `ThemeSettingsContent`, `ThemeSettings`, and the default settings JSON, allowing users to specify the font size for code blocks in agent responses. [[1]](diffhunk://#diff-a3bba02a485aba48e8e9a9d85485332378aa4fe29a0c50d11ae801ecfa0a56a4R69-R72) [[2]](diffhunk://#diff-aed3a9217587d27844c57ac8aff4a749f1fb1fc5d54926ef5065bf85f8fd633aR118-R119) [[3]](diffhunk://#diff-42e01d7aacb60673842554e30970b4ddbbaee7a2ec2c6f2be1c0b08b0dd89631R82-R83) * Updated the VSCode import logic to recognize and import the new `agent_buffer_code_font_size` setting. **Font size application in agent UI:** * Modified the agent UI rendering logic in `thread_view.rs` to use the new `agent_buffer_code_font_size` for code blocks, and to fall back to the agent UI font size if unset. [[1]](diffhunk://#diff-f73942e8d4f8c4d4d173d57d7c58bb653c4bb6ae7079533ee501750cdca27d98L5584-R5584) [[2]](diffhunk://#diff-f73942e8d4f8c4d4d173d57d7c58bb653c4bb6ae7079533ee501750cdca27d98L5596-R5598) * Implemented a helper method in `ThemeSettings` to retrieve the code block font size, with fallback logic to ensure a value is always used. * Updated the settings application logic to propagate the new code block font size setting throughout the theme system. ### Example Screenshots ![Screenshot 2025-10-31 at 12 38 28](https://github.com/user-attachments/assets/cbc34232-ab1f-40bf-a006-689678380e47) ![Screenshot 2025-10-31 at 12 37 45](https://github.com/user-attachments/assets/372b5cf8-2df8-425a-b052-12136de7c6bd) --------- Co-authored-by: Danilo Leal --- crates/agent_ui/src/acp/thread_view.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 0ae60ebe0df91c61eb5c968d5ee23ec18ef87187..a4b3106fa9d9ded053ff2f33b720ec3b10512d01 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -5596,7 +5596,7 @@ fn default_markdown_style( let theme_settings = ThemeSettings::get_global(cx); let colors = cx.theme().colors(); - let buffer_font_size = TextSize::Small.rems(cx); + let buffer_font_size = theme_settings.agent_buffer_font_size(cx); let mut text_style = window.text_style(); let line_height = buffer_font_size * 1.75; @@ -5608,9 +5608,9 @@ fn default_markdown_style( }; let font_size = if buffer_font { - TextSize::Small.rems(cx) + theme_settings.agent_buffer_font_size(cx) } else { - TextSize::Default.rems(cx) + theme_settings.agent_ui_font_size(cx) }; let text_color = if muted_text { From 06bdb28517d085d5c7635dacc15d4731a1b97d66 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Sat, 1 Nov 2025 16:35:04 -0300 Subject: [PATCH 59/82] zeta cli: Add convert-example command (#41608) Adds a `convert-example` subcommand to the zeta cli that converts eval examples from/to `json`, `toml`, and `md` formats. Release Notes: - N/A --------- Co-authored-by: Max Brunsfeld --- Cargo.lock | 2 + .../src/cloud_zeta2_prompt.rs | 2 +- crates/zeta2/src/related_excerpts.rs | 2 +- crates/zeta_cli/Cargo.toml | 2 + crates/zeta_cli/src/example.rs | 355 ++++++++++++++++++ crates/zeta_cli/src/main.rs | 17 + 6 files changed, 378 insertions(+), 2 deletions(-) create mode 100644 crates/zeta_cli/src/example.rs diff --git a/Cargo.lock b/Cargo.lock index ec55e4af77f78a9476b147744a9973d758d0e630..c0eea670a77f03c4dbb5afdb7d1197b6d9b76159 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21757,6 +21757,7 @@ dependencies = [ "polars", "project", "prompt_store", + "pulldown-cmark 0.12.2", "release_channel", "reqwest_client", "serde", @@ -21766,6 +21767,7 @@ dependencies = [ "smol", "soa-rs", "terminal_view", + "toml 0.8.23", "util", "watch", "zeta", diff --git a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs index a0df39b50eb6753397f5afd37aa30b71b853b9c5..6caf9941845146dc0c30c4606f677e5ec816c137 100644 --- a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs +++ b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs @@ -212,7 +212,7 @@ pub fn write_codeblock<'a>( include_line_numbers: bool, output: &'a mut String, ) { - writeln!(output, "`````path={}", path.display()).unwrap(); + writeln!(output, "`````{}", path.display()).unwrap(); write_excerpts( excerpts, sorted_insertions, diff --git a/crates/zeta2/src/related_excerpts.rs b/crates/zeta2/src/related_excerpts.rs index dd27992274ae2b25ec07e2a47dc8a60b46f5f3f2..44388251e32678ff8d1b3ce594ab35996b235759 100644 --- a/crates/zeta2/src/related_excerpts.rs +++ b/crates/zeta2/src/related_excerpts.rs @@ -64,7 +64,7 @@ const SEARCH_PROMPT: &str = indoc! {r#" ## Current cursor context - `````path={current_file_path} + `````{current_file_path} {cursor_excerpt} ````` diff --git a/crates/zeta_cli/Cargo.toml b/crates/zeta_cli/Cargo.toml index 19dafefbdcf8ed577a54e686b6b0c4ed90cf4512..a54298366614c3633cf527cc5746480e66c6caae 100644 --- a/crates/zeta_cli/Cargo.toml +++ b/crates/zeta_cli/Cargo.toml @@ -39,8 +39,10 @@ paths.workspace = true polars = { version = "0.51", features = ["lazy", "dtype-struct", "parquet"] } project.workspace = true prompt_store.workspace = true +pulldown-cmark.workspace = true release_channel.workspace = true reqwest_client.workspace = true +toml.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true diff --git a/crates/zeta_cli/src/example.rs b/crates/zeta_cli/src/example.rs new file mode 100644 index 0000000000000000000000000000000000000000..de95bbe8d0c97df7c12ce04f75de35ed41a660e4 --- /dev/null +++ b/crates/zeta_cli/src/example.rs @@ -0,0 +1,355 @@ +use std::{ + borrow::Cow, + env, + fmt::{self, Display}, + fs, + io::Write, + mem, + path::{Path, PathBuf}, +}; + +use anyhow::{Context as _, Result}; +use clap::ValueEnum; +use gpui::http_client::Url; +use pulldown_cmark::CowStr; +use serde::{Deserialize, Serialize}; + +const CURSOR_POSITION_HEADING: &str = "Cursor Position"; +const EDIT_HISTORY_HEADING: &str = "Edit History"; +const EXPECTED_PATCH_HEADING: &str = "Expected Patch"; +const EXPECTED_EXCERPTS_HEADING: &str = "Expected Excerpts"; +const REPOSITORY_URL_FIELD: &str = "repository_url"; +const REVISION_FIELD: &str = "revision"; + +#[derive(Debug)] +pub struct NamedExample { + pub name: String, + pub example: Example, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Example { + pub repository_url: String, + pub revision: String, + pub cursor_path: PathBuf, + pub cursor_position: String, + pub edit_history: Vec, + pub expected_patch: String, + pub expected_excerpts: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpectedExcerpt { + path: PathBuf, + text: String, +} + +#[derive(ValueEnum, Debug, Clone)] +pub enum ExampleFormat { + Json, + Toml, + Md, +} + +impl NamedExample { + pub fn load(path: impl AsRef) -> Result { + let path = path.as_ref(); + let content = std::fs::read_to_string(path)?; + let ext = path.extension(); + + match ext.and_then(|s| s.to_str()) { + Some("json") => Ok(Self { + name: path.file_name().unwrap_or_default().display().to_string(), + example: serde_json::from_str(&content)?, + }), + Some("toml") => Ok(Self { + name: path.file_name().unwrap_or_default().display().to_string(), + example: toml::from_str(&content)?, + }), + Some("md") => Self::parse_md(&content), + Some(_) => { + anyhow::bail!("Unrecognized example extension: {}", ext.unwrap().display()); + } + None => { + anyhow::bail!( + "Failed to determine example type since the file does not have an extension." + ); + } + } + } + + pub fn parse_md(input: &str) -> Result { + use pulldown_cmark::{CodeBlockKind, Event, HeadingLevel, Parser, Tag, TagEnd}; + + let parser = Parser::new(input); + + let mut named = NamedExample { + name: String::new(), + example: Example { + repository_url: String::new(), + revision: String::new(), + cursor_path: PathBuf::new(), + cursor_position: String::new(), + edit_history: Vec::new(), + expected_patch: String::new(), + expected_excerpts: Vec::new(), + }, + }; + + let mut text = String::new(); + let mut current_section = String::new(); + let mut block_info: CowStr = "".into(); + + for event in parser { + match event { + Event::Text(line) => { + text.push_str(&line); + + if !named.name.is_empty() + && current_section.is_empty() + // in h1 section + && let Some((field, value)) = line.split_once('=') + { + match field.trim() { + REPOSITORY_URL_FIELD => { + named.example.repository_url = value.trim().to_string(); + } + REVISION_FIELD => { + named.example.revision = value.trim().to_string(); + } + _ => { + eprintln!("Warning: Unrecognized field `{field}`"); + } + } + } + } + Event::End(TagEnd::Heading(HeadingLevel::H1)) => { + if !named.name.is_empty() { + anyhow::bail!( + "Found multiple H1 headings. There should only be one with the name of the example." + ); + } + named.name = mem::take(&mut text); + } + Event::End(TagEnd::Heading(HeadingLevel::H2)) => { + current_section = mem::take(&mut text); + } + Event::End(TagEnd::Heading(level)) => { + anyhow::bail!("Unexpected heading level: {level}"); + } + Event::Start(Tag::CodeBlock(kind)) => { + match kind { + CodeBlockKind::Fenced(info) => { + block_info = info; + } + CodeBlockKind::Indented => { + anyhow::bail!("Unexpected indented codeblock"); + } + }; + } + Event::Start(_) => { + text.clear(); + block_info = "".into(); + } + Event::End(TagEnd::CodeBlock) => { + if current_section.eq_ignore_ascii_case(EDIT_HISTORY_HEADING) { + named.example.edit_history.push(mem::take(&mut text)); + } else if current_section.eq_ignore_ascii_case(CURSOR_POSITION_HEADING) { + let path = PathBuf::from(block_info.trim()); + named.example.cursor_path = path; + named.example.cursor_position = mem::take(&mut text); + } else if current_section.eq_ignore_ascii_case(EXPECTED_PATCH_HEADING) { + named.example.expected_patch = mem::take(&mut text); + } else if current_section.eq_ignore_ascii_case(EXPECTED_EXCERPTS_HEADING) { + let path = PathBuf::from(block_info.trim()); + named.example.expected_excerpts.push(ExpectedExcerpt { + path, + text: mem::take(&mut text), + }); + } else { + eprintln!("Warning: Unrecognized section `{current_section:?}`") + } + } + _ => {} + } + } + + if named.example.cursor_path.as_path() == Path::new("") + || named.example.cursor_position.is_empty() + { + anyhow::bail!("Missing cursor position codeblock"); + } + + Ok(named) + } + + pub fn write(&self, format: ExampleFormat, mut out: impl Write) -> Result<()> { + match format { + ExampleFormat::Json => Ok(serde_json::to_writer(out, &self.example)?), + ExampleFormat::Toml => { + Ok(out.write_all(toml::to_string_pretty(&self.example)?.as_bytes())?) + } + ExampleFormat::Md => Ok(write!(out, "{}", self)?), + } + } + + #[allow(unused)] + pub async fn setup_worktree(&self) -> Result { + let worktrees_dir = env::current_dir()?.join("target").join("zeta-worktrees"); + let repos_dir = env::current_dir()?.join("target").join("zeta-repos"); + fs::create_dir_all(&repos_dir)?; + fs::create_dir_all(&worktrees_dir)?; + + let (repo_owner, repo_name) = self.repo_name()?; + + let repo_dir = repos_dir.join(repo_owner.as_ref()).join(repo_name.as_ref()); + if !repo_dir.is_dir() { + fs::create_dir_all(&repo_dir)?; + run_git(&repo_dir, &["init"]).await?; + run_git( + &repo_dir, + &["remote", "add", "origin", &self.example.repository_url], + ) + .await?; + } + + run_git( + &repo_dir, + &["fetch", "--depth", "1", "origin", &self.example.revision], + ) + .await?; + + let worktree_path = worktrees_dir.join(&self.name); + + if worktree_path.is_dir() { + run_git(&worktree_path, &["clean", "--force", "-d"]).await?; + run_git(&worktree_path, &["reset", "--hard", "HEAD"]).await?; + run_git(&worktree_path, &["checkout", &self.example.revision]).await?; + } else { + let worktree_path_string = worktree_path.to_string_lossy(); + run_git( + &repo_dir, + &[ + "worktree", + "add", + "-f", + &worktree_path_string, + &self.example.revision, + ], + ) + .await?; + } + + Ok(worktree_path) + } + + #[allow(unused)] + fn repo_name(&self) -> Result<(Cow<'_, str>, Cow<'_, str>)> { + // git@github.com:owner/repo.git + if self.example.repository_url.contains('@') { + let (owner, repo) = self + .example + .repository_url + .split_once(':') + .context("expected : in git url")? + .1 + .split_once('/') + .context("expected / in git url")?; + Ok(( + Cow::Borrowed(owner), + Cow::Borrowed(repo.trim_end_matches(".git")), + )) + // http://github.com/owner/repo.git + } else { + let url = Url::parse(&self.example.repository_url)?; + let mut segments = url.path_segments().context("empty http url")?; + let owner = segments + .next() + .context("expected owner path segment")? + .to_string(); + let repo = segments + .next() + .context("expected repo path segment")? + .trim_end_matches(".git") + .to_string(); + assert!(segments.next().is_none()); + + Ok((owner.into(), repo.into())) + } + } +} + +async fn run_git(repo_path: &Path, args: &[&str]) -> Result { + let output = smol::process::Command::new("git") + .current_dir(repo_path) + .args(args) + .output() + .await?; + + anyhow::ensure!( + output.status.success(), + "`git {}` within `{}` failed with status: {}\nstderr:\n{}\nstdout:\n{}", + args.join(" "), + repo_path.display(), + output.status, + String::from_utf8_lossy(&output.stderr), + String::from_utf8_lossy(&output.stdout), + ); + Ok(String::from_utf8(output.stdout)?.trim().to_string()) +} + +impl Display for NamedExample { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "# {}\n\n", self.name)?; + write!( + f, + "{REPOSITORY_URL_FIELD} = {}\n", + self.example.repository_url + )?; + write!(f, "{REVISION_FIELD} = {}\n\n", self.example.revision)?; + + write!( + f, + "## {CURSOR_POSITION_HEADING}\n\n`````{}\n{}`````\n", + self.example.cursor_path.display(), + self.example.cursor_position + )?; + write!(f, "## {EDIT_HISTORY_HEADING}\n\n")?; + + if !self.example.edit_history.is_empty() { + write!(f, "`````diff\n")?; + for item in &self.example.edit_history { + write!(f, "{item}")?; + } + write!(f, "`````\n")?; + } + + if !self.example.expected_patch.is_empty() { + write!( + f, + "\n## {EXPECTED_PATCH_HEADING}\n\n`````diff\n{}`````\n", + self.example.expected_patch + )?; + } + + if !self.example.expected_excerpts.is_empty() { + write!(f, "\n## {EXPECTED_EXCERPTS_HEADING}\n\n")?; + + for excerpt in &self.example.expected_excerpts { + write!( + f, + "`````{}{}\n{}`````\n\n", + excerpt + .path + .extension() + .map(|ext| format!("{} ", ext.to_string_lossy())) + .unwrap_or_default(), + excerpt.path.display(), + excerpt.text + )?; + } + } + + Ok(()) + } +} diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index 7a6d4b26dc87cd9db7d40fe2745520ee5f574ea6..8f19287744697e9f0d2ffd520be8a814790b8345 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -1,8 +1,10 @@ +mod example; mod headless; mod source_location; mod syntax_retrieval_stats; mod util; +use crate::example::{ExampleFormat, NamedExample}; use crate::syntax_retrieval_stats::retrieval_stats; use ::serde::Serialize; use ::util::paths::PathStyle; @@ -22,6 +24,7 @@ use language_model::LanguageModelRegistry; use project::{Project, Worktree}; use reqwest_client::ReqwestClient; use serde_json::json; +use std::io; use std::{collections::HashSet, path::PathBuf, process::exit, str::FromStr, sync::Arc}; use zeta2::{ContextMode, LlmContextOptions, SearchToolQuery}; @@ -48,6 +51,11 @@ enum Command { #[command(subcommand)] command: Zeta2Command, }, + ConvertExample { + path: PathBuf, + #[arg(long, value_enum, default_value_t = ExampleFormat::Md)] + output_format: ExampleFormat, + }, } #[derive(Subcommand, Debug)] @@ -641,6 +649,15 @@ fn main() { } }, }, + Command::ConvertExample { + path, + output_format, + } => { + let example = NamedExample::load(path).unwrap(); + example.write(output_format, io::stdout()).unwrap(); + let _ = cx.update(|cx| cx.quit()); + return; + } }; match result { From 07dcb8f2bb0a28290a2a05d57995cb9ca2a5bf97 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Sat, 1 Nov 2025 17:30:13 -0400 Subject: [PATCH 60/82] debugger: Add program and module path fallbacks for debugpy toolchain (#40975) Fixes the Debugpy toolchain detection bug in #40324 When detecting what toolchain (venv) to use in the Debugpy configuration stage, we used to only base it off of the current working directory argument passed to the config. This is wrong behavior for cases like mono repos, where the correct virtual environment to use is nested in another folder. This PR fixes this issue by adding the program and module fields as fallbacks to check for virtual environments. We also added support for program/module relative paths as well when cwd is not None. Release Notes: - debugger: Improve mono repo virtual environment detection with Debugpy --------- Co-authored-by: Remco Smits --- crates/dap_adapters/src/dap_adapters.rs | 86 +++++++++++++------------ crates/dap_adapters/src/python.rs | 75 ++++++++++++++------- 2 files changed, 96 insertions(+), 65 deletions(-) diff --git a/crates/dap_adapters/src/dap_adapters.rs b/crates/dap_adapters/src/dap_adapters.rs index d8a706ba414af2c9e0beb1cffe8357bcece1dc52..2ab9cabc198c4b036301cb92e1f544ae640b898d 100644 --- a/crates/dap_adapters/src/dap_adapters.rs +++ b/crates/dap_adapters/src/dap_adapters.rs @@ -42,61 +42,63 @@ pub fn init(cx: &mut App) { } #[cfg(test)] -struct MockDelegate { - worktree_root: PathBuf, -} +mod test_mocks { + use super::*; -#[cfg(test)] -impl MockDelegate { - fn new() -> Arc { - Arc::new(Self { - worktree_root: PathBuf::from("/tmp/test"), - }) + pub(crate) struct MockDelegate { + worktree_root: PathBuf, } -} -#[cfg(test)] -#[async_trait::async_trait] -impl adapters::DapDelegate for MockDelegate { - fn worktree_id(&self) -> settings::WorktreeId { - settings::WorktreeId::from_usize(0) + impl MockDelegate { + pub(crate) fn new() -> Arc { + Arc::new(Self { + worktree_root: PathBuf::from("/tmp/test"), + }) + } } - fn worktree_root_path(&self) -> &std::path::Path { - &self.worktree_root - } + #[async_trait::async_trait] + impl adapters::DapDelegate for MockDelegate { + fn worktree_id(&self) -> settings::WorktreeId { + settings::WorktreeId::from_usize(0) + } - fn http_client(&self) -> Arc { - unimplemented!("Not needed for tests") - } + fn worktree_root_path(&self) -> &std::path::Path { + &self.worktree_root + } - fn node_runtime(&self) -> node_runtime::NodeRuntime { - unimplemented!("Not needed for tests") - } + fn http_client(&self) -> Arc { + unimplemented!("Not needed for tests") + } - fn toolchain_store(&self) -> Arc { - unimplemented!("Not needed for tests") - } + fn node_runtime(&self) -> node_runtime::NodeRuntime { + unimplemented!("Not needed for tests") + } - fn fs(&self) -> Arc { - unimplemented!("Not needed for tests") - } + fn toolchain_store(&self) -> Arc { + unimplemented!("Not needed for tests") + } - fn output_to_console(&self, _msg: String) {} + fn fs(&self) -> Arc { + unimplemented!("Not needed for tests") + } - async fn which(&self, _command: &std::ffi::OsStr) -> Option { - None - } + fn output_to_console(&self, _msg: String) {} - async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result { - Ok(String::new()) - } + async fn which(&self, _command: &std::ffi::OsStr) -> Option { + None + } - async fn shell_env(&self) -> collections::HashMap { - collections::HashMap::default() - } + async fn read_text_file(&self, _path: &util::rel_path::RelPath) -> Result { + Ok(String::new()) + } - fn is_headless(&self) -> bool { - false + async fn shell_env(&self) -> collections::HashMap { + collections::HashMap::default() + } + + fn is_headless(&self) -> bool { + false + } } } diff --git a/crates/dap_adapters/src/python.rs b/crates/dap_adapters/src/python.rs index e718f66c78099044baed837da0ddc7bfa96ffa1c..4d81e5ba851305ae3adc2ee0a6ab6a29f43edd62 100644 --- a/crates/dap_adapters/src/python.rs +++ b/crates/dap_adapters/src/python.rs @@ -824,29 +824,58 @@ impl DebugAdapter for PythonDebugAdapter { .await; } - let base_path = config - .config - .get("cwd") - .and_then(|cwd| { - RelPath::new( - cwd.as_str() - .map(Path::new)? - .strip_prefix(delegate.worktree_root_path()) - .ok()?, - PathStyle::local(), - ) - .ok() + let base_paths = ["cwd", "program", "module"] + .into_iter() + .filter_map(|key| { + config.config.get(key).and_then(|cwd| { + RelPath::new( + cwd.as_str() + .map(Path::new)? + .strip_prefix(delegate.worktree_root_path()) + .ok()?, + PathStyle::local(), + ) + .ok() + }) }) - .unwrap_or_else(|| RelPath::empty().into()); - let toolchain = delegate - .toolchain_store() - .active_toolchain( - delegate.worktree_id(), - base_path.into_arc(), - language::LanguageName::new(Self::LANGUAGE_NAME), - cx, + .chain( + // While Debugpy's wiki saids absolute paths are required, but it actually supports relative paths when cwd is passed in. + // (Which should always be the case because Zed defaults to the cwd worktree root) + // So we want to check that these relative paths find toolchains as well. Otherwise, they won't be checked + // because the strip prefix in the iteration above will return an error + config + .config + .get("cwd") + .map(|_| { + ["program", "module"].into_iter().filter_map(|key| { + config.config.get(key).and_then(|value| { + let path = Path::new(value.as_str()?); + RelPath::new(path, PathStyle::local()).ok() + }) + }) + }) + .into_iter() + .flatten(), ) - .await; + .chain([RelPath::empty().into()]); + + let mut toolchain = None; + + for base_path in base_paths { + if let Some(found_toolchain) = delegate + .toolchain_store() + .active_toolchain( + delegate.worktree_id(), + base_path.into_arc(), + language::LanguageName::new(Self::LANGUAGE_NAME), + cx, + ) + .await + { + toolchain = Some(found_toolchain); + break; + } + } self.fetch_debugpy_whl(toolchain.clone(), delegate) .await @@ -914,7 +943,7 @@ mod tests { let result = adapter .get_installed_binary( - &MockDelegate::new(), + &test_mocks::MockDelegate::new(), &task_def, None, None, @@ -955,7 +984,7 @@ mod tests { let result_host = adapter .get_installed_binary( - &MockDelegate::new(), + &test_mocks::MockDelegate::new(), &task_def_host, None, None, From df15d2d2feada0bd474d814fcd4514e8000be62c Mon Sep 17 00:00:00 2001 From: Haojian Wu Date: Sun, 2 Nov 2025 00:52:32 +0100 Subject: [PATCH 61/82] Fix doc typos (#41727) Release Notes: - N/A --- docs/src/extensions/icon-themes.md | 2 +- docs/src/extensions/languages.md | 2 +- docs/src/languages/rego.md | 2 +- docs/src/snippets.md | 2 +- docs/src/visual-customization.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/src/extensions/icon-themes.md b/docs/src/extensions/icon-themes.md index 697723a59677c25dd14982a1c7f7cf92d1950a70..676cae59cd343a3f73ce5e0504e370e92c699d2b 100644 --- a/docs/src/extensions/icon-themes.md +++ b/docs/src/extensions/icon-themes.md @@ -11,7 +11,7 @@ The [Material Icon Theme](https://github.com/zed-extensions/material-icon-theme) There are two important directories for an icon theme extension: - `icon_themes`: This directory will contain one or more JSON files containing the icon theme definitions. -- `icons`: This directory contains the icons assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired. +- `icons`: This directory contains the icon assets that will be distributed with the extension. You can created subdirectories in this directory, if so desired. Each icon theme file should adhere to the JSON schema specified at [`https://zed.dev/schema/icon_themes/v0.3.0.json`](https://zed.dev/schema/icon_themes/v0.3.0.json). diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index 5c63b880c875701e1721b8d6298dc49da6b45a98..7eb6a355dbfcafaa01ca885789d41e28c474d2f4 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -324,7 +324,7 @@ This query marks number and string values in key-value pairs and arrays for reda The `runnables.scm` file defines rules for detecting runnable code. -Here's an example from an `runnables.scm` file for JSON: +Here's an example from a `runnables.scm` file for JSON: ```scheme ( diff --git a/docs/src/languages/rego.md b/docs/src/languages/rego.md index 21192a5c53a2e05a34754eb80421d60fc77467ac..14231c65620ee2c88ac3bb100d6ac91b941c80f4 100644 --- a/docs/src/languages/rego.md +++ b/docs/src/languages/rego.md @@ -7,7 +7,7 @@ Rego language support in Zed is provided by the community-maintained [Rego exten ## Installation -The extensions is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information. +The extension is largely based on the [Regal](https://docs.styra.com/regal/language-server) language server which should be installed to make use of the extension. Read the [getting started](https://docs.styra.com/regal#getting-started) instructions for more information. ## Configuration diff --git a/docs/src/snippets.md b/docs/src/snippets.md index 21aed43452318863b735a9b46cd5399a8bfca1c6..29ecd9bc850b919dbc63a87e2f1bf9477901a33d 100644 --- a/docs/src/snippets.md +++ b/docs/src/snippets.md @@ -1,6 +1,6 @@ # Snippets -Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit a existing snippets file for a specified [scope](#scopes). +Use the {#action snippets::ConfigureSnippets} action to create a new snippets file or edit an existing snippets file for a specified [scope](#scopes). The snippets are located in `~/.config/zed/snippets` directory to which you can navigate to with the {#action snippets::OpenFolder} action. diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index b353377dd764d2506abd4cce46352df3ca47dfcb..dc50588cde659b4e580822ddfd7eaf8951f63ea7 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -6,7 +6,7 @@ See [Configuring Zed](./configuring-zed.md) for additional information and other ## Themes -Use may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu. +User may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu. You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings: From 2408f767f48973aed1a4fbc7e51a352416d2b79c Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Sat, 1 Nov 2025 19:45:44 -0700 Subject: [PATCH 62/82] gh-workflow unit evals (#41637) Closes #ISSUE Release Notes: - N/A *or* Added/Fixed/Improved ... --- .github/workflows/eval.yml | 71 ----------- .github/workflows/run_agent_evals.yml | 62 ++++++++++ .github/workflows/run_unit_evals.yml | 63 ++++++++++ .github/workflows/unit_evals.yml | 86 ------------- script/run-unit-evals | 5 + tooling/xtask/src/tasks/workflows.rs | 3 + .../src/tasks/workflows/run_agent_evals.rs | 113 ++++++++++++++++++ 7 files changed, 246 insertions(+), 157 deletions(-) delete mode 100644 .github/workflows/eval.yml create mode 100644 .github/workflows/run_agent_evals.yml create mode 100644 .github/workflows/run_unit_evals.yml delete mode 100644 .github/workflows/unit_evals.yml create mode 100755 script/run-unit-evals create mode 100644 tooling/xtask/src/tasks/workflows/run_agent_evals.rs diff --git a/.github/workflows/eval.yml b/.github/workflows/eval.yml deleted file mode 100644 index b5da9e7b7c8e293fb565f4de269a1ae266c19692..0000000000000000000000000000000000000000 --- a/.github/workflows/eval.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Run Agent Eval - -on: - schedule: - - cron: "0 0 * * *" - - pull_request: - branches: - - "**" - types: [synchronize, reopened, labeled] - - workflow_dispatch: - -concurrency: - # Allow only one workflow per any non-`main` branch. - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} - cancel-in-progress: true - -env: - CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: 0 - RUST_BACKTRACE: 1 - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - ZED_EVAL_TELEMETRY: 1 - -jobs: - run_eval: - timeout-minutes: 60 - name: Run Agent Eval - if: > - github.repository_owner == 'zed-industries' && - (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) - runs-on: - - namespace-profile-16x32-ubuntu-2204 - steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Linux dependencies - run: ./script/linux - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Compile eval - run: cargo build --package=eval - - - name: Run eval - run: cargo run --package=eval -- --repetitions=8 --concurrency=1 - - # Even the Linux runner is not stateful, in theory there is no need to do this cleanup. - # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code - # to clean up the config file, I’ve included the cleanup code here as a precaution. - # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution. - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml new file mode 100644 index 0000000000000000000000000000000000000000..67a050cd59c973ecd674fc3f6fe7ea4da436428f --- /dev/null +++ b/.github/workflows/run_agent_evals.yml @@ -0,0 +1,62 @@ +# Generated from xtask::workflows::run_agent_evals +# Rebuild with `cargo xtask workflows`. +name: run_agent_evals +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_EVAL_TELEMETRY: '1' +on: + pull_request: + types: + - synchronize + - reopened + - labeled + branches: + - '**' + schedule: + - cron: 0 0 * * * + workflow_dispatch: {} +jobs: + agent_evals: + if: | + github.repository_owner == 'zed-industries' && + (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::cache_rust_dependencies + uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: cargo build --package=eval + run: cargo build --package=eval + shell: bash -euxo pipefail {0} + - name: run_agent_evals::agent_evals::run_eval + run: cargo run --package=eval -- --repetitions=8 --concurrency=1 + shell: bash -euxo pipefail {0} + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + timeout-minutes: 60 +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml new file mode 100644 index 0000000000000000000000000000000000000000..b94d54e1639c0255dbfcf9921c85ff48b8d5a476 --- /dev/null +++ b/.github/workflows/run_unit_evals.yml @@ -0,0 +1,63 @@ +# Generated from xtask::workflows::run_agent_evals +# Rebuild with `cargo xtask workflows`. +name: run_agent_evals +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} +on: + schedule: + - cron: 47 1 * * 2 + workflow_dispatch: {} +jobs: + unit_evals: + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies + uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 100 + shell: bash -euxo pipefail {0} + - name: ./script/run-unit-evals + run: ./script/run-unit-evals + shell: bash -euxo pipefail {0} + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + - name: run_agent_evals::unit_evals::send_failure_to_slack + if: ${{ failure() }} + uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 + with: + method: chat.postMessage + token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} + payload: | + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/unit_evals.yml b/.github/workflows/unit_evals.yml deleted file mode 100644 index 53ed33a1af300d6b641b3b9430de0bb6846b27cc..0000000000000000000000000000000000000000 --- a/.github/workflows/unit_evals.yml +++ /dev/null @@ -1,86 +0,0 @@ -name: Run Unit Evals - -on: - schedule: - # GitHub might drop jobs at busy times, so we choose a random time in the middle of the night. - - cron: "47 1 * * 2" - workflow_dispatch: - -concurrency: - # Allow only one workflow per any non-`main` branch. - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} - cancel-in-progress: true - -env: - CARGO_TERM_COLOR: always - CARGO_INCREMENTAL: 0 - RUST_BACKTRACE: 1 - ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} - -jobs: - unit_evals: - if: github.repository_owner == 'zed-industries' - timeout-minutes: 60 - name: Run unit evals - runs-on: - - namespace-profile-16x32-ubuntu-2204 - steps: - - name: Add Rust to the PATH - run: echo "$HOME/.cargo/bin" >> "$GITHUB_PATH" - - - name: Checkout repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - with: - clean: false - - - name: Cache dependencies - uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2 - with: - save-if: ${{ github.ref == 'refs/heads/main' }} - # cache-provider: "buildjet" - - - name: Install Linux dependencies - run: ./script/linux - - - name: Configure CI - run: | - mkdir -p ./../.cargo - cp ./.cargo/ci-config.toml ./../.cargo/config.toml - - - name: Install Rust - shell: bash -euxo pipefail {0} - run: | - cargo install cargo-nextest --locked - - - name: Install Node - uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4 - with: - node-version: "18" - - - name: Limit target directory size - shell: bash -euxo pipefail {0} - run: script/clear-target-dir-if-larger-than 100 - - - name: Run unit evals - shell: bash -euxo pipefail {0} - run: cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' - env: - ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} - - - name: Send failure message to Slack channel if needed - if: ${{ failure() }} - uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 - with: - method: chat.postMessage - token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} - payload: | - channel: C04UDRNNJFQ - text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" - - # Even the Linux runner is not stateful, in theory there is no need to do this cleanup. - # But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code - # to clean up the config file, I’ve included the cleanup code here as a precaution. - # While it’s not strictly necessary at this moment, I believe it’s better to err on the side of caution. - - name: Clean CI config file - if: always() - run: rm -rf ./../.cargo diff --git a/script/run-unit-evals b/script/run-unit-evals new file mode 100755 index 0000000000000000000000000000000000000000..02481e1ce9dde7d2cbde9603f663093bf7a2ee38 --- /dev/null +++ b/script/run-unit-evals @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index a8472606ffd6aea48775f3fca28f9c30b2223cc5..538724bcd9648b89d303a6eff834d08ffb3bf18a 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -10,6 +10,7 @@ mod release_nightly; mod run_bundling; mod release; +mod run_agent_evals; mod run_tests; mod runners; mod steps; @@ -28,6 +29,8 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { ("run_tests.yml", run_tests::run_tests()), ("release.yml", release::release()), ("compare_perf.yml", compare_perf::compare_perf()), + ("run_unit_evals.yml", run_agent_evals::run_unit_evals()), + ("run_agent_evals.yml", run_agent_evals::run_agent_evals()), ]; fs::create_dir_all(dir) .with_context(|| format!("Failed to create directory: {}", dir.display()))?; diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs new file mode 100644 index 0000000000000000000000000000000000000000..b83aee8457ef61c7430431c6de6f654d9559423e --- /dev/null +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -0,0 +1,113 @@ +use gh_workflow::{ + Event, Expression, Job, PullRequest, PullRequestType, Run, Schedule, Step, Use, Workflow, + WorkflowDispatch, +}; + +use crate::tasks::workflows::{ + runners::{self, Platform}, + steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config}, + vars, +}; + +pub(crate) fn run_agent_evals() -> Workflow { + let agent_evals = agent_evals(); + + named::workflow() + .on(Event::default() + .schedule([Schedule::default().cron("0 0 * * *")]) + .pull_request(PullRequest::default().add_branch("**").types([ + PullRequestType::Synchronize, + PullRequestType::Reopened, + PullRequestType::Labeled, + ])) + .workflow_dispatch(WorkflowDispatch::default())) + .concurrency(vars::one_workflow_per_non_main_branch()) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("CARGO_INCREMENTAL", 0)) + .add_env(("RUST_BACKTRACE", 1)) + .add_env(("ANTHROPIC_API_KEY", "${{ secrets.ANTHROPIC_API_KEY }}")) + .add_env(( + "ZED_CLIENT_CHECKSUM_SEED", + "${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}", + )) + .add_env(("ZED_EVAL_TELEMETRY", 1)) + .add_job(agent_evals.name, agent_evals.job) +} + +fn agent_evals() -> NamedJob { + fn run_eval() -> Step { + named::bash("cargo run --package=eval -- --repetitions=8 --concurrency=1") + } + + named::job( + Job::default() + .cond(Expression::new(indoc::indoc!{r#" + github.repository_owner == 'zed-industries' && + (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) + "#})) + .runs_on(runners::LINUX_DEFAULT) + .timeout_minutes(60_u32) + .add_step(steps::checkout_repo()) + .add_step(steps::cache_rust_dependencies()) + .map(steps::install_linux_dependencies) + .add_step(setup_cargo_config(Platform::Linux)) + .add_step(steps::script("cargo build --package=eval")) + .add_step(run_eval()) + .add_step(steps::cleanup_cargo_config(Platform::Linux)) + ) +} + +pub(crate) fn run_unit_evals() -> Workflow { + let unit_evals = unit_evals(); + + named::workflow() + .on(Event::default() + .schedule([ + // GitHub might drop jobs at busy times, so we choose a random time in the middle of the night. + Schedule::default().cron("47 1 * * 2"), + ]) + .workflow_dispatch(WorkflowDispatch::default())) + .concurrency(vars::one_workflow_per_non_main_branch()) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("CARGO_INCREMENTAL", 0)) + .add_env(("RUST_BACKTRACE", 1)) + .add_env(( + "ZED_CLIENT_CHECKSUM_SEED", + "${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}", + )) + .add_job(unit_evals.name, unit_evals.job) +} + +fn unit_evals() -> NamedJob { + fn send_failure_to_slack() -> Step { + named::uses( + "slackapi", + "slack-github-action", + "b0fa283ad8fea605de13dc3f449259339835fc52", + ) + .if_condition(Expression::new("${{ failure() }}")) + .add_with(("method", "chat.postMessage")) + .add_with(("token", "${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}")) + .add_with(("payload", indoc::indoc!{r#" + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" + "#})) + } + + named::job( + Job::default() + .runs_on(runners::LINUX_DEFAULT) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(Platform::Linux)) + .add_step(steps::cache_rust_dependencies()) + .map(steps::install_linux_dependencies) + .add_step(steps::cargo_install_nextest(Platform::Linux)) + .add_step(steps::clear_target_dir_if_large(Platform::Linux)) + .add_step( + steps::script("./script/run-unit-evals") + .add_env(("ANTHROPIC_API_KEY", "${{ secrets.ANTHROPIC_API_KEY }}")), + ) + .add_step(send_failure_to_slack()) + .add_step(steps::cleanup_cargo_config(Platform::Linux)), + ) +} From 548cdfde3a3343825b9f92c76ddcb8c582b9a1d0 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Sun, 2 Nov 2025 00:37:02 -0400 Subject: [PATCH 63/82] Delete release process docs (#41733) These have been migrated to the README.md [here](https://github.com/zed-industries/release_notes). These don't need to be public. Putting them in the same repo where we draft (`release_notes`) means less jumping around and allows us to include additional information we might not want to make public. Release Notes: - N/A --- docs/src/SUMMARY.md | 1 - docs/src/development.md | 1 - docs/src/development/releases.md | 147 ------------------------------- 3 files changed, 149 deletions(-) delete mode 100644 docs/src/development/releases.md diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index 9e5bbb1413fec9b021d73dce0f002c1e039c5da9..1a4783cdf5342c0ab92d4eea45260c416fc68cd8 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -165,6 +165,5 @@ - [Local Collaboration](./development/local-collaboration.md) - [Using Debuggers](./development/debuggers.md) - [Glossary](./development/glossary.md) -- [Release Process](./development/releases.md) - [Release Notes](./development/release-notes.md) - [Debugging Crashes](./development/debugging-crashes.md) diff --git a/docs/src/development.md b/docs/src/development.md index 6cb5f0b8271ab0347d33ee0cf634b60e790f3ba0..31bb245ac42f80c830a0faba405323d1097e3f51 100644 --- a/docs/src/development.md +++ b/docs/src/development.md @@ -88,7 +88,6 @@ in-depth examples and explanations. ## Contributor links - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md) -- [Releases](./development/releases.md) - [Debugging Crashes](./development/debugging-crashes.md) - [Code of Conduct](https://zed.dev/code-of-conduct) - [Zed Contributor License](https://zed.dev/cla) diff --git a/docs/src/development/releases.md b/docs/src/development/releases.md deleted file mode 100644 index 6cb3deb31680f8c038195c93ebf12fe6699354e2..0000000000000000000000000000000000000000 --- a/docs/src/development/releases.md +++ /dev/null @@ -1,147 +0,0 @@ -# Zed Releases - -Read about Zed's [release channels here](https://zed.dev/faq#what-are-the-release-channels). - -## Wednesday Release Process - -You will need write access to the Zed repository to do this. - -Credentials for various services used in this process can be found in 1Password. - -Use the `releases` Slack channel to notify the team that releases will be starting. -This is mostly a formality on Wednesday's minor update releases, but can be beneficial when doing patch releases, as other devs may have landed fixes they'd like to cherry pick. - -### Starting the Builds - -1. Checkout `main` and ensure your working copy is clean. - -1. Run `git fetch && git pull` to ensure you have the latest commits locally. - -1. Run `git fetch --tags --force` to forcibly ensure your local tags are in sync with the remote. - -1. Run `./script/get-stable-channel-release-notes` and store output locally. - -1. Run `./script/bump-zed-minor-versions`. - - - Push the tags and branches as instructed. - -1. Run `./script/get-preview-channel-changes` and store output locally. - -> **Note:** Always prioritize the stable release. -> If you've completed aggregating stable release notes, you can move on to working on aggregating preview release notes, but once the stable build has finished, work through the rest of the stable steps to fully publish. -> Preview can be finished up after. - -### Stable Release - -1. Aggregate stable release notes. - - - Follow the instructions at the end of the script and aggregate the release notes into one structure. - -1. Once the stable release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the stable release notes into it and **save**. - - - **Do not publish the draft!** - -1. Check the stable release assets. - - - Ensure the stable release job has finished without error. - - Ensure the draft has the proper number of assets—releases currently have 12 assets each (as of v0.211). - - Download the artifacts for the stable release draft and test that you can run them locally. - -1. Publish the stable draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). - - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. - The release will be public once the rebuild has completed. - -1. Post the stable release notes to social media. - - - Bluesky and X posts will already be built as drafts in [Buffer](https://buffer.com). - - Double-check links. - - Publish both, one at a time, ensuring both are posted to each respective platform. - -1. Send the stable release notes email. - - - The email broadcast will already be built as a draft in [Kit](https://kit.com). - - Double-check links. - - Publish the email. - -### Preview Release - -1. Aggregate preview release notes. - - - Take the script's output and build release notes by organizing each release note line into a category. - - Use a prior release for the initial outline. - - Make sure to append the `Credit` line, if present, to the end of each release note line. - -1. Once the preview release draft is up on [GitHub Releases](https://github.com/zed-industries/zed/releases), paste the preview release notes into it and **save**. - - - **Do not publish the draft!** - -1. Check the preview release assets. - - - Ensure the preview release job has finished without error. - - Ensure the draft has the proper number of assets—releases currently have 12 assets each (as of v0.211). - - Download the artifacts for the preview release draft and test that you can run them locally. - -1. Publish the preview draft on [GitHub Releases](https://github.com/zed-industries/zed/releases). - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. - The release will be public once the rebuild has completed. - -### Prep Content for Next Week's Stable Release - -1. Build social media posts based on the popular items in preview. - - - Draft the copy in the [tweets](https://zed.dev/channel/tweets-23331) channel. - - Create the preview media (videos, screenshots). - - For features that you film videos around, try to create alternative photo-only versions to be used in the email, as videos and GIFs aren't great for email. - - Store all created media in `Feature Media` in our Google Drive. - - Build X and Bluesky post drafts (copy and media) in [Buffer](https://buffer.com), to be sent for next week's stable release. - - **Note: These are preview items and you may discover bugs.** - **This is a very good time to report these findings to the team!** - -1. Build email based on the popular items in preview. - - - You can reuse the copy and photo media from the preview social media posts. - - Create a draft email in [Kit](https://kit.com), to be sent for next week's stable release. - -## Patch Release Process - -If your PR fixes a panic or a crash, you should cherry-pick it to the current stable and preview branches. -If your PR fixes a regression in recently released code, you should cherry-pick it to preview. - -You will need write access to the Zed repository to do this: - ---- - -1. Send a PR containing your change to `main` as normal. - -1. Once it is merged, cherry-pick the commit locally to either of the release branches (`v0.XXX.x`). - - - In some cases, you may have to handle a merge conflict. - More often than not, this will happen when cherry-picking to stable, as the stable branch is more "stale" than the preview branch. - -1. After the commit is cherry-picked, run `./script/trigger-release {preview|stable}`. - This will bump the version numbers, create a new release tag, and kick off a release build. - - - This can also be run from the [GitHub Actions UI](https://github.com/zed-industries/zed/actions/workflows/bump_patch_version.yml): - ![](https://github.com/zed-industries/zed/assets/1486634/9e31ae95-09e1-4c7f-9591-944f4f5b63ea) - -1. Once release drafts are up on [GitHub Releases](https://github.com/zed-industries/zed/releases), proofread and edit the release notes as needed and **save**. - - - **Do not publish the drafts, yet.** - -1. Check the release assets. - - - Ensure the stable / preview release jobs have finished without error. - - Ensure each draft has the proper number of assets—releases currently have 10 assets each. - - Download the artifacts for each release draft and test that you can run them locally. - -1. Publish stable / preview drafts, one at a time. - - Use [Vercel](https://vercel.com/zed-industries/zed-dev) to check the progress of the website rebuild. - The release will be public once the rebuild has completed. - -## Nightly release process - -In addition to the public releases, we also have a nightly build that we encourage employees to use. -Nightly is released by cron once a day, and can be shipped as often as you'd like. -There are no release notes or announcements, so you can just merge your changes to main and run `./script/trigger-release nightly`. From d5421ba1a8b880d6baf911c6e38c7c322dc267df Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Sun, 2 Nov 2025 02:35:11 -0500 Subject: [PATCH 64/82] windows: Fix click bleeding through collab follow (#41726) On Windows, clicking on a collab user icon in the title bar would minimize/expand Zed because the click would bleed through to the title bar. This PR fixes this by stopping propagation. #### Before (On MacOS with double clicks to mimic the same behavior) https://github.com/user-attachments/assets/5a91f7ff-265a-4575-aa23-00b8d30daeed #### After (On MacOS with double clicks to mimic the same behavior) https://github.com/user-attachments/assets/e9fcb98f-4855-4f21-8926-2d306d256f1c Release Notes: - Windows: Fix clicking on user icon in title bar to follow minimizing/expanding Zed Co-authored-by: Remco Smits --- crates/title_bar/src/collab.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 070952d1cec346e4ec41e26f69895b65cd74f082..16a0389efa46429d91c79f4eb1e99f62d01753b5 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -220,6 +220,8 @@ impl TitleBar { .on_click({ let peer_id = collaborator.peer_id; cx.listener(move |this, _, window, cx| { + cx.stop_propagation(); + this.workspace .update(cx, |workspace, cx| { if is_following { From d887e2050fc90af3d3210e6bd1eee5989431b811 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Sun, 2 Nov 2025 16:15:01 +0800 Subject: [PATCH 65/82] windows: Hide background helpers behind CREATE_NO_WINDOW (#41737) Close https://github.com/zed-industries/zed/issues/41538 Release Notes: - Fixed some processes on windows not spawning with CREATE_NO_WINDOW --------- Signed-off-by: Xiaobo Liu --- Cargo.lock | 1 + crates/auto_update/Cargo.toml | 1 + crates/auto_update/src/auto_update.rs | 2 +- crates/fs/src/fs.rs | 2 +- 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c0eea670a77f03c4dbb5afdb7d1197b6d9b76159..3dc7b2337edcb1d155a56f241b517db5a2ad8045 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1339,6 +1339,7 @@ dependencies = [ "settings", "smol", "tempfile", + "util", "which 6.0.3", "workspace", ] diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 08db9f8a97bb0783da987f84991ad1aaa62c2141..630be043dca120ca76b2552f0a729a03a684f934 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -26,6 +26,7 @@ serde_json.workspace = true settings.workspace = true smol.workspace = true tempfile.workspace = true +util.workspace = true workspace.workspace = true [target.'cfg(not(target_os = "windows"))'.dependencies] diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 9f93dd27900e4b90de8c6d61d41b3b6c287eaaf0..331a58414958a48feaad70babee2dc2ea3b730e0 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -962,7 +962,7 @@ pub async fn finalize_auto_update_on_quit() { .parent() .map(|p| p.join("tools").join("auto_update_helper.exe")) { - let mut command = smol::process::Command::new(helper); + let mut command = util::command::new_smol_command(helper); command.arg("--launch"); command.arg("false"); if let Ok(mut cmd) = command.spawn() { diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index c794303ef71232d5a162b51ec8db7d472328b767..0202b2134f4fd0d3f983b2c67e97414a44457143 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -377,7 +377,7 @@ impl Fs for RealFs { #[cfg(windows)] if smol::fs::metadata(&target).await?.is_dir() { - let status = smol::process::Command::new("cmd") + let status = new_smol_command("cmd") .args(["/C", "mklink", "/J"]) .args([path, target.as_path()]) .status() From a9bc890497f1edaf4f177385cf96785de60e910c Mon Sep 17 00:00:00 2001 From: Mayank Verma Date: Sun, 2 Nov 2025 21:26:58 +0530 Subject: [PATCH 66/82] ui: Fix popover menu not restoring focus to the previously focused element (#41751) Closes #26548 Here's a before/after comparison: https://github.com/user-attachments/assets/21d49db7-28bb-4fe2-bdaf-e86b6400ae7a Release Notes: - Fixed popover menus not restoring focus to the previously focused element --- crates/ui/src/components/popover_menu.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ui/src/components/popover_menu.rs b/crates/ui/src/components/popover_menu.rs index 439b53f0388114aa37adcf5277e87744e6f4f9e4..b1a52bec8fdf1f7030b5b321bed7702d602ff212 100644 --- a/crates/ui/src/components/popover_menu.rs +++ b/crates/ui/src/components/popover_menu.rs @@ -270,11 +270,11 @@ fn show_menu( window: &mut Window, cx: &mut App, ) { + let previous_focus_handle = window.focused(cx); let Some(new_menu) = (builder)(window, cx) else { return; }; let menu2 = menu.clone(); - let previous_focus_handle = window.focused(cx); window .subscribe(&new_menu, cx, move |modal, _: &DismissEvent, window, cx| { From 12fe12b5acfaabfd6d93912f82408b026708ad65 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Sun, 2 Nov 2025 14:30:37 -0300 Subject: [PATCH 67/82] docs: Update theme, icon theme, and visual customization pages (#41761) Some housekeeping updates: - Update hardcoded actions/keybindings so they're pulled from the repo - Mention settings window when useful - Add more info about agent panel's font size - Break sentences in individual lines Release Notes: - N/A --- docs/src/icon-themes.md | 10 ++++++---- docs/src/themes.md | 23 +++++++++++++---------- docs/src/visual-customization.md | 24 +++++++++++++++++------- 3 files changed, 36 insertions(+), 21 deletions(-) diff --git a/docs/src/icon-themes.md b/docs/src/icon-themes.md index e035c7171ef84d77f3d18ae704af8b369c23947e..72fc51b834acc7f4cd03eee83246f9d7b1f9b756 100644 --- a/docs/src/icon-themes.md +++ b/docs/src/icon-themes.md @@ -4,19 +4,21 @@ Zed comes with a built-in icon theme, with more icon themes available as extensi ## Selecting an Icon Theme -See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with "icon theme selector: toggle". +See what icon themes are installed and preview them via the Icon Theme Selector, which you can open from the command palette with `icon theme selector: toggle`. Navigating through the icon theme list by moving up and down will change the icon theme in real time and hitting enter will save it to your settings file. ## Installing more Icon Themes -More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions). +More icon themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=icon-themes). ## Configuring Icon Themes -Your selected icon theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux). +Your selected icon theme is stored in your settings file. +You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}). -Just like with themes, Zed allows for configuring different icon themes for light and dark mode. You can set the mode to `"light"` or `"dark"` to ignore the current system mode. +Just like with themes, Zed allows for configuring different icon themes for light and dark mode. +You can set the mode to `"light"` or `"dark"` to ignore the current system mode. ```json [settings] { diff --git a/docs/src/themes.md b/docs/src/themes.md index 00c2a9571c82c044864d181f8547f2d28ef1a489..0bbea57ebfd7c9d55031c2ca9ff31b67b360bcdd 100644 --- a/docs/src/themes.md +++ b/docs/src/themes.md @@ -4,21 +4,23 @@ Zed comes with a number of built-in themes, with more themes available as extens ## Selecting a Theme -See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with "theme selector: Toggle" (bound to `cmd-k cmd-t` on macOS and `ctrl-k ctrl-t` on Linux). +See what themes are installed and preview them via the Theme Selector, which you can open from the command palette with `theme selector: toggle` (bound to {#kb theme_selector::Toggle}). Navigating through the theme list by moving up and down will change the theme in real time and hitting enter will save it to your settings file. ## Installing more Themes -More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions). +More themes are available from the Extensions page, which you can access via the command palette with `zed: extensions` or the [Zed website](https://zed.dev/extensions?filter=themes). Many popular themes have been ported to Zed, and if you're struggling to choose one, visit [zed-themes.com](https://zed-themes.com), a third-party gallery with visible previews for many of them. ## Configuring a Theme -Your selected theme is stored in your settings file. You can open your settings file from the command palette with `zed: open settings file` (bound to `cmd-alt-,` on macOS and `ctrl-alt-,` on Linux). +Your selected theme is stored in your settings file. +You can open your settings file from the command palette with {#action zed::OpenSettingsFile} (bound to {#kb zed::OpenSettingsFile}). -By default, Zed maintains two themes: one for light mode and one for dark mode. You can set the mode to `"dark"` or `"light"` to ignore the current system mode. +By default, Zed maintains two themes: one for light mode and one for dark mode. +You can set the mode to `"dark"` or `"light"` to ignore the current system mode. ```json [settings] { @@ -32,7 +34,8 @@ By default, Zed maintains two themes: one for light mode and one for dark mode. ## Theme Overrides -To override specific attributes of a theme, use the `theme_overrides` setting. This setting can be used to configure theme-specific overrides. +To override specific attributes of a theme, use the `theme_overrides` setting. +This setting can be used to configure theme-specific overrides. For example, add the following to your `settings.json` if you wish to override the background color of the editor and display comments and doc comments as italics: @@ -54,17 +57,17 @@ For example, add the following to your `settings.json` if you wish to override t } ``` -To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see: [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting). +To see a comprehensive list of list of captures (like `comment` and `comment.doc`) see [Language Extensions: Syntax highlighting](./extensions/languages.md#syntax-highlighting). -To see a list of available theme attributes look at the JSON file for your theme. For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes. +To see a list of available theme attributes look at the JSON file for your theme. +For example, [assets/themes/one/one.json](https://github.com/zed-industries/zed/blob/main/assets/themes/one/one.json) for the default One Dark and One Light themes. ## Local Themes Store new themes locally by placing them in the `~/.config/zed/themes` directory (macOS and Linux) or `%USERPROFILE%\AppData\Roaming\Zed\themes\` (Windows). -For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory. It will be available in the theme selector the next time Zed loads. - -Find more themes at [zed-themes.com](https://zed-themes.com). +For example, to create a new theme called `my-cool-theme`, create a file called `my-cool-theme.json` in that directory. +It will be available in the theme selector the next time Zed loads. ## Theme Development diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index dc50588cde659b4e580822ddfd7eaf8951f63ea7..509e47863357fa71081d8c70e34fa68d841e09f8 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -1,14 +1,14 @@ # Visual Customization -Various aspects of Zed's visual layout can be configured via Zed settings.json which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}). +Various aspects of Zed's visual layout can be configured via either the settings window or the `settings.json` file, which you can access via {#action zed::OpenSettings} ({#kb zed::OpenSettings}) and {#action zed::OpenSettingsFile} ({#kb zed::OpenSettingsFile}) respectively. See [Configuring Zed](./configuring-zed.md) for additional information and other non-visual settings. ## Themes -User may install zed extensions providing [Themes](./themes.md) and [Icon Themes](./icon-themes.md) via {#action zed::Extensions} from the command palette or menu. +You can install many [themes](./themes.md) and [icon themes](./icon-themes.md) in form of extensions by running {#action zed::Extensions} from the command palette. -You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and ({#action icon_theme_selector::Toggle}) which will modify the following settings: +You can preview/choose amongst your installed themes and icon themes with {#action theme_selector::Toggle} ({#kb theme_selector::Toggle}) and {#action icon_theme_selector::Toggle} ({#kb icon_theme_selector::Toggle}) which will modify the following settings: ```json [settings] { @@ -61,15 +61,20 @@ If you would like to use distinct themes for light mode/dark mode that can be se "line_height": "standard", }, - // Agent Panel Font Settings - "agent_font_size": 15 + // Controls the font size for agent responses in the agent panel. + // If not specified, it falls back to the UI font size. + "agent_ui_font_size": 15, + // Controls the font size for the agent panel's message editor, user message, + // and any other snippet of code. + "agent_buffer_font_size": 12 ``` ### Font ligatures By default Zed enable font ligatures which will visually combines certain adjacent characters. -For example `=>` will be displayed as `→` and `!=` will be `≠`. This is purely cosmetic and the individual characters remain unchanged. +For example `=>` will be displayed as `→` and `!=` will be `≠`. +This is purely cosmetic and the individual characters remain unchanged. To disable this behavior use: @@ -464,7 +469,12 @@ Project panel can be shown/hidden with {#action project_panel::ToggleFocus} ({#k "default_width": 640, // Default width (left/right docked) "default_height": 320, // Default height (bottom docked) }, - "agent_font_size": 16 + // Controls the font size for agent responses in the agent panel. + // If not specified, it falls back to the UI font size. + "agent_ui_font_size": 15, + // Controls the font size for the agent panel's message editor, user message, + // and any other snippet of code. + "agent_buffer_font_size": 12 ``` See [Zed AI Documentation](./ai/overview.md) for additional non-visual AI settings. From 00ff89f00fc771c41c36bb577c6e50893e4a9625 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Sun, 2 Nov 2025 14:30:50 -0300 Subject: [PATCH 68/82] agent_ui: Make single file review actions match panel (#41718) When we introduced the ACP-based agent panel, the condition that the "review" | "reject" | "keep" buttons observed to be displayed got mismatched between the panel and the pane (when in the single file review scenario). In the panel, the buttons appear as soon as there are changed buffers, whereas in the pane, they appear when response generation is done. I believe that making them appear at the same time, observing the same condition, is the desired behavior. Thus, I think the panel behavior is more correct, because there are loads of times where agent response generation isn't technically done (e.g., when there's a command waiting for permission to be run) but the _file edit_ has already been performed and is in a good state to be already accepted or rejected. So, this is what this PR is doing; effectively removing the "generating" state from the agent diff, and switching to `EditorState::Reviewing` when there are changed buffers. Release Notes: - Improved agent edit single file reviews by making the "reject" and "accept" buttons appear at the same time. --- crates/agent_ui/src/agent_diff.rs | 26 ++++++-------------------- 1 file changed, 6 insertions(+), 20 deletions(-) diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index a0f117b0bf30abee9d2182cf8c3fadd10099b1f0..63eb2ac49731a5e57b4eae5bf33b821b2e223c25 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -70,14 +70,6 @@ impl AgentDiffThread { } } - fn is_generating(&self, cx: &App) -> bool { - match self { - AgentDiffThread::AcpThread(thread) => { - thread.read(cx).status() == acp_thread::ThreadStatus::Generating - } - } - } - fn has_pending_edit_tool_uses(&self, cx: &App) -> bool { match self { AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(), @@ -970,9 +962,7 @@ impl AgentDiffToolbar { None => ToolbarItemLocation::Hidden, Some(AgentDiffToolbarItem::Pane(_)) => ToolbarItemLocation::PrimaryRight, Some(AgentDiffToolbarItem::Editor { state, .. }) => match state { - EditorState::Generating | EditorState::Reviewing => { - ToolbarItemLocation::PrimaryRight - } + EditorState::Reviewing => ToolbarItemLocation::PrimaryRight, EditorState::Idle => ToolbarItemLocation::Hidden, }, } @@ -1050,7 +1040,6 @@ impl Render for AgentDiffToolbar { let content = match state { EditorState::Idle => return Empty.into_any(), - EditorState::Generating => vec![spinner_icon], EditorState::Reviewing => vec![ h_flex() .child( @@ -1222,7 +1211,6 @@ pub struct AgentDiff { pub enum EditorState { Idle, Reviewing, - Generating, } struct WorkspaceThread { @@ -1545,15 +1533,11 @@ impl AgentDiff { multibuffer.add_diff(diff_handle.clone(), cx); }); - let new_state = if thread.is_generating(cx) { - EditorState::Generating - } else { - EditorState::Reviewing - }; + let reviewing_state = EditorState::Reviewing; let previous_state = self .reviewing_editors - .insert(weak_editor.clone(), new_state.clone()); + .insert(weak_editor.clone(), reviewing_state.clone()); if previous_state.is_none() { editor.update(cx, |editor, cx| { @@ -1566,7 +1550,9 @@ impl AgentDiff { unaffected.remove(weak_editor); } - if new_state == EditorState::Reviewing && previous_state != Some(new_state) { + if reviewing_state == EditorState::Reviewing + && previous_state != Some(reviewing_state) + { // Jump to first hunk when we enter review mode editor.update(cx, |editor, cx| { let snapshot = multibuffer.read(cx).snapshot(cx); From 9909b59bd03388b5e99e4ddc2de3b7409ab667ed Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Sun, 2 Nov 2025 14:56:23 -0300 Subject: [PATCH 69/82] agent_ui: Improve the "go to file" affordance in the edit bar (#41762) This PR makes it clearer that you can click on the file path to open the corresponding file in the agent panel's "edit bar", which is the element that shows up in the panel as soon as agent-made edits happen. Release Notes: - agent panel: Improved the "go to file" affordance in the edit bar. --- crates/agent_ui/src/acp/thread_view.rs | 34 +++++++++++++++++--------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index a4b3106fa9d9ded053ff2f33b720ec3b10512d01..5c575de401daf26bd7815bc49d923072243ee980 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -3631,6 +3631,7 @@ impl AcpThreadView { .child( h_flex() .id("edits-container") + .cursor_pointer() .gap_1() .child(Disclosure::new("edits-disclosure", expanded)) .map(|this| { @@ -3770,6 +3771,7 @@ impl AcpThreadView { Label::new(name.to_string()) .size(LabelSize::XSmall) .buffer_font(cx) + .ml_1p5() }); let file_icon = FileIcons::get_icon(path.as_std_path(), cx) @@ -3801,14 +3803,30 @@ impl AcpThreadView { }) .child( h_flex() + .id(("file-name-row", index)) .relative() - .id(("file-name", index)) .pr_8() - .gap_1p5() .w_full() .overflow_x_scroll() - .child(file_icon) - .child(h_flex().gap_0p5().children(file_name).children(file_path)) + .child( + h_flex() + .id(("file-name-path", index)) + .cursor_pointer() + .pr_0p5() + .gap_0p5() + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .rounded_xs() + .child(file_icon) + .children(file_name) + .children(file_path) + .tooltip(Tooltip::text("Go to File")) + .on_click({ + let buffer = buffer.clone(); + cx.listener(move |this, _, window, cx| { + this.open_edited_buffer(&buffer, window, cx); + }) + }), + ) .child( div() .absolute() @@ -3818,13 +3836,7 @@ impl AcpThreadView { .bottom_0() .right_0() .bg(overlay_gradient), - ) - .on_click({ - let buffer = buffer.clone(); - cx.listener(move |this, _, window, cx| { - this.open_edited_buffer(&buffer, window, cx); - }) - }), + ), ) .child( h_flex() From 4e7ba8e6807e2493ee118c03063545c61722c4d3 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Mon, 3 Nov 2025 02:11:21 +0800 Subject: [PATCH 70/82] acp_tools: Add vertical scrollbar to ACP logs (#41740) Release Notes: - N/A --------- Signed-off-by: Xiaobo Liu Co-authored-by: Danilo Leal --- crates/acp_tools/src/acp_tools.rs | 51 ++++++++++++++++++------------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index a40bcbd93c878a85c85d7edd312e713988234966..7615784676c7d9ff1782a6e9537e608cb927154d 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -19,7 +19,7 @@ use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; use project::Project; use settings::Settings; use theme::ThemeSettings; -use ui::{Tooltip, prelude::*}; +use ui::{Tooltip, WithScrollbar, prelude::*}; use util::ResultExt as _; use workspace::{ Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace, @@ -291,17 +291,19 @@ impl AcpTools { let expanded = self.expanded.contains(&index); v_flex() + .id(index) + .group("message") + .cursor_pointer() + .font_buffer(cx) .w_full() - .px_4() .py_3() - .border_color(colors.border) - .border_b_1() + .pl_4() + .pr_5() .gap_2() .items_start() - .font_buffer(cx) .text_size(base_size) - .id(index) - .group("message") + .border_color(colors.border) + .border_b_1() .hover(|this| this.bg(colors.element_background.opacity(0.5))) .on_click(cx.listener(move |this, _, _, cx| { if this.expanded.contains(&index) { @@ -323,15 +325,14 @@ impl AcpTools { h_flex() .w_full() .gap_2() - .items_center() .flex_shrink_0() .child(match message.direction { - acp::StreamMessageDirection::Incoming => { - ui::Icon::new(ui::IconName::ArrowDown).color(Color::Error) - } - acp::StreamMessageDirection::Outgoing => { - ui::Icon::new(ui::IconName::ArrowUp).color(Color::Success) - } + acp::StreamMessageDirection::Incoming => Icon::new(IconName::ArrowDown) + .color(Color::Error) + .size(IconSize::Small), + acp::StreamMessageDirection::Outgoing => Icon::new(IconName::ArrowUp) + .color(Color::Success) + .size(IconSize::Small), }) .child( Label::new(message.name.clone()) @@ -501,7 +502,7 @@ impl Focusable for AcpTools { } impl Render for AcpTools { - fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { v_flex() .track_focus(&self.focus_handle) .size_full() @@ -516,13 +517,19 @@ impl Render for AcpTools { .child("No messages recorded yet") .into_any() } else { - list( - connection.list_state.clone(), - cx.processor(Self::render_message), - ) - .with_sizing_behavior(gpui::ListSizingBehavior::Auto) - .flex_grow() - .into_any() + div() + .size_full() + .flex_grow() + .child( + list( + connection.list_state.clone(), + cx.processor(Self::render_message), + ) + .with_sizing_behavior(gpui::ListSizingBehavior::Auto) + .size_full(), + ) + .vertical_scrollbar_for(connection.list_state.clone(), window, cx) + .into_any() } } None => h_flex() From f7153bbe8a869d0e2b25efed64fd7e4217899b63 Mon Sep 17 00:00:00 2001 From: Aero Date: Mon, 3 Nov 2025 03:05:29 +0800 Subject: [PATCH 71/82] agent_ui: Add delete button for compatible API-based LLM providers (#41739) Discussion: https://github.com/zed-industries/zed/discussions/41736 Release Notes: - agent panel: Added the ability to remove OpenAI-compatible LLM providers directly from the UI. --------- Co-authored-by: Danilo Leal --- crates/agent_ui/src/agent_configuration.rs | 87 +++++++++++++++++++++- 1 file changed, 83 insertions(+), 4 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 61f8ee60a794cbd6622759a89efb6f40c8f1503d..7781bdc17bfd57996b196c3f7d684c2d11493776 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -23,16 +23,18 @@ use language::LanguageRegistry; use language_model::{ LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry, ZED_CLOUD_PROVIDER_ID, }; +use language_models::AllLanguageModelSettings; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME}, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; use rope::Rope; -use settings::{SettingsStore, update_settings_file}; +use settings::{Settings, SettingsStore, update_settings_file}; use ui::{ - Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, - Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*, + Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, + ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch, + SwitchColor, Tooltip, WithScrollbar, prelude::*, }; use util::ResultExt as _; use workspace::{Workspace, create_and_open_local_file}; @@ -304,10 +306,76 @@ impl AgentConfiguration { } })), ) - }), + }) + .when( + is_expanded && is_removable_provider(&provider.id(), cx), + |this| { + this.child( + Button::new( + SharedString::from(format!("delete-provider-{provider_id}")), + "Remove Provider", + ) + .full_width() + .style(ButtonStyle::Outlined) + .icon_position(IconPosition::Start) + .icon(IconName::Trash) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .label_size(LabelSize::Small) + .on_click(cx.listener({ + let provider = provider.clone(); + move |this, _event, window, cx| { + this.delete_provider(provider.clone(), window, cx); + } + })), + ) + }, + ), ) } + fn delete_provider( + &mut self, + provider: Arc, + window: &mut Window, + cx: &mut Context, + ) { + let fs = self.fs.clone(); + let provider_id = provider.id(); + + cx.spawn_in(window, async move |_, cx| { + cx.update(|_window, cx| { + update_settings_file(fs.clone(), cx, { + let provider_id = provider_id.clone(); + move |settings, _| { + if let Some(ref mut openai_compatible) = settings + .language_models + .as_mut() + .and_then(|lm| lm.openai_compatible.as_mut()) + { + let key_to_remove: Arc = Arc::from(provider_id.0.as_ref()); + openai_compatible.remove(&key_to_remove); + } + } + }); + }) + .log_err(); + + cx.update(|_window, cx| { + LanguageModelRegistry::global(cx).update(cx, { + let provider_id = provider_id.clone(); + move |registry, cx| { + registry.unregister_provider(provider_id, cx); + } + }) + }) + .log_err(); + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + fn render_provider_configuration_section( &mut self, cx: &mut Context, @@ -1225,3 +1293,14 @@ fn find_text_in_buffer( None } } + +// OpenAI-compatible providers are user-configured and can be removed, +// whereas built-in providers (like Anthropic, OpenAI, Google, etc.) can't. +// +// If in the future we have more "API-compatible-type" of providers, +// they should be included here as removable providers. +fn is_removable_provider(provider_id: &LanguageModelProviderId, cx: &App) -> bool { + AllLanguageModelSettings::get_global(cx) + .openai_compatible + .contains_key(provider_id.0.as_ref()) +} From deacd3e92280d124858b8da304884283a81f5ccf Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 3 Nov 2025 00:54:47 -0300 Subject: [PATCH 72/82] extension_ui: Fix card label truncation (#41784) Closes https://github.com/zed-industries/zed/issues/41763 Release Notes: - N/A --- crates/extensions_ui/src/extensions_ui.rs | 25 ++++++++++------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/crates/extensions_ui/src/extensions_ui.rs b/crates/extensions_ui/src/extensions_ui.rs index 539f2d8864134effdf0a3edcdefa4ca213b7eff3..3a7e1a80dd348d97a54f1dce21794760a2399740 100644 --- a/crates/extensions_ui/src/extensions_ui.rs +++ b/crates/extensions_ui/src/extensions_ui.rs @@ -805,25 +805,22 @@ impl ExtensionsPage { ) .child( h_flex() - .gap_2() + .gap_1() .justify_between() .child( - h_flex() - .gap_1() - .child( - Icon::new(IconName::Person) - .size(IconSize::XSmall) - .color(Color::Muted), - ) - .child( - Label::new(extension.manifest.authors.join(", ")) - .size(LabelSize::Small) - .color(Color::Muted) - .truncate(), - ), + Icon::new(IconName::Person) + .size(IconSize::XSmall) + .color(Color::Muted), + ) + .child( + Label::new(extension.manifest.authors.join(", ")) + .size(LabelSize::Small) + .color(Color::Muted) + .truncate(), ) .child( h_flex() + .ml_auto() .gap_1() .child( IconButton::new( From 73b75a7765cd8bf77ae15d6c7fc60df007f35ee1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=80=E1=B4=8D=E1=B4=9B=E1=B4=8F=E1=B4=80=E1=B4=87?= =?UTF-8?q?=CA=80?= Date: Mon, 3 Nov 2025 14:47:20 +0800 Subject: [PATCH 73/82] Support Gitee avatars in git blame (#41783) Part of https://github.com/zed-industries/zed/issues/11043. CleanShot 2025-11-03 at 10 39
08@2x Release Notes: - Added Support for showing Gitee avatars in git blame --- .../src/providers/gitee.rs | 81 ++++++++++++++++++- 1 file changed, 78 insertions(+), 3 deletions(-) diff --git a/crates/git_hosting_providers/src/providers/gitee.rs b/crates/git_hosting_providers/src/providers/gitee.rs index e2bcb6668240fa43120555f9b3c11a10dd1418d7..120a360cb19615e11e0ea4829a6fcd68665e4fcc 100644 --- a/crates/git_hosting_providers/src/providers/gitee.rs +++ b/crates/git_hosting_providers/src/providers/gitee.rs @@ -1,5 +1,11 @@ -use std::str::FromStr; - +use std::{str::FromStr, sync::Arc}; + +use anyhow::{Context as _, Result, bail}; +use async_trait::async_trait; +use futures::AsyncReadExt; +use gpui::SharedString; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; +use serde::Deserialize; use url::Url; use git::{ @@ -9,6 +15,55 @@ use git::{ pub struct Gitee; +#[derive(Debug, Deserialize)] +struct CommitDetails { + author: Option, +} + +#[derive(Debug, Deserialize)] +struct Author { + avatar_url: String, +} + +impl Gitee { + async fn fetch_gitee_commit_author( + &self, + repo_owner: &str, + repo: &str, + commit: &str, + client: &Arc, + ) -> Result> { + let url = format!("https://gitee.com/api/v5/repos/{repo_owner}/{repo}/commits/{commit}"); + + let request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); + + let mut response = client + .send(request.body(AsyncBody::default())?) + .await + .with_context(|| format!("error fetching Gitee commit details at {:?}", url))?; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let body_str = std::str::from_utf8(&body)?; + + serde_json::from_str::(body_str) + .map(|commit| commit.author) + .context("failed to deserialize Gitee commit details") + } +} + +#[async_trait] impl GitHostingProvider for Gitee { fn name(&self) -> String { "Gitee".to_string() @@ -19,7 +74,7 @@ impl GitHostingProvider for Gitee { } fn supports_avatars(&self) -> bool { - false + true } fn format_line_number(&self, line: u32) -> String { @@ -80,6 +135,26 @@ impl GitHostingProvider for Gitee { ); permalink } + + async fn commit_author_avatar_url( + &self, + repo_owner: &str, + repo: &str, + commit: SharedString, + http_client: Arc, + ) -> Result> { + let commit = commit.to_string(); + let avatar_url = self + .fetch_gitee_commit_author(repo_owner, repo, &commit, &http_client) + .await? + .map(|author| -> Result { + let mut url = Url::parse(&author.avatar_url)?; + url.set_query(Some("width=128")); + Ok(url) + }) + .transpose()?; + Ok(avatar_url) + } } #[cfg(test)] From dc503e997589c6d5cf4475ca36e5bbb75aa57b76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=80=E1=B4=8D=E1=B4=9B=E1=B4=8F=E1=B4=80=E1=B4=87?= =?UTF-8?q?=CA=80?= Date: Mon, 3 Nov 2025 14:51:04 +0800 Subject: [PATCH 74/82] Support GitLab and self-hosted GitLab avatars in git blame (#41747) Part of #11043. Release Notes: - Added Support for showing GitLab and self-hosted GitLab avatars in git blame --- Cargo.lock | 1 + crates/git_hosting_providers/Cargo.toml | 1 + .../src/providers/gitlab.rs | 131 +++++++++++++++++- 3 files changed, 127 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3dc7b2337edcb1d155a56f241b517db5a2ad8045..05a060a2d430618d00af4933936cdc43f2cd5a40 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7078,6 +7078,7 @@ dependencies = [ "serde_json", "settings", "url", + "urlencoding", "util", ] diff --git a/crates/git_hosting_providers/Cargo.toml b/crates/git_hosting_providers/Cargo.toml index 2b3e8f235ff6e5f351c1875107443f51838c6da9..851556151e285975cb1eb7d3d33244d7e11b5663 100644 --- a/crates/git_hosting_providers/Cargo.toml +++ b/crates/git_hosting_providers/Cargo.toml @@ -23,6 +23,7 @@ serde.workspace = true serde_json.workspace = true settings.workspace = true url.workspace = true +urlencoding.workspace = true util.workspace = true [dev-dependencies] diff --git a/crates/git_hosting_providers/src/providers/gitlab.rs b/crates/git_hosting_providers/src/providers/gitlab.rs index d18af7cccae058a7b9746f7dfe86beef8d6fda94..af3bb17494a79056db0fd4c531f67b77a31e0954 100644 --- a/crates/git_hosting_providers/src/providers/gitlab.rs +++ b/crates/git_hosting_providers/src/providers/gitlab.rs @@ -1,6 +1,11 @@ -use std::str::FromStr; - -use anyhow::{Result, bail}; +use std::{str::FromStr, sync::Arc}; + +use anyhow::{Context as _, Result, bail}; +use async_trait::async_trait; +use futures::AsyncReadExt; +use gpui::SharedString; +use http_client::{AsyncBody, HttpClient, HttpRequestExt, Request}; +use serde::Deserialize; use url::Url; use git::{ @@ -10,6 +15,16 @@ use git::{ use crate::get_host_from_git_remote_url; +#[derive(Debug, Deserialize)] +struct CommitDetails { + author_email: String, +} + +#[derive(Debug, Deserialize)] +struct AvatarInfo { + avatar_url: String, +} + #[derive(Debug)] pub struct Gitlab { name: String, @@ -46,8 +61,79 @@ impl Gitlab { Url::parse(&format!("https://{}", host))?, )) } + + async fn fetch_gitlab_commit_author( + &self, + repo_owner: &str, + repo: &str, + commit: &str, + client: &Arc, + ) -> Result> { + let Some(host) = self.base_url.host_str() else { + bail!("failed to get host from gitlab base url"); + }; + let project_path = format!("{}/{}", repo_owner, repo); + let project_path_encoded = urlencoding::encode(&project_path); + let url = format!( + "https://{host}/api/v4/projects/{project_path_encoded}/repository/commits/{commit}" + ); + + let request = Request::get(&url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); + + let mut response = client + .send(request.body(AsyncBody::default())?) + .await + .with_context(|| format!("error fetching GitLab commit details at {:?}", url))?; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let body_str = std::str::from_utf8(&body)?; + + let author_email = serde_json::from_str::(body_str) + .map(|commit| commit.author_email) + .context("failed to deserialize GitLab commit details")?; + + let avatar_info_url = format!("https://{host}/api/v4/avatar?email={author_email}"); + + let request = Request::get(&avatar_info_url) + .header("Content-Type", "application/json") + .follow_redirects(http_client::RedirectPolicy::FollowAll); + + let mut response = client + .send(request.body(AsyncBody::default())?) + .await + .with_context(|| format!("error fetching GitLab avatar info at {:?}", url))?; + + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + + if response.status().is_client_error() { + let text = String::from_utf8_lossy(body.as_slice()); + bail!( + "status error {}, response: {text:?}", + response.status().as_u16() + ); + } + + let body_str = std::str::from_utf8(&body)?; + + serde_json::from_str::>(body_str) + .context("failed to deserialize GitLab avatar info") + } } +#[async_trait] impl GitHostingProvider for Gitlab { fn name(&self) -> String { self.name.clone() @@ -58,7 +144,7 @@ impl GitHostingProvider for Gitlab { } fn supports_avatars(&self) -> bool { - false + true } fn format_line_number(&self, line: u32) -> String { @@ -122,6 +208,39 @@ impl GitHostingProvider for Gitlab { ); permalink } + + async fn commit_author_avatar_url( + &self, + repo_owner: &str, + repo: &str, + commit: SharedString, + http_client: Arc, + ) -> Result> { + let commit = commit.to_string(); + let avatar_url = self + .fetch_gitlab_commit_author(repo_owner, repo, &commit, &http_client) + .await? + .map(|author| -> Result { + let mut url = Url::parse(&author.avatar_url)?; + if let Some(host) = url.host_str() { + let size_query = if host.contains("gravatar") || host.contains("libravatar") { + Some("s=128") + } else if self + .base_url + .host_str() + .is_some_and(|base_host| host.contains(base_host)) + { + Some("width=128") + } else { + None + }; + url.set_query(size_query); + } + Ok(url) + }) + .transpose()?; + Ok(avatar_url) + } } #[cfg(test)] @@ -134,8 +253,8 @@ mod tests { #[test] fn test_invalid_self_hosted_remote_url() { let remote_url = "https://gitlab.com/zed-industries/zed.git"; - let github = Gitlab::from_remote_url(remote_url); - assert!(github.is_err()); + let gitlab = Gitlab::from_remote_url(remote_url); + assert!(gitlab.is_err()); } #[test] From f2b539598e369dc166c69dcb3fc13628af7903ca Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 3 Nov 2025 11:02:31 +0100 Subject: [PATCH 75/82] sum_tree: Spawn less tasks in `SumTree::from_iter_async` (#41793) Release Notes: - N/A *or* Added/Fixed/Improved ... --- Cargo.lock | 2 +- crates/rope/src/rope.rs | 22 ++++-------- crates/sum_tree/Cargo.toml | 2 +- crates/sum_tree/src/sum_tree.rs | 64 ++++++++++++++++++++------------- 4 files changed, 48 insertions(+), 42 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 05a060a2d430618d00af4933936cdc43f2cd5a40..1ee239c3740bb0fba40f81b9cdae77a46d62d535 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -16367,7 +16367,7 @@ dependencies = [ "arrayvec", "ctor", "futures 0.3.31", - "itertools 0.14.0", + "futures-lite 1.13.0", "log", "pollster 0.4.0", "rand 0.9.2", diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index b515f46ea89ddd5f8f29ca7d462b48fe8fff1d38..3c1c360be7dd2dbb55394f04d89e512a860bf668 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -323,21 +323,13 @@ impl Rope { const PARALLEL_THRESHOLD: usize = 4 * (2 * sum_tree::TREE_BASE); if new_chunks.len() >= PARALLEL_THRESHOLD { - let cx2 = executor.clone(); - executor - .scoped(|scope| { - // SAFETY: transmuting to 'static is safe because the future is scoped - // and the underlying string data cannot go out of scope because dropping the scope - // will wait for the task to finish - let new_chunks = - unsafe { std::mem::transmute::, Vec<&'static str>>(new_chunks) }; - - let async_extend = self - .chunks - .async_extend(new_chunks.into_iter().map(Chunk::new), cx2); - - scope.spawn(async_extend); - }) + // SAFETY: transmuting to 'static is sound here. We block on the future making use of this + // and we know that the result of this computation is not stashing the static reference + // away. + let new_chunks = + unsafe { std::mem::transmute::, Vec<&'static str>>(new_chunks) }; + self.chunks + .async_extend(new_chunks.into_iter().map(Chunk::new), executor) .await; } else { self.chunks diff --git a/crates/sum_tree/Cargo.toml b/crates/sum_tree/Cargo.toml index fd39bd4d83c65501b4731f31d3f357a3ff7f6fa3..46c645aceb8e20b51e1a901a9881aafd7dfa446f 100644 --- a/crates/sum_tree/Cargo.toml +++ b/crates/sum_tree/Cargo.toml @@ -17,7 +17,7 @@ doctest = false arrayvec = "0.7.1" log.workspace = true futures.workspace = true -itertools.workspace = true +futures-lite.workspace = true [dev-dependencies] ctor.workspace = true diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 8562766b1b49ac8eb1e3c816f210d1a60cae2aed..b1590a2def72be6da45735228213905bce2624f7 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -4,15 +4,15 @@ mod tree_map; use arrayvec::ArrayVec; pub use cursor::{Cursor, FilterCursor, Iter}; use futures::{StreamExt, stream}; -use itertools::Itertools as _; +use futures_lite::future::yield_now; use std::marker::PhantomData; use std::mem; use std::{cmp::Ordering, fmt, iter::FromIterator, sync::Arc}; pub use tree_map::{MapSeekTarget, TreeMap, TreeSet}; -#[cfg(test)] +#[cfg(all(test, not(rust_analyzer)))] pub const TREE_BASE: usize = 2; -#[cfg(not(test))] +#[cfg(not(all(test, not(rust_analyzer))))] pub const TREE_BASE: usize = 6; pub trait BackgroundSpawn { @@ -316,30 +316,44 @@ impl SumTree { T: 'static + Send + Sync, for<'a> T::Summary: Summary = ()> + Send + Sync, S: BackgroundSpawn, - I: IntoIterator, + I: IntoIterator, { - let mut futures = vec![]; - let chunks = iter.into_iter().chunks(2 * TREE_BASE); - for chunk in chunks.into_iter() { - let items: ArrayVec = chunk.into_iter().collect(); - futures.push(async move { - let item_summaries: ArrayVec = - items.iter().map(|item| item.summary(())).collect(); - let mut summary = item_summaries[0].clone(); - for item_summary in &item_summaries[1..] { - ::add_summary(&mut summary, item_summary, ()); - } - SumTree(Arc::new(Node::Leaf { - summary, - items, - item_summaries, - })) - }); + let iter = iter.into_iter(); + let num_leaves = iter.len().div_ceil(2 * TREE_BASE); + + if num_leaves == 0 { + return Self::new(()); } - let mut nodes = futures::stream::iter(futures) + let mut nodes = stream::iter(iter) + .chunks(num_leaves.div_ceil(4)) + .map(|chunk| async move { + let mut chunk = chunk.into_iter(); + let mut leaves = vec![]; + loop { + let items: ArrayVec = + chunk.by_ref().take(2 * TREE_BASE).collect(); + if items.is_empty() { + break; + } + let item_summaries: ArrayVec = + items.iter().map(|item| item.summary(())).collect(); + let mut summary = item_summaries[0].clone(); + for item_summary in &item_summaries[1..] { + ::add_summary(&mut summary, item_summary, ()); + } + leaves.push(SumTree(Arc::new(Node::Leaf { + summary, + items, + item_summaries, + }))); + yield_now().await; + } + leaves + }) .map(|future| spawn.background_spawn(future)) .buffered(4) + .flat_map(|it| stream::iter(it.into_iter())) .collect::>() .await; @@ -622,7 +636,7 @@ impl SumTree { pub async fn async_extend(&mut self, iter: I, spawn: S) where S: BackgroundSpawn, - I: IntoIterator + 'static, + I: IntoIterator, T: 'static + Send + Sync, for<'b> T::Summary: Summary = ()> + Send + Sync, { @@ -1126,7 +1140,7 @@ mod tests { let rng = &mut rng; let mut tree = SumTree::::default(); - let count = rng.random_range(0..10); + let count = rng.random_range(0..128); if rng.random() { tree.extend(rng.sample_iter(StandardUniform).take(count), ()); } else { @@ -1140,7 +1154,7 @@ mod tests { for _ in 0..num_operations { let splice_end = rng.random_range(0..tree.extent::(()).0 + 1); let splice_start = rng.random_range(0..splice_end + 1); - let count = rng.random_range(0..10); + let count = rng.random_range(0..128); let tree_end = tree.extent::(()); let new_items = rng .sample_iter(StandardUniform) From 3a058138c1e4b7e18e9282588d0a898b64f4a12b Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Mon, 3 Nov 2025 12:22:51 +0200 Subject: [PATCH 76/82] Fix Sonnet's regression with inserting `` (#41800) Sometimes, inside the edit agent, Sonnet thinks that it's doing a tool call and closes its response with `` instead of properly closing . A better but more labor-intensive way of fixing this would be switching to streaming tool calls for LLMs that support it. Closes #39921 Release Notes: - Fixed Sonnet's regression with inserting `` sometimes --- crates/agent/src/edit_agent/edit_parser.rs | 46 +++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/crates/agent/src/edit_agent/edit_parser.rs b/crates/agent/src/edit_agent/edit_parser.rs index 8411171ba4ea491d2603014a0715ce471b34e36f..425bf93efff115d4daef380e3f82abcdb8c0746f 100644 --- a/crates/agent/src/edit_agent/edit_parser.rs +++ b/crates/agent/src/edit_agent/edit_parser.rs @@ -13,7 +13,15 @@ const EDITS_END_TAG: &str = ""; const SEARCH_MARKER: &str = "<<<<<<< SEARCH"; const SEPARATOR_MARKER: &str = "======="; const REPLACE_MARKER: &str = ">>>>>>> REPLACE"; -const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG]; +const SONNET_PARAMETER_INVOKE_1: &str = "\n"; +const SONNET_PARAMETER_INVOKE_2: &str = ""; +const END_TAGS: [&str; 5] = [ + OLD_TEXT_END_TAG, + NEW_TEXT_END_TAG, + EDITS_END_TAG, + SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call + SONNET_PARAMETER_INVOKE_2, +]; #[derive(Debug)] pub enum EditParserEvent { @@ -547,6 +555,37 @@ mod tests { ); } + #[gpui::test(iterations = 1000)] + fn test_xml_edits_with_closing_parameter_invoke(mut rng: StdRng) { + // This case is a regression with Claude Sonnet 4.5. + // Sometimes Sonnet thinks that it's doing a tool call + // and closes its response with '' + // instead of properly closing + + let mut parser = EditParser::new(EditFormat::XmlTags); + assert_eq!( + parse_random_chunks( + indoc! {" + some textupdated text + "}, + &mut parser, + &mut rng + ), + vec![Edit { + old_text: "some text".to_string(), + new_text: "updated text".to_string(), + line_hint: None, + },] + ); + assert_eq!( + parser.finish(), + EditParserMetrics { + tags: 2, + mismatched_tags: 1 + } + ); + } + #[gpui::test(iterations = 1000)] fn test_xml_nested_tags(mut rng: StdRng) { let mut parser = EditParser::new(EditFormat::XmlTags); @@ -1035,6 +1074,11 @@ mod tests { last_ix = chunk_ix; } + if new_text.is_some() { + pending_edit.new_text = new_text.take().unwrap(); + edits.push(pending_edit); + } + edits } } From bc3c88e7375c650e31f489628dc8ff9ccdddce6a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 3 Nov 2025 11:41:53 +0100 Subject: [PATCH 77/82] Revert "windows: Don't flood windows message queue with gpui messages" (#41803) Reverts zed-industries/zed#41595 Closes #41704 --- .../gpui/src/platform/windows/dispatcher.rs | 26 +++++-------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/crates/gpui/src/platform/windows/dispatcher.rs b/crates/gpui/src/platform/windows/dispatcher.rs index 6759a573e6c04ecf943f6cc17616743bcab4ef28..8d3e6305f6b4bb60f6c282280bafa7f76f59eecb 100644 --- a/crates/gpui/src/platform/windows/dispatcher.rs +++ b/crates/gpui/src/platform/windows/dispatcher.rs @@ -80,27 +80,15 @@ impl PlatformDispatcher for WindowsDispatcher { } fn dispatch_on_main_thread(&self, runnable: Runnable) { - let was_empty = self.main_sender.is_empty(); match self.main_sender.send(runnable) { Ok(_) => unsafe { - // Only send a `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` to the - // queue if we have no runnables queued up yet, otherwise we - // risk filling the message queue with gpui messages causing us - // to starve the message loop of system messages, resulting in a - // process hang. - // - // When the message loop receives a - // `WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD` message we drain the - // runnable queue entirely. - if was_empty { - PostMessageW( - Some(self.platform_window_handle.as_raw()), - WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, - WPARAM(self.validation_number), - LPARAM(0), - ) - .log_err(); - } + PostMessageW( + Some(self.platform_window_handle.as_raw()), + WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD, + WPARAM(self.validation_number), + LPARAM(0), + ) + .log_err(); }, Err(runnable) => { // NOTE: Runnable may wrap a Future that is !Send. From c8f9db2e2425c490ffa34aa22d5c9fbcea0836e3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 3 Nov 2025 11:50:05 +0100 Subject: [PATCH 78/82] remote: Fix more quoting issues with nushell (#41547) https://github.com/zed-industries/zed/pull/40084#issuecomment-3464159871 Closes https://github.com/zed-industries/zed/pull/41547 Release Notes: - Fixed remoting not working when the remote has nu set as its shell --- crates/languages/src/python.rs | 2 +- crates/project/src/terminals.rs | 17 ++-- crates/remote/src/transport/ssh.rs | 111 ++++++++++++++++++------ crates/remote/src/transport/wsl.rs | 64 ++++++-------- crates/util/src/shell.rs | 130 +++++++++++++++++++++++++++++ 5 files changed, 246 insertions(+), 78 deletions(-) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index a87f17795f5b6a1d69368d826688a6ed48309d23..40054abeec298930033cd208c93492cf3354e346 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -1211,7 +1211,7 @@ impl ToolchainLister for PythonToolchainProvider { activation_script.extend(match shell { ShellKind::Fish => Some(format!("\"{pyenv}\" shell - fish {version}")), ShellKind::Posix => Some(format!("\"{pyenv}\" shell - sh {version}")), - ShellKind::Nushell => Some(format!("\"{pyenv}\" shell - nu {version}")), + ShellKind::Nushell => Some(format!("^\"{pyenv}\" shell - nu {version}")), ShellKind::PowerShell => None, ShellKind::Csh => None, ShellKind::Tcsh => None, diff --git a/crates/project/src/terminals.rs b/crates/project/src/terminals.rs index 5ea9824916520cfb53673f82f17c1d0e5d31ede3..17564b17dd4d6623d7ca72fadbd0aa8defd1f9cc 100644 --- a/crates/project/src/terminals.rs +++ b/crates/project/src/terminals.rs @@ -8,7 +8,6 @@ use remote::RemoteClient; use settings::{Settings, SettingsLocation}; use smol::channel::bounded; use std::{ - borrow::Cow, path::{Path, PathBuf}, sync::Arc, }; @@ -122,6 +121,7 @@ impl Project { let lang_registry = self.languages.clone(); cx.spawn(async move |project, cx| { let shell_kind = ShellKind::new(&shell, is_windows); + let activation_script = maybe!(async { for toolchain in toolchains { let Some(toolchain) = toolchain.await else { @@ -143,14 +143,8 @@ impl Project { .update(cx, move |_, cx| { let format_to_run = || { if let Some(command) = &spawn_task.command { - let mut command: Option> = shell_kind.try_quote(command); - if let Some(command) = &mut command - && command.starts_with('"') - && let Some(prefix) = shell_kind.command_prefix() - { - *command = Cow::Owned(format!("{prefix}{command}")); - } - + let command = shell_kind.prepend_command_prefix(command); + let command = shell_kind.try_quote_prefix_aware(&command); let args = spawn_task .args .iter() @@ -172,12 +166,13 @@ impl Project { let activation_script = activation_script.join(&format!("{separator} ")); let to_run = format_to_run(); + + let arg = format!("{activation_script}{separator} {to_run}"); + let args = shell_kind.args_for_shell(false, arg); let shell = remote_client .read(cx) .shell() .unwrap_or_else(get_default_system_shell); - let arg = format!("{activation_script}{separator} {to_run}"); - let args = shell_kind.args_for_shell(false, arg); create_remote_shell( Some((&shell, &args)), diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 86d93ac2454a41a45d531dd8076066988634e5ce..18a4f64de28d1665deb4c788d7e4673e1e3b9ec5 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -39,6 +39,7 @@ pub(crate) struct SshRemoteConnection { ssh_platform: RemotePlatform, ssh_path_style: PathStyle, ssh_shell: String, + ssh_shell_kind: ShellKind, ssh_default_system_shell: String, _temp_dir: TempDir, } @@ -241,6 +242,7 @@ impl RemoteConnection for SshRemoteConnection { let Self { ssh_path_style, socket, + ssh_shell_kind, ssh_shell, .. } = self; @@ -254,6 +256,7 @@ impl RemoteConnection for SshRemoteConnection { env, *ssh_path_style, ssh_shell, + *ssh_shell_kind, socket.ssh_args(), ) } @@ -367,7 +370,7 @@ impl RemoteConnection for SshRemoteConnection { let ssh_proxy_process = match self .socket - .ssh_command("env", &proxy_args) + .ssh_command(self.ssh_shell_kind, "env", &proxy_args) // IMPORTANT: we kill this process when we drop the task that uses it. .kill_on_drop(true) .spawn() @@ -490,6 +493,13 @@ impl SshRemoteConnection { _ => PathStyle::Posix, }; let ssh_default_system_shell = String::from("/bin/sh"); + let ssh_shell_kind = ShellKind::new( + &ssh_shell, + match ssh_platform.os { + "windows" => true, + _ => false, + }, + ); let mut this = Self { socket, @@ -499,6 +509,7 @@ impl SshRemoteConnection { ssh_path_style, ssh_platform, ssh_shell, + ssh_shell_kind, ssh_default_system_shell, }; @@ -563,7 +574,11 @@ impl SshRemoteConnection { if self .socket - .run_command(&dst_path.display(self.path_style()), &["version"]) + .run_command( + self.ssh_shell_kind, + &dst_path.display(self.path_style()), + &["version"], + ) .await .is_ok() { @@ -632,7 +647,11 @@ impl SshRemoteConnection { ) -> Result<()> { if let Some(parent) = tmp_path_gz.parent() { self.socket - .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()]) + .run_command( + self.ssh_shell_kind, + "mkdir", + &["-p", parent.display(self.path_style()).as_ref()], + ) .await?; } @@ -641,6 +660,7 @@ impl SshRemoteConnection { match self .socket .run_command( + self.ssh_shell_kind, "curl", &[ "-f", @@ -660,13 +680,19 @@ impl SshRemoteConnection { { Ok(_) => {} Err(e) => { - if self.socket.run_command("which", &["curl"]).await.is_ok() { + if self + .socket + .run_command(self.ssh_shell_kind, "which", &["curl"]) + .await + .is_ok() + { return Err(e); } match self .socket .run_command( + self.ssh_shell_kind, "wget", &[ "--header=Content-Type: application/json", @@ -681,7 +707,12 @@ impl SshRemoteConnection { { Ok(_) => {} Err(e) => { - if self.socket.run_command("which", &["wget"]).await.is_ok() { + if self + .socket + .run_command(self.ssh_shell_kind, "which", &["wget"]) + .await + .is_ok() + { return Err(e); } else { anyhow::bail!("Neither curl nor wget is available"); @@ -703,7 +734,11 @@ impl SshRemoteConnection { ) -> Result<()> { if let Some(parent) = tmp_path_gz.parent() { self.socket - .run_command("mkdir", &["-p", parent.display(self.path_style()).as_ref()]) + .run_command( + self.ssh_shell_kind, + "mkdir", + &["-p", parent.display(self.path_style()).as_ref()], + ) .await?; } @@ -750,7 +785,7 @@ impl SshRemoteConnection { format!("chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",) }; let args = shell_kind.args_for_shell(false, script.to_string()); - self.socket.run_command("sh", &args).await?; + self.socket.run_command(shell_kind, "sh", &args).await?; Ok(()) } @@ -894,11 +929,16 @@ impl SshSocket { // Furthermore, some setups (e.g. Coder) will change directory when SSH'ing // into a machine. You must use `cd` to get back to $HOME. // You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'" - fn ssh_command(&self, program: &str, args: &[impl AsRef]) -> process::Command { - let shell_kind = ShellKind::Posix; + fn ssh_command( + &self, + shell_kind: ShellKind, + program: &str, + args: &[impl AsRef], + ) -> process::Command { let mut command = util::command::new_smol_command("ssh"); + let program = shell_kind.prepend_command_prefix(program); let mut to_run = shell_kind - .try_quote(program) + .try_quote_prefix_aware(&program) .expect("shell quoting") .into_owned(); for arg in args { @@ -920,8 +960,13 @@ impl SshSocket { command } - async fn run_command(&self, program: &str, args: &[impl AsRef]) -> Result { - let output = self.ssh_command(program, args).output().await?; + async fn run_command( + &self, + shell_kind: ShellKind, + program: &str, + args: &[impl AsRef], + ) -> Result { + let output = self.ssh_command(shell_kind, program, args).output().await?; anyhow::ensure!( output.status.success(), "failed to run command: {}", @@ -994,12 +1039,7 @@ impl SshSocket { } async fn platform(&self, shell: ShellKind) -> Result { - let program = if shell == ShellKind::Nushell { - "^uname" - } else { - "uname" - }; - let uname = self.run_command(program, &["-sm"]).await?; + let uname = self.run_command(shell, "uname", &["-sm"]).await?; let Some((os, arch)) = uname.split_once(" ") else { anyhow::bail!("unknown uname: {uname:?}") }; @@ -1030,7 +1070,10 @@ impl SshSocket { } async fn shell(&self) -> String { - match self.run_command("sh", &["-c", "echo $SHELL"]).await { + match self + .run_command(ShellKind::Posix, "sh", &["-c", "echo $SHELL"]) + .await + { Ok(shell) => shell.trim().to_owned(), Err(e) => { log::error!("Failed to get shell: {e}"); @@ -1256,11 +1299,11 @@ fn build_command( ssh_env: HashMap, ssh_path_style: PathStyle, ssh_shell: &str, + ssh_shell_kind: ShellKind, ssh_args: Vec, ) -> Result { use std::fmt::Write as _; - let shell_kind = ShellKind::new(ssh_shell, false); let mut exec = String::new(); if let Some(working_dir) = working_dir { let working_dir = RemotePathBuf::new(working_dir, ssh_path_style).to_string(); @@ -1270,12 +1313,24 @@ fn build_command( const TILDE_PREFIX: &'static str = "~/"; if working_dir.starts_with(TILDE_PREFIX) { let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/"); - write!(exec, "cd \"$HOME/{working_dir}\" && ",)?; + write!( + exec, + "cd \"$HOME/{working_dir}\" {} ", + ssh_shell_kind.sequential_and_commands_separator() + )?; } else { - write!(exec, "cd \"{working_dir}\" && ",)?; + write!( + exec, + "cd \"{working_dir}\" {} ", + ssh_shell_kind.sequential_and_commands_separator() + )?; } } else { - write!(exec, "cd && ")?; + write!( + exec, + "cd {} ", + ssh_shell_kind.sequential_and_commands_separator() + )?; }; write!(exec, "exec env ")?; @@ -1284,7 +1339,7 @@ fn build_command( exec, "{}={} ", k, - shell_kind.try_quote(v).context("shell quoting")? + ssh_shell_kind.try_quote(v).context("shell quoting")? )?; } @@ -1292,12 +1347,12 @@ fn build_command( write!( exec, "{}", - shell_kind - .try_quote(&input_program) + ssh_shell_kind + .try_quote_prefix_aware(&input_program) .context("shell quoting")? )?; for arg in input_args { - let arg = shell_kind.try_quote(&arg).context("shell quoting")?; + let arg = ssh_shell_kind.try_quote(&arg).context("shell quoting")?; write!(exec, " {}", &arg)?; } } else { @@ -1341,6 +1396,7 @@ mod tests { env.clone(), PathStyle::Posix, "/bin/fish", + ShellKind::Fish, vec!["-p".to_string(), "2222".to_string()], )?; @@ -1370,6 +1426,7 @@ mod tests { env.clone(), PathStyle::Posix, "/bin/fish", + ShellKind::Fish, vec!["-p".to_string(), "2222".to_string()], )?; diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index e6827347914cc35e266080dab7c83fd182e16a64..1bfa5e640d991f939456418750b633d87cbde3f6 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -44,6 +44,7 @@ pub(crate) struct WslRemoteConnection { remote_binary_path: Option>, platform: RemotePlatform, shell: String, + shell_kind: ShellKind, default_system_shell: String, connection_options: WslConnectionOptions, can_exec: bool, @@ -73,16 +74,17 @@ impl WslRemoteConnection { remote_binary_path: None, platform: RemotePlatform { os: "", arch: "" }, shell: String::new(), + shell_kind: ShellKind::Posix, default_system_shell: String::from("/bin/sh"), can_exec: true, }; delegate.set_status(Some("Detecting WSL environment"), cx); this.shell = this.detect_shell().await?; - let shell = ShellKind::new(&this.shell, false); - this.can_exec = this.detect_can_exec(shell).await?; - this.platform = this.detect_platform(shell).await?; + this.shell_kind = ShellKind::new(&this.shell, false); + this.can_exec = this.detect_can_exec().await?; + this.platform = this.detect_platform().await?; this.remote_binary_path = Some( - this.ensure_server_binary(&delegate, release_channel, version, commit, shell, cx) + this.ensure_server_binary(&delegate, release_channel, version, commit, cx) .await?, ); log::debug!("Detected WSL environment: {this:#?}"); @@ -90,20 +92,16 @@ impl WslRemoteConnection { Ok(this) } - async fn detect_can_exec(&self, shell: ShellKind) -> Result { + async fn detect_can_exec(&self) -> Result { let options = &self.connection_options; - let program = if shell == ShellKind::Nushell { - "^uname" - } else { - "uname" - }; + let program = self.shell_kind.prepend_command_prefix("uname"); let args = &["-m"]; - let output = wsl_command_impl(options, program, args, true) + let output = wsl_command_impl(options, &program, args, true) .output() .await?; if !output.status.success() { - let output = wsl_command_impl(options, program, args, false) + let output = wsl_command_impl(options, &program, args, false) .output() .await?; @@ -120,14 +118,9 @@ impl WslRemoteConnection { Ok(true) } } - async fn detect_platform(&self, shell: ShellKind) -> Result { - let arch_str = if shell == ShellKind::Nushell { - // https://github.com/nushell/nushell/issues/12570 - self.run_wsl_command("sh", &["-c", "uname -m"]) - } else { - self.run_wsl_command("uname", &["-m"]) - } - .await?; + async fn detect_platform(&self) -> Result { + let program = self.shell_kind.prepend_command_prefix("uname"); + let arch_str = self.run_wsl_command(&program, &["-m"]).await?; let arch_str = arch_str.trim().to_string(); let arch = match arch_str.as_str() { "x86_64" => "x86_64", @@ -163,7 +156,6 @@ impl WslRemoteConnection { release_channel: ReleaseChannel, version: SemanticVersion, commit: Option, - shell: ShellKind, cx: &mut AsyncApp, ) -> Result> { let version_str = match release_channel { @@ -186,12 +178,9 @@ impl WslRemoteConnection { if let Some(parent) = dst_path.parent() { let parent = parent.display(PathStyle::Posix); - if shell == ShellKind::Nushell { - self.run_wsl_command("mkdir", &[&parent]).await - } else { - self.run_wsl_command("mkdir", &["-p", &parent]).await - } - .map_err(|e| anyhow!("Failed to create directory: {}", e))?; + self.run_wsl_command("mkdir", &["-p", &parent]) + .await + .map_err(|e| anyhow!("Failed to create directory: {}", e))?; } #[cfg(debug_assertions)] @@ -206,7 +195,7 @@ impl WslRemoteConnection { )) .unwrap(), ); - self.upload_file(&remote_server_path, &tmp_path, delegate, &shell, cx) + self.upload_file(&remote_server_path, &tmp_path, delegate, cx) .await?; self.extract_and_install(&tmp_path, &dst_path, delegate, cx) .await?; @@ -239,8 +228,7 @@ impl WslRemoteConnection { ); let tmp_path = RelPath::unix(&tmp_path).unwrap(); - self.upload_file(&src_path, &tmp_path, delegate, &shell, cx) - .await?; + self.upload_file(&src_path, &tmp_path, delegate, cx).await?; self.extract_and_install(&tmp_path, &dst_path, delegate, cx) .await?; @@ -252,19 +240,15 @@ impl WslRemoteConnection { src_path: &Path, dst_path: &RelPath, delegate: &Arc, - shell: &ShellKind, cx: &mut AsyncApp, ) -> Result<()> { delegate.set_status(Some("Uploading remote server to WSL"), cx); if let Some(parent) = dst_path.parent() { let parent = parent.display(PathStyle::Posix); - if *shell == ShellKind::Nushell { - self.run_wsl_command("mkdir", &[&parent]).await - } else { - self.run_wsl_command("mkdir", &["-p", &parent]).await - } - .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?; + self.run_wsl_command("mkdir", &["-p", &parent]) + .await + .map_err(|e| anyhow!("Failed to create directory when uploading file: {}", e))?; } let t0 = Instant::now(); @@ -441,7 +425,7 @@ impl RemoteConnection for WslRemoteConnection { bail!("WSL shares the network interface with the host system"); } - let shell_kind = ShellKind::new(&self.shell, false); + let shell_kind = self.shell_kind; let working_dir = working_dir .map(|working_dir| RemotePathBuf::new(working_dir, PathStyle::Posix).to_string()) .unwrap_or("~".to_string()); @@ -461,7 +445,9 @@ impl RemoteConnection for WslRemoteConnection { write!( exec, "{}", - shell_kind.try_quote(&program).context("shell quoting")? + shell_kind + .try_quote_prefix_aware(&program) + .context("shell quoting")? )?; for arg in args { let arg = shell_kind.try_quote(&arg).context("shell quoting")?; diff --git a/crates/util/src/shell.rs b/crates/util/src/shell.rs index 7ab214d5105fb81c930954a1aaf9c4aa6fb865c5..e2da1c394b7d151a9ac4c7059c7d4f25e0d5fea5 100644 --- a/crates/util/src/shell.rs +++ b/crates/util/src/shell.rs @@ -408,6 +408,15 @@ impl ShellKind { } } + pub fn prepend_command_prefix<'a>(&self, command: &'a str) -> Cow<'a, str> { + match self.command_prefix() { + Some(prefix) if !command.starts_with(prefix) => { + Cow::Owned(format!("{prefix}{command}")) + } + _ => Cow::Borrowed(command), + } + } + pub const fn sequential_commands_separator(&self) -> char { match self { ShellKind::Cmd => '&', @@ -422,6 +431,20 @@ impl ShellKind { } } + pub const fn sequential_and_commands_separator(&self) -> &'static str { + match self { + ShellKind::Cmd + | ShellKind::Posix + | ShellKind::Csh + | ShellKind::Tcsh + | ShellKind::Rc + | ShellKind::Fish + | ShellKind::PowerShell + | ShellKind::Xonsh => "&&", + ShellKind::Nushell => ";", + } + } + pub fn try_quote<'a>(&self, arg: &'a str) -> Option> { shlex::try_quote(arg).ok().map(|arg| match self { // If we are running in PowerShell, we want to take extra care when escaping strings. @@ -438,6 +461,42 @@ impl ShellKind { }) } + /// Quotes the given argument if necessary, taking into account the command prefix. + /// + /// In other words, this will consider quoting arg without its command prefix to not break the command. + /// You should use this over `try_quote` when you want to quote a shell command. + pub fn try_quote_prefix_aware<'a>(&self, arg: &'a str) -> Option> { + if let Some(char) = self.command_prefix() { + if let Some(arg) = arg.strip_prefix(char) { + // we have a command that is prefixed + for quote in ['\'', '"'] { + if let Some(arg) = arg + .strip_prefix(quote) + .and_then(|arg| arg.strip_suffix(quote)) + { + // and the command itself is wrapped as a literal, that + // means the prefix exists to interpret a literal as a + // command. So strip the quotes, quote the command, and + // re-add the quotes if they are missing after requoting + let quoted = self.try_quote(arg)?; + return Some(if quoted.starts_with(['\'', '"']) { + Cow::Owned(self.prepend_command_prefix("ed).into_owned()) + } else { + Cow::Owned( + self.prepend_command_prefix(&format!("{quote}{quoted}{quote}")) + .into_owned(), + ) + }); + } + } + return self + .try_quote(arg) + .map(|quoted| Cow::Owned(self.prepend_command_prefix("ed).into_owned())); + } + } + self.try_quote(arg) + } + pub fn split(&self, input: &str) -> Option> { shlex::split(input) } @@ -525,4 +584,75 @@ mod tests { "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string() ); } + + #[test] + fn test_try_quote_nu_command() { + let shell_kind = ShellKind::Nushell; + assert_eq!( + shell_kind.try_quote("'uname'").unwrap().into_owned(), + "\"'uname'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("'uname'") + .unwrap() + .into_owned(), + "\"'uname'\"".to_string() + ); + assert_eq!( + shell_kind.try_quote("^uname").unwrap().into_owned(), + "'^uname'".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("^uname") + .unwrap() + .into_owned(), + "^uname".to_string() + ); + assert_eq!( + shell_kind.try_quote("^'uname'").unwrap().into_owned(), + "'^'\"'uname\'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("^'uname'") + .unwrap() + .into_owned(), + "^'uname'".to_string() + ); + assert_eq!( + shell_kind.try_quote("'uname a'").unwrap().into_owned(), + "\"'uname a'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("'uname a'") + .unwrap() + .into_owned(), + "\"'uname a'\"".to_string() + ); + assert_eq!( + shell_kind.try_quote("^'uname a'").unwrap().into_owned(), + "'^'\"'uname a'\"".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("^'uname a'") + .unwrap() + .into_owned(), + "^'uname a'".to_string() + ); + assert_eq!( + shell_kind.try_quote("uname").unwrap().into_owned(), + "uname".to_string() + ); + assert_eq!( + shell_kind + .try_quote_prefix_aware("uname") + .unwrap() + .into_owned(), + "uname".to_string() + ); + } } From b625263989201a1eab65bf08dd73ffea121600af Mon Sep 17 00:00:00 2001 From: kallyaleksiev <67751757+kallyaleksiev@users.noreply.github.com> Date: Mon, 3 Nov 2025 11:13:20 +0000 Subject: [PATCH 79/82] remote: Close window when SSH connection fails (#41782) ## Context This PR closes issue https://github.com/zed-industries/zed/issues/41781 It essentially `matches` the result of opening the connection here https://github.com/zed-industries/zed/blob/f7153bbe8a869d0e2b25efed64fd7e4217899b63/crates/recent_projects/src/remote_connections.rs#L650 and adds a Close / Retry alert that upon 'Close' closes the new window if the result is an error --- .../recent_projects/src/remote_connections.rs | 50 ++++++++++++++++++- 1 file changed, 48 insertions(+), 2 deletions(-) diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index c371b27ce1dcfe665d96f548bca2c893559005ec..7c8557f9dac2131a84c54cc60657e105d2839658 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -574,6 +574,7 @@ pub async fn open_remote_project( open_options: workspace::OpenOptions, cx: &mut AsyncApp, ) -> Result<()> { + let created_new_window = open_options.replace_window.is_none(); let window = if let Some(window) = open_options.replace_window { window } else { @@ -648,7 +649,45 @@ pub async fn open_remote_project( let Some(delegate) = delegate else { break }; let remote_connection = - remote::connect(connection_options.clone(), delegate.clone(), cx).await?; + match remote::connect(connection_options.clone(), delegate.clone(), cx).await { + Ok(connection) => connection, + Err(e) => { + window + .update(cx, |workspace, _, cx| { + if let Some(ui) = workspace.active_modal::(cx) { + ui.update(cx, |modal, cx| modal.finished(cx)) + } + }) + .ok(); + log::error!("Failed to open project: {e:?}"); + let response = window + .update(cx, |_, window, cx| { + window.prompt( + PromptLevel::Critical, + match connection_options { + RemoteConnectionOptions::Ssh(_) => "Failed to connect over SSH", + RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL", + }, + Some(&e.to_string()), + &["Retry", "Cancel"], + cx, + ) + })? + .await; + + if response == Ok(0) { + continue; + } + + if created_new_window { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } + break; + } + }; + let (paths, paths_with_positions) = determine_paths_with_positions(&remote_connection, paths.clone()).await; @@ -686,7 +725,7 @@ pub async fn open_remote_project( RemoteConnectionOptions::Wsl(_) => "Failed to connect to WSL", }, Some(&e.to_string()), - &["Retry", "Ok"], + &["Retry", "Cancel"], cx, ) })? @@ -694,7 +733,14 @@ pub async fn open_remote_project( if response == Ok(0) { continue; } + + if created_new_window { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } } + Ok(items) => { for (item, path) in items.into_iter().zip(paths_with_positions) { let Some(item) = item else { From 50504793e6562b239dcd9399bc668445fae55f29 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 3 Nov 2025 13:07:13 +0100 Subject: [PATCH 80/82] file_finder: Fix highlighting panic in open path prompt (#41808) Closes https://github.com/zed-industries/zed/issues/41249 Couldn't quite come up with a test case here but verified it works. Release Notes: - Fixed a panic in file finder when deleting characters --- crates/file_finder/src/open_path_prompt.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index 694ef1eaceb720c3b63d4ca9d243ab73e9442970..f29c0e6cd20f423dd9073abced0182f272b588c9 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -711,7 +711,9 @@ impl PickerDelegate for OpenPathDelegate { match &self.directory_state { DirectoryState::List { parent_path, .. } => { - let (label, indices) = if *parent_path == self.prompt_root { + let (label, indices) = if is_current_dir_candidate { + ("open this directory".to_string(), vec![]) + } else if *parent_path == self.prompt_root { match_positions.iter_mut().for_each(|position| { *position += self.prompt_root.len(); }); @@ -719,8 +721,6 @@ impl PickerDelegate for OpenPathDelegate { format!("{}{}", self.prompt_root, candidate.path.string), match_positions, ) - } else if is_current_dir_candidate { - ("open this directory".to_string(), vec![]) } else { (candidate.path.string, match_positions) }; From f8b414c22cdb52256a89e97f917fd387e657ba92 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 3 Nov 2025 13:28:32 +0100 Subject: [PATCH 81/82] zed: Reduce number of rayon threads, spawn with bigger stacks (#41812) We already do this for the cli and remote server but forgot to do so for the main binary Release Notes: - N/A *or* Added/Fixed/Improved ... --- Cargo.lock | 1 + crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 7 +++++++ 3 files changed, 9 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 1ee239c3740bb0fba40f81b9cdae77a46d62d535..947d90338afea01f0096585c2b1fb74ac907bbf1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21233,6 +21233,7 @@ dependencies = [ "project_symbols", "prompt_store", "proto", + "rayon", "recent_projects", "release_channel", "remote", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 9f6196c1482bcff2db9b7812dfb75b1471fec273..ebb3d8beb321cb6ee42cec84ddf7f456672a0265 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -73,6 +73,7 @@ gpui = { workspace = true, features = [ "windows-manifest", ] } gpui_tokio.workspace = true +rayon.workspace = true edit_prediction_button.workspace = true http_client.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index b873a58d3b61338b25c5908c2f87b62acb95d6f6..18903d888090bbc1fa0955d46417486a3f9fe13b 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -257,6 +257,13 @@ pub fn main() { return; } + rayon::ThreadPoolBuilder::new() + .num_threads(4) + .stack_size(10 * 1024 * 1024) + .thread_name(|ix| format!("RayonWorker{}", ix)) + .build_global() + .unwrap(); + log::info!( "========== starting zed version {}, sha {} ==========", app_version, From 28f30fc851549a535a670ee980890ba766b51f47 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 3 Nov 2025 15:54:53 +0200 Subject: [PATCH 82/82] Fix racy inlay hints queries (#41816) Follow-up of https://github.com/zed-industries/zed/pull/40183 Release Notes: - (Preview only) Fixed inlay hints duplicating when multiple editors are open for the same buffer --------- Co-authored-by: Lukas Wirth --- crates/collab/src/tests/editor_tests.rs | 24 +-- crates/editor/src/editor.rs | 10 +- crates/editor/src/inlays/inlay_hints.rs | 198 ++++++++--------- crates/project/src/lsp_store.rs | 201 ++++++++++-------- .../project/src/lsp_store/inlay_hint_cache.rs | 54 ++++- crates/project/src/project.rs | 15 +- crates/project/src/project_tests.rs | 8 - crates/proto/proto/lsp.proto | 1 + crates/search/src/project_search.rs | 165 ++++++++++++-- 9 files changed, 447 insertions(+), 229 deletions(-) diff --git a/crates/collab/src/tests/editor_tests.rs b/crates/collab/src/tests/editor_tests.rs index 73fdd8da8890d62f7da39f944edfe333d2c983aa..bdc024aaca7242ab0fe261e3b673bf4d0efe23b1 100644 --- a/crates/collab/src/tests/editor_tests.rs +++ b/crates/collab/src/tests/editor_tests.rs @@ -39,6 +39,7 @@ use std::{ Arc, atomic::{self, AtomicBool, AtomicUsize}, }, + time::Duration, }; use text::Point; use util::{path, rel_path::rel_path, uri}; @@ -1817,14 +1818,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( settings.project.all_languages.defaults.inlay_hints = Some(InlayHintSettingsContent { enabled: Some(true), - show_value_hints: Some(true), - edit_debounce_ms: Some(0), - scroll_debounce_ms: Some(0), - show_type_hints: Some(true), - show_parameter_hints: Some(false), - show_other_hints: Some(true), - show_background: Some(false), - toggle_on_modifiers_press: None, + ..InlayHintSettingsContent::default() }) }); }); @@ -1834,15 +1828,8 @@ async fn test_mutual_editor_inlay_hint_cache_update( store.update_user_settings(cx, |settings| { settings.project.all_languages.defaults.inlay_hints = Some(InlayHintSettingsContent { - show_value_hints: Some(true), enabled: Some(true), - edit_debounce_ms: Some(0), - scroll_debounce_ms: Some(0), - show_type_hints: Some(true), - show_parameter_hints: Some(false), - show_other_hints: Some(true), - show_background: Some(false), - toggle_on_modifiers_press: None, + ..InlayHintSettingsContent::default() }) }); }); @@ -1935,6 +1922,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( }); let fake_language_server = fake_language_servers.next().await.unwrap(); let editor_a = file_a.await.unwrap().downcast::().unwrap(); + executor.advance_clock(Duration::from_millis(100)); executor.run_until_parked(); let initial_edit = edits_made.load(atomic::Ordering::Acquire); @@ -1955,6 +1943,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .downcast::() .unwrap(); + executor.advance_clock(Duration::from_millis(100)); executor.run_until_parked(); editor_b.update(cx_b, |editor, cx| { assert_eq!( @@ -1973,6 +1962,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( }); cx_b.focus(&editor_b); + executor.advance_clock(Duration::from_secs(1)); executor.run_until_parked(); editor_a.update(cx_a, |editor, cx| { assert_eq!( @@ -1996,6 +1986,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( }); cx_a.focus(&editor_a); + executor.advance_clock(Duration::from_secs(1)); executor.run_until_parked(); editor_a.update(cx_a, |editor, cx| { assert_eq!( @@ -2017,6 +2008,7 @@ async fn test_mutual_editor_inlay_hint_cache_update( .into_response() .expect("inlay refresh request failed"); + executor.advance_clock(Duration::from_secs(1)); executor.run_until_parked(); editor_a.update(cx_a, |editor, cx| { assert_eq!( diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ed6b8ec2eca4dcb558bc832ac56b92af8791712c..d6779d8a328b6bd06b91f945e0e5ae7d7b4bd12b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1832,9 +1832,15 @@ impl Editor { project::Event::RefreshCodeLens => { // we always query lens with actions, without storing them, always refreshing them } - project::Event::RefreshInlayHints(server_id) => { + project::Event::RefreshInlayHints { + server_id, + request_id, + } => { editor.refresh_inlay_hints( - InlayHintRefreshReason::RefreshRequested(*server_id), + InlayHintRefreshReason::RefreshRequested { + server_id: *server_id, + request_id: *request_id, + }, cx, ); } diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 74fe9988763b976f315624b8e1ab36110e2137ee..4fd673a8d2f049da94f90adfcdfbd8cd3263d12d 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -1,5 +1,4 @@ use std::{ - collections::hash_map, ops::{ControlFlow, Range}, time::Duration, }; @@ -49,8 +48,8 @@ pub struct LspInlayHintData { allowed_hint_kinds: HashSet>, invalidate_debounce: Option, append_debounce: Option, - hint_refresh_tasks: HashMap>, Vec>>>, - hint_chunk_fetched: HashMap>)>, + hint_refresh_tasks: HashMap>>, + hint_chunk_fetching: HashMap>)>, invalidate_hints_for_buffers: HashSet, pub added_hints: HashMap>, } @@ -63,7 +62,7 @@ impl LspInlayHintData { enabled_in_settings: settings.enabled, hint_refresh_tasks: HashMap::default(), added_hints: HashMap::default(), - hint_chunk_fetched: HashMap::default(), + hint_chunk_fetching: HashMap::default(), invalidate_hints_for_buffers: HashSet::default(), invalidate_debounce: debounce_value(settings.edit_debounce_ms), append_debounce: debounce_value(settings.scroll_debounce_ms), @@ -99,9 +98,8 @@ impl LspInlayHintData { pub fn clear(&mut self) { self.hint_refresh_tasks.clear(); - self.hint_chunk_fetched.clear(); + self.hint_chunk_fetching.clear(); self.added_hints.clear(); - self.invalidate_hints_for_buffers.clear(); } /// Checks inlay hint settings for enabled hint kinds and general enabled state. @@ -199,7 +197,7 @@ impl LspInlayHintData { ) { for buffer_id in removed_buffer_ids { self.hint_refresh_tasks.remove(buffer_id); - self.hint_chunk_fetched.remove(buffer_id); + self.hint_chunk_fetching.remove(buffer_id); } } } @@ -211,7 +209,10 @@ pub enum InlayHintRefreshReason { SettingsChange(InlayHintSettings), NewLinesShown, BufferEdited(BufferId), - RefreshRequested(LanguageServerId), + RefreshRequested { + server_id: LanguageServerId, + request_id: Option, + }, ExcerptsRemoved(Vec), } @@ -296,7 +297,7 @@ impl Editor { | InlayHintRefreshReason::Toggle(_) | InlayHintRefreshReason::SettingsChange(_) => true, InlayHintRefreshReason::NewLinesShown - | InlayHintRefreshReason::RefreshRequested(_) + | InlayHintRefreshReason::RefreshRequested { .. } | InlayHintRefreshReason::ExcerptsRemoved(_) => false, InlayHintRefreshReason::BufferEdited(buffer_id) => { let Some(affected_language) = self @@ -370,48 +371,45 @@ impl Editor { let Some(buffer) = multi_buffer.read(cx).buffer(buffer_id) else { continue; }; - let fetched_tasks = inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default(); + + let (fetched_for_version, fetched_chunks) = inlay_hints + .hint_chunk_fetching + .entry(buffer_id) + .or_default(); if visible_excerpts .buffer_version - .changed_since(&fetched_tasks.0) + .changed_since(fetched_for_version) { - fetched_tasks.1.clear(); - fetched_tasks.0 = visible_excerpts.buffer_version.clone(); + *fetched_for_version = visible_excerpts.buffer_version.clone(); + fetched_chunks.clear(); inlay_hints.hint_refresh_tasks.remove(&buffer_id); } - let applicable_chunks = - semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx); + let known_chunks = if ignore_previous_fetches { + None + } else { + Some((fetched_for_version.clone(), fetched_chunks.clone())) + }; - match inlay_hints + let mut applicable_chunks = + semantics_provider.applicable_inlay_chunks(&buffer, &visible_excerpts.ranges, cx); + applicable_chunks.retain(|chunk| fetched_chunks.insert(chunk.clone())); + if applicable_chunks.is_empty() && !ignore_previous_fetches { + continue; + } + inlay_hints .hint_refresh_tasks .entry(buffer_id) .or_default() - .entry(applicable_chunks) - { - hash_map::Entry::Occupied(mut o) => { - if invalidate_cache.should_invalidate() || ignore_previous_fetches { - o.get_mut().push(spawn_editor_hints_refresh( - buffer_id, - invalidate_cache, - ignore_previous_fetches, - debounce, - visible_excerpts, - cx, - )); - } - } - hash_map::Entry::Vacant(v) => { - v.insert(Vec::new()).push(spawn_editor_hints_refresh( - buffer_id, - invalidate_cache, - ignore_previous_fetches, - debounce, - visible_excerpts, - cx, - )); - } - } + .push(spawn_editor_hints_refresh( + buffer_id, + invalidate_cache, + debounce, + visible_excerpts, + known_chunks, + applicable_chunks, + cx, + )); } } @@ -506,9 +504,13 @@ impl Editor { } InlayHintRefreshReason::NewLinesShown => InvalidationStrategy::None, InlayHintRefreshReason::BufferEdited(_) => InvalidationStrategy::BufferEdited, - InlayHintRefreshReason::RefreshRequested(server_id) => { - InvalidationStrategy::RefreshRequested(*server_id) - } + InlayHintRefreshReason::RefreshRequested { + server_id, + request_id, + } => InvalidationStrategy::RefreshRequested { + server_id: *server_id, + request_id: *request_id, + }, }; match &mut self.inlay_hints { @@ -718,44 +720,29 @@ impl Editor { fn inlay_hints_for_buffer( &mut self, invalidate_cache: InvalidationStrategy, - ignore_previous_fetches: bool, buffer_excerpts: VisibleExcerpts, + known_chunks: Option<(Global, HashSet>)>, cx: &mut Context, ) -> Option, anyhow::Result)>>> { let semantics_provider = self.semantics_provider()?; - let inlay_hints = self.inlay_hints.as_mut()?; - let buffer_id = buffer_excerpts.buffer.read(cx).remote_id(); let new_hint_tasks = semantics_provider .inlay_hints( invalidate_cache, buffer_excerpts.buffer, buffer_excerpts.ranges, - inlay_hints - .hint_chunk_fetched - .get(&buffer_id) - .filter(|_| !ignore_previous_fetches && !invalidate_cache.should_invalidate()) - .cloned(), + known_chunks, cx, ) .unwrap_or_default(); - let (known_version, known_chunks) = - inlay_hints.hint_chunk_fetched.entry(buffer_id).or_default(); - if buffer_excerpts.buffer_version.changed_since(known_version) { - known_chunks.clear(); - *known_version = buffer_excerpts.buffer_version; - } - - let mut hint_tasks = Vec::new(); + let mut hint_tasks = None; for (row_range, new_hints_task) in new_hint_tasks { - let inserted = known_chunks.insert(row_range.clone()); - if inserted || ignore_previous_fetches || invalidate_cache.should_invalidate() { - hint_tasks.push(cx.spawn(async move |_, _| (row_range, new_hints_task.await))); - } + hint_tasks + .get_or_insert_with(Vec::new) + .push(cx.spawn(async move |_, _| (row_range, new_hints_task.await))); } - - Some(hint_tasks) + hint_tasks } fn apply_fetched_hints( @@ -793,20 +780,28 @@ impl Editor { let excerpts = self.buffer.read(cx).excerpt_ids(); let hints_to_insert = new_hints .into_iter() - .filter_map(|(chunk_range, hints_result)| match hints_result { - Ok(new_hints) => Some(new_hints), - Err(e) => { - log::error!( - "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}" - ); - if let Some((for_version, chunks_fetched)) = - inlay_hints.hint_chunk_fetched.get_mut(&buffer_id) - { - if for_version == &query_version { - chunks_fetched.remove(&chunk_range); + .filter_map(|(chunk_range, hints_result)| { + let chunks_fetched = inlay_hints.hint_chunk_fetching.get_mut(&buffer_id); + match hints_result { + Ok(new_hints) => { + if new_hints.is_empty() { + if let Some((_, chunks_fetched)) = chunks_fetched { + chunks_fetched.remove(&chunk_range); + } } + Some(new_hints) + } + Err(e) => { + log::error!( + "Failed to query inlays for buffer row range {chunk_range:?}, {e:#}" + ); + if let Some((for_version, chunks_fetched)) = chunks_fetched { + if for_version == &query_version { + chunks_fetched.remove(&chunk_range); + } + } + None } - None } }) .flat_map(|hints| hints.into_values()) @@ -856,9 +851,10 @@ struct VisibleExcerpts { fn spawn_editor_hints_refresh( buffer_id: BufferId, invalidate_cache: InvalidationStrategy, - ignore_previous_fetches: bool, debounce: Option, buffer_excerpts: VisibleExcerpts, + known_chunks: Option<(Global, HashSet>)>, + applicable_chunks: Vec>, cx: &mut Context<'_, Editor>, ) -> Task<()> { cx.spawn(async move |editor, cx| { @@ -869,12 +865,7 @@ fn spawn_editor_hints_refresh( let query_version = buffer_excerpts.buffer_version.clone(); let Some(hint_tasks) = editor .update(cx, |editor, cx| { - editor.inlay_hints_for_buffer( - invalidate_cache, - ignore_previous_fetches, - buffer_excerpts, - cx, - ) + editor.inlay_hints_for_buffer(invalidate_cache, buffer_excerpts, known_chunks, cx) }) .ok() else { @@ -882,6 +873,19 @@ fn spawn_editor_hints_refresh( }; let hint_tasks = hint_tasks.unwrap_or_default(); if hint_tasks.is_empty() { + editor + .update(cx, |editor, _| { + if let Some((_, hint_chunk_fetching)) = editor + .inlay_hints + .as_mut() + .and_then(|inlay_hints| inlay_hints.hint_chunk_fetching.get_mut(&buffer_id)) + { + for applicable_chunks in &applicable_chunks { + hint_chunk_fetching.remove(applicable_chunks); + } + } + }) + .ok(); return; } let new_hints = join_all(hint_tasks).await; @@ -1102,7 +1106,10 @@ pub mod tests { editor .update(cx, |editor, _window, cx| { editor.refresh_inlay_hints( - InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()), + InlayHintRefreshReason::RefreshRequested { + server_id: fake_server.server.server_id(), + request_id: Some(1), + }, cx, ); }) @@ -1958,15 +1965,8 @@ pub mod tests { async fn test_large_buffer_inlay_requests_split(cx: &mut gpui::TestAppContext) { init_test(cx, |settings| { settings.defaults.inlay_hints = Some(InlayHintSettingsContent { - show_value_hints: Some(true), enabled: Some(true), - edit_debounce_ms: Some(0), - scroll_debounce_ms: Some(0), - show_type_hints: Some(true), - show_parameter_hints: Some(true), - show_other_hints: Some(true), - show_background: Some(false), - toggle_on_modifiers_press: None, + ..InlayHintSettingsContent::default() }) }); @@ -2044,6 +2044,7 @@ pub mod tests { cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx)); cx.executor().run_until_parked(); let _fake_server = fake_servers.next().await.unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); let ranges = lsp_request_ranges @@ -2129,6 +2130,7 @@ pub mod tests { ); }) .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); editor.update(cx, |_, _, _| { let ranges = lsp_request_ranges @@ -2145,6 +2147,7 @@ pub mod tests { editor.handle_input("++++more text++++", window, cx); }) .unwrap(); + cx.executor().advance_clock(Duration::from_secs(1)); cx.executor().run_until_parked(); editor.update(cx, |editor, _window, cx| { let mut ranges = lsp_request_ranges.lock().drain(..).collect::>(); @@ -3887,7 +3890,10 @@ let c = 3;"# editor .update(cx, |editor, _, cx| { editor.refresh_inlay_hints( - InlayHintRefreshReason::RefreshRequested(fake_server.server.server_id()), + InlayHintRefreshReason::RefreshRequested { + server_id: fake_server.server.server_id(), + request_id: Some(1), + }, cx, ); }) @@ -4022,7 +4028,7 @@ let c = 3;"# let mut all_fetched_hints = Vec::new(); for buffer in editor.buffer.read(cx).all_buffers() { lsp_store.update(cx, |lsp_store, cx| { - let hints = &lsp_store.latest_lsp_data(&buffer, cx).inlay_hints(); + let hints = lsp_store.latest_lsp_data(&buffer, cx).inlay_hints(); all_cached_labels.extend(hints.all_cached_hints().into_iter().map(|hint| { let mut label = hint.text().to_string(); if hint.padding_left { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 762070796f068fb01b19522b4a506eb693b9bd63..02c1aca737ee63e83334e172767539a256df7c90 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -853,23 +853,32 @@ impl LocalLspStore { language_server .on_request::({ let lsp_store = lsp_store.clone(); + let request_id = Arc::new(AtomicUsize::new(0)); move |(), cx| { - let this = lsp_store.clone(); + let lsp_store = lsp_store.clone(); + let request_id = request_id.clone(); let mut cx = cx.clone(); async move { - this.update(&mut cx, |lsp_store, cx| { - cx.emit(LspStoreEvent::RefreshInlayHints(server_id)); - lsp_store - .downstream_client - .as_ref() - .map(|(client, project_id)| { - client.send(proto::RefreshInlayHints { - project_id: *project_id, - server_id: server_id.to_proto(), + lsp_store + .update(&mut cx, |lsp_store, cx| { + let request_id = + Some(request_id.fetch_add(1, atomic::Ordering::AcqRel)); + cx.emit(LspStoreEvent::RefreshInlayHints { + server_id, + request_id, + }); + lsp_store + .downstream_client + .as_ref() + .map(|(client, project_id)| { + client.send(proto::RefreshInlayHints { + project_id: *project_id, + server_id: server_id.to_proto(), + request_id: request_id.map(|id| id as u64), + }) }) - }) - })? - .transpose()?; + })? + .transpose()?; Ok(()) } } @@ -3659,7 +3668,10 @@ pub enum LspStoreEvent { new_language: Option>, }, Notification(String), - RefreshInlayHints(LanguageServerId), + RefreshInlayHints { + server_id: LanguageServerId, + request_id: Option, + }, RefreshCodeLens, DiagnosticsUpdated { server_id: LanguageServerId, @@ -6636,14 +6648,22 @@ impl LspStore { cx: &mut Context, ) -> HashMap, Task>> { let buffer_snapshot = buffer.read(cx).snapshot(); - let for_server = if let InvalidationStrategy::RefreshRequested(server_id) = invalidate { + let next_hint_id = self.next_hint_id.clone(); + let lsp_data = self.latest_lsp_data(&buffer, cx); + let mut lsp_refresh_requested = false; + let for_server = if let InvalidationStrategy::RefreshRequested { + server_id, + request_id, + } = invalidate + { + let invalidated = lsp_data + .inlay_hints + .invalidate_for_server_refresh(server_id, request_id); + lsp_refresh_requested = invalidated; Some(server_id) } else { None }; - let invalidate_cache = invalidate.should_invalidate(); - let next_hint_id = self.next_hint_id.clone(); - let lsp_data = self.latest_lsp_data(&buffer, cx); let existing_inlay_hints = &mut lsp_data.inlay_hints; let known_chunks = known_chunks .filter(|(known_version, _)| !lsp_data.buffer_version.changed_since(known_version)) @@ -6651,8 +6671,8 @@ impl LspStore { .unwrap_or_default(); let mut hint_fetch_tasks = Vec::new(); - let mut cached_inlay_hints = HashMap::default(); - let mut ranges_to_query = Vec::new(); + let mut cached_inlay_hints = None; + let mut ranges_to_query = None; let applicable_chunks = existing_inlay_hints .applicable_chunks(ranges.as_slice()) .filter(|chunk| !known_chunks.contains(&(chunk.start..chunk.end))) @@ -6667,12 +6687,12 @@ impl LspStore { match ( existing_inlay_hints .cached_hints(&row_chunk) - .filter(|_| !invalidate_cache) + .filter(|_| !lsp_refresh_requested) .cloned(), existing_inlay_hints .fetched_hints(&row_chunk) .as_ref() - .filter(|_| !invalidate_cache) + .filter(|_| !lsp_refresh_requested) .cloned(), ) { (None, None) => { @@ -6681,19 +6701,18 @@ impl LspStore { } else { Point::new(row_chunk.end, 0) }; - ranges_to_query.push(( + ranges_to_query.get_or_insert_with(Vec::new).push(( row_chunk, buffer_snapshot.anchor_before(Point::new(row_chunk.start, 0)) ..buffer_snapshot.anchor_after(end), )); } - (None, Some(fetched_hints)) => { - hint_fetch_tasks.push((row_chunk, fetched_hints.clone())) - } + (None, Some(fetched_hints)) => hint_fetch_tasks.push((row_chunk, fetched_hints)), (Some(cached_hints), None) => { for (server_id, cached_hints) in cached_hints { if for_server.is_none_or(|for_server| for_server == server_id) { cached_inlay_hints + .get_or_insert_with(HashMap::default) .entry(row_chunk.start..row_chunk.end) .or_insert_with(HashMap::default) .entry(server_id) @@ -6703,10 +6722,11 @@ impl LspStore { } } (Some(cached_hints), Some(fetched_hints)) => { - hint_fetch_tasks.push((row_chunk, fetched_hints.clone())); + hint_fetch_tasks.push((row_chunk, fetched_hints)); for (server_id, cached_hints) in cached_hints { if for_server.is_none_or(|for_server| for_server == server_id) { cached_inlay_hints + .get_or_insert_with(HashMap::default) .entry(row_chunk.start..row_chunk.end) .or_insert_with(HashMap::default) .entry(server_id) @@ -6718,18 +6738,18 @@ impl LspStore { } } - let cached_chunk_data = cached_inlay_hints - .into_iter() - .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints)))) - .collect(); - if hint_fetch_tasks.is_empty() && ranges_to_query.is_empty() { - cached_chunk_data + if hint_fetch_tasks.is_empty() + && ranges_to_query + .as_ref() + .is_none_or(|ranges| ranges.is_empty()) + && let Some(cached_inlay_hints) = cached_inlay_hints + { + cached_inlay_hints + .into_iter() + .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints)))) + .collect() } else { - if invalidate_cache { - lsp_data.inlay_hints.clear(); - } - - for (chunk, range_to_query) in ranges_to_query { + for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() { let next_hint_id = next_hint_id.clone(); let buffer = buffer.clone(); let new_inlay_hints = cx @@ -6745,31 +6765,38 @@ impl LspStore { let update_cache = !lsp_data .buffer_version .changed_since(&buffer.read(cx).version()); - new_hints_by_server - .into_iter() - .map(|(server_id, new_hints)| { - let new_hints = new_hints - .into_iter() - .map(|new_hint| { - ( - InlayId::Hint(next_hint_id.fetch_add( - 1, - atomic::Ordering::AcqRel, - )), - new_hint, - ) - }) - .collect::>(); - if update_cache { - lsp_data.inlay_hints.insert_new_hints( - chunk, - server_id, - new_hints.clone(), - ); - } - (server_id, new_hints) - }) - .collect() + if new_hints_by_server.is_empty() { + if update_cache { + lsp_data.inlay_hints.invalidate_for_chunk(chunk); + } + HashMap::default() + } else { + new_hints_by_server + .into_iter() + .map(|(server_id, new_hints)| { + let new_hints = new_hints + .into_iter() + .map(|new_hint| { + ( + InlayId::Hint(next_hint_id.fetch_add( + 1, + atomic::Ordering::AcqRel, + )), + new_hint, + ) + }) + .collect::>(); + if update_cache { + lsp_data.inlay_hints.insert_new_hints( + chunk, + server_id, + new_hints.clone(), + ); + } + (server_id, new_hints) + }) + .collect() + } }) }) .map_err(Arc::new) @@ -6781,22 +6808,25 @@ impl LspStore { hint_fetch_tasks.push((chunk, new_inlay_hints)); } - let mut combined_data = cached_chunk_data; - combined_data.extend(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| { - ( - chunk.start..chunk.end, - cx.spawn(async move |_, _| { - hints_fetch.await.map_err(|e| { - if e.error_code() != ErrorCode::Internal { - anyhow!(e.error_code()) - } else { - anyhow!("{e:#}") - } - }) - }), - ) - })); - combined_data + cached_inlay_hints + .unwrap_or_default() + .into_iter() + .map(|(row_chunk, hints)| (row_chunk, Task::ready(Ok(hints)))) + .chain(hint_fetch_tasks.into_iter().map(|(chunk, hints_fetch)| { + ( + chunk.start..chunk.end, + cx.spawn(async move |_, _| { + hints_fetch.await.map_err(|e| { + if e.error_code() != ErrorCode::Internal { + anyhow!(e.error_code()) + } else { + anyhow!("{e:#}") + } + }) + }), + ) + })) + .collect() } } @@ -9604,7 +9634,10 @@ impl LspStore { if let Some(work) = status.pending_work.remove(&token) && !work.is_disk_based_diagnostics_progress { - cx.emit(LspStoreEvent::RefreshInlayHints(language_server_id)); + cx.emit(LspStoreEvent::RefreshInlayHints { + server_id: language_server_id, + request_id: None, + }); } cx.notify(); } @@ -9743,9 +9776,10 @@ impl LspStore { mut cx: AsyncApp, ) -> Result { lsp_store.update(&mut cx, |_, cx| { - cx.emit(LspStoreEvent::RefreshInlayHints( - LanguageServerId::from_proto(envelope.payload.server_id), - )); + cx.emit(LspStoreEvent::RefreshInlayHints { + server_id: LanguageServerId::from_proto(envelope.payload.server_id), + request_id: envelope.payload.request_id.map(|id| id as usize), + }); })?; Ok(proto::Ack {}) } @@ -10972,7 +11006,6 @@ impl LspStore { language_server.name(), Some(key.worktree_id), )); - cx.emit(LspStoreEvent::RefreshInlayHints(server_id)); let server_capabilities = language_server.capabilities(); if let Some((downstream_client, project_id)) = self.downstream_client.as_ref() { diff --git a/crates/project/src/lsp_store/inlay_hint_cache.rs b/crates/project/src/lsp_store/inlay_hint_cache.rs index 7d3ec27e5af83c4d83b269c171943d90754bd1a6..51189d8fdae788c7c12546f2c9ac1735930c3095 100644 --- a/crates/project/src/lsp_store/inlay_hint_cache.rs +++ b/crates/project/src/lsp_store/inlay_hint_cache.rs @@ -19,7 +19,10 @@ pub enum InvalidationStrategy { /// Demands to re-query all inlay hints needed and invalidate all cached entries, but does not require instant update with invalidation. /// /// Despite nothing forbids language server from sending this request on every edit, it is expected to be sent only when certain internal server state update, invisible for the editor otherwise. - RefreshRequested(LanguageServerId), + RefreshRequested { + server_id: LanguageServerId, + request_id: Option, + }, /// Multibuffer excerpt(s) and/or singleton buffer(s) were edited at least on one place. /// Neither editor nor LSP is able to tell which open file hints' are not affected, so all of them have to be invalidated, re-queried and do that fast enough to avoid being slow, but also debounce to avoid loading hints on every fast keystroke sequence. BufferEdited, @@ -36,7 +39,7 @@ impl InvalidationStrategy { pub fn should_invalidate(&self) -> bool { matches!( self, - InvalidationStrategy::RefreshRequested(_) | InvalidationStrategy::BufferEdited + InvalidationStrategy::RefreshRequested { .. } | InvalidationStrategy::BufferEdited ) } } @@ -47,6 +50,7 @@ pub struct BufferInlayHints { hints_by_chunks: Vec>, fetches_by_chunks: Vec>, hints_by_id: HashMap, + latest_invalidation_requests: HashMap>, pub(super) hint_resolves: HashMap>>, } @@ -104,6 +108,7 @@ impl BufferInlayHints { Self { hints_by_chunks: vec![None; buffer_chunks.len()], fetches_by_chunks: vec![None; buffer_chunks.len()], + latest_invalidation_requests: HashMap::default(), hints_by_id: HashMap::default(), hint_resolves: HashMap::default(), snapshot, @@ -176,6 +181,7 @@ impl BufferInlayHints { self.fetches_by_chunks = vec![None; self.buffer_chunks.len()]; self.hints_by_id.clear(); self.hint_resolves.clear(); + self.latest_invalidation_requests.clear(); } pub fn insert_new_hints( @@ -222,4 +228,48 @@ impl BufferInlayHints { pub fn buffer_chunks_len(&self) -> usize { self.buffer_chunks.len() } + + pub(crate) fn invalidate_for_server_refresh( + &mut self, + for_server: LanguageServerId, + request_id: Option, + ) -> bool { + match self.latest_invalidation_requests.entry(for_server) { + hash_map::Entry::Occupied(mut o) => { + if request_id > *o.get() { + o.insert(request_id); + } else { + return false; + } + } + hash_map::Entry::Vacant(v) => { + v.insert(request_id); + } + } + + for (chunk_id, chunk_data) in self.hints_by_chunks.iter_mut().enumerate() { + if let Some(removed_hints) = chunk_data + .as_mut() + .and_then(|chunk_data| chunk_data.remove(&for_server)) + { + for (id, _) in removed_hints { + self.hints_by_id.remove(&id); + self.hint_resolves.remove(&id); + } + self.fetches_by_chunks[chunk_id] = None; + } + } + + true + } + + pub(crate) fn invalidate_for_chunk(&mut self, chunk: BufferChunk) { + self.fetches_by_chunks[chunk.id] = None; + if let Some(hints_by_server) = self.hints_by_chunks[chunk.id].take() { + for (hint_id, _) in hints_by_server.into_values().flatten() { + self.hints_by_id.remove(&hint_id); + self.hint_resolves.remove(&hint_id); + } + } + } } diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 7c7fe9a43091611a53dbde0ecbaf6691b7d768d0..e75a1bd395e9558c707ea5e27ab18d38c6b603be 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -337,7 +337,10 @@ pub enum Event { HostReshared, Reshared, Rejoined, - RefreshInlayHints(LanguageServerId), + RefreshInlayHints { + server_id: LanguageServerId, + request_id: Option, + }, RefreshCodeLens, RevealInProjectPanel(ProjectEntryId), SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>), @@ -3076,9 +3079,13 @@ impl Project { return; }; } - LspStoreEvent::RefreshInlayHints(server_id) => { - cx.emit(Event::RefreshInlayHints(*server_id)) - } + LspStoreEvent::RefreshInlayHints { + server_id, + request_id, + } => cx.emit(Event::RefreshInlayHints { + server_id: *server_id, + request_id: *request_id, + }), LspStoreEvent::RefreshCodeLens => cx.emit(Event::RefreshCodeLens), LspStoreEvent::LanguageServerPrompt(prompt) => { cx.emit(Event::LanguageServerPrompt(prompt.clone())) diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 3dc918d5a757af56038471e1a601d6f2cf7dbbe1..ee474895ec401c3da9f4294b8b34f0580f68d81e 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1815,10 +1815,6 @@ async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) { fake_server .start_progress(format!("{}/0", progress_token)) .await; - assert_eq!( - events.next().await.unwrap(), - Event::RefreshInlayHints(fake_server.server.server_id()) - ); assert_eq!( events.next().await.unwrap(), Event::DiskBasedDiagnosticsStarted { @@ -1957,10 +1953,6 @@ async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppC Some(worktree_id) ) ); - assert_eq!( - events.next().await.unwrap(), - Event::RefreshInlayHints(fake_server.server.server_id()) - ); fake_server.start_progress(progress_token).await; assert_eq!( events.next().await.unwrap(), diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index 30059431094bf1b11c1e481979ed5ea651f1d40b..644e492ef6a5d639a99f75b18465ca93b0c0ef92 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -466,6 +466,7 @@ message ResolveInlayHintResponse { message RefreshInlayHints { uint64 project_id = 1; uint64 server_id = 2; + optional uint64 request_id = 3; } message CodeLens { diff --git a/crates/search/src/project_search.rs b/crates/search/src/project_search.rs index 0bb05ecb93cd5cc6c9730307792c1737531a39a5..042b58db0460f18668624b0ee8d3343e748244aa 100644 --- a/crates/search/src/project_search.rs +++ b/crates/search/src/project_search.rs @@ -2346,7 +2346,15 @@ pub fn perform_project_search( #[cfg(test)] pub mod tests { - use std::{ops::Deref as _, sync::Arc, time::Duration}; + use std::{ + ops::Deref as _, + path::PathBuf, + sync::{ + Arc, + atomic::{self, AtomicUsize}, + }, + time::Duration, + }; use super::*; use editor::{DisplayPoint, display_map::DisplayRow}; @@ -4247,6 +4255,8 @@ pub mod tests { ) .await; + let requests_count = Arc::new(AtomicUsize::new(0)); + let closure_requests_count = requests_count.clone(); let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let language_registry = project.read_with(cx, |project, _| project.languages().clone()); let language = rust_lang(); @@ -4258,21 +4268,26 @@ pub mod tests { inlay_hint_provider: Some(lsp::OneOf::Left(true)), ..lsp::ServerCapabilities::default() }, - initializer: Some(Box::new(|fake_server| { - fake_server.set_request_handler::( - move |_, _| async move { - Ok(Some(vec![lsp::InlayHint { - position: lsp::Position::new(0, 17), - label: lsp::InlayHintLabel::String(": i32".to_owned()), - kind: Some(lsp::InlayHintKind::TYPE), - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }])) - }, - ); + initializer: Some(Box::new(move |fake_server| { + let requests_count = closure_requests_count.clone(); + fake_server.set_request_handler::({ + move |_, _| { + let requests_count = requests_count.clone(); + async move { + requests_count.fetch_add(1, atomic::Ordering::Release); + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 17), + label: lsp::InlayHintLabel::String(": i32".to_owned()), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + } + }); })), ..FakeLspAdapter::default() }, @@ -4286,7 +4301,7 @@ pub mod tests { }); perform_search(search_view, "let ", cx); - let _fake_server = fake_servers.next().await.unwrap(); + let fake_server = fake_servers.next().await.unwrap(); cx.executor().advance_clock(Duration::from_secs(1)); cx.executor().run_until_parked(); search_view @@ -4299,11 +4314,127 @@ pub mod tests { ); }) .unwrap(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 1, + "New hints should have been queried", + ); // Can do the 2nd search without any panics perform_search(search_view, "let ", cx); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + search_view + .update(cx, |search_view, _, cx| { + assert_eq!( + search_view + .results_editor + .update(cx, |editor, cx| editor.display_text(cx)), + "\n\nfn main() { let a: i32 = 2; }\n" + ); + }) + .unwrap(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 2, + "We did drop the previous buffer when cleared the old project search results, hence another query was made", + ); + + let singleton_editor = window + .update(cx, |workspace, window, cx| { + workspace.open_abs_path( + PathBuf::from(path!("/dir/main.rs")), + workspace::OpenOptions::default(), + window, + cx, + ) + }) + .unwrap() + .await + .unwrap() + .downcast::() + .unwrap(); cx.executor().advance_clock(Duration::from_millis(100)); cx.executor().run_until_parked(); + singleton_editor.update(cx, |editor, cx| { + assert_eq!( + editor.display_text(cx), + "fn main() { let a: i32 = 2; }\n", + "Newly opened editor should have the correct text with hints", + ); + }); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 2, + "Opening the same buffer again should reuse the cached hints", + ); + + window + .update(cx, |_, window, cx| { + singleton_editor.update(cx, |editor, cx| { + editor.handle_input("test", window, cx); + }); + }) + .unwrap(); + + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + singleton_editor.update(cx, |editor, cx| { + assert_eq!( + editor.display_text(cx), + "testfn main() { l: i32et a = 2; }\n", + "Newly opened editor should have the correct text with hints", + ); + }); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 3, + "We have edited the buffer and should send a new request", + ); + + window + .update(cx, |_, window, cx| { + singleton_editor.update(cx, |editor, cx| { + editor.undo(&editor::actions::Undo, window, cx); + }); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 4, + "We have edited the buffer again and should send a new request again", + ); + singleton_editor.update(cx, |editor, cx| { + assert_eq!( + editor.display_text(cx), + "fn main() { let a: i32 = 2; }\n", + "Newly opened editor should have the correct text with hints", + ); + }); + project.update(cx, |_, cx| { + cx.emit(project::Event::RefreshInlayHints { + server_id: fake_server.server.server_id(), + request_id: Some(1), + }); + }); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 5, + "After a simulated server refresh request, we should have sent another request", + ); + + perform_search(search_view, "let ", cx); + cx.executor().advance_clock(Duration::from_secs(1)); + cx.executor().run_until_parked(); + assert_eq!( + requests_count.load(atomic::Ordering::Acquire), + 5, + "New project search should reuse the cached hints", + ); search_view .update(cx, |search_view, _, cx| { assert_eq!(