From 9e87fefe3fae9dad2de38afdd4aa3e9ad102bd2b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Feb 2026 13:16:33 -0700 Subject: [PATCH 001/548] Fix panic when folding immediately after a tab (#49920) fixes ZED-57B Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed a (rare) panic when a fold was created immediately following a tab character --- crates/editor/src/display_map/tab_map.rs | 63 ++++++++++++++++++------ 1 file changed, 49 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 45c52db37ee1e9e0e1e25b745a42a7338e2e714e..5ff3979e7da848ddba98f5b6f8d1ea26ad990a81 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -604,19 +604,16 @@ impl<'a> Iterator for TabChunks<'a> { #[ztracing::instrument(skip_all)] fn next(&mut self) -> Option { - if self.chunk.text.is_empty() { - if let Some(chunk) = self.fold_chunks.next() { - self.chunk = chunk; - if self.inside_leading_tab { - self.chunk.text = &self.chunk.text[1..]; - self.chunk.tabs >>= 1; - self.chunk.chars >>= 1; - self.chunk.newlines >>= 1; - self.inside_leading_tab = false; - self.input_column += 1; - } - } else { - return None; + while self.chunk.text.is_empty() { + let chunk = self.fold_chunks.next()?; + self.chunk = chunk; + if self.inside_leading_tab { + self.chunk.text = &self.chunk.text[1..]; + self.chunk.tabs >>= 1; + self.chunk.chars >>= 1; + self.chunk.newlines >>= 1; + self.inside_leading_tab = false; + self.input_column += 1; } } @@ -710,7 +707,7 @@ mod tests { use crate::{ MultiBuffer, display_map::{ - fold_map::{FoldMap, FoldOffset}, + fold_map::{FoldMap, FoldOffset, FoldPlaceholder}, inlay_map::InlayMap, }, }; @@ -1090,6 +1087,44 @@ mod tests { } } + #[gpui::test] + fn test_empty_chunk_after_leading_tab_trim(cx: &mut gpui::App) { + // We fold "hello" (offsets 1..6) so the fold map creates a + // transform boundary at offset 1, producing a 1-byte fold chunk + // for the tab. + let text = "\thello"; + let buffer = MultiBuffer::build_simple(text, cx); + let buffer_snapshot = buffer.read(cx).snapshot(cx); + let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); + let mut fold_map = FoldMap::new(inlay_snapshot.clone()).0; + + let (mut writer, _, _) = fold_map.write(inlay_snapshot.clone(), vec![]); + writer.fold(vec![( + MultiBufferOffset(1)..MultiBufferOffset(6), + FoldPlaceholder::test(), + )]); + let (fold_snapshot, _) = fold_map.read(inlay_snapshot, vec![]); + + let tab_size = NonZeroU32::new(4).unwrap(); + let (_, tab_snapshot) = TabMap::new(fold_snapshot, tab_size); + + // The tab at column 0 expands to 4 spaces (columns 0‥4). + // Seek starting at column 2 (middle of that tab) so that + // `inside_leading_tab = true` and `to_next_stop = 2`. + // Set the end just past the tab expansion so the iterator must + // process the tab byte from the fold chunk. + let max = tab_snapshot.max_point(); + let start = TabPoint::new(0, 2); + let end = max; + + // This should not panic. + let result: String = tab_snapshot + .chunks(start..end, false, Highlights::default()) + .map(|c| c.text) + .collect(); + assert!(!result.is_empty()); + } + #[gpui::test(iterations = 100)] fn test_random_tabs(cx: &mut gpui::App, mut rng: StdRng) { let tab_size = NonZeroU32::new(rng.random_range(1..=4)).unwrap(); From c2504ad162847b7a3d8108a2ee5d9b7dfe1b9117 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Tue, 24 Feb 2026 04:18:03 +0800 Subject: [PATCH 002/548] http_client: Fix Codex ACP install robustness by staging downloads (#45428) Release Notes: - Improved Codex ACP install robustness by staging downloads --------- Signed-off-by: Xiaobo Liu Co-authored-by: Conrad Irwin --- crates/http_client/src/github_download.rs | 96 ++++++++++++++++++++--- 1 file changed, 86 insertions(+), 10 deletions(-) diff --git a/crates/http_client/src/github_download.rs b/crates/http_client/src/github_download.rs index 02dee08b215e547d632caaf5f94b0872aa6aa20d..642bbf11c11ce8816a1506c3c4989dce434552d8 100644 --- a/crates/http_client/src/github_download.rs +++ b/crates/http_client/src/github_download.rs @@ -1,4 +1,8 @@ -use std::{path::Path, pin::Pin, task::Poll}; +use std::{ + path::{Path, PathBuf}, + pin::Pin, + task::Poll, +}; use anyhow::{Context, Result}; use async_compression::futures::bufread::GzipDecoder; @@ -40,11 +44,37 @@ pub async fn download_server_binary( asset_kind: AssetKind, ) -> Result<(), anyhow::Error> { log::info!("downloading github artifact from {url}"); + let Some(destination_parent) = destination_path.parent() else { + anyhow::bail!("destination path has no parent: {destination_path:?}"); + }; + + let staging_path = staging_path(destination_parent, asset_kind)?; let mut response = http_client .get(url, Default::default(), true) .await .with_context(|| format!("downloading release from {url}"))?; let body = response.body_mut(); + + if let Err(err) = extract_to_staging(body, digest, url, &staging_path, asset_kind).await { + cleanup_staging_path(&staging_path, asset_kind).await; + return Err(err); + } + + if let Err(err) = finalize_download(&staging_path, destination_path).await { + cleanup_staging_path(&staging_path, asset_kind).await; + return Err(err); + } + + Ok(()) +} + +async fn extract_to_staging( + body: impl AsyncRead + Unpin, + digest: Option<&str>, + url: &str, + staging_path: &Path, + asset_kind: AssetKind, +) -> Result<()> { match digest { Some(expected_sha_256) => { let temp_asset_file = tempfile::NamedTempFile::new() @@ -57,7 +87,7 @@ pub async fn download_server_binary( futures::io::copy(&mut BufReader::new(body), &mut writer) .await .with_context(|| { - format!("saving archive contents into the temporary file for {url}",) + format!("saving archive contents into the temporary file for {url}") })?; let asset_sha_256 = format!("{:x}", writer.hasher.finalize()); @@ -69,22 +99,68 @@ pub async fn download_server_binary( .writer .seek(std::io::SeekFrom::Start(0)) .await - .with_context(|| format!("seeking temporary file {destination_path:?}",))?; - stream_file_archive(&mut writer.writer, url, destination_path, asset_kind) + .with_context(|| format!("seeking temporary file for {url}"))?; + stream_file_archive(&mut writer.writer, url, staging_path, asset_kind) .await .with_context(|| { - format!("extracting downloaded asset for {url} into {destination_path:?}",) + format!("extracting downloaded asset for {url} into {staging_path:?}") + })?; + } + None => { + stream_response_archive(body, url, staging_path, asset_kind) + .await + .with_context(|| { + format!("extracting response for asset {url} into {staging_path:?}") })?; } - None => stream_response_archive(body, url, destination_path, asset_kind) - .await - .with_context(|| { - format!("extracting response for asset {url} into {destination_path:?}",) - })?, } Ok(()) } +fn staging_path(parent: &Path, asset_kind: AssetKind) -> Result { + match asset_kind { + AssetKind::TarGz | AssetKind::Zip => { + let dir = tempfile::Builder::new() + .prefix(".tmp-github-download-") + .tempdir_in(parent) + .with_context(|| format!("creating staging directory in {parent:?}"))?; + Ok(dir.keep()) + } + AssetKind::Gz => { + let path = tempfile::Builder::new() + .prefix(".tmp-github-download-") + .tempfile_in(parent) + .with_context(|| format!("creating staging file in {parent:?}"))? + .into_temp_path() + .keep() + .with_context(|| format!("persisting staging file in {parent:?}"))?; + Ok(path) + } + } +} + +async fn cleanup_staging_path(staging_path: &Path, asset_kind: AssetKind) { + match asset_kind { + AssetKind::TarGz | AssetKind::Zip => { + if let Err(err) = async_fs::remove_dir_all(staging_path).await { + log::warn!("failed to remove staging directory {staging_path:?}: {err:?}"); + } + } + AssetKind::Gz => { + if let Err(err) = async_fs::remove_file(staging_path).await { + log::warn!("failed to remove staging file {staging_path:?}: {err:?}"); + } + } + } +} + +async fn finalize_download(staging_path: &Path, destination_path: &Path) -> Result<()> { + async_fs::rename(staging_path, destination_path) + .await + .with_context(|| format!("renaming {staging_path:?} to {destination_path:?}"))?; + Ok(()) +} + async fn stream_response_archive( response: impl AsyncRead + Unpin, url: &str, From 97a8fe78283c53c9668d1aa54617d9f67ed976ce Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 23 Feb 2026 12:52:33 -0800 Subject: [PATCH 003/548] zeta: Consolidate logic for picking region sizes, use larger editable region (#49921) This will not affect how Zeta 2 behaves in production until we update Cloud to pull in the changes to the `zeta_prompt` crate. But from some early testing, it seems to improve behavior, not worsen it, even though the editable region size differs from the currently-deployed model's training data. Release Notes: - N/A --------- Co-authored-by: Oleksiy Syvokon Co-authored-by: Ben Kunkle Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- crates/edit_prediction/src/cursor_excerpt.rs | 52 ++++++------------ crates/edit_prediction/src/edit_prediction.rs | 4 +- crates/edit_prediction/src/zeta.rs | 43 +++++---------- .../edit_prediction_cli/src/format_prompt.rs | 54 +++++++++---------- crates/zeta_prompt/src/zeta_prompt.rs | 36 +++++++------ 5 files changed, 73 insertions(+), 116 deletions(-) diff --git a/crates/edit_prediction/src/cursor_excerpt.rs b/crates/edit_prediction/src/cursor_excerpt.rs index 900d78945ca6ab4fab9c9c60bf13009368c7c77b..18a13130720ca2a5bd313f94437a8c3cfab3c691 100644 --- a/crates/edit_prediction/src/cursor_excerpt.rs +++ b/crates/edit_prediction/src/cursor_excerpt.rs @@ -1,24 +1,15 @@ use language::{BufferSnapshot, Point}; use std::ops::Range; +use text::OffsetRangeExt as _; use zeta_prompt::ExcerptRanges; -/// Pre-computed Point ranges for all editable/context budget combinations. -pub struct ExcerptRangePoints { - pub editable_150: Range, - pub editable_180: Range, - pub editable_350: Range, - pub editable_150_context_350: Range, - pub editable_180_context_350: Range, - pub editable_350_context_150: Range, -} - /// Computes all range variants for a cursor position: editable ranges at 150, 180, and 350 /// token budgets, plus their corresponding context expansions. Returns the full excerpt range /// (union of all context ranges) and the individual sub-ranges as Points. pub fn compute_excerpt_ranges( position: Point, snapshot: &BufferSnapshot, -) -> (Range, ExcerptRangePoints) { +) -> (Range, Range, ExcerptRanges) { let editable_150 = compute_editable_range(snapshot, position, 150); let editable_180 = compute_editable_range(snapshot, position, 180); let editable_350 = compute_editable_range(snapshot, position, 350); @@ -44,37 +35,24 @@ pub fn compute_excerpt_ranges( let full_context = Point::new(full_start_row, 0)..Point::new(full_end_row, snapshot.line_len(full_end_row)); - let ranges = ExcerptRangePoints { - editable_150, - editable_180, - editable_350, - editable_150_context_350, - editable_180_context_350, - editable_350_context_150, - }; - - (full_context, ranges) -} + let full_context_offset_range = full_context.to_offset(snapshot); -/// Converts `ExcerptRangePoints` to byte-offset `ExcerptRanges` relative to `excerpt_start`. -pub fn excerpt_ranges_to_byte_offsets( - ranges: &ExcerptRangePoints, - excerpt_start: usize, - snapshot: &BufferSnapshot, -) -> ExcerptRanges { let to_offset = |range: &Range| -> Range { let start = range.start.to_offset(snapshot); let end = range.end.to_offset(snapshot); - (start - excerpt_start)..(end - excerpt_start) + (start - full_context_offset_range.start)..(end - full_context_offset_range.start) }; - ExcerptRanges { - editable_150: to_offset(&ranges.editable_150), - editable_180: to_offset(&ranges.editable_180), - editable_350: to_offset(&ranges.editable_350), - editable_150_context_350: to_offset(&ranges.editable_150_context_350), - editable_180_context_350: to_offset(&ranges.editable_180_context_350), - editable_350_context_150: to_offset(&ranges.editable_350_context_150), - } + + let ranges = ExcerptRanges { + editable_150: to_offset(&editable_150), + editable_180: to_offset(&editable_180), + editable_350: to_offset(&editable_350), + editable_150_context_350: to_offset(&editable_150_context_350), + editable_180_context_350: to_offset(&editable_180_context_350), + editable_350_context_150: to_offset(&editable_350_context_150), + }; + + (full_context, full_context_offset_range, ranges) } pub fn editable_and_context_ranges_for_cursor_position( diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index cb201870ef636f85e06e0e9cb2def4ec8a149db4..fb6af292fca3b610b5344da146fba558380ad22f 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -2212,9 +2212,7 @@ impl EditPredictionStore { { let http_client = client.http_client(); - let mut token = if let Ok(custom_token) = std::env::var("ZED_PREDICT_EDITS_TOKEN") { - Some(custom_token) - } else if require_auth { + let mut token = if require_auth { Some(llm_token.acquire(&client).await?) } else { llm_token.acquire(&client).await.ok() diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index b68c5df5d306f7db8a66e6be2d6c0847dcfa76dd..658071c9ccfbdf64a9a1ebead7724774cd5cc40e 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -1,4 +1,4 @@ -use crate::cursor_excerpt::{compute_excerpt_ranges, excerpt_ranges_to_byte_offsets}; +use crate::cursor_excerpt::compute_excerpt_ranges; use crate::prediction::EditPredictionResult; use crate::{ CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, @@ -11,7 +11,7 @@ use edit_prediction_types::PredictedCursorPosition; use futures::AsyncReadExt as _; use gpui::{App, AppContext as _, Task, http_client, prelude::*}; use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings}; -use language::{BufferSnapshot, OffsetRangeExt as _, ToOffset as _, ToPoint, text_diff}; +use language::{BufferSnapshot, ToOffset as _, ToPoint, text_diff}; use release_channel::AppVersion; use text::{Anchor, Bias}; @@ -24,19 +24,6 @@ use zeta_prompt::{ zeta1::{self, EDITABLE_REGION_END_MARKER}, }; -pub const MAX_CONTEXT_TOKENS: usize = 350; - -pub fn max_editable_tokens(format: ZetaFormat) -> usize { - match format { - ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => 150, - ZetaFormat::V0114180EditableRegion => 180, - ZetaFormat::V0120GitMergeMarkers => 180, - ZetaFormat::V0131GitMergeMarkersPrefix => 180, - ZetaFormat::V0211Prefill => 180, - ZetaFormat::V0211SeedCoder => 180, - } -} - pub fn request_prediction_with_zeta( store: &mut EditPredictionStore, EditPredictionModelInput { @@ -359,7 +346,8 @@ pub fn zeta2_prompt_input( ) -> (std::ops::Range, zeta_prompt::ZetaPromptInput) { let cursor_point = cursor_offset.to_point(snapshot); - let (full_context, range_points) = compute_excerpt_ranges(cursor_point, snapshot); + let (full_context, full_context_offset_range, excerpt_ranges) = + compute_excerpt_ranges(cursor_point, snapshot); let related_files = crate::filter_redundant_excerpts( related_files, @@ -367,24 +355,17 @@ pub fn zeta2_prompt_input( full_context.start.row..full_context.end.row, ); - let full_context_start_offset = full_context.start.to_offset(snapshot); + let full_context_start_offset = full_context_offset_range.start; let full_context_start_row = full_context.start.row; - let excerpt_ranges = - excerpt_ranges_to_byte_offsets(&range_points, full_context_start_offset, snapshot); - - let editable_range = match preferred_model { - Some(EditPredictionModelKind::Zeta1) => &range_points.editable_350, - _ => match zeta_format { - ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => &range_points.editable_150, - _ => &range_points.editable_180, - }, + let editable_offset_range = match preferred_model { + Some(EditPredictionModelKind::Zeta1) => excerpt_ranges.editable_350.clone(), + _ => zeta_prompt::excerpt_range_for_format(zeta_format, &excerpt_ranges).0, }; + let absolute_editable_range = full_context_start_offset + editable_offset_range.start + ..full_context_start_offset + editable_offset_range.end; - let editable_offset_range = editable_range.to_offset(snapshot); let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset; - let editable_range_in_excerpt = (editable_offset_range.start - full_context_start_offset) - ..(editable_offset_range.end - full_context_start_offset); let prompt_input = zeta_prompt::ZetaPromptInput { cursor_path: excerpt_path, @@ -392,7 +373,7 @@ pub fn zeta2_prompt_input( .text_for_range(full_context) .collect::() .into(), - editable_range_in_excerpt, + editable_range_in_excerpt: editable_offset_range, cursor_offset_in_excerpt, excerpt_start_row: Some(full_context_start_row), events, @@ -402,7 +383,7 @@ pub fn zeta2_prompt_input( in_open_source_repo: is_open_source, can_collect_data, }; - (editable_offset_range, prompt_input) + (absolute_editable_range, prompt_input) } pub(crate) async fn send_custom_server_request( diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 17d8aa68359a771da72558cd21523f13e2df4b38..a3e83e73bb9d69c22be33936ae73177022dfc165 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -6,14 +6,14 @@ use crate::{ retrieve_context::run_context_retrieval, }; use anyhow::{Context as _, Result, anyhow}; -use edit_prediction::{cursor_excerpt::editable_and_context_ranges_for_cursor_position, udiff}; +use edit_prediction::{cursor_excerpt::compute_excerpt_ranges, udiff}; use gpui::{AppContext, AsyncApp}; -use language::{Buffer, OffsetRangeExt, Point}; +use language::{Buffer, Point}; use similar::DiffableStr; use std::sync::Arc; use std::{fmt::Write as _, ops::Range}; -use zeta_prompt::ZetaFormat; use zeta_prompt::format_zeta_prompt; +use zeta_prompt::{ZetaFormat, excerpt_range_for_format}; pub async fn run_format_prompt( example: &mut Example, @@ -47,18 +47,15 @@ pub async fn run_format_prompt( let cursor_point = Point::new(prompt_inputs.cursor_row, prompt_inputs.cursor_column); let snapshot = cx.background_spawn(snapshot_fut).await; + let (_, _, excerpt_ranges) = compute_excerpt_ranges(cursor_point, &snapshot); + match args.provider { PredictionProvider::Teacher(_) | PredictionProvider::TeacherNonBatching(_) => { step_progress.set_substatus("formatting teacher prompt"); - let (editable_range, context_range) = editable_and_context_ranges_for_cursor_position( - cursor_point, - &snapshot, - edit_prediction::zeta::max_editable_tokens(ZetaFormat::default()), - edit_prediction::zeta::MAX_CONTEXT_TOKENS, - ); - let editable_range = editable_range.to_offset(&snapshot); - let context_range = context_range.to_offset(&snapshot); + let zeta_format = ZetaFormat::default(); + let (editable_range, context_range) = + excerpt_range_for_format(zeta_format, &excerpt_ranges); let prompt = TeacherPrompt::format_prompt(example, editable_range, context_range); example.prompt = Some(ExamplePrompt { @@ -69,17 +66,11 @@ pub async fn run_format_prompt( provider: args.provider, }); } - PredictionProvider::Zeta2(version) => { + PredictionProvider::Zeta2(zeta_format) => { step_progress.set_substatus("formatting zeta2 prompt"); - let (editable_range, context_range) = editable_and_context_ranges_for_cursor_position( - cursor_point, - &snapshot, - edit_prediction::zeta::max_editable_tokens(version), - edit_prediction::zeta::MAX_CONTEXT_TOKENS, - ); - let editable_range = editable_range.to_offset(&snapshot); - let context_range = context_range.to_offset(&snapshot); + let (editable_range, context_range) = + excerpt_range_for_format(zeta_format, &excerpt_ranges); let context_start = context_range.start; let cursor_offset_in_excerpt = prompt_inputs.cursor_offset - context_start; @@ -93,7 +84,7 @@ pub async fn run_format_prompt( excerpt_start_row: prompt_inputs.excerpt_start_row, events: prompt_inputs.edit_history.clone(), related_files: prompt_inputs.related_files.clone().unwrap_or_default(), - excerpt_ranges: None, + excerpt_ranges: Some(excerpt_ranges), preferred_model: None, in_open_source_repo: example .spec @@ -102,21 +93,24 @@ pub async fn run_format_prompt( .map_or(false, |input| input.in_open_source_repo), can_collect_data: false, }; - let prompt = format_zeta_prompt(&input, version); - let prefill = zeta_prompt::get_prefill(&input, version); + let prompt = format_zeta_prompt(&input, zeta_format); + let prefill = zeta_prompt::get_prefill(&input, zeta_format); let (expected_patch, expected_cursor_offset) = example .spec .expected_patches_with_cursor_positions() .into_iter() .next() .context("expected patches is empty")?; - let expected_output = - zeta2_output_for_patch(&input, &expected_patch, expected_cursor_offset, version)?; - let rejected_output = example - .spec - .rejected_patch - .as_ref() - .and_then(|patch| zeta2_output_for_patch(&input, patch, None, version).ok()); + let expected_output = zeta2_output_for_patch( + &input, + &expected_patch, + expected_cursor_offset, + zeta_format, + )?; + let rejected_output = + example.spec.rejected_patch.as_ref().and_then(|patch| { + zeta2_output_for_patch(&input, patch, None, zeta_format).ok() + }); example.prompt = Some(ExamplePrompt { input: prompt, diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index d8376b5ce312db1166667b89d69bfe9476543a8d..8a2c3477bc2e64fb056f403149e04244897b31cd 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -86,9 +86,9 @@ pub struct ZetaPromptInput { pub enum ZetaFormat { V0112MiddleAtEnd, V0113Ordered, - #[default] V0114180EditableRegion, V0120GitMergeMarkers, + #[default] V0131GitMergeMarkersPrefix, V0211Prefill, V0211SeedCoder, @@ -242,19 +242,11 @@ pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str { } } -fn resolve_cursor_region( - input: &ZetaPromptInput, +pub fn excerpt_range_for_format( format: ZetaFormat, -) -> (&str, Range, usize) { - let Some(ranges) = &input.excerpt_ranges else { - return ( - &input.cursor_excerpt, - input.editable_range_in_excerpt.clone(), - input.cursor_offset_in_excerpt, - ); - }; - - let (editable_range, context_range) = match format { + ranges: &ExcerptRanges, +) -> (Range, Range) { + match format { ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered => ( ranges.editable_150.clone(), ranges.editable_150_context_350.clone(), @@ -264,11 +256,25 @@ fn resolve_cursor_region( | ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill | ZetaFormat::V0211SeedCoder => ( - ranges.editable_180.clone(), - ranges.editable_180_context_350.clone(), + ranges.editable_350.clone(), + ranges.editable_350_context_150.clone(), ), + } +} + +fn resolve_cursor_region( + input: &ZetaPromptInput, + format: ZetaFormat, +) -> (&str, Range, usize) { + let Some(ranges) = &input.excerpt_ranges else { + return ( + &input.cursor_excerpt, + input.editable_range_in_excerpt.clone(), + input.cursor_offset_in_excerpt, + ); }; + let (editable_range, context_range) = excerpt_range_for_format(format, ranges); let context_start = context_range.start; let context_text = &input.cursor_excerpt[context_range]; let adjusted_editable = From a8043dcff8f28a0443d7ec238e7f020689ebe1ff Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 23 Feb 2026 16:28:40 -0500 Subject: [PATCH 004/548] editor: Prevent panic when attempting to resolve a breakpoint anchor against the wrong buffer (#49893) Closes ZED-4HY Release Notes: - Fixed a crash when using breakpoints. --- crates/project/src/debugger/breakpoint_store.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index d548781891598b229c430692bee36919bf7669fb..54f884aa5704bd256620f35eb0ea73dc53feeab5 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -628,6 +628,10 @@ impl BreakpointStore { file_breakpoints.breakpoints.iter().filter_map({ let range = range.clone(); move |bp| { + if !buffer_snapshot.can_resolve(bp.position()) { + return None; + } + if let Some(range) = &range && (bp.position().cmp(&range.start, buffer_snapshot).is_lt() || bp.position().cmp(&range.end, buffer_snapshot).is_gt()) From 778f2d34c7f840999f9ca7343005d110292d78aa Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Feb 2026 15:32:47 -0700 Subject: [PATCH 005/548] Fix panic in breadcrumbs (#49930) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Fixes https://zed-dev.sentry.io/issues/7286512714/?project=4509715135987712&query=is%3Aunresolved&referrer=issue-stream Release Notes: - Fix (rare) panic when breadcrumbs contain newlines --- crates/editor/src/element.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index d76fb175c32188439dea8d392248cf70793fd1c2..deedd07d3eddfd563d4fbdaf51311908ebc40c01 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -7835,7 +7835,7 @@ pub fn render_breadcrumb_text( return styled_element; } - StyledText::new(segment.text.replace('\n', "⏎")) + StyledText::new(segment.text.replace('\n', " ")) .with_default_highlights(&text_style, segment.highlights.unwrap_or_default()) .into_any() }); From 7d66703b80d7938ec666ca6a1d385fc3462209d3 Mon Sep 17 00:00:00 2001 From: Eric Holk Date: Mon, 23 Feb 2026 14:58:50 -0800 Subject: [PATCH 006/548] Remove thread title persistence (#49929) This was causing performance issues and is on its way out anyway. Release Notes: - N/A --- Cargo.lock | 2 - crates/sidebar/Cargo.toml | 2 - crates/sidebar/src/sidebar.rs | 123 ++++------------------------------ 3 files changed, 14 insertions(+), 113 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aa52141ae7529bab47e8aeab6959525227cb04c5..430cbe0f580e9ad94767441fca687d69ce404099 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -15512,7 +15512,6 @@ dependencies = [ "acp_thread", "agent_ui", "chrono", - "db", "editor", "feature_flags", "fs", @@ -15521,7 +15520,6 @@ dependencies = [ "picker", "project", "recent_projects", - "serde_json", "settings", "theme", "ui", diff --git a/crates/sidebar/Cargo.toml b/crates/sidebar/Cargo.toml index f80a6f16725e00dfc353377e49339692be0837af..6165a41c68894df9ad60110663562df713a24470 100644 --- a/crates/sidebar/Cargo.toml +++ b/crates/sidebar/Cargo.toml @@ -19,10 +19,8 @@ test-support = [] acp_thread.workspace = true agent_ui.workspace = true chrono.workspace = true -db.workspace = true fs.workspace = true fuzzy.workspace = true -serde_json.workspace = true gpui.workspace = true picker.workspace = true project.workspace = true diff --git a/crates/sidebar/src/sidebar.rs b/crates/sidebar/src/sidebar.rs index 7ee487f5d491670c0b9ba34229a96a1c21718979..24974512cda12276b5fcdc51ebd71d091782dff6 100644 --- a/crates/sidebar/src/sidebar.rs +++ b/crates/sidebar/src/sidebar.rs @@ -1,7 +1,6 @@ use acp_thread::ThreadStatus; use agent_ui::{AgentPanel, AgentPanelEvent}; use chrono::{Datelike, Local, NaiveDate, TimeDelta}; -use db::kvp::KEY_VALUE_STORE; use fs::Fs; use fuzzy::StringMatchCandidate; @@ -38,8 +37,6 @@ struct AgentThreadInfo { icon: IconName, } -const LAST_THREAD_TITLES_KEY: &str = "sidebar-last-thread-titles"; - const DEFAULT_WIDTH: Pixels = px(320.0); const MIN_WIDTH: Pixels = px(200.0); const MAX_WIDTH: Pixels = px(800.0); @@ -54,12 +51,7 @@ struct WorkspaceThreadEntry { } impl WorkspaceThreadEntry { - fn new( - index: usize, - workspace: &Entity, - persisted_titles: &HashMap, - cx: &App, - ) -> Self { + fn new(index: usize, workspace: &Entity, cx: &App) -> Self { let workspace_ref = workspace.read(cx); let worktrees: Vec<_> = workspace_ref @@ -89,18 +81,7 @@ impl WorkspaceThreadEntry { .join("\n") .into(); - let thread_info = Self::thread_info(workspace, cx).or_else(|| { - if worktrees.is_empty() { - return None; - } - let path_key = sorted_paths_key(&worktrees); - let title = persisted_titles.get(&path_key)?; - Some(AgentThreadInfo { - title: SharedString::from(title.clone()), - status: AgentThreadStatus::Completed, - icon: IconName::ZedAgent, - }) - }); + let thread_info = Self::thread_info(workspace, cx); Self { index, @@ -243,15 +224,6 @@ impl WorkspacePickerDelegate { fn set_recent_projects(&mut self, recent_projects: Vec, cx: &App) { self.recent_project_thread_titles.clear(); - if let Some(map) = read_thread_title_map() { - for entry in &recent_projects { - let path_key = sorted_paths_key(&entry.paths); - if let Some(title) = map.get(&path_key) { - self.recent_project_thread_titles - .insert(entry.full_path.clone(), title.clone().into()); - } - } - } self.recent_projects = recent_projects; @@ -755,8 +727,8 @@ impl Sidebar { let subscription = cx.observe_in( &multi_workspace, window, - |this, multi_workspace, window, cx| { - this.queue_refresh(multi_workspace, window, cx); + |this, _multi_workspace, window, cx| { + this.update_entries(window, cx); }, ); @@ -793,7 +765,7 @@ impl Sidebar { test_recent_project_thread_titles: HashMap::new(), _fetch_recent_projects: fetch_recent_projects, }; - this.queue_refresh(this.multi_workspace.clone(), window, cx); + this.update_entries(window, cx); this } @@ -820,7 +792,7 @@ impl Sidebar { ProjectEvent::WorktreeAdded(_) | ProjectEvent::WorktreeRemoved(_) | ProjectEvent::WorktreeOrderChanged => { - this.queue_refresh(this.multi_workspace.clone(), window, cx); + this.update_entries(window, cx); } _ => {} }, @@ -834,16 +806,12 @@ impl Sidebar { multi_workspace: &MultiWorkspace, cx: &App, ) -> (Vec, usize) { - let persisted_titles = read_thread_title_map().unwrap_or_default(); - #[allow(unused_mut)] let mut entries: Vec = multi_workspace .workspaces() .iter() .enumerate() - .map(|(index, workspace)| { - WorkspaceThreadEntry::new(index, workspace, &persisted_titles, cx) - }) + .map(|(index, workspace)| WorkspaceThreadEntry::new(index, workspace, cx)) .collect(); #[cfg(any(test, feature = "test-support"))] @@ -916,14 +884,14 @@ impl Sidebar { &agent_panel, window, |this, _, _event: &AgentPanelEvent, window, cx| { - this.queue_refresh(this.multi_workspace.clone(), window, cx); + this.update_entries(window, cx); }, ) } else { // Panel hasn't loaded yet — observe the workspace so we // re-subscribe once the panel appears on its dock. cx.observe_in(workspace, window, |this, _, window, cx| { - this.queue_refresh(this.multi_workspace.clone(), window, cx); + this.update_entries(window, cx); }) } }) @@ -943,60 +911,16 @@ impl Sidebar { let agent_panel = workspace.read(cx).panel::(cx)?; let thread = agent_panel.read(cx).active_agent_thread(cx)?; Some(cx.observe_in(&thread, window, |this, _, window, cx| { - this.queue_refresh(this.multi_workspace.clone(), window, cx); + this.update_entries(window, cx); })) }) .collect() } - fn persist_thread_titles( - &self, - entries: &[WorkspaceThreadEntry], - multi_workspace: &Entity, - cx: &mut Context, - ) { - let mut map = read_thread_title_map().unwrap_or_default(); - let workspaces = multi_workspace.read(cx).workspaces().to_vec(); - let mut changed = false; - - for (workspace, entry) in workspaces.iter().zip(entries.iter()) { - if let Some(ref info) = entry.thread_info { - let paths: Vec<_> = workspace - .read(cx) - .worktrees(cx) - .map(|wt| wt.read(cx).abs_path()) - .collect(); - if paths.is_empty() { - continue; - } - let path_key = sorted_paths_key(&paths); - let title = info.title.to_string(); - if map.get(&path_key) != Some(&title) { - map.insert(path_key, title); - changed = true; - } - } - } - - if changed { - if let Some(json) = serde_json::to_string(&map).log_err() { - cx.background_spawn(async move { - KEY_VALUE_STORE - .write_kvp(LAST_THREAD_TITLES_KEY.into(), json) - .await - .log_err(); - }) - .detach(); - } - } - } - - fn queue_refresh( - &mut self, - multi_workspace: Entity, - window: &mut Window, - cx: &mut Context, - ) { + /// Reconciles the sidebar's displayed entries with the current state of all + /// workspaces and their agent threads. + fn update_entries(&mut self, window: &mut Window, cx: &mut Context) { + let multi_workspace = self.multi_workspace.clone(); cx.defer_in(window, move |this, window, cx| { if !this.multi_workspace.read(cx).multi_workspace_enabled(cx) { return; @@ -1009,8 +933,6 @@ impl Sidebar { this.build_workspace_thread_entries(multi_workspace, cx) }); - this.persist_thread_titles(&entries, &multi_workspace, cx); - let had_notifications = !this.picker.read(cx).delegate.notified_workspaces.is_empty(); this.picker.update(cx, |picker, cx| { picker.delegate.set_entries(entries, active_index, cx); @@ -1046,23 +968,6 @@ impl Focusable for Sidebar { } } -fn sorted_paths_key>(paths: &[P]) -> String { - let mut sorted: Vec = paths - .iter() - .map(|p| p.as_ref().to_string_lossy().to_string()) - .collect(); - sorted.sort(); - sorted.join("\n") -} - -fn read_thread_title_map() -> Option> { - let json = KEY_VALUE_STORE - .read_kvp(LAST_THREAD_TITLES_KEY) - .log_err() - .flatten()?; - serde_json::from_str(&json).log_err() -} - impl Render for Sidebar { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let titlebar_height = ui::utils::platform_title_bar_height(window); From 512f1f60f2920dd7450866b42dfdb2e25fc2d9af Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 23 Feb 2026 19:28:13 -0500 Subject: [PATCH 007/548] git: Fix diff view buttons not working when project diff or branch diff is empty (#49884) Release Notes: - git: Fixed a bug where the buttons to toggle the split diff view would have no effect when the diff being viewed was empty. --- crates/editor/src/split.rs | 7 ++++++- crates/search/src/buffer_search.rs | 28 ++++++++++++++++++++-------- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index 570f6c46ddd377b83b3ccda6c19664de48f2a7b7..abee7c6a482fedb66579f28b7636e7e2dc6504cd 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -841,7 +841,12 @@ impl SplittableEditor { }); } - fn toggle_split(&mut self, _: &ToggleSplitDiff, window: &mut Window, cx: &mut Context) { + pub fn toggle_split( + &mut self, + _: &ToggleSplitDiff, + window: &mut Window, + cx: &mut Context, + ) { if self.lhs.is_some() { self.unsplit(window, cx); } else { diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index eac830c140107ce822ca6149da296b9e7a7308f6..42b2344316e8d9abe19380b8a4aaaf3538fae9d1 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -109,7 +109,6 @@ impl Render for BufferSearchBar { .and_then(|weak| weak.upgrade()) .map(|splittable_editor| { let is_split = splittable_editor.read(cx).is_split(); - let focus_handle = splittable_editor.focus_handle(cx); h_flex() .gap_1() .child( @@ -137,7 +136,7 @@ impl Render for BufferSearchBar { .into_any() })) .on_click({ - let focus_handle = focus_handle.clone(); + let splittable_editor = splittable_editor.downgrade(); move |_, window, cx| { if window.modifiers().secondary() { update_settings_file( @@ -150,9 +149,15 @@ impl Render for BufferSearchBar { ); } if is_split { - focus_handle.focus(window, cx); - window - .dispatch_action(ToggleSplitDiff.boxed_clone(), cx); + splittable_editor + .update(cx, |editor, cx| { + editor.toggle_split( + &ToggleSplitDiff, + window, + cx, + ); + }) + .ok(); } } }), @@ -182,6 +187,7 @@ impl Render for BufferSearchBar { .into_any() })) .on_click({ + let splittable_editor = splittable_editor.downgrade(); move |_, window, cx| { if window.modifiers().secondary() { update_settings_file( @@ -194,9 +200,15 @@ impl Render for BufferSearchBar { ); } if !is_split { - focus_handle.focus(window, cx); - window - .dispatch_action(ToggleSplitDiff.boxed_clone(), cx); + splittable_editor + .update(cx, |editor, cx| { + editor.toggle_split( + &ToggleSplitDiff, + window, + cx, + ); + }) + .ok(); } } }), From 5ef898d61ebf71c8d9b10cb5c975551b5715ef67 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 23 Feb 2026 17:08:09 -0800 Subject: [PATCH 008/548] Fix handling of excerpt regions in EP CLI (#49936) Previously, we were not computing excerpt regions correctly for EP examples captured from prod. This PR fixes that, and also simplifies the data flow in the EP CLI. Examples either come from a concise spec (like the markdown evals), or are collected from prod. Either way, we compute from them a `ZetaPromptInput`, and the downstream steps like prompt-formatting and scoring are derived from that. Release Notes: - N/A --------- Co-authored-by: Ben Kunkle --- crates/edit_prediction/src/capture_example.rs | 98 +------ crates/edit_prediction/src/example_spec.rs | 78 +----- crates/edit_prediction_cli/src/example.rs | 21 +- .../edit_prediction_cli/src/format_prompt.rs | 85 +++--- .../edit_prediction_cli/src/load_project.rs | 53 ++-- crates/edit_prediction_cli/src/main.rs | 15 -- .../edit_prediction_cli/src/parse_output.rs | 14 +- crates/edit_prediction_cli/src/predict.rs | 4 +- .../edit_prediction_cli/src/pull_examples.rs | 254 +----------------- crates/edit_prediction_cli/src/qa.rs | 29 +- .../src/retrieve_context.rs | 49 +--- .../src/reversal_tracking.rs | 130 +++++---- crates/edit_prediction_cli/src/score.rs | 7 +- .../edit_prediction_cli/src/split_commit.rs | 4 +- crates/edit_prediction_cli/src/synthesize.rs | 2 +- .../src/edit_prediction_ui.rs | 10 +- crates/zeta_prompt/src/zeta_prompt.rs | 12 +- 17 files changed, 191 insertions(+), 674 deletions(-) diff --git a/crates/edit_prediction/src/capture_example.rs b/crates/edit_prediction/src/capture_example.rs index d9cccda9fee5abe93b3bff3823e2a6897c22f07f..0fbece7478068d26c0c1a8accf7e93aba8c83b9c 100644 --- a/crates/edit_prediction/src/capture_example.rs +++ b/crates/edit_prediction/src/capture_example.rs @@ -1,10 +1,6 @@ use crate::{ - StoredEvent, - cursor_excerpt::editable_and_context_ranges_for_cursor_position, - example_spec::{ - CapturedEvent, CapturedPromptInput, CapturedRelatedExcerpt, CapturedRelatedFile, - ExampleSpec, MAX_CURSOR_FILE_SIZE, - }, + StoredEvent, cursor_excerpt::editable_and_context_ranges_for_cursor_position, + example_spec::ExampleSpec, }; use anyhow::Result; use buffer_diff::BufferDiffSnapshot; @@ -13,14 +9,13 @@ use gpui::{App, Entity, Task}; use language::{Buffer, ToPoint as _}; use project::{Project, WorktreeId}; use std::{collections::hash_map, fmt::Write as _, ops::Range, path::Path, sync::Arc}; -use text::{BufferSnapshot as TextBufferSnapshot, Point, ToOffset as _}; +use text::{BufferSnapshot as TextBufferSnapshot, Point}; pub fn capture_example( project: Entity, buffer: Entity, cursor_anchor: language::Anchor, mut events: Vec, - related_files: Vec, populate_expected_patch: bool, cx: &mut App, ) -> Option>> { @@ -60,14 +55,6 @@ pub fn capture_example( .map(|s| s.to_string()) .unwrap_or_default(); - let full_cursor_offset = cursor_anchor.to_offset(&snapshot); - let cursor_point = cursor_anchor.to_point(&snapshot); - let cursor_file_content = if snapshot.len() <= MAX_CURSOR_FILE_SIZE { - Some(snapshot.text()) - } else { - None - }; - let (cursor_excerpt, cursor_offset_in_excerpt, cursor_excerpt_range) = cx .background_executor() .spawn(async move { compute_cursor_excerpt(&snapshot, cursor_anchor) }) @@ -109,56 +96,6 @@ pub fn capture_example( rejected_patch = Some(empty_patch); } - let prompt_input = cursor_file_content.map(|content| { - let captured_events: Vec = events - .iter() - .map(|stored_event| { - let zeta_prompt::Event::BufferChange { - path, - old_path, - diff, - predicted, - in_open_source_repo, - } = stored_event.event.as_ref(); - CapturedEvent { - path: strip_root_name(path, &root_name).into(), - old_path: strip_root_name(old_path, &root_name).into(), - diff: diff.clone(), - predicted: *predicted, - in_open_source_repo: *in_open_source_repo, - } - }) - .collect(); - - let captured_related_files: Vec = related_files - .iter() - .map(|rf| CapturedRelatedFile { - path: strip_root_name(&rf.path, &root_name).into(), - max_row: rf.max_row, - excerpts: rf - .excerpts - .iter() - .map(|e| CapturedRelatedExcerpt { - row_range: e.row_range.clone(), - text: e.text.to_string(), - }) - .collect(), - }) - .collect(); - - CapturedPromptInput { - cursor_file_content: content, - cursor_offset: full_cursor_offset, - cursor_row: cursor_point.row, - cursor_column: cursor_point.column, - excerpt_start_row: Some(0), - events: captured_events, - related_files: captured_related_files, - in_open_source_repo: false, - zed_version: None, - } - }); - let mut spec = ExampleSpec { name: generate_timestamp_name(), repository_url, @@ -171,7 +108,6 @@ pub fn capture_example( edit_history, expected_patches, rejected_patch, - captured_prompt_input: prompt_input, telemetry: None, human_feedback: Vec::new(), rating: None, @@ -466,7 +402,6 @@ mod tests { buffer.clone(), Anchor::MIN, events, - Vec::new(), true, cx, ) @@ -584,38 +519,11 @@ mod tests { "} .to_string() ), - captured_prompt_input: example.captured_prompt_input.clone(), telemetry: None, human_feedback: Vec::new(), rating: None, } ); - - let prompt_input = example - .captured_prompt_input - .expect("should have captured prompt input"); - assert!( - prompt_input.cursor_file_content.contains("fn main()"), - "cursor_file_content should contain file content" - ); - assert_eq!( - prompt_input.cursor_offset, 0, - "cursor at Anchor::MIN should be offset 0" - ); - assert_eq!( - prompt_input.cursor_row, 0, - "cursor at Anchor::MIN should be row 0" - ); - assert_eq!( - prompt_input.cursor_column, 0, - "cursor at Anchor::MIN should be column 0" - ); - assert!(prompt_input.events.len() > 0, "should have captured events"); - assert_eq!( - prompt_input.related_files.len(), - 0, - "should have no related files (none passed)" - ); } fn init_test(cx: &mut TestAppContext) { diff --git a/crates/edit_prediction/src/example_spec.rs b/crates/edit_prediction/src/example_spec.rs index 3053473fd0df5acf46d4bed25fc47e913437d8a6..77cbb92b9eb6bbeb54cc76f888d6266fb2d5fafa 100644 --- a/crates/edit_prediction/src/example_spec.rs +++ b/crates/edit_prediction/src/example_spec.rs @@ -1,7 +1,7 @@ use crate::udiff::DiffLine; use anyhow::{Context as _, Result}; use serde::{Deserialize, Serialize}; -use std::{borrow::Cow, fmt::Write as _, mem, ops::Range, path::Path, sync::Arc}; +use std::{borrow::Cow, fmt::Write as _, mem, path::Path, sync::Arc}; use telemetry_events::EditPredictionRating; pub const CURSOR_POSITION_MARKER: &str = "[CURSOR_POSITION]"; @@ -81,8 +81,6 @@ pub struct ExampleSpec { #[serde(default, skip_serializing_if = "Option::is_none")] pub rejected_patch: Option, #[serde(default, skip_serializing_if = "Option::is_none")] - pub captured_prompt_input: Option, - #[serde(default, skip_serializing_if = "Option::is_none")] pub telemetry: Option, #[serde(default, skip_serializing_if = "Vec::is_empty")] pub human_feedback: Vec, @@ -105,76 +103,6 @@ pub struct TelemetrySource { pub was_shown: bool, } -/// All data needed to run format_prompt without loading the project. -#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] -pub struct CapturedPromptInput { - pub cursor_file_content: String, - pub cursor_offset: usize, - pub cursor_row: u32, - pub cursor_column: u32, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub excerpt_start_row: Option, - pub events: Vec, - pub related_files: Vec, - #[serde(default)] - pub in_open_source_repo: bool, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub zed_version: Option, -} - -#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] -pub struct CapturedEvent { - pub path: Arc, - pub old_path: Arc, - pub diff: String, - pub predicted: bool, - #[serde(default)] - pub in_open_source_repo: bool, -} - -impl CapturedEvent { - pub fn to_event(&self) -> zeta_prompt::Event { - zeta_prompt::Event::BufferChange { - path: self.path.clone(), - old_path: self.old_path.clone(), - diff: self.diff.clone(), - predicted: self.predicted, - in_open_source_repo: self.in_open_source_repo, - } - } -} - -#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] -pub struct CapturedRelatedFile { - pub path: Arc, - pub max_row: u32, - pub excerpts: Vec, -} - -impl CapturedRelatedFile { - pub fn to_related_file(&self) -> zeta_prompt::RelatedFile { - zeta_prompt::RelatedFile { - path: self.path.clone(), - max_row: self.max_row, - in_open_source_repo: false, - excerpts: self - .excerpts - .iter() - .map(|e| zeta_prompt::RelatedExcerpt { - row_range: e.row_range.clone(), - text: e.text.clone().into(), - }) - .collect(), - } - } -} - -#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] -pub struct CapturedRelatedExcerpt { - pub row_range: Range, - pub text: String, -} - const REASONING_HEADING: &str = "Reasoning"; const UNCOMMITTED_DIFF_HEADING: &str = "Uncommitted Diff"; const EDIT_HISTORY_HEADING: &str = "Edit History"; @@ -320,7 +248,6 @@ impl ExampleSpec { edit_history: String::new(), expected_patches: Vec::new(), rejected_patch: None, - captured_prompt_input: None, telemetry: None, human_feedback: Vec::new(), rating: None, @@ -654,7 +581,6 @@ mod tests { edit_history: String::new(), expected_patches: Vec::new(), rejected_patch: None, - captured_prompt_input: None, telemetry: None, human_feedback: Vec::new(), rating: None, @@ -791,7 +717,6 @@ mod tests { edit_history: String::new(), expected_patches: Vec::new(), rejected_patch: None, - captured_prompt_input: None, telemetry: None, human_feedback: Vec::new(), rating: None, @@ -864,7 +789,6 @@ mod tests { edit_history: String::new(), expected_patches: Vec::new(), rejected_patch: None, - captured_prompt_input: None, telemetry: None, human_feedback: Vec::new(), rating: None, diff --git a/crates/edit_prediction_cli/src/example.rs b/crates/edit_prediction_cli/src/example.rs index 6fad9389dd6f4aa203c1ba9a13fe9e03089ee784..495ca26f97af5f2c2c1dc50ea339881853d9ebbc 100644 --- a/crates/edit_prediction_cli/src/example.rs +++ b/crates/edit_prediction_cli/src/example.rs @@ -15,9 +15,8 @@ use std::{ collections::VecDeque, io::Read, path::{Path, PathBuf}, - sync::Arc, }; -use zeta_prompt::RelatedFile; +use zeta_prompt::ZetaPromptInput; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Example { @@ -27,7 +26,7 @@ pub struct Example { /// The full content of the file where an edit is being predicted, and the /// actual cursor offset. #[serde(skip_serializing_if = "Option::is_none")] - pub prompt_inputs: Option, + pub prompt_inputs: Option, /// The input and expected output from the edit prediction model. #[serde(skip_serializing_if = "Option::is_none")] @@ -46,6 +45,9 @@ pub struct Example { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub qa: Vec>, + /// The Zed version used to generate this example. + pub zed_version: Option, + /// The application state used to process this example. #[serde(skip)] pub state: Option, @@ -59,18 +61,6 @@ pub struct ExampleState { pub _open_buffers: OpenedBuffers, } -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct ExamplePromptInputs { - pub content: String, - pub cursor_row: u32, - pub cursor_column: u32, - pub cursor_offset: usize, - #[serde(default, skip_serializing_if = "Option::is_none")] - pub excerpt_start_row: Option, - pub edit_history: Vec>, - pub related_files: Option>, -} - #[derive(Clone, Debug, Serialize, Deserialize)] pub struct ExamplePrompt { pub input: String, @@ -340,5 +330,6 @@ fn parse_markdown_example(input: &str) -> Result { score: Vec::new(), qa: Vec::new(), state: None, + zed_version: None, }) } diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index a3e83e73bb9d69c22be33936ae73177022dfc165..1ed383c158b453cf75e661ec9dfe72ae6ed6537b 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -6,9 +6,8 @@ use crate::{ retrieve_context::run_context_retrieval, }; use anyhow::{Context as _, Result, anyhow}; -use edit_prediction::{cursor_excerpt::compute_excerpt_ranges, udiff}; -use gpui::{AppContext, AsyncApp}; -use language::{Buffer, Point}; +use edit_prediction::udiff; +use gpui::AsyncApp; use similar::DiffableStr; use std::sync::Arc; use std::{fmt::Write as _, ops::Range}; @@ -31,23 +30,10 @@ pub async fn run_format_prompt( .as_ref() .context("prompt_inputs must be set after context retrieval")?; - let language = app_state - .languages - .load_language_for_file_path(&example.spec.cursor_path) - .await - .ok(); - let snapshot_fut = cx.update(|cx| { - Buffer::build_snapshot( - prompt_inputs.content.as_str().into(), - language, - Some(app_state.languages.clone()), - cx, - ) - }); - let cursor_point = Point::new(prompt_inputs.cursor_row, prompt_inputs.cursor_column); - let snapshot = cx.background_spawn(snapshot_fut).await; - - let (_, _, excerpt_ranges) = compute_excerpt_ranges(cursor_point, &snapshot); + let excerpt_ranges = prompt_inputs + .excerpt_ranges + .as_ref() + .context("prompt_inputs must have excerpt_ranges")?; match args.provider { PredictionProvider::Teacher(_) | PredictionProvider::TeacherNonBatching(_) => { @@ -55,7 +41,7 @@ pub async fn run_format_prompt( let zeta_format = ZetaFormat::default(); let (editable_range, context_range) = - excerpt_range_for_format(zeta_format, &excerpt_ranges); + excerpt_range_for_format(zeta_format, excerpt_ranges); let prompt = TeacherPrompt::format_prompt(example, editable_range, context_range); example.prompt = Some(ExamplePrompt { @@ -70,27 +56,25 @@ pub async fn run_format_prompt( step_progress.set_substatus("formatting zeta2 prompt"); let (editable_range, context_range) = - excerpt_range_for_format(zeta_format, &excerpt_ranges); + excerpt_range_for_format(zeta_format, excerpt_ranges); let context_start = context_range.start; - let cursor_offset_in_excerpt = prompt_inputs.cursor_offset - context_start; + let cursor_offset_in_excerpt = prompt_inputs.cursor_offset_in_excerpt - context_start; let editable_range_in_excerpt = (editable_range.start - context_start)..(editable_range.end - context_start); let input = zeta_prompt::ZetaPromptInput { - cursor_path: example.spec.cursor_path.clone(), - cursor_excerpt: prompt_inputs.content[context_range].to_string().into(), + cursor_path: prompt_inputs.cursor_path.clone(), + cursor_excerpt: prompt_inputs.cursor_excerpt[context_range] + .to_string() + .into(), editable_range_in_excerpt, cursor_offset_in_excerpt, excerpt_start_row: prompt_inputs.excerpt_start_row, - events: prompt_inputs.edit_history.clone(), - related_files: prompt_inputs.related_files.clone().unwrap_or_default(), - excerpt_ranges: Some(excerpt_ranges), + events: prompt_inputs.events.clone(), + related_files: prompt_inputs.related_files.clone(), + excerpt_ranges: prompt_inputs.excerpt_ranges.clone(), preferred_model: None, - in_open_source_repo: example - .spec - .captured_prompt_input - .as_ref() - .map_or(false, |input| input.in_open_source_repo), + in_open_source_repo: prompt_inputs.in_open_source_repo, can_collect_data: false, }; let prompt = format_zeta_prompt(&input, zeta_format); @@ -241,14 +225,12 @@ impl TeacherPrompt { new_editable_region.insert(0, '\n'); } - let (editable_region_offset, _) = prompt_inputs - .content + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + let (editable_region_offset, _) = excerpt .match_indices(&old_editable_region) - .min_by_key(|(index, _)| index.abs_diff(prompt_inputs.cursor_offset)) + .min_by_key(|(index, _)| index.abs_diff(prompt_inputs.cursor_offset_in_excerpt)) .context("editable region not found in prompt content")?; - let editable_region_start_line = prompt_inputs.content[..editable_region_offset] - .matches('\n') - .count(); + let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count(); // Use full context so cursor offset (relative to editable region start) aligns with diff content let editable_region_lines = old_editable_region.lines().count() as u32; @@ -273,7 +255,7 @@ impl TeacherPrompt { &example.spec.cursor_path, editable_region_cursor_offset, &new_editable_region, - &prompt_inputs.content, + excerpt, editable_region_offset, editable_region_start_line, ) @@ -298,10 +280,7 @@ impl TeacherPrompt { } pub fn format_context(example: &Example) -> String { - let related_files = example - .prompt_inputs - .as_ref() - .and_then(|pi| pi.related_files.as_ref()); + let related_files = example.prompt_inputs.as_ref().map(|pi| &pi.related_files); let Some(related_files) = related_files else { return "(No context)".to_string(); @@ -342,16 +321,18 @@ impl TeacherPrompt { let mut result = String::new(); let prompt_inputs = example.prompt_inputs.as_ref().unwrap(); + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + let cursor_offset = prompt_inputs.cursor_offset_in_excerpt; let path_str = example.spec.cursor_path.to_string_lossy(); result.push_str(&format!("`````{path_str}\n")); - result.push_str(&prompt_inputs.content[context_range.start..editable_range.start]); + result.push_str(&excerpt[context_range.start..editable_range.start]); result.push_str(Self::EDITABLE_REGION_START); - result.push_str(&prompt_inputs.content[editable_range.start..prompt_inputs.cursor_offset]); + result.push_str(&excerpt[editable_range.start..cursor_offset]); result.push_str(Self::USER_CURSOR_MARKER); - result.push_str(&prompt_inputs.content[prompt_inputs.cursor_offset..editable_range.end]); + result.push_str(&excerpt[cursor_offset..editable_range.end]); result.push_str(Self::EDITABLE_REGION_END); - result.push_str(&prompt_inputs.content[editable_range.end..context_range.end]); + result.push_str(&excerpt[editable_range.end..context_range.end]); result.push_str("\n`````"); result @@ -402,16 +383,16 @@ pub fn extract_cursor_excerpt_from_example(example: &Example) -> Option // Fallback: construct from prompt_inputs if available let prompt_inputs = example.prompt_inputs.as_ref()?; - let content = &prompt_inputs.content; - let cursor_offset = prompt_inputs.cursor_offset; + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + let cursor_offset = prompt_inputs.cursor_offset_in_excerpt; // Simple fallback: just show content around cursor with markers let path_str = example.spec.cursor_path.to_string_lossy(); let mut result = format!("`````{path_str}\n"); result.push_str(TeacherPrompt::EDITABLE_REGION_START); - result.push_str(&content[..cursor_offset]); + result.push_str(&excerpt[..cursor_offset]); result.push_str(TeacherPrompt::USER_CURSOR_MARKER); - result.push_str(&content[cursor_offset..]); + result.push_str(&excerpt[cursor_offset..]); result.push_str(TeacherPrompt::EDITABLE_REGION_END); result.push_str("\n`````"); diff --git a/crates/edit_prediction_cli/src/load_project.rs b/crates/edit_prediction_cli/src/load_project.rs index 5faf9f21a02286da0da45a79462fac488d94a1e0..680af6f0168c766c6066a91a8f57fe4573b46403 100644 --- a/crates/edit_prediction_cli/src/load_project.rs +++ b/crates/edit_prediction_cli/src/load_project.rs @@ -1,5 +1,5 @@ use crate::{ - example::{Example, ExamplePromptInputs, ExampleState}, + example::{Example, ExampleState}, git, headless::EpAppState, progress::{ExampleProgress, InfoStyle, Step, StepProgress}, @@ -7,6 +7,7 @@ use crate::{ use anyhow::{Context as _, Result}; use edit_prediction::{ EditPredictionStore, + cursor_excerpt::compute_excerpt_ranges, udiff::{OpenedBuffers, refresh_worktree_entries, strip_diff_path_prefix}, }; use futures::AsyncWriteExt as _; @@ -14,6 +15,7 @@ use gpui::{AsyncApp, Entity}; use language::{Anchor, Buffer, LanguageNotFound, ToOffset, ToPoint}; use project::{Project, ProjectPath, buffer_store::BufferStoreEvent}; use std::{fs, path::PathBuf, sync::Arc}; +use zeta_prompt::ZetaPromptInput; pub async fn run_load_project( example: &mut Example, @@ -58,7 +60,7 @@ pub async fn run_load_project( .read_with(&cx, |buffer, _| buffer.parsing_idle()) .await; - let edit_history = ep_store.update(&mut cx, |store, cx| { + let events: Vec> = ep_store.update(&mut cx, |store, cx| { store .edit_history_for_project(&project, cx) .into_iter() @@ -66,25 +68,46 @@ pub async fn run_load_project( .collect() }); + let existing_related_files = example + .prompt_inputs + .take() + .map(|inputs| inputs.related_files) + .unwrap_or_default(); + let (prompt_inputs, language_name) = buffer.read_with(&cx, |buffer, _cx| { - let cursor_point = cursor_position.to_point(&buffer); + let snapshot = buffer.snapshot(); + let cursor_point = cursor_position.to_point(&snapshot); + let cursor_offset = cursor_position.to_offset(&snapshot); let language_name = buffer .language() .map(|l| l.name().to_string()) .unwrap_or_else(|| "Unknown".to_string()); + + let (full_context_point_range, full_context_offset_range, excerpt_ranges) = + compute_excerpt_ranges(cursor_point, &snapshot); + + let cursor_excerpt: Arc = buffer + .text_for_range(full_context_offset_range.clone()) + .collect::() + .into(); + let cursor_offset_in_excerpt = cursor_offset - full_context_offset_range.start; + let excerpt_start_row = Some(full_context_point_range.start.row); + + let editable_range_in_excerpt = excerpt_ranges.editable_350.clone(); + ( - ExamplePromptInputs { - content: buffer.text(), - cursor_row: cursor_point.row, - cursor_column: cursor_point.column, - cursor_offset: cursor_position.to_offset(&buffer), - excerpt_start_row: Some(0), - edit_history, - related_files: example - .prompt_inputs - .take() - .map(|inputs| inputs.related_files) - .unwrap_or_default(), + ZetaPromptInput { + cursor_path: example.spec.cursor_path.clone(), + cursor_excerpt, + editable_range_in_excerpt, + cursor_offset_in_excerpt, + excerpt_start_row, + events, + related_files: existing_related_files, + excerpt_ranges: Some(excerpt_ranges), + preferred_model: None, + in_open_source_repo: false, + can_collect_data: false, }, language_name, ) diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index 836cd433657199125100051dc428bd7636360d30..03f94a4dc47388c9a56169f2be0280af33dc6f1d 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -684,21 +684,6 @@ async fn load_examples( } else { let max_rows_per_timestamp = remaining_limit_for_snowflake.unwrap_or(5000); - if !captured_after_timestamps.is_empty() { - captured_after_timestamps.sort(); - - let mut captured_examples = pull_examples::fetch_captured_examples_after( - http_client.clone(), - &captured_after_timestamps, - max_rows_per_timestamp, - remaining_offset, - background_executor.clone(), - Some(MIN_CAPTURE_VERSION), - ) - .await?; - examples.append(&mut captured_examples); - } - if !rejected_after_timestamps.is_empty() { rejected_after_timestamps.sort(); diff --git a/crates/edit_prediction_cli/src/parse_output.rs b/crates/edit_prediction_cli/src/parse_output.rs index 9d3608e0d743e8bd06baaa378a676e4f8005fe05..4b8af44785c1781de772f569c012ee64eee48aad 100644 --- a/crates/edit_prediction_cli/src/parse_output.rs +++ b/crates/edit_prediction_cli/src/parse_output.rs @@ -133,20 +133,18 @@ fn parse_zeta2_output( } let old_text_trimmed = old_text.trim_end_matches('\n'); - let (editable_region_offset, _) = prompt_inputs - .content + let excerpt = prompt_inputs.cursor_excerpt.as_ref(); + let (editable_region_offset, _) = excerpt .match_indices(old_text_trimmed) - .min_by_key(|(index, _)| index.abs_diff(prompt_inputs.cursor_offset)) + .min_by_key(|(index, _)| index.abs_diff(prompt_inputs.cursor_offset_in_excerpt)) .with_context(|| { format!( "could not find editable region in content.\nLooking for:\n{}\n\nIn content:\n{}", - old_text_trimmed, &prompt_inputs.content + old_text_trimmed, excerpt ) })?; - let editable_region_start_line = prompt_inputs.content[..editable_region_offset] - .matches('\n') - .count(); + let editable_region_start_line = excerpt[..editable_region_offset].matches('\n').count(); // Use full context so cursor offset (relative to editable region start) aligns with diff content let editable_region_lines = old_text_normalized.lines().count() as u32; @@ -170,7 +168,7 @@ fn parse_zeta2_output( &example.spec.cursor_path, editable_region_cursor_offset, &new_text, - &prompt_inputs.content, + excerpt, editable_region_offset, editable_region_start_line, ) diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs index 6ad7880bda369fff8e35ac77c422471f989cb8b7..e02fcbdb425a62fb478b8be36fdd034eede27622 100644 --- a/crates/edit_prediction_cli/src/predict.rs +++ b/crates/edit_prediction_cli/src/predict.rs @@ -53,11 +53,10 @@ pub async fn run_prediction( ); }; - run_context_retrieval(example, app_state.clone(), example_progress, cx.clone()).await?; - if let PredictionProvider::Teacher(backend) | PredictionProvider::TeacherNonBatching(backend) = provider { + run_context_retrieval(example, app_state.clone(), example_progress, cx.clone()).await?; run_format_prompt( example, &FormatPromptArgs { provider }, @@ -81,6 +80,7 @@ pub async fn run_prediction( } run_load_project(example, app_state.clone(), example_progress, cx.clone()).await?; + run_context_retrieval(example, app_state.clone(), example_progress, cx.clone()).await?; let step_progress = example_progress.start(Step::Predict); diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index 91ea85e019973ec04cc747606e105c5e9ef50988..cacfc9bb679acdcb3c709736c6e4b5e79af861e8 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -15,15 +15,11 @@ use zeta_prompt::ZetaPromptInput; use crate::example::Example; use crate::progress::{InfoStyle, Progress, Step}; const EDIT_PREDICTION_DEPLOYMENT_EVENT: &str = "Edit Prediction Deployment"; -use edit_prediction::example_spec::{ - CapturedEvent, CapturedPromptInput, CapturedRelatedExcerpt, CapturedRelatedFile, ExampleSpec, - TelemetrySource, -}; +use edit_prediction::example_spec::{ExampleSpec, TelemetrySource}; use std::fmt::Write as _; pub(crate) const SNOWFLAKE_SUCCESS_CODE: &str = "090001"; pub(crate) const SNOWFLAKE_ASYNC_IN_PROGRESS_CODE: &str = "333334"; -const EDIT_PREDICTION_EXAMPLE_CAPTURED_EVENT: &str = "Edit Prediction Example Captured"; const PREDICTIVE_EDIT_REQUESTED_EVENT: &str = "Predictive Edit Requested"; const PREDICTIVE_EDIT_REJECTED_EVENT: &str = "Predictive Edit Rejected"; const EDIT_PREDICTION_RATED_EVENT: &str = "Edit Prediction Rated"; @@ -71,135 +67,6 @@ pub fn parse_rated_after_input(input: &str) -> Option<(&str, Option, - after_timestamps: &[String], - max_rows_per_timestamp: usize, - offset: usize, - background_executor: BackgroundExecutor, - _min_capture_version: Option, -) -> Result> { - if after_timestamps.is_empty() { - return Ok(Vec::new()); - } - - let progress = Progress::global(); - - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - - let mut all_examples = Vec::new(); - - for after_date in after_timestamps.iter() { - let step_progress_name = format!(">{after_date}"); - let step_progress = progress.start(Step::PullExamples, &step_progress_name); - step_progress.set_substatus("querying"); - - let statement = indoc! {r#" - SELECT - event_properties:example AS example - FROM events - WHERE event_type = ? - AND time > TRY_TO_TIMESTAMP_NTZ(?) - AND event_properties:can_collect_data = true - ORDER BY time ASC - LIMIT ? - OFFSET ? - "#}; - - let request = json!({ - "statement": statement, - "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": { - "1": { "type": "TEXT", "value": EDIT_PREDICTION_EXAMPLE_CAPTURED_EVENT }, - "2": { "type": "TEXT", "value": after_date }, - "3": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, - "4": { "type": "FIXED", "value": offset.to_string() } - } - }); - - let response = run_sql_with_polling( - http_client.clone(), - &base_url, - &token, - &request, - &step_progress, - background_executor.clone(), - ) - .await?; - - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); - - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); - - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); - - let example_index = response - .result_set_meta_data - .as_ref() - .and_then(|m| { - m.row_type.iter().enumerate().find_map(|(index, col)| { - if col.name.eq_ignore_ascii_case("example") { - Some(index) - } else { - None - } - }) - }) - .unwrap_or(0); - - all_examples.extend(examples_from_response(&response, example_index)?); - - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(examples_from_response(&partition_response, example_index)?); - } - } - - step_progress.set_substatus("done"); - } - - Ok(all_examples) -} - #[derive(Debug, Clone, Deserialize)] #[serde(rename_all = "camelCase")] pub(crate) struct SnowflakeStatementResponse { @@ -236,56 +103,6 @@ struct SnowflakeColumnMeta { name: String, } -fn examples_from_response( - response: &SnowflakeStatementResponse, - example_index: usize, -) -> Result + '_> { - if let Some(code) = &response.code { - if code != SNOWFLAKE_SUCCESS_CODE { - anyhow::bail!( - "snowflake sql api returned error code={code} message={}", - response.message.as_deref().unwrap_or("") - ); - } - } - - let iter = response.data.iter().enumerate().filter_map(move |(row_index, data_row)| { - let Some(example_value) = data_row.get(example_index) else { - return None; - }; - if example_value.is_null() { - return None; - } - - let parse_result = match example_value { - JsonValue::String(encoded_json) => serde_json::from_str::(encoded_json), - _ => serde_json::from_value::(example_value.clone()), - }; - - match parse_result { - Ok(spec) => Some(Example { - spec, - prompt_inputs: None, - prompt: None, - predictions: Vec::new(), - score: Vec::new(), - qa: Vec::new(), - state: None, - }), - Err(error) => { - let raw_json = serde_json::to_string_pretty(example_value) - .unwrap_or_else(|_| "".to_string()); - log::error!( - "failed to parse ExampleSpec for row {row_index}: {error:#}\nraw json:\n{raw_json}" - ); - None - } - } - }); - - Ok(iter) -} - async fn run_sql_with_polling( http_client: Arc, base_url: &str, @@ -1306,48 +1123,9 @@ fn build_example_from_snowflake( rejection: Option, zed_version: Option, ) -> Example { - let events: Vec = input - .events - .iter() - .map(|event| match event.as_ref() { - zeta_prompt::Event::BufferChange { - path, - old_path, - diff, - predicted, - in_open_source_repo, - } => CapturedEvent { - path: path.clone(), - old_path: old_path.clone(), - diff: diff.clone(), - predicted: *predicted, - in_open_source_repo: *in_open_source_repo, - }, - }) - .collect(); - - let related_files: Vec = input - .related_files - .iter() - .map(|rf| CapturedRelatedFile { - path: rf.path.clone(), - max_row: rf.max_row, - excerpts: rf - .excerpts - .iter() - .map(|e| CapturedRelatedExcerpt { - row_range: e.row_range.clone(), - text: e.text.to_string(), - }) - .collect(), - }) - .collect(); - let cursor_excerpt = input.cursor_excerpt.as_ref(); let cursor_offset = input.cursor_offset_in_excerpt; - let (cursor_row, cursor_column) = compute_row_column(cursor_excerpt, cursor_offset); - let mut edit_history = String::new(); for event in &input.events { zeta_prompt::write_event(&mut edit_history, event); @@ -1371,17 +1149,6 @@ fn build_example_from_snowflake( edit_history, expected_patches: Vec::new(), rejected_patch: None, - captured_prompt_input: Some(CapturedPromptInput { - cursor_file_content: cursor_excerpt.to_string(), - cursor_offset, - cursor_row, - cursor_column, - excerpt_start_row: None, - events, - related_files, - in_open_source_repo: input.in_open_source_repo, - zed_version, - }), telemetry: Some(TelemetrySource { request_id, device_id, @@ -1395,7 +1162,8 @@ fn build_example_from_snowflake( Example { spec, - prompt_inputs: None, + zed_version, + prompt_inputs: Some(input), prompt: None, predictions: Vec::new(), score: Vec::new(), @@ -1404,22 +1172,6 @@ fn build_example_from_snowflake( } } -fn compute_row_column(text: &str, offset: usize) -> (u32, u32) { - let mut row = 0u32; - let mut last_newline_offset = 0; - for (i, c) in text.char_indices() { - if i >= offset { - break; - } - if c == '\n' { - row += 1; - last_newline_offset = i + 1; - } - } - let column = (offset - last_newline_offset) as u32; - (row, column) -} - fn build_cursor_position(excerpt: &str, cursor_offset: usize) -> String { let before = &excerpt[..cursor_offset.min(excerpt.len())]; let after = &excerpt[cursor_offset.min(excerpt.len())..]; diff --git a/crates/edit_prediction_cli/src/qa.rs b/crates/edit_prediction_cli/src/qa.rs index c84d4b5cbe31ced383113c5dfb425c07e5cdc73e..a2c5ad30b9487d301b3cd158c363d21955fb4a16 100644 --- a/crates/edit_prediction_cli/src/qa.rs +++ b/crates/edit_prediction_cli/src/qa.rs @@ -82,22 +82,19 @@ pub fn build_prompt(example: &Example) -> Result { extract_cursor_excerpt_from_example(example).context("failed to extract cursor excerpt")?; let mut edit_history = String::new(); - for event in &prompt_inputs.edit_history { - match event.as_ref() { - zeta_prompt::Event::BufferChange { - path, - old_path, - diff, - predicted: _, - in_open_source_repo: _, - } => { - edit_history.push_str(&format!("--- a{}\n", old_path.display())); - edit_history.push_str(&format!("+++ b{}\n", path.display())); - let diff_word_diff = unified_to_word_diff(diff); - edit_history.push_str(&diff_word_diff); - edit_history.push_str("\n\n"); - } - } + for event in &prompt_inputs.events { + let zeta_prompt::Event::BufferChange { + path, + old_path, + diff, + predicted: _, + in_open_source_repo: _, + } = event.as_ref(); + edit_history.push_str(&format!("--- a{}\n", old_path.display())); + edit_history.push_str(&format!("+++ b{}\n", path.display())); + let diff_word_diff = unified_to_word_diff(&diff); + edit_history.push_str(&diff_word_diff); + edit_history.push_str("\n\n"); } let prompt_template = crate::prompt_assets::get_prompt("qa.md"); diff --git a/crates/edit_prediction_cli/src/retrieve_context.rs b/crates/edit_prediction_cli/src/retrieve_context.rs index a000f69c768b3ac370e4f6a202e8f1250a28d6da..18ee3c1b0ec1456b02bb145c98e669b777048385 100644 --- a/crates/edit_prediction_cli/src/retrieve_context.rs +++ b/crates/edit_prediction_cli/src/retrieve_context.rs @@ -1,5 +1,5 @@ use crate::{ - example::{Example, ExamplePromptInputs}, + example::Example, headless::EpAppState, load_project::run_load_project, progress::{ExampleProgress, InfoStyle, Step, StepProgress}, @@ -20,41 +20,18 @@ pub async fn run_context_retrieval( example_progress: &ExampleProgress, mut cx: AsyncApp, ) -> anyhow::Result<()> { - if example - .prompt_inputs - .as_ref() - .is_some_and(|inputs| inputs.related_files.is_some()) - { - return Ok(()); - } - - if let Some(captured) = &example.spec.captured_prompt_input { - let step_progress = example_progress.start(Step::Context); - step_progress.set_substatus("using captured prompt input"); - - let edit_history: Vec> = captured - .events - .iter() - .map(|e| Arc::new(e.to_event())) - .collect(); - - let related_files: Vec = captured - .related_files - .iter() - .map(|rf| rf.to_related_file()) - .collect(); - - example.prompt_inputs = Some(ExamplePromptInputs { - content: captured.cursor_file_content.clone(), - cursor_row: captured.cursor_row, - cursor_column: captured.cursor_column, - cursor_offset: captured.cursor_offset, - excerpt_start_row: captured.excerpt_start_row, - edit_history, - related_files: Some(related_files), - }); + if example.prompt_inputs.is_some() { + if example.spec.repository_url.is_empty() { + return Ok(()); + } - return Ok(()); + if example + .prompt_inputs + .as_ref() + .is_some_and(|inputs| !inputs.related_files.is_empty()) + { + return Ok(()); + } } run_load_project(example, app_state.clone(), example_progress, cx.clone()).await?; @@ -95,7 +72,7 @@ pub async fn run_context_retrieval( step_progress.set_info(format!("{} excerpts", excerpt_count), InfoStyle::Normal); if let Some(prompt_inputs) = example.prompt_inputs.as_mut() { - prompt_inputs.related_files = Some(context_files); + prompt_inputs.related_files = context_files; } Ok(()) } diff --git a/crates/edit_prediction_cli/src/reversal_tracking.rs b/crates/edit_prediction_cli/src/reversal_tracking.rs index c8c903d266085e1add1fcb49cf42bc6fb1d5480a..cc558939e9aecf826afce77d6205b0ff49ab87bc 100644 --- a/crates/edit_prediction_cli/src/reversal_tracking.rs +++ b/crates/edit_prediction_cli/src/reversal_tracking.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use edit_prediction::udiff::apply_diff_to_string; use language::{char_diff, text_diff}; -use crate::example::ExamplePromptInputs; +use zeta_prompt::ZetaPromptInput; fn apply_diff_to_string_lenient(diff_str: &str, text: &str) -> String { let hunks = parse_diff_hunks(diff_str); @@ -609,13 +609,13 @@ fn is_predicted_event(event: &zeta_prompt::Event) -> bool { } pub fn compute_prediction_reversal_ratio( - prompt_inputs: &ExamplePromptInputs, + prompt_inputs: &ZetaPromptInput, predicted_content: &str, cursor_path: &Path, ) -> f32 { - let current_content = &prompt_inputs.content; + let current_content: &str = prompt_inputs.cursor_excerpt.as_ref(); - let edit_history: &[Arc] = &prompt_inputs.edit_history; + let edit_history: &[Arc] = &prompt_inputs.events; let relevant_events = filter_edit_history_by_path(edit_history, cursor_path); let most_recent = match relevant_events.last() { @@ -656,6 +656,26 @@ mod tests { use edit_prediction::udiff::apply_diff_to_string; use indoc::indoc; + fn make_test_prompt_inputs( + content: &str, + events: Vec>, + excerpt_start_row: Option, + ) -> ZetaPromptInput { + ZetaPromptInput { + cursor_path: Arc::from(Path::new("src/test.rs")), + cursor_excerpt: content.into(), + editable_range_in_excerpt: 0..content.len(), + cursor_offset_in_excerpt: 0, + excerpt_start_row, + events, + related_files: Vec::new(), + excerpt_ranges: None, + preferred_model: None, + in_open_source_repo: false, + can_collect_data: false, + } + } + #[test] fn test_reversal_overlap() { struct Case { @@ -1729,17 +1749,13 @@ mod tests { #[test] fn test_compute_prediction_reversal_ratio_full_file() { - let prompt_inputs = ExamplePromptInputs { - content: indoc! {" + let prompt_inputs = make_test_prompt_inputs( + indoc! {" line1 user_added line2 - "} - .to_string(), - cursor_row: 0, - cursor_column: 0, - cursor_offset: 0, - edit_history: vec![Arc::new(zeta_prompt::Event::BufferChange { + "}, + vec![Arc::new(zeta_prompt::Event::BufferChange { path: Arc::from(Path::new("src/test.rs")), old_path: Arc::from(Path::new("src/test.rs")), diff: indoc! {" @@ -1752,9 +1768,8 @@ mod tests { predicted: false, in_open_source_repo: false, })], - excerpt_start_row: None, - related_files: None, - }; + None, + ); let predicted = indoc! {" line1 @@ -1772,17 +1787,13 @@ mod tests { #[test] fn test_compute_prediction_reversal_ratio_with_excerpt() { - let prompt_inputs = ExamplePromptInputs { - content: indoc! {" + let prompt_inputs = make_test_prompt_inputs( + indoc! {" line10 user_added line11 - "} - .to_string(), - cursor_row: 0, - cursor_column: 0, - cursor_offset: 0, - edit_history: vec![Arc::new(zeta_prompt::Event::BufferChange { + "}, + vec![Arc::new(zeta_prompt::Event::BufferChange { path: Arc::from(Path::new("src/test.rs")), old_path: Arc::from(Path::new("src/test.rs")), diff: indoc! {" @@ -1795,9 +1806,8 @@ mod tests { predicted: false, in_open_source_repo: false, })], - excerpt_start_row: Some(10), - related_files: None, - }; + Some(10), + ); let predicted = indoc! {" line10 @@ -1815,18 +1825,13 @@ mod tests { #[test] fn test_compute_prediction_reversal_ratio_no_history() { - let prompt_inputs = ExamplePromptInputs { - content: indoc! {" + let prompt_inputs = make_test_prompt_inputs( + indoc! {" original content - "} - .to_string(), - cursor_row: 0, - cursor_column: 0, - cursor_offset: 0, - edit_history: vec![], - excerpt_start_row: None, - related_files: None, - }; + "}, + vec![], + None, + ); let predicted = indoc! {" completely different @@ -1842,17 +1847,13 @@ mod tests { #[test] fn test_compute_prediction_reversal_ratio_path_filtering() { - let prompt_inputs = ExamplePromptInputs { - content: indoc! {" + let prompt_inputs = make_test_prompt_inputs( + indoc! {" line1 user_added line2 - "} - .to_string(), - cursor_row: 0, - cursor_column: 0, - cursor_offset: 0, - edit_history: vec![Arc::new(zeta_prompt::Event::BufferChange { + "}, + vec![Arc::new(zeta_prompt::Event::BufferChange { path: Arc::from(Path::new("src/other.rs")), old_path: Arc::from(Path::new("src/other.rs")), diff: indoc! {" @@ -1865,9 +1866,8 @@ mod tests { predicted: false, in_open_source_repo: false, })], - excerpt_start_row: None, - related_files: None, - }; + None, + ); let predicted = indoc! {" line1 @@ -1884,17 +1884,13 @@ mod tests { #[test] fn test_compute_prediction_reversal_ratio_lenient_fallback() { - let prompt_inputs = ExamplePromptInputs { - content: indoc! {" + let prompt_inputs = make_test_prompt_inputs( + indoc! {" actual_line1 user_added actual_line2 - "} - .to_string(), - cursor_row: 0, - cursor_column: 0, - cursor_offset: 0, - edit_history: vec![Arc::new(zeta_prompt::Event::BufferChange { + "}, + vec![Arc::new(zeta_prompt::Event::BufferChange { path: Arc::from(Path::new("src/test.rs")), old_path: Arc::from(Path::new("src/test.rs")), diff: indoc! {" @@ -1907,9 +1903,8 @@ mod tests { predicted: false, in_open_source_repo: false, })], - excerpt_start_row: None, - related_files: None, - }; + None, + ); let predicted = indoc! {" actual_line1 @@ -1955,18 +1950,14 @@ mod tests { #[test] fn test_only_most_recent_edit_tracked() { - let prompt_inputs = ExamplePromptInputs { - content: indoc! {" + let prompt_inputs = make_test_prompt_inputs( + indoc! {" line1 first_add second_add line2 - "} - .to_string(), - cursor_row: 0, - cursor_column: 0, - cursor_offset: 0, - edit_history: vec![ + "}, + vec![ Arc::new(zeta_prompt::Event::BufferChange { path: Arc::from(Path::new("src/test.rs")), old_path: Arc::from(Path::new("src/test.rs")), @@ -1994,9 +1985,8 @@ mod tests { in_open_source_repo: false, }), ], - excerpt_start_row: None, - related_files: None, - }; + None, + ); let predicted = indoc! {" line1 diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs index d1514f7bf93e124407c1c1557743fa16e0cd240c..8436dc4a4b26206eb41bafd5b9de8645cb0abb5e 100644 --- a/crates/edit_prediction_cli/src/score.rs +++ b/crates/edit_prediction_cli/src/score.rs @@ -30,11 +30,11 @@ pub async fn run_scoring( let progress = example_progress.start(Step::Score); progress.set_substatus("applying patches"); - let original_text = &example + let prompt_inputs = example .prompt_inputs .as_ref() - .context("prompt_inputs is required for scoring - run prediction first or ensure JSON includes prompt_inputs")? - .content; + .context("prompt_inputs is required for scoring - run prediction first or ensure JSON includes prompt_inputs")?; + let original_text: &str = prompt_inputs.cursor_excerpt.as_ref(); let expected_patches_with_cursors = example.spec.expected_patches_with_cursor_positions(); let expected_texts: Vec = expected_patches_with_cursors @@ -80,7 +80,6 @@ pub async fn run_scoring( deleted_tokens: 0, }; - let prompt_inputs = example.prompt_inputs.as_ref().unwrap(); let cursor_path = example.spec.cursor_path.as_ref(); progress.set_substatus("computing metrics"); diff --git a/crates/edit_prediction_cli/src/split_commit.rs b/crates/edit_prediction_cli/src/split_commit.rs index 08b327f1f58bebd45b30c48485286616cd152527..b70ac354b5c79b1b58d6f4027a53a69b2e0080b6 100644 --- a/crates/edit_prediction_cli/src/split_commit.rs +++ b/crates/edit_prediction_cli/src/split_commit.rs @@ -371,7 +371,7 @@ pub fn generate_evaluation_example_from_ordered_commit( reasoning: None, uncommitted_diff: String::new(), rejected_patch: None, - captured_prompt_input: None, + telemetry: None, human_feedback: Vec::new(), rating: None, @@ -1370,7 +1370,7 @@ Date: Mon Jan 1 00:00:00 2024 reasoning: None, uncommitted_diff: String::new(), rejected_patch: None, - captured_prompt_input: None, + telemetry: None, human_feedback: Vec::new(), rating: None, diff --git a/crates/edit_prediction_cli/src/synthesize.rs b/crates/edit_prediction_cli/src/synthesize.rs index 31bae0af63858b4aaf45670bb543d8cf810bb1a1..3977804a8fc686e547d5b518bc64bd836a1afc7f 100644 --- a/crates/edit_prediction_cli/src/synthesize.rs +++ b/crates/edit_prediction_cli/src/synthesize.rs @@ -792,7 +792,7 @@ async fn build_example( edit_history, expected_patches: vec![expected_patch_with_header], rejected_patch: None, - captured_prompt_input: None, + telemetry: None, human_feedback: Vec::new(), rating: None, diff --git a/crates/edit_prediction_ui/src/edit_prediction_ui.rs b/crates/edit_prediction_ui/src/edit_prediction_ui.rs index 774bc19af304d36cad43aedbfe088b4daca52d62..1a6c030239631536e143000e2eef37fdd0e599c8 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_ui.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_ui.rs @@ -154,15 +154,7 @@ fn capture_example_as_markdown( .text_anchor_for_position(editor.selections.newest_anchor().head(), cx)?; let ep_store = EditPredictionStore::try_global(cx)?; let events = ep_store.update(cx, |store, cx| store.edit_history_for_project(&project, cx)); - let example = capture_example( - project.clone(), - buffer, - cursor_anchor, - events, - Vec::new(), - true, - cx, - )?; + let example = capture_example(project.clone(), buffer, cursor_anchor, events, true, cx)?; let examples_dir = AllLanguageSettings::get_global(cx) .edit_predictions diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index 8a2c3477bc2e64fb056f403149e04244897b31cd..eb5be311db1539f96b96b50e372d7733aa69f611 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -19,7 +19,7 @@ fn estimate_tokens(bytes: usize) -> usize { } /// The client's preferred edit prediction model. The server may override this. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum EditPredictionModelKind { Zeta1, Zeta2, @@ -28,7 +28,7 @@ pub enum EditPredictionModelKind { /// Pre-computed byte offset ranges within `cursor_excerpt` for different /// editable and context token budgets. Allows the server to select the /// appropriate ranges for whichever model it uses. -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct ExcerptRanges { /// Editable region computed with a 150-token budget. pub editable_150: Range, @@ -44,7 +44,7 @@ pub struct ExcerptRanges { pub editable_350_context_150: Range, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct ZetaPromptInput { pub cursor_path: Arc, pub cursor_excerpt: Arc, @@ -149,7 +149,7 @@ impl ZetaFormat { } } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] #[serde(tag = "event")] pub enum Event { BufferChange { @@ -200,7 +200,7 @@ pub fn write_event(prompt: &mut String, event: &Event) { } } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct RelatedFile { pub path: Arc, pub max_row: u32, @@ -209,7 +209,7 @@ pub struct RelatedFile { pub in_open_source_repo: bool, } -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] pub struct RelatedExcerpt { pub row_range: Range, pub text: Arc, From 62f168969e13920d756fc399920a95b5e4ee8b0a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 23 Feb 2026 22:46:49 -0300 Subject: [PATCH 009/548] agent_ui: Add some design tweaks to the subagents UI (#49938) - Increase hit area of both the preview expansion as well as the full screen expansion - Add the ability to stop a subagent from the full screen view - Fix subagent state display in the full screen view (e.g., we were showing the green check mark even when the subagent was cancelled) - Make card header font size consistent with the thread through a new enum value in `LabelSize` - Refine tooltip content and display - Fix slight layout shift happening between the "there is no thread" and "there is a thread" states --- Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/acp_thread/src/acp_thread.rs | 11 + .../src/acp/thread_view/active_thread.rs | 424 ++++++++++-------- crates/ui/src/components/label/label_like.rs | 7 +- 3 files changed, 264 insertions(+), 178 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 83645226e5eb9cba3d19b37b587d15d1d80087c1..37fa2488524bf325755f1807125d9685821c04ee 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -1685,6 +1685,17 @@ impl AcpThread { }) } + pub fn tool_call_for_subagent(&self, session_id: &acp::SessionId) -> Option<&ToolCall> { + self.entries.iter().find_map(|entry| match entry { + AgentThreadEntry::ToolCall(tool_call) + if tool_call.subagent_session_id.as_ref() == Some(session_id) => + { + Some(tool_call) + } + _ => None, + }) + } + pub fn resolve_locations(&mut self, id: acp::ToolCallId, cx: &mut Context) { let project = self.project.clone(); let Some((_, tool_call)) = self.tool_call_mut(&id) else { diff --git a/crates/agent_ui/src/acp/thread_view/active_thread.rs b/crates/agent_ui/src/acp/thread_view/active_thread.rs index 5ca4770a7dfc9df3c654f64855623227432b55c2..aa1a11ee2f65100d5bfa3c06801a98be16419af9 100644 --- a/crates/agent_ui/src/acp/thread_view/active_thread.rs +++ b/crates/agent_ui/src/acp/thread_view/active_thread.rs @@ -2351,14 +2351,42 @@ impl AcpThreadView { ) } + fn is_subagent_canceled_or_failed(&self, cx: &App) -> bool { + let Some(parent_session_id) = self.parent_id.as_ref() else { + return false; + }; + + let my_session_id = self.thread.read(cx).session_id().clone(); + + self.server_view + .upgrade() + .and_then(|sv| sv.read(cx).thread_view(parent_session_id)) + .is_some_and(|parent_view| { + parent_view + .read(cx) + .thread + .read(cx) + .tool_call_for_subagent(&my_session_id) + .is_some_and(|tc| { + matches!( + tc.status, + ToolCallStatus::Canceled + | ToolCallStatus::Failed + | ToolCallStatus::Rejected + ) + }) + }) + } + pub(crate) fn render_subagent_titlebar(&mut self, cx: &mut Context) -> Option
{ let Some(parent_session_id) = self.parent_id.clone() else { return None; }; let server_view = self.server_view.clone(); - - let is_done = self.thread.read(cx).status() == ThreadStatus::Idle; + let thread = self.thread.clone(); + let is_done = thread.read(cx).status() == ThreadStatus::Idle; + let is_canceled_or_failed = self.is_subagent_canceled_or_failed(cx); Some( h_flex() @@ -2369,6 +2397,9 @@ impl AcpThreadView { .justify_between() .gap_1() .border_b_1() + .when(is_done && is_canceled_or_failed, |this| { + this.border_dashed() + }) .border_color(cx.theme().colors().border) .bg(cx.theme().colors().editor_background.opacity(0.2)) .child( @@ -2381,23 +2412,43 @@ impl AcpThreadView { .color(Color::Muted), ) .child(self.title_editor.clone()) - .when(is_done, |this| { + .when(is_done && is_canceled_or_failed, |this| { + this.child(Icon::new(IconName::Close).color(Color::Error)) + }) + .when(is_done && !is_canceled_or_failed, |this| { this.child(Icon::new(IconName::Check).color(Color::Success)) }), ) .child( - IconButton::new("minimize_subagent", IconName::Minimize) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Minimize Subagent")) - .on_click(move |_, window, cx| { - let _ = server_view.update(cx, |server_view, cx| { - server_view.navigate_to_session( - parent_session_id.clone(), - window, - cx, - ); - }); - }), + h_flex() + .gap_0p5() + .when(!is_done, |this| { + this.child( + IconButton::new("stop_subagent", IconName::Stop) + .icon_size(IconSize::Small) + .icon_color(Color::Error) + .tooltip(Tooltip::text("Stop Subagent")) + .on_click(move |_, _, cx| { + thread.update(cx, |thread, cx| { + thread.cancel(cx).detach(); + }); + }), + ) + }) + .child( + IconButton::new("minimize_subagent", IconName::Minimize) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Minimize Subagent")) + .on_click(move |_, window, cx| { + let _ = server_view.update(cx, |server_view, cx| { + server_view.navigate_to_session( + parent_session_id.clone(), + window, + cx, + ); + }); + }), + ), ), ) } @@ -6112,6 +6163,11 @@ impl AcpThreadView { ToolCallStatus::Canceled | ToolCallStatus::Failed | ToolCallStatus::Rejected ); + let has_title = thread + .as_ref() + .is_some_and(|t| !t.read(cx).title().is_empty()); + let has_no_title_or_canceled = !has_title || is_canceled_or_failed; + let title = thread .as_ref() .map(|t| t.read(cx).title()) @@ -6147,155 +6203,130 @@ impl AcpThreadView { .as_ref() .map_or(false, |thread| !thread.read(cx).entries().is_empty()); + let tooltip_meta_description = if is_expanded { + "Click to Collapse" + } else { + "Click to Preview" + }; + v_flex() .w_full() .rounded_md() .border_1() + .when(has_no_title_or_canceled, |this| this.border_dashed()) .border_color(self.tool_card_border_color(cx)) .overflow_hidden() .child( h_flex() - .id(format!("subagent-header-click-{}", entry_ix)) .group(&card_header_id) + .h_8() .p_1() - .pl_1p5() .w_full() - .gap_1() .justify_between() - .bg(self.tool_card_header_bg(cx)) - .when(has_expandable_content, |this| { - this.cursor_pointer().on_click(cx.listener({ - let tool_call_id = tool_call.id.clone(); - move |this, _, _, cx| { - if this.expanded_tool_calls.contains(&tool_call_id) { - this.expanded_tool_calls.remove(&tool_call_id); - } else { - this.expanded_tool_calls.insert(tool_call_id.clone()); - } - cx.notify(); - } - })) + .when(!has_no_title_or_canceled, |this| { + this.bg(self.tool_card_header_bg(cx)) }) .child( h_flex() .id(format!("subagent-title-{}", entry_ix)) + .px_1() .min_w_0() + .size_full() + .gap_2() + .justify_between() + .rounded_sm() .overflow_hidden() - .gap_1p5() - .child(icon) .child( - Label::new(title.to_string()) - .size(LabelSize::Small) - .truncate(), - ) - .when(files_changed > 0, |this| { - this.child( - h_flex() - .gap_1() - .child( + h_flex() + .min_w_0() + .w_full() + .gap_1p5() + .child(icon) + .child( + Label::new(title.to_string()) + .size(LabelSize::Custom(self.tool_name_font_size())) + .truncate(), + ) + .when(files_changed > 0, |this| { + this.child( Label::new(format!( "— {} {} changed", files_changed, if files_changed == 1 { "file" } else { "files" } )) - .size(LabelSize::Small) + .size(LabelSize::Custom(self.tool_name_font_size())) .color(Color::Muted), ) - .child(DiffStat::new( - diff_stat_id.clone(), - diff_stats.lines_added as usize, - diff_stats.lines_removed as usize, - )), - ) + .child( + DiffStat::new( + diff_stat_id.clone(), + diff_stats.lines_added as usize, + diff_stats.lines_removed as usize, + ) + .label_size(LabelSize::Custom( + self.tool_name_font_size(), + )), + ) + }), + ) + .when(!has_no_title_or_canceled, |this| { + this.tooltip(move |_, cx| { + Tooltip::with_meta( + title.to_string(), + None, + tooltip_meta_description, + cx, + ) + }) }) - .tooltip(Tooltip::text(title.to_string())), - ) - .when_some(subagent_session_id, |this, subagent_session_id| { - this.child( - h_flex() - .flex_shrink_0() - .when(has_expandable_content, |this| { - this.child( - IconButton::new( - format!("subagent-disclosure-{}", entry_ix), - if is_expanded { + .when(has_expandable_content, |this| { + this.cursor_pointer() + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .child( + div().visible_on_hover(card_header_id).child( + Icon::new(if is_expanded { IconName::ChevronUp } else { IconName::ChevronDown - }, - ) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .disabled(!has_expandable_content) - .visible_on_hover(card_header_id.clone()) - .on_click( - cx.listener({ - let tool_call_id = tool_call.id.clone(); - move |this, _, _, cx| { - if this - .expanded_tool_calls - .contains(&tool_call_id) - { - this.expanded_tool_calls - .remove(&tool_call_id); - } else { - this.expanded_tool_calls - .insert(tool_call_id.clone()); - } - cx.notify(); - } - }), + }) + .color(Color::Muted) + .size(IconSize::Small), ), ) - }) - .child( - IconButton::new( - format!("expand-subagent-{}", entry_ix), - IconName::Maximize, - ) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .tooltip(Tooltip::text("Expand Subagent")) - .visible_on_hover(card_header_id) - .on_click(cx.listener( - move |this, _event, window, cx| { - this.server_view - .update(cx, |this, cx| { - this.navigate_to_session( - subagent_session_id.clone(), - window, - cx, - ); - }) - .ok(); - }, - )), - ) - .when(is_running, |buttons| { - buttons.child( - IconButton::new( - format!("stop-subagent-{}", entry_ix), - IconName::Stop, - ) - .icon_size(IconSize::Small) - .icon_color(Color::Error) - .tooltip(Tooltip::text("Stop Subagent")) - .when_some( - thread_view - .as_ref() - .map(|view| view.read(cx).thread.clone()), - |this, thread| { - this.on_click(cx.listener( - move |_this, _event, _window, cx| { - thread.update(cx, |thread, cx| { - thread.cancel(cx).detach(); - }); - }, - )) + .on_click(cx.listener({ + let tool_call_id = tool_call.id.clone(); + move |this, _, _, cx| { + if this.expanded_tool_calls.contains(&tool_call_id) { + this.expanded_tool_calls.remove(&tool_call_id); + } else { + this.expanded_tool_calls + .insert(tool_call_id.clone()); + } + cx.notify(); + } + })) + }), + ) + .when(is_running && subagent_session_id.is_some(), |buttons| { + buttons.child( + IconButton::new(format!("stop-subagent-{}", entry_ix), IconName::Stop) + .icon_size(IconSize::Small) + .icon_color(Color::Error) + .tooltip(Tooltip::text("Stop Subagent")) + .when_some( + thread_view + .as_ref() + .map(|view| view.read(cx).thread.clone()), + |this, thread| { + this.on_click(cx.listener( + move |_this, _event, _window, cx| { + thread.update(cx, |thread, cx| { + thread.cancel(cx).detach(); + }); }, - ), - ) - }), + )) + }, + ), ) }), ) @@ -6322,6 +6353,7 @@ impl AcpThreadView { this } } else { + let session_id = thread.read(cx).session_id().clone(); this.when(is_expanded, |this| { this.child(self.render_subagent_expanded_content( active_session_id, @@ -6333,6 +6365,40 @@ impl AcpThreadView { window, cx, )) + .child( + h_flex() + .p_1() + .w_full() + .border_t_1() + .when(is_canceled_or_failed, |this| this.border_dashed()) + .border_color(cx.theme().colors().border_variant) + .child( + Button::new( + format!("expand-subagent-{}", entry_ix), + "Full Screen", + ) + .full_width() + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .icon(IconName::Maximize) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .icon_position(IconPosition::Start) + .on_click(cx.listener( + move |this, _event, window, cx| { + this.server_view + .update(cx, |this, cx| { + this.navigate_to_session( + session_id.clone(), + window, + cx, + ); + }) + .ok(); + }, + )), + ), + ) }) } }) @@ -6355,18 +6421,33 @@ impl AcpThreadView { let subagent_view = thread_view.read(cx); let session_id = subagent_view.thread.read(cx).session_id().clone(); - if is_running { - let entries = subagent_view.thread.read(cx).entries(); - let total_entries = entries.len(); - let start_ix = total_entries.saturating_sub(MAX_PREVIEW_ENTRIES); + let base_container = || { + div() + .id(format!("subagent-content-{}", session_id)) + .relative() + .w_full() + .h_56() + .border_t_1() + .border_color(self.tool_card_border_color(cx)) + .overflow_hidden() + }; + + let show_thread_entries = is_running || tool_call.content.is_empty(); + if show_thread_entries { let scroll_handle = self .subagent_scroll_handles .borrow_mut() .entry(session_id.clone()) .or_default() .clone(); - scroll_handle.scroll_to_bottom(); + if is_running { + scroll_handle.scroll_to_bottom(); + } + + let entries = subagent_view.thread.read(cx).entries(); + let total_entries = entries.len(); + let start_ix = total_entries.saturating_sub(MAX_PREVIEW_ENTRIES); let rendered_entries: Vec = entries[start_ix..] .iter() @@ -6377,51 +6458,41 @@ impl AcpThreadView { }) .collect(); - let editor_bg = cx.theme().colors().editor_background; - - let gradient_overlay = div().absolute().inset_0().bg(linear_gradient( - 180., - linear_color_stop(editor_bg, 0.), - linear_color_stop(editor_bg.opacity(0.), 0.15), - )); - - let interaction_blocker = div() - .absolute() - .inset_0() - .size_full() - .block_mouse_except_scroll(); - - div() - .id(format!("subagent-content-{}", session_id)) - .relative() - .w_full() - .h_56() - .border_t_1() - .border_color(self.tool_card_border_color(cx)) - .bg(editor_bg.opacity(0.4)) - .overflow_hidden() + base_container() .child( div() - .id("entries") + .id(format!("subagent-entries-{}", session_id)) .size_full() .track_scroll(&scroll_handle) .pb_1() .children(rendered_entries), ) - .child(gradient_overlay) - .child(interaction_blocker) + .when(is_running, |this| { + let editor_bg = cx.theme().colors().editor_background; + this.child( + div() + .absolute() + .inset_0() + .size_full() + .bg(linear_gradient( + 180., + linear_color_stop(editor_bg, 0.), + linear_color_stop(editor_bg.opacity(0.), 0.15), + )) + .block_mouse_except_scroll(), + ) + }) + .into_any_element() } else { - div() - .id(format!("subagent-content-{}", session_id)) - .p_2() - .children( - tool_call - .content - .iter() - .enumerate() - .map(|(content_ix, content)| { - div().id(("tool-call-output", entry_ix)).child( - self.render_tool_call_content( + base_container() + .child( + v_flex() + .id(format!("subagent-done-content-{}", session_id)) + .size_full() + .justify_end() + .children(tool_call.content.iter().enumerate().map( + |(content_ix, content)| { + div().p_2().child(self.render_tool_call_content( active_session_id, entry_ix, content, @@ -6438,10 +6509,11 @@ impl AcpThreadView { focus_handle, window, cx, - ), - ) - }), + )) + }, + )), ) + .into_any_element() } } diff --git a/crates/ui/src/components/label/label_like.rs b/crates/ui/src/components/label/label_like.rs index ba39d7e16f7c5ac12cbfaba7abe921884d71e37f..d87bdf6c12323c4858881f36af62f1a91cdd2aa1 100644 --- a/crates/ui/src/components/label/label_like.rs +++ b/crates/ui/src/components/label/label_like.rs @@ -1,11 +1,11 @@ use crate::prelude::*; -use gpui::{FontWeight, StyleRefinement, UnderlineStyle}; +use gpui::{FontWeight, Rems, StyleRefinement, UnderlineStyle}; use settings::Settings; use smallvec::SmallVec; use theme::ThemeSettings; /// Sets the size of a label -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Default)] +#[derive(Debug, PartialEq, Clone, Copy, Default)] pub enum LabelSize { /// The default size of a label. #[default] @@ -16,6 +16,8 @@ pub enum LabelSize { Small, /// The extra small size of a label. XSmall, + /// An arbitrary custom size specified in rems. + Custom(Rems), } /// Sets the line height of a label @@ -225,6 +227,7 @@ impl RenderOnce for LabelLike { LabelSize::Default => this.text_ui(cx), LabelSize::Small => this.text_ui_sm(cx), LabelSize::XSmall => this.text_ui_xs(cx), + LabelSize::Custom(size) => this.text_size(size), }) .when(self.line_height_style == LineHeightStyle::UiLabel, |this| { this.line_height(relative(1.)) From da6ff30bfb9acb1359752a238f7bdca661129679 Mon Sep 17 00:00:00 2001 From: Altay Date: Tue, 24 Feb 2026 04:56:35 +0300 Subject: [PATCH 010/548] settings_ui: Improve control alignment for multiline settings rows (#49850) Closes #49848 Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) ## Summary Align right-side controls in settings rows when descriptions wrap to multiple lines, and relax the text-width cap for better balance. ## Changes - Top-aligned settings row containers in shared settings UI render paths by adding `items_start()` to: - Setting rows (`render_settings_item`) - Sub-page link rows - Action link rows - Updated the left text column width constraint in `render_settings_item` from `w_3_4` to `w_full + max_w_2_3 + min_w_0` to avoid overlap while reducing excess empty space. ## Validation - Built and ran Zed locally with `cargo run`. - Manual verification in Settings pages with multiline descriptions and right-side controls: - General - Appearance - Languages & Tools - Agent/Terminal-related sections - Confirmed improved control alignment and no right-column text overlap in the tested views. | Before | After | |---|---| | ![1-before](https://github.com/user-attachments/assets/bf548d15-a1a6-4917-81ac-0a1afefa2ea1) | ![1-after](https://github.com/user-attachments/assets/47a61610-7b74-48bd-838d-e39e3b578d19) | | ![2-before](https://github.com/user-attachments/assets/92c0bd32-9772-42f4-be98-b1818556fa80) | ![2-after](https://github.com/user-attachments/assets/188e0fdf-91ea-49bf-9648-9177107d94c9) | ## Linked Issues - #49848 Release Notes: - Fixed settings row layout so right-side controls align better with multiline setting descriptions. --------- Co-authored-by: Danilo Leal --- crates/settings_ui/src/settings_ui.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 743d05014d91b26bf47b788194f21b330fdc45f8..9ac338f7b849a53c402a0cea6b79ddc6496df0f2 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -1163,7 +1163,9 @@ fn render_settings_item( .child( v_flex() .relative() - .w_3_4() + .w_full() + .max_w_2_3() + .min_w_0() .child( h_flex() .w_full() From bb271cf56259160b808ca80122180a7aabd59fb3 Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Tue, 24 Feb 2026 02:46:26 +0000 Subject: [PATCH 011/548] build: Add cargo timing script to devshell (#49941) --- nix/modules/devshells.nix | 10 ++++++++++ script/cargo | 4 ++++ 2 files changed, 14 insertions(+) diff --git a/nix/modules/devshells.nix b/nix/modules/devshells.nix index 0acc0f8909ef51afd29c1b0cd7621d32915f5546..cfc0e48b871e71d87f9f794b35c16fed714ed4a9 100644 --- a/nix/modules/devshells.nix +++ b/nix/modules/devshells.nix @@ -18,6 +18,15 @@ # Musl cross-compiler for building remote_server muslCross = pkgs.pkgsCross.musl64; + + # Cargo build timings wrapper script + wrappedCargo = pkgs.writeShellApplication { + name = "cargo"; + runtimeInputs = [pkgs.nodejs]; + text = '' + NIX_WRAPPER=1 CARGO=${rustToolchain}/bin/cargo ./script/cargo "$@" + ''; + }; in { devShells.default = (pkgs.mkShell.override { inherit (zed-editor) stdenv; }) { @@ -25,6 +34,7 @@ inputsFrom = [ zed-editor ]; packages = with pkgs; [ + wrappedCargo # must be first, to shadow the `cargo` provided by `rustToolchain` rustToolchain # cargo, rustc, and rust-toolchain.toml components included cargo-nextest cargo-hakari diff --git a/script/cargo b/script/cargo index 8226a2becdbbcc27e5ad01b27f8479fea706fa5d..6ff91ff84a59ed4a6820c24cff3b83790dab3d21 100755 --- a/script/cargo +++ b/script/cargo @@ -209,6 +209,10 @@ async function main() { // Handle --init flag if (args[0] === "--init") { + if (process.env.NIX_WRAPPER === "1") { + console.error("`--init` not supported when going through the nix wrapper"); + process.exit(1); + } initShellAlias(); return; } From 1c7563c241916bcba937e28351b74815b2571ef6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=80=E1=B4=8D=E1=B4=9B=E1=B4=8F=E1=B4=80=E1=B4=87?= =?UTF-8?q?=CA=80?= Date: Tue, 24 Feb 2026 10:57:44 +0800 Subject: [PATCH 012/548] git_ui: Make stash picker footers button wrap (#49814) Closes #48383 An alternative approach would be to use `min_w` instead of `w`, but that would result in inconsistent widths across the three tabs in the git picker, causing layout jumps when switching between them. It might be more appropriate to simply increase the width directly. |Before|After| |--|--| |CleanShot 2026-02-22 at 02 01 20@2x|CleanShot 2026-02-22 at 02 03
50@2x| Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed footer buttons clipped in stash picker when vim mode is enabled on macOS --------- Co-authored-by: Danilo Leal --- crates/git_ui/src/stash_picker.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/git_ui/src/stash_picker.rs b/crates/git_ui/src/stash_picker.rs index 1713de71c2db01f11b03a8c8e8e8eb498bb31b77..e736dd806a35703991e1fb51e27e3952e5692d99 100644 --- a/crates/git_ui/src/stash_picker.rs +++ b/crates/git_ui/src/stash_picker.rs @@ -527,6 +527,7 @@ impl PickerDelegate for StashListDelegate { .p_1p5() .gap_0p5() .justify_end() + .flex_wrap() .border_t_1() .border_color(cx.theme().colors().border_variant) .child( From 060e4afb41e91784d616e18799f3c204e20a9073 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Mon, 23 Feb 2026 21:19:54 -0600 Subject: [PATCH 013/548] Skip docs suggestions workflow for fork PRs (#49942) ## Summary Fork PRs don't have access to repository secrets (`FACTORY_API_KEY`), causing the docs suggestions workflow to fail when triggered by external contributor PRs. This adds a condition to skip the `batch-suggestions` job when the PR originates from a fork (`github.event.pull_request.head.repo.full_name != github.repository`). ## Context See failing job: https://github.com/zed-industries/zed/actions/runs/22323201362/job/64586740764 The `pull_request` trigger doesn't pass secrets to workflows running on fork PRs for security reasons. This is expected GitHub behavior. Release Notes: - N/A --- .github/workflows/docs_suggestions.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs_suggestions.yml b/.github/workflows/docs_suggestions.yml index df6f001d2a08d5c577401b3c4dd099a1622d8d70..8cf98e978cddfe38688b2f9b47df17f48e472362 100644 --- a/.github/workflows/docs_suggestions.yml +++ b/.github/workflows/docs_suggestions.yml @@ -52,13 +52,15 @@ env: jobs: # Job for PRs merged to main - batch suggestions to branch + # Only runs for PRs from the same repo (not forks) since secrets aren't available for fork PRs batch-suggestions: runs-on: ubuntu-latest timeout-minutes: 10 if: | (github.event_name == 'pull_request' && github.event.pull_request.merged == true && - github.event.pull_request.base.ref == 'main') || + github.event.pull_request.base.ref == 'main' && + github.event.pull_request.head.repo.full_name == github.repository) || (github.event_name == 'workflow_dispatch' && inputs.mode == 'batch') steps: From c7dd11902514a4c906760fea87f0009afb734386 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 23 Feb 2026 22:33:33 -0500 Subject: [PATCH 014/548] git: More rigorously test excerpt syncing for split diff, and fix a couple of bugs (#49907) This PR adds a more rigorous test for the excerpt syncing logic in `SplittableEditor`, in preparation for refactoring that code, since we've had some bugs there. The new test covers - edits within the RHS multibuffer - edits to the individual main buffers, not necessarily within the excerpt bounds of the split diff - excerpt expansion - excerpt removal - excerpt recalculation based on diff hunk ranges Bugs fixed: - incorrect edit merging logic in `patches_for_range` - `merge_excerpts` leaving stale excerpt IDs in `excerpts_by_path` Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/editor/src/display_map.rs | 13 + crates/editor/src/split.rs | 440 ++++++++++++++++-------- crates/multi_buffer/src/multi_buffer.rs | 41 ++- crates/multi_buffer/src/path_key.rs | 8 +- 4 files changed, 352 insertions(+), 150 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 9bdff7776ce3a8fd85750b691f8f719a53a749bd..31a1f68300d6d24ef449f4f0eb0b072f0cbe0ea9 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -358,6 +358,19 @@ impl Companion { } } + #[cfg(test)] + pub(crate) fn excerpt_mappings( + &self, + ) -> ( + &HashMap, + &HashMap, + ) { + ( + &self.lhs_excerpt_to_rhs_excerpt, + &self.rhs_excerpt_to_lhs_excerpt, + ) + } + fn buffer_to_companion_buffer(&self, display_map_id: EntityId) -> &HashMap { if self.is_rhs(display_map_id) { &self.rhs_buffer_to_lhs_buffer diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index abee7c6a482fedb66579f28b7636e7e2dc6504cd..c85b7545c2c8bbabb3777476fa4b318f0b70908f 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -337,7 +337,7 @@ fn patch_for_excerpt( let mut merged_edits: Vec> = Vec::new(); for edit in edits { if let Some(last) = merged_edits.last_mut() { - if edit.new.start <= last.new.end { + if edit.new.start <= last.new.end || edit.old.start <= last.old.end { last.old.end = last.old.end.max(edit.old.end); last.new.end = last.new.end.max(edit.new.end); continue; @@ -1126,28 +1126,21 @@ impl SplittableEditor { #[cfg(test)] impl SplittableEditor { fn check_invariants(&self, quiesced: bool, cx: &mut App) { - use multi_buffer::MultiBufferRow; use text::Bias; use crate::display_map::Block; use crate::display_map::DisplayRow; self.debug_print(cx); + self.check_excerpt_mapping_invariants(cx); let lhs = self.lhs.as_ref().unwrap(); - let rhs_excerpts = self.rhs_multibuffer.read(cx).excerpt_ids(); - let lhs_excerpts = lhs.multibuffer.read(cx).excerpt_ids(); - assert_eq!( - lhs_excerpts.len(), - rhs_excerpts.len(), - "mismatch in excerpt count" - ); if quiesced { - let rhs_snapshot = lhs + let lhs_snapshot = lhs .editor .update(cx, |editor, cx| editor.display_snapshot(cx)); - let lhs_snapshot = self + let rhs_snapshot = self .rhs_editor .update(cx, |editor, cx| editor.display_snapshot(cx)); @@ -1189,10 +1182,20 @@ impl SplittableEditor { let (lhs_point, rhs_point) = if lhs_hunk.row_range.is_empty() || rhs_hunk.row_range.is_empty() { - ( - Point::new(lhs_hunk.row_range.end.0, 0), - Point::new(rhs_hunk.row_range.end.0, 0), - ) + let lhs_end = Point::new(lhs_hunk.row_range.end.0, 0); + let rhs_end = Point::new(rhs_hunk.row_range.end.0, 0); + + let lhs_exceeds = lhs_snapshot + .range_for_excerpt(lhs_hunk.excerpt_id) + .map_or(false, |range| lhs_end >= range.end); + let rhs_exceeds = rhs_snapshot + .range_for_excerpt(rhs_hunk.excerpt_id) + .map_or(false, |range| rhs_end >= range.end); + if lhs_exceeds != rhs_exceeds { + continue; + } + + (lhs_end, rhs_end) } else { ( Point::new(lhs_hunk.row_range.start.0, 0), @@ -1207,44 +1210,6 @@ impl SplittableEditor { "mismatch in hunk position" ); } - - // Filtering out empty lines is a bit of a hack, to work around a case where - // the base text has a trailing newline but the current text doesn't, or vice versa. - // In this case, we get the additional newline on one side, but that line is not - // marked as added/deleted by rowinfos. - self.check_sides_match(cx, |snapshot| { - snapshot - .buffer_snapshot() - .text() - .split("\n") - .zip(snapshot.buffer_snapshot().row_infos(MultiBufferRow(0))) - .filter(|(line, row_info)| !line.is_empty() && row_info.diff_status.is_none()) - .map(|(line, _)| line.to_owned()) - .collect::>() - }); - } - } - - #[track_caller] - fn check_sides_match( - &self, - cx: &mut App, - mut extract: impl FnMut(&crate::DisplaySnapshot) -> T, - ) { - let lhs = self.lhs.as_ref().expect("requires split"); - let rhs_snapshot = self.rhs_editor.update(cx, |editor, cx| { - editor.display_map.update(cx, |map, cx| map.snapshot(cx)) - }); - let lhs_snapshot = lhs.editor.update(cx, |editor, cx| { - editor.display_map.update(cx, |map, cx| map.snapshot(cx)) - }); - - let rhs_t = extract(&rhs_snapshot); - let lhs_t = extract(&lhs_snapshot); - - if rhs_t != lhs_t { - self.debug_print(cx); - pretty_assertions::assert_eq!(rhs_t, lhs_t); } } @@ -1519,87 +1484,109 @@ impl SplittableEditor { eprintln!(); } - fn randomly_edit_excerpts( - &mut self, - rng: &mut impl rand::Rng, - mutation_count: usize, - cx: &mut Context, - ) { - use collections::HashSet; - use rand::prelude::*; - use std::env; - use util::RandomCharIter; + fn check_excerpt_mapping_invariants(&self, cx: &gpui::App) { + use multi_buffer::{ExcerptId, PathKey}; - let max_buffers = env::var("MAX_BUFFERS") - .map(|i| i.parse().expect("invalid `MAX_BUFFERS` variable")) - .unwrap_or(4); + let lhs = self.lhs.as_ref().expect("should have lhs editor"); - for _ in 0..mutation_count { - let paths = self - .rhs_multibuffer - .read(cx) - .paths() - .cloned() - .collect::>(); - let excerpt_ids = self.rhs_multibuffer.read(cx).excerpt_ids(); + let rhs_excerpt_ids = self.rhs_multibuffer.read(cx).excerpt_ids(); + let lhs_excerpt_ids = lhs.multibuffer.read(cx).excerpt_ids(); + assert_eq!( + rhs_excerpt_ids.len(), + lhs_excerpt_ids.len(), + "excerpt count mismatch: rhs has {}, lhs has {}", + rhs_excerpt_ids.len(), + lhs_excerpt_ids.len(), + ); - if rng.random_bool(0.2) && !excerpt_ids.is_empty() { - let mut excerpts = HashSet::default(); - for _ in 0..rng.random_range(0..excerpt_ids.len()) { - excerpts.extend(excerpt_ids.choose(rng).copied()); - } + let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); + let companion = rhs_display_map + .read(cx) + .companion() + .cloned() + .expect("should have companion"); + let (lhs_to_rhs, rhs_to_lhs) = { + let c = companion.read(cx); + let (l, r) = c.excerpt_mappings(); + (l.clone(), r.clone()) + }; - let line_count = rng.random_range(1..5); + assert_eq!( + lhs_to_rhs.len(), + rhs_to_lhs.len(), + "mapping size mismatch: lhs_to_rhs has {}, rhs_to_lhs has {}", + lhs_to_rhs.len(), + rhs_to_lhs.len(), + ); - log::info!("Expanding excerpts {excerpts:?} by {line_count} lines"); + for (&lhs_id, &rhs_id) in &lhs_to_rhs { + let reverse = rhs_to_lhs.get(&rhs_id); + assert_eq!( + reverse, + Some(&lhs_id), + "lhs_to_rhs maps {lhs_id:?} -> {rhs_id:?}, but rhs_to_lhs maps {rhs_id:?} -> {reverse:?}", + ); + } + for (&rhs_id, &lhs_id) in &rhs_to_lhs { + let reverse = lhs_to_rhs.get(&lhs_id); + assert_eq!( + reverse, + Some(&rhs_id), + "rhs_to_lhs maps {rhs_id:?} -> {lhs_id:?}, but lhs_to_rhs maps {lhs_id:?} -> {reverse:?}", + ); + } - self.expand_excerpts( - excerpts.iter().cloned(), - line_count, - ExpandExcerptDirection::UpAndDown, - cx, - ); - continue; - } + assert_eq!( + lhs_to_rhs.len(), + rhs_excerpt_ids.len(), + "mapping covers {} excerpts but rhs has {}", + lhs_to_rhs.len(), + rhs_excerpt_ids.len(), + ); - if excerpt_ids.is_empty() || (rng.random_bool(0.8) && paths.len() < max_buffers) { - let len = rng.random_range(100..500); - let text = RandomCharIter::new(&mut *rng).take(len).collect::(); - let buffer = cx.new(|cx| Buffer::local(text, cx)); - log::info!( - "Creating new buffer {} with text: {:?}", - buffer.read(cx).remote_id(), - buffer.read(cx).text() - ); - let buffer_snapshot = buffer.read(cx).snapshot(); - let diff = cx.new(|cx| BufferDiff::new_unchanged(&buffer_snapshot, cx)); - // Create some initial diff hunks. - buffer.update(cx, |buffer, cx| { - buffer.randomly_edit(rng, 1, cx); - }); - let buffer_snapshot = buffer.read(cx).text_snapshot(); - diff.update(cx, |diff, cx| { - diff.recalculate_diff_sync(&buffer_snapshot, cx); - }); - let path = PathKey::for_buffer(&buffer, cx); - let ranges = diff.update(cx, |diff, cx| { - diff.snapshot(cx) - .hunks(&buffer_snapshot) - .map(|hunk| hunk.buffer_range.to_point(&buffer_snapshot)) - .collect::>() - }); - self.set_excerpts_for_path(path, buffer, ranges, 2, diff, cx); - } else { - log::info!("removing excerpts"); - let remove_count = rng.random_range(1..=paths.len()); - let paths_to_remove = paths - .choose_multiple(rng, remove_count) - .cloned() - .collect::>(); - for path in paths_to_remove { - self.remove_excerpts_for_path(path.clone(), cx); - } - } + let rhs_mapped_order: Vec = rhs_excerpt_ids + .iter() + .map(|rhs_id| { + *rhs_to_lhs.get(rhs_id).unwrap_or_else(|| { + panic!("rhs excerpt {rhs_id:?} has no mapping in rhs_to_lhs") + }) + }) + .collect(); + assert_eq!( + rhs_mapped_order, lhs_excerpt_ids, + "excerpt ordering mismatch: mapping rhs order through rhs_to_lhs doesn't match lhs order", + ); + + let rhs_paths: Vec = self.rhs_multibuffer.read(cx).paths().cloned().collect(); + let lhs_paths: Vec = lhs.multibuffer.read(cx).paths().cloned().collect(); + assert_eq!( + rhs_paths, lhs_paths, + "path set mismatch between rhs and lhs" + ); + + for path in &rhs_paths { + let rhs_path_excerpts: Vec = self + .rhs_multibuffer + .read(cx) + .excerpts_for_path(path) + .collect(); + let lhs_path_excerpts: Vec = + lhs.multibuffer.read(cx).excerpts_for_path(path).collect(); + assert_eq!( + rhs_path_excerpts.len(), + lhs_path_excerpts.len(), + "excerpt count mismatch for path {path:?}: rhs has {}, lhs has {}", + rhs_path_excerpts.len(), + lhs_path_excerpts.len(), + ); + let rhs_path_mapped: Vec = rhs_path_excerpts + .iter() + .map(|rhs_id| *rhs_to_lhs.get(rhs_id).unwrap()) + .collect(); + assert_eq!( + rhs_path_mapped, lhs_path_excerpts, + "per-path excerpt ordering mismatch for {path:?}", + ); } } } @@ -2275,7 +2262,9 @@ mod tests { #[gpui::test(iterations = 25)] async fn test_random_split_editor(mut rng: StdRng, cx: &mut gpui::TestAppContext) { + use multi_buffer::ExpandExcerptDirection; use rand::prelude::*; + use util::RandomCharIter; let (editor, cx) = init_test(cx, SoftWrap::EditorWidth, DiffViewStyle::Split).await; let operations = std::env::var("OPERATIONS") @@ -2288,9 +2277,32 @@ mod tests { }); if buffers.is_empty() { - log::info!("adding excerpts to empty multibuffer"); + log::info!("creating initial buffer"); + let len = rng.random_range(200..1000); + let base_text: String = RandomCharIter::new(&mut *rng).take(len).collect(); + let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx)); + let buffer_snapshot = buffer.read_with(cx, |b, _| b.text_snapshot()); + let diff = + cx.new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer_snapshot, cx)); + let edit_count = rng.random_range(3..8); + buffer.update(cx, |buffer, cx| { + buffer.randomly_edit(rng, edit_count, cx); + }); + let buffer_snapshot = buffer.read_with(cx, |b, _| b.text_snapshot()); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(&buffer_snapshot, cx); + }); + let diff_snapshot = diff.read_with(cx, |diff, cx| diff.snapshot(cx)); + let ranges = diff_snapshot + .hunks(&buffer_snapshot) + .map(|hunk| hunk.range) + .collect::>(); + let context_lines = rng.random_range(0..2); + editor.update(cx, |editor, cx| { + let path = PathKey::for_buffer(&buffer, cx); + editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + }); editor.update(cx, |editor, cx| { - editor.randomly_edit_excerpts(rng, 2, cx); editor.check_invariants(true, cx); }); continue; @@ -2299,27 +2311,130 @@ mod tests { let mut quiesced = false; match rng.random_range(0..100) { - 0..=44 => { + 0..=14 if buffers.len() < 6 => { + log::info!("creating new buffer and setting excerpts"); + let len = rng.random_range(200..1000); + let base_text: String = RandomCharIter::new(&mut *rng).take(len).collect(); + let buffer = cx.new(|cx| Buffer::local(base_text.clone(), cx)); + let buffer_snapshot = buffer.read_with(cx, |b, _| b.text_snapshot()); + let diff = cx + .new(|cx| BufferDiff::new_with_base_text(&base_text, &buffer_snapshot, cx)); + let edit_count = rng.random_range(3..8); + buffer.update(cx, |buffer, cx| { + buffer.randomly_edit(rng, edit_count, cx); + }); + let buffer_snapshot = buffer.read_with(cx, |b, _| b.text_snapshot()); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(&buffer_snapshot, cx); + }); + let diff_snapshot = diff.read_with(cx, |diff, cx| diff.snapshot(cx)); + let ranges = diff_snapshot + .hunks(&buffer_snapshot) + .map(|hunk| hunk.range) + .collect::>(); + let context_lines = rng.random_range(0..2); + editor.update(cx, |editor, cx| { + let path = PathKey::for_buffer(&buffer, cx); + editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + }); + } + 15..=29 => { log::info!("randomly editing multibuffer"); + let edit_count = rng.random_range(1..5); editor.update(cx, |editor, cx| { editor.rhs_multibuffer.update(cx, |multibuffer, cx| { - multibuffer.randomly_edit(rng, 5, cx); - }) - }) + multibuffer.randomly_edit(rng, edit_count, cx); + }); + }); + } + 30..=44 => { + log::info!("randomly editing individual buffer"); + let buffer = buffers.iter().choose(rng).unwrap(); + let edit_count = rng.random_range(1..3); + buffer.update(cx, |buffer, cx| { + buffer.randomly_edit(rng, edit_count, cx); + }); } - 45..=64 => { + 45..=54 => { + log::info!("recalculating diff and resetting excerpts for single buffer"); + let buffer = buffers.iter().choose(rng).unwrap(); + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot()); + let diff = editor.update(cx, |editor, cx| { + editor + .rhs_multibuffer + .read(cx) + .diff_for(buffer.read(cx).remote_id()) + .unwrap() + }); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(&buffer_snapshot, cx); + }); + cx.run_until_parked(); + let diff_snapshot = diff.read_with(cx, |diff, cx| diff.snapshot(cx)); + let ranges = diff_snapshot + .hunks(&buffer_snapshot) + .map(|hunk| hunk.range) + .collect::>(); + let context_lines = rng.random_range(0..2); + let buffer = buffer.clone(); + editor.update(cx, |editor, cx| { + let path = PathKey::for_buffer(&buffer, cx); + editor.set_excerpts_for_path(path, buffer, ranges, context_lines, diff, cx); + }); + } + 55..=64 => { log::info!("randomly undoing/redoing in single buffer"); let buffer = buffers.iter().choose(rng).unwrap(); buffer.update(cx, |buffer, cx| { buffer.randomly_undo_redo(rng, cx); }); } - 65..=79 => { - log::info!("mutating excerpts"); - editor.update(cx, |editor, cx| { - editor.randomly_edit_excerpts(rng, 2, cx); + 65..=74 => { + log::info!("removing excerpts for a random path"); + let paths = editor.update(cx, |editor, cx| { + editor + .rhs_multibuffer + .read(cx) + .paths() + .cloned() + .collect::>() + }); + if let Some(path) = paths.choose(rng) { + editor.update(cx, |editor, cx| { + editor.remove_excerpts_for_path(path.clone(), cx); + }); + } + } + 75..=79 => { + log::info!("unsplit and resplit"); + editor.update_in(cx, |editor, window, cx| { + editor.unsplit(window, cx); + }); + cx.run_until_parked(); + editor.update_in(cx, |editor, window, cx| { + editor.split(window, cx); }); } + 80..=89 => { + let excerpt_ids = editor.update(cx, |editor, cx| { + editor.rhs_multibuffer.read(cx).excerpt_ids() + }); + if !excerpt_ids.is_empty() { + let count = rng.random_range(1..=excerpt_ids.len().min(3)); + let chosen: Vec<_> = + excerpt_ids.choose_multiple(rng, count).copied().collect(); + let line_count = rng.random_range(1..5); + log::info!("expanding {count} excerpts by {line_count} lines"); + editor.update(cx, |editor, cx| { + editor.expand_excerpts( + chosen.into_iter(), + line_count, + ExpandExcerptDirection::UpAndDown, + cx, + ); + }); + } + } _ => { log::info!("quiescing"); for buffer in buffers { @@ -2356,6 +2471,55 @@ mod tests { } } + #[gpui::test] + async fn test_expand_excerpt_with_hunk_before_excerpt_start(cx: &mut gpui::TestAppContext) { + use rope::Point; + + let (editor, cx) = init_test(cx, SoftWrap::None, DiffViewStyle::Split).await; + + let base_text = "aaaaaaa rest_of_line\nsecond_line\nthird_line\nfourth_line"; + let current_text = "aaaaaaa rest_of_line\nsecond_line\nMODIFIED\nfourth_line"; + let (buffer, diff) = buffer_with_diff(base_text, current_text, cx); + + let buffer_snapshot = buffer.read_with(cx, |b, _| b.text_snapshot()); + diff.update(cx, |diff, cx| { + diff.recalculate_diff_sync(&buffer_snapshot, cx); + }); + cx.run_until_parked(); + + let diff_snapshot = diff.read_with(cx, |diff, cx| diff.snapshot(cx)); + let ranges = diff_snapshot + .hunks(&buffer_snapshot) + .map(|hunk| hunk.range) + .collect::>(); + + editor.update(cx, |editor, cx| { + let path = PathKey::for_buffer(&buffer, cx); + editor.set_excerpts_for_path(path, buffer.clone(), ranges, 0, diff.clone(), cx); + }); + cx.run_until_parked(); + + buffer.update(cx, |buffer, cx| { + buffer.edit( + [(Point::new(0, 7)..Point::new(1, 7), "\nnew_line\n")], + None, + cx, + ); + }); + + let excerpt_ids = editor.update(cx, |editor, cx| { + editor.rhs_multibuffer.read(cx).excerpt_ids() + }); + editor.update(cx, |editor, cx| { + editor.expand_excerpts( + excerpt_ids.iter().copied(), + 2, + multi_buffer::ExpandExcerptDirection::UpAndDown, + cx, + ); + }); + } + #[gpui::test] async fn test_basic_alignment(cx: &mut gpui::TestAppContext) { use rope::Point; diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 4f89dfa6faf7aec30eefc99c96334c8e1286e177..22142c2e28bfcb56077e936d664b7c810862feaa 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -595,14 +595,12 @@ impl DiffState { base_text_changed_range, extended_range: _, }) => { - if let Some(base_text_changed_range) = base_text_changed_range.clone() { - this.inverted_buffer_diff_changed( - diff, - main_buffer, - base_text_changed_range, - cx, - ) - } + this.inverted_buffer_diff_changed( + diff, + main_buffer, + base_text_changed_range.clone(), + cx, + ); cx.emit(Event::BufferDiffChanged); } BufferDiffEvent::LanguageChanged => { @@ -2202,7 +2200,19 @@ impl MultiBuffer { drop(snapshot); self.resize_excerpt(excerpt_ids[0], union_range, cx); - self.remove_excerpts(excerpt_ids[1..].iter().copied(), cx); + let removed = &excerpt_ids[1..]; + for &excerpt_id in removed { + if let Some(path) = self.paths_by_excerpt.get(&excerpt_id) { + if let Some(excerpt_list) = self.excerpts_by_path.get_mut(path) { + excerpt_list.retain(|id| *id != excerpt_id); + if excerpt_list.is_empty() { + let path = path.clone(); + self.excerpts_by_path.remove(&path); + } + } + } + } + self.remove_excerpts(removed.iter().copied(), cx); excerpt_ids[0] } @@ -2489,7 +2499,7 @@ impl MultiBuffer { &mut self, diff: Entity, main_buffer: Entity, - diff_change_range: Range, + diff_change_range: Option>, cx: &mut Context, ) { self.sync_mut(cx); @@ -2510,6 +2520,10 @@ impl MultiBuffer { .diffs .insert_or_replace(base_text_buffer_id, new_diff); + let Some(diff_change_range) = diff_change_range else { + return; + }; + let excerpt_edits = snapshot.excerpt_edits_for_diff_change(buffer_state, diff_change_range); let edits = Self::sync_diff_transforms( &mut snapshot, @@ -2705,7 +2719,12 @@ impl MultiBuffer { let base_text_buffer_id = snapshot.remote_id(); let diff_change_range = 0..snapshot.len(); self.snapshot.get_mut().has_inverted_diff = true; - self.inverted_buffer_diff_changed(diff.clone(), main_buffer.clone(), diff_change_range, cx); + self.inverted_buffer_diff_changed( + diff.clone(), + main_buffer.clone(), + Some(diff_change_range), + cx, + ); self.diffs.insert( base_text_buffer_id, DiffState::new_inverted(diff, main_buffer, cx), diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index be2b7bfa33954b6d8a19c42b544822cb1608aaef..475ed3c9d623382c5ed989918ee3224afc95cd25 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -188,7 +188,13 @@ impl MultiBuffer { direction: ExpandExcerptDirection, cx: &mut Context, ) { - let grouped = ids + let mut sorted_ids: Vec = ids.into_iter().collect(); + sorted_ids.sort_by(|a, b| { + let path_a = self.paths_by_excerpt.get(a); + let path_b = self.paths_by_excerpt.get(b); + path_a.cmp(&path_b) + }); + let grouped = sorted_ids .into_iter() .chunk_by(|id| self.paths_by_excerpt.get(id).cloned()) .into_iter() From 61e7746d4c513affc5de86a8e6a9876f658a2a2e Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Feb 2026 21:44:20 -0700 Subject: [PATCH 015/548] Fix wasmtime panic handling (#49944) We see a number of crashes in Sentry that appear to be crashes in wasmtime. This shouldn't happen, as wasmtime is designed to run untrusted code "safely". Looking into this, it seems likely that the problem is that we race with wasmtime when installing signal handlers. If wasmtime's handlers are installed before ours, then any signals that it intends to handle (like out of bounds memory access) will reach our handlers before its; which causes us to assume the app has crashed. This changes fixes our crash handler initialization to ensure we always create our signal handler first, and reverts a previous attempt to fix this from #40883 Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Linux: Fixed crashes that could happen due to our crash handler erroneously catching signals intended for wasmtime. --- Cargo.lock | 2 +- crates/crashes/Cargo.toml | 3 +- crates/crashes/src/crashes.rs | 140 +++++++++++++------------ crates/extension_host/src/wasm_host.rs | 16 +-- crates/remote_server/src/server.rs | 31 +++--- crates/zed/src/main.rs | 11 +- 6 files changed, 103 insertions(+), 100 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 430cbe0f580e9ad94767441fca687d69ce404099..964fce6bf3acaadff8a539df9937c84b1d0bdb74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4086,7 +4086,7 @@ dependencies = [ "bincode", "cfg-if", "crash-handler", - "extension_host", + "futures 0.3.31", "log", "mach2 0.5.0", "minidumper", diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index bd1c1121848e34349b5cd58c0fa033d380fa791b..5e451853a925d86ffcc1491a5c95af1f94e6ed05 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -9,9 +9,10 @@ license = "GPL-3.0-or-later" bincode.workspace = true cfg-if.workspace = true crash-handler.workspace = true -extension_host.workspace = true +futures.workspace = true log.workspace = true minidumper.workspace = true + paths.workspace = true release_channel.workspace = true smol.workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 967b2b846461a701f377ceacefbbaaa9d811091d..a1a43dbb88198b7afd4b89141f7578c0a5bc25ce 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -1,8 +1,10 @@ use crash_handler::{CrashEventResult, CrashHandler}; +use futures::future::BoxFuture; use log::info; use minidumper::{Client, LoopAction, MinidumpBinary}; use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; +use std::mem; #[cfg(not(target_os = "windows"))] use smol::process::Command; @@ -34,43 +36,76 @@ const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); #[cfg(target_os = "macos")] static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0); -pub async fn init(crash_init: InitCrashHandler) { - let gen_var = match env::var("ZED_GENERATE_MINIDUMPS") { - Ok(v) => { - if v == "false" || v == "0" { - Some(false) - } else { - Some(true) - } - } - Err(_) => None, - }; +fn should_install_crash_handler() -> bool { + if let Ok(value) = env::var("ZED_GENERATE_MINIDUMPS") { + return value == "true" || value == "1"; + } - match (gen_var, *RELEASE_CHANNEL) { - (Some(false), _) | (None, ReleaseChannel::Dev) => { - let old_hook = panic::take_hook(); - panic::set_hook(Box::new(move |info| { - unsafe { env::set_var("RUST_BACKTRACE", "1") }; - old_hook(info); - // prevent the macOS crash dialog from popping up - if cfg!(target_os = "macos") { - std::process::exit(1); - } - })); - return; - } - _ => { - panic::set_hook(Box::new(panic_hook)); - } + if *RELEASE_CHANNEL == ReleaseChannel::Dev { + return false; + } + + true +} + +/// Install crash signal handlers and spawn the crash-handler subprocess. +/// +/// The synchronous portion (signal handlers, panic hook) runs inline. +/// The async keepalive task is passed to `spawn` so the caller decides +/// which executor to schedule it on. +pub fn init(crash_init: InitCrashHandler, spawn: impl FnOnce(BoxFuture<'static, ()>)) { + if !should_install_crash_handler() { + let old_hook = panic::take_hook(); + panic::set_hook(Box::new(move |info| { + unsafe { env::set_var("RUST_BACKTRACE", "1") }; + old_hook(info); + // prevent the macOS crash dialog from popping up + if cfg!(target_os = "macos") { + std::process::exit(1); + } + })); + return; } + panic::set_hook(Box::new(panic_hook)); + + let handler = CrashHandler::attach(unsafe { + crash_handler::make_crash_event(move |crash_context: &crash_handler::CrashContext| { + let Some(client) = CRASH_HANDLER.get() else { + return CrashEventResult::Handled(false); + }; + + // only request a minidump once + let res = if REQUESTED_MINIDUMP + .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) + .is_ok() + { + #[cfg(target_os = "macos")] + suspend_all_other_threads(); + + // on macos this "ping" is needed to ensure that all our + // `client.send_message` calls have been processed before we trigger the + // minidump request. + client.ping().ok(); + client.request_dump(crash_context).is_ok() + } else { + true + }; + CrashEventResult::Handled(res) + }) + }) + .expect("failed to attach signal handler"); + + info!("crash signal handlers installed"); + + spawn(Box::pin(connect_and_keepalive(crash_init, handler))); +} + +/// Spawn the crash-handler subprocess, connect the IPC client, and run the +/// keepalive ping loop. Called on a background executor by [`init`]. +async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandler) { let exe = env::current_exe().expect("unable to find ourselves"); let zed_pid = process::id(); - // TODO: we should be able to get away with using 1 crash-handler process per machine, - // but for now we append the PID of the current process which makes it unique per remote - // server or interactive zed instance. This solves an issue where occasionally the socket - // used by the crash handler isn't destroyed correctly which causes it to stay on the file - // system and block further attempts to initialize crash handlers with that socket path. let socket_name = paths::temp_dir().join(format!("zed-crash-handler-{zed_pid}")); #[cfg(not(target_os = "windows"))] let _crash_handler = Command::new(exe) @@ -82,8 +117,6 @@ pub async fn init(crash_init: InitCrashHandler) { #[cfg(target_os = "windows")] spawn_crash_handler_windows(&exe, &socket_name); - #[cfg(target_os = "linux")] - let server_pid = _crash_handler.id(); info!("spawning crash handler process"); let mut elapsed = Duration::ZERO; @@ -106,36 +139,15 @@ pub async fn init(crash_init: InitCrashHandler) { .unwrap(); let client = Arc::new(client); - let handler = CrashHandler::attach(unsafe { - let client = client.clone(); - crash_handler::make_crash_event(move |crash_context: &crash_handler::CrashContext| { - // only request a minidump once - let res = if REQUESTED_MINIDUMP - .compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) - .is_ok() - { - #[cfg(target_os = "macos")] - suspend_all_other_threads(); - - // on macos this "ping" is needed to ensure that all our - // `client.send_message` calls have been processed before we trigger the - // minidump request. - client.ping().ok(); - client.request_dump(crash_context).is_ok() - } else { - true - }; - CrashEventResult::Handled(res) - }) - }) - .expect("failed to attach signal handler"); #[cfg(target_os = "linux")] - { - handler.set_ptracer(Some(server_pid)); - } + handler.set_ptracer(Some(_crash_handler.id())); + + // Publishing the client to the OnceLock makes it visible to the signal + // handler callback installed earlier. CRASH_HANDLER.set(client.clone()).ok(); - std::mem::forget(handler); + // mem::forget so that the drop is not called + mem::forget(handler); info!("crash handler registered"); loop { @@ -300,12 +312,6 @@ impl minidumper::ServerHandler for CrashServer { } pub fn panic_hook(info: &PanicHookInfo) { - // Don't handle a panic on threads that are not relevant to the main execution. - if extension_host::wasm_host::IS_WASM_THREAD.with(|v| v.load(Ordering::Acquire)) { - log::error!("wasm thread panicked!"); - return; - } - let message = info.payload_as_str().unwrap_or("Box").to_owned(); let span = info diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index a5994f8438236b39ae9bf9b600557f3f55360cfc..fe3c11de3ae78115b8e5db08884b7e07be152324 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -33,10 +33,7 @@ use settings::Settings; use std::{ borrow::Cow, path::{Path, PathBuf}, - sync::{ - Arc, LazyLock, OnceLock, - atomic::{AtomicBool, Ordering}, - }, + sync::{Arc, LazyLock, OnceLock}, time::Duration, }; use task::{DebugScenario, SpawnInTerminal, TaskTemplate, ZedDebugConfig}; @@ -498,11 +495,6 @@ pub struct WasmState { pub(crate) capability_granter: CapabilityGranter, } -std::thread_local! { - /// Used by the crash handler to ignore panics in extension-related threads. - pub static IS_WASM_THREAD: AtomicBool = const { AtomicBool::new(false) }; -} - type MainThreadCall = Box FnOnce(&'a mut AsyncApp) -> LocalBoxFuture<'a, ()>>; type ExtensionCall = Box< @@ -656,12 +648,6 @@ impl WasmHost { let (tx, mut rx) = mpsc::unbounded::(); let extension_task = async move { - // note: Setting the thread local here will slowly "poison" all tokio threads - // causing us to not record their panics any longer. - // - // This is fine though, the main zed binary only uses tokio for livekit and wasm extensions. - // Livekit seldom (if ever) panics 🤞 so the likelihood of us missing a panic in sentry is very low. - IS_WASM_THREAD.with(|v| v.store(true, Ordering::Release)); while let Some(call) = rx.next().await { (call)(&mut extension, &mut store).await; } diff --git a/crates/remote_server/src/server.rs b/crates/remote_server/src/server.rs index 6f0cf3003309988bdc276e5e8ed9d34ed4c64c81..6784f5fc1d221989aeaf1ecbd34da65f8f923a87 100644 --- a/crates/remote_server/src/server.rs +++ b/crates/remote_server/src/server.rs @@ -452,15 +452,18 @@ pub fn execute_run( let app = gpui_platform::headless(); let pid = std::process::id(); let id = pid.to_string(); - app.background_executor() - .spawn(crashes::init(crashes::InitCrashHandler { + crashes::init( + crashes::InitCrashHandler { session_id: id, zed_version: VERSION.to_owned(), binary: "zed-remote-server".to_string(), release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), - })) - .detach(); + }, + |task| { + app.background_executor().spawn(task).detach(); + }, + ); let log_rx = init_logging_server(&log_file)?; log::info!( "starting up with PID {}:\npid_file: {:?}, log_file: {:?}, stdin_socket: {:?}, stdout_socket: {:?}, stderr_socket: {:?}", @@ -704,14 +707,18 @@ pub(crate) fn execute_proxy( let server_paths = ServerPaths::new(&identifier)?; let id = std::process::id().to_string(); - smol::spawn(crashes::init(crashes::InitCrashHandler { - session_id: id, - zed_version: VERSION.to_owned(), - binary: "zed-remote-server".to_string(), - release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), - commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), - })) - .detach(); + crashes::init( + crashes::InitCrashHandler { + session_id: id, + zed_version: VERSION.to_owned(), + binary: "zed-remote-server".to_string(), + release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), + commit_sha: option_env!("ZED_COMMIT_SHA").unwrap_or("no_sha").to_owned(), + }, + |task| { + smol::spawn(task).detach(); + }, + ); log::info!("starting proxy process. PID: {}", std::process::id()); let server_pid = { diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 9a76cee4079053953dd263439de66b29749ec425..95ff6f03b1b7902e254c5e405c5d8b50e1f48773 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -332,8 +332,8 @@ fn main() { .background_executor() .spawn(Session::new(session_id.clone())); - app.background_executor() - .spawn(crashes::init(InitCrashHandler { + crashes::init( + InitCrashHandler { session_id, zed_version: app_version.to_string(), binary: "zed".to_string(), @@ -342,8 +342,11 @@ fn main() { .as_ref() .map(|sha| sha.full()) .unwrap_or_else(|| "no sha".to_owned()), - })) - .detach(); + }, + |task| { + app.background_executor().spawn(task).detach(); + }, + ); let (open_listener, mut open_rx) = OpenListener::new(); From 9f0844399a73dd99094c41d1efdad6e07401ee7a Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Feb 2026 22:13:04 -0700 Subject: [PATCH 016/548] Better cargo wrapper (#49946) Fixes the wrapper on linux/mac to not double-run cargo. Makes it work at all on windows Release Notes: - N/A --- script/cargo | 53 ++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 45 insertions(+), 8 deletions(-) diff --git a/script/cargo b/script/cargo index 6ff91ff84a59ed4a6820c24cff3b83790dab3d21..c0738bc6fcb6ede01aa93305dc001b7af2282437 100755 --- a/script/cargo +++ b/script/cargo @@ -62,8 +62,21 @@ function getShellConfigPath(shell) { case "fish": return path.join(home, ".config", "fish", "config.fish"); case "powershell": - // PowerShell Core (pwsh) profile locations if (process.platform === "win32") { + // Spawn PowerShell to get the real $PROFILE path, since os.homedir() doesn't account + // for OneDrive folder redirection, and the subdirectory differs between Windows PowerShell + // 5.x ("WindowsPowerShell") and PowerShell Core ("PowerShell"). + const psModulePath = process.env.PSModulePath || ""; + const psExe = psModulePath.toLowerCase().includes("\\windowspowershell\\") ? "powershell" : "pwsh"; + const result = spawnSync(psExe, ["-NoProfile", "-Command", "$PROFILE"], { + encoding: "utf-8", + stdio: ["pipe", "pipe", "pipe"], + timeout: 5000, + }); + if (result.status === 0 && result.stdout.trim()) { + return result.stdout.trim(); + } + // Fallback if spawning fails return path.join(home, "Documents", "PowerShell", "Microsoft.PowerShell_profile.ps1"); } else { return path.join(home, ".config", "powershell", "Microsoft.PowerShell_profile.ps1"); @@ -79,13 +92,30 @@ function generateAlias(shell, scriptDir) { switch (shell) { case "zsh": case "bash": - return `\n# Zed cargo timing wrapper\ncargo() { local w="${cargoWrapper}"; [[ -x "$w" ]] && "$w" "$@" || command cargo "$@"; }\n`; + return `\n# Zed cargo timing wrapper\ncargo() { local w="${cargoWrapper}"; if [[ -x "$w" ]]; then "$w" "$@"; else command cargo "$@"; fi; }\n`; case "fish": - return `\n# Zed cargo timing wrapper\nfunction cargo\n set -l w "${cargoWrapper}"\n if test -x "$w"\n "$w" $argv\n else\n command cargo $argv\n end\nend\n`; + return `\n# Zed cargo timing wrapper\nfunction cargo\n set -l w "${cargoWrapper}"\n if test -x "$w"\n "$w" $argv\n return $status\n else\n command cargo $argv\n end\nend\n`; case "powershell": - return `\n# Zed cargo timing wrapper\nfunction cargo {\n \$wrapper = "${cargoWrapper}"\n if (Test-Path \$wrapper) {\n & \$wrapper @args\n } else {\n & (Get-Command -Name cargo -CommandType Application | Select-Object -First 1).Source @args\n }\n}\n`; + return `\n# Zed cargo timing wrapper\nfunction cargo {\n \$wrapper = "${cargoWrapper}"\n if (Test-Path \$wrapper) {\n node \$wrapper @args\n } else {\n & (Get-Command -Name cargo -CommandType Application | Select-Object -First 1).Source @args\n }\n}\n`; default: - return `cargo() { local w="${cargoWrapper}"; [[ -x "$w" ]] && "$w" "$@" || command cargo "$@"; }`; + return `cargo() { local w="${cargoWrapper}"; if [[ -x "$w" ]]; then "$w" "$@"; else command cargo "$@"; fi; }`; + } +} + +function aliasBlockRegex(shell) { + switch (shell) { + case "zsh": + case "bash": + // Comment line + single-line cargo() { ... } function + return /\n?# Zed cargo timing wrapper\ncargo\(\) \{[^\n]*\}\n/; + case "fish": + // Comment line + multi-line function cargo...end block + return /\n?# Zed cargo timing wrapper\nfunction cargo\n[\s\S]*?\nend\n/; + case "powershell": + // Comment line + multi-line function cargo {...} block + return /\n?# Zed cargo timing wrapper\nfunction cargo \{[\s\S]*?\n\}\n/; + default: + return null; } } @@ -102,12 +132,19 @@ function initShellAlias() { return; } - // Check if alias already exists + // Check if alias already exists; if so, replace it in-place if (fs.existsSync(configPath)) { const content = fs.readFileSync(configPath, "utf-8"); if (content.includes("Zed cargo timing wrapper")) { - console.log(`Alias already exists in ${configPath}`); - console.log("To update, remove the existing alias and run --init again."); + const blockRegex = aliasBlockRegex(shell); + const updated = blockRegex ? content.replace(blockRegex, "") : content; + fs.writeFileSync(configPath, updated + alias); + console.log(`Updated cargo timing alias in ${configPath}`); + if (shell === "powershell") { + console.log(`\nRestart PowerShell or run: . "${configPath}"`); + } else { + console.log(`\nRestart your shell or run: source ${configPath}`); + } return; } } From 096610efdb23c889f3ceaf41945e7e6e8cd3a7e3 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 23 Feb 2026 21:13:33 -0800 Subject: [PATCH 017/548] Fix incorrect cursor excerpt handling in EP CLI format-prompt (#49947) Release Notes: - N/A *or* Added/Fixed/Improved ... --- .../src/anthropic_client.rs | 3 +- .../edit_prediction_cli/src/format_prompt.rs | 53 ++++++------------- crates/zeta_prompt/src/zeta_prompt.rs | 12 +++-- 3 files changed, 24 insertions(+), 44 deletions(-) diff --git a/crates/edit_prediction_cli/src/anthropic_client.rs b/crates/edit_prediction_cli/src/anthropic_client.rs index 941e82c9dbf39186cd4e061f81bcfe71b2ad5ee0..784fa711b0058e3d2884460f6ca6f5300fc44a9a 100644 --- a/crates/edit_prediction_cli/src/anthropic_client.rs +++ b/crates/edit_prediction_cli/src/anthropic_client.rs @@ -512,7 +512,8 @@ impl BatchingLlmClient { async fn upload_pending_requests(&self) -> Result> { const BATCH_CHUNK_SIZE: i32 = 16_000; - const MAX_BATCH_SIZE_BYTES: usize = 200 * 1024 * 1024; // 200MB (buffer below 256MB limit) + const MAX_BATCH_SIZE_BYTES: usize = 100 * 1024 * 1024; + let mut all_batch_ids = Vec::new(); let mut total_uploaded = 0; diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 1ed383c158b453cf75e661ec9dfe72ae6ed6537b..d8fd613ee8d6e1323c8ca0521ca67c837e9fb225 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -11,8 +11,9 @@ use gpui::AsyncApp; use similar::DiffableStr; use std::sync::Arc; use std::{fmt::Write as _, ops::Range}; -use zeta_prompt::format_zeta_prompt; -use zeta_prompt::{ZetaFormat, excerpt_range_for_format}; +use zeta_prompt::{ + ZetaFormat, excerpt_range_for_format, format_zeta_prompt, resolve_cursor_region, +}; pub async fn run_format_prompt( example: &mut Example, @@ -30,16 +31,15 @@ pub async fn run_format_prompt( .as_ref() .context("prompt_inputs must be set after context retrieval")?; - let excerpt_ranges = prompt_inputs - .excerpt_ranges - .as_ref() - .context("prompt_inputs must have excerpt_ranges")?; - match args.provider { PredictionProvider::Teacher(_) | PredictionProvider::TeacherNonBatching(_) => { step_progress.set_substatus("formatting teacher prompt"); let zeta_format = ZetaFormat::default(); + let excerpt_ranges = prompt_inputs + .excerpt_ranges + .as_ref() + .context("prompt_inputs must have excerpt_ranges")?; let (editable_range, context_range) = excerpt_range_for_format(zeta_format, excerpt_ranges); @@ -55,30 +55,8 @@ pub async fn run_format_prompt( PredictionProvider::Zeta2(zeta_format) => { step_progress.set_substatus("formatting zeta2 prompt"); - let (editable_range, context_range) = - excerpt_range_for_format(zeta_format, excerpt_ranges); - - let context_start = context_range.start; - let cursor_offset_in_excerpt = prompt_inputs.cursor_offset_in_excerpt - context_start; - let editable_range_in_excerpt = - (editable_range.start - context_start)..(editable_range.end - context_start); - let input = zeta_prompt::ZetaPromptInput { - cursor_path: prompt_inputs.cursor_path.clone(), - cursor_excerpt: prompt_inputs.cursor_excerpt[context_range] - .to_string() - .into(), - editable_range_in_excerpt, - cursor_offset_in_excerpt, - excerpt_start_row: prompt_inputs.excerpt_start_row, - events: prompt_inputs.events.clone(), - related_files: prompt_inputs.related_files.clone(), - excerpt_ranges: prompt_inputs.excerpt_ranges.clone(), - preferred_model: None, - in_open_source_repo: prompt_inputs.in_open_source_repo, - can_collect_data: false, - }; - let prompt = format_zeta_prompt(&input, zeta_format); - let prefill = zeta_prompt::get_prefill(&input, zeta_format); + let prompt = format_zeta_prompt(prompt_inputs, zeta_format); + let prefill = zeta_prompt::get_prefill(prompt_inputs, zeta_format); let (expected_patch, expected_cursor_offset) = example .spec .expected_patches_with_cursor_positions() @@ -86,15 +64,14 @@ pub async fn run_format_prompt( .next() .context("expected patches is empty")?; let expected_output = zeta2_output_for_patch( - &input, + prompt_inputs, &expected_patch, expected_cursor_offset, zeta_format, )?; - let rejected_output = - example.spec.rejected_patch.as_ref().and_then(|patch| { - zeta2_output_for_patch(&input, patch, None, zeta_format).ok() - }); + let rejected_output = example.spec.rejected_patch.as_ref().and_then(|patch| { + zeta2_output_for_patch(prompt_inputs, patch, None, zeta_format).ok() + }); example.prompt = Some(ExamplePrompt { input: prompt, @@ -117,8 +94,8 @@ pub fn zeta2_output_for_patch( cursor_offset: Option, version: ZetaFormat, ) -> Result { - let mut old_editable_region = - input.cursor_excerpt[input.editable_range_in_excerpt.clone()].to_string(); + let (context, editable_range, _) = resolve_cursor_region(input, version); + let mut old_editable_region = context[editable_range].to_string(); if !old_editable_region.ends_with_newline() { old_editable_region.push('\n'); diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index eb5be311db1539f96b96b50e372d7733aa69f611..7391683d34d8010336c6f81e6da50be6e6c11c15 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -262,7 +262,7 @@ pub fn excerpt_range_for_format( } } -fn resolve_cursor_region( +pub fn resolve_cursor_region( input: &ZetaPromptInput, format: ZetaFormat, ) -> (&str, Range, usize) { @@ -374,7 +374,10 @@ pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { | ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211SeedCoder => String::new(), - ZetaFormat::V0211Prefill => v0211_prefill::get_prefill(input), + ZetaFormat::V0211Prefill => { + let (context, editable_range, _) = resolve_cursor_region(input, format); + v0211_prefill::get_prefill(context, &editable_range) + } } } @@ -715,9 +718,8 @@ pub mod v0131_git_merge_markers_prefix { pub mod v0211_prefill { use super::*; - pub fn get_prefill(input: &ZetaPromptInput) -> String { - let editable_region = &input.cursor_excerpt - [input.editable_range_in_excerpt.start..input.editable_range_in_excerpt.end]; + pub fn get_prefill(context: &str, editable_range: &Range) -> String { + let editable_region = &context[editable_range.start..editable_range.end]; let prefill_len = (editable_region.len() as f64 * PREFILL_RATIO) as usize; let prefill_len = editable_region.floor_char_boundary(prefill_len); From 564424b31f82bdc488bd839292816cbbd59e8df0 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 23 Feb 2026 23:27:31 -0700 Subject: [PATCH 018/548] Defer wgpu context creation until we have a surface (#49926) Fixes ZED-54X Release Notes: - Linux: wait to request a graphics context until we have a window so we can (ideally) pick a better context or (less ideally) fail more gracefully. --------- Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- crates/editor/src/editor.rs | 4 +- crates/gpui_linux/src/linux/platform.rs | 44 --------- crates/gpui_linux/src/linux/wayland/client.rs | 18 ++-- crates/gpui_linux/src/linux/wayland/window.rs | 9 +- crates/gpui_linux/src/linux/x11/client.rs | 33 +++---- crates/gpui_linux/src/linux/x11/window.rs | 9 +- crates/gpui_wgpu/src/wgpu_context.rs | 89 +++++++++++++++---- crates/gpui_wgpu/src/wgpu_renderer.rs | 46 +++++++--- crates/zed/src/zed.rs | 2 +- crates/zed/src/zed/open_listener.rs | 2 +- 10 files changed, 149 insertions(+), 107 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 0acc4ce142891109d6888b5f637b472133a5eaa5..54e20d00cafebc209cec2bd10eb0cbb0007e3af8 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -360,7 +360,7 @@ pub fn init(cx: &mut App) { Editor::new_file(workspace, &Default::default(), window, cx) }, ) - .detach(); + .detach_and_log_err(cx); } }) .on_action(move |_: &workspace::NewWindow, cx| { @@ -375,7 +375,7 @@ pub fn init(cx: &mut App) { Editor::new_file(workspace, &Default::default(), window, cx) }, ) - .detach(); + .detach_and_log_err(cx); } }); _ = ui_input::ERASED_EDITOR_FACTORY.set(|window, cx| { diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index b3a08310ea419e55c91cd361a032e51163d1b2f3..5929533951738a474cdb76f3047162451de5ce1e 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -46,50 +46,6 @@ pub(crate) const KEYRING_LABEL: &str = "zed-github-account"; const FILE_PICKER_PORTAL_MISSING: &str = "Couldn't open file picker due to missing xdg-desktop-portal implementation."; -#[cfg(any(feature = "x11", feature = "wayland"))] -pub trait ResultExt { - type Ok; - - fn notify_err(self, msg: &'static str) -> Self::Ok; -} - -#[cfg(any(feature = "x11", feature = "wayland"))] -impl ResultExt for anyhow::Result { - type Ok = T; - - fn notify_err(self, msg: &'static str) -> T { - match self { - Ok(v) => v, - Err(e) => { - use ashpd::desktop::notification::{Notification, NotificationProxy, Priority}; - use futures::executor::block_on; - - let proxy = block_on(NotificationProxy::new()).expect(msg); - - let notification_id = "dev.zed.Oops"; - block_on( - proxy.add_notification( - notification_id, - Notification::new("Zed failed to launch") - .body(Some( - format!( - "{e:?}. See https://zed.dev/docs/linux for troubleshooting steps." - ) - .as_str(), - )) - .priority(Priority::High) - .icon(ashpd::desktop::Icon::with_names(&[ - "dialog-question-symbolic", - ])), - ) - ).expect(msg); - - panic!("{msg}"); - } - } - } -} - pub(crate) trait LinuxClient { fn compositor_name(&self) -> &'static str; fn with_common(&self, f: impl FnOnce(&mut LinuxCommon) -> R) -> R; diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index 2378b822c53dce527d622b24da7cb602b4fc7060..a810a00af642c3a252a9a144b884837f82eac7e7 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -74,10 +74,10 @@ use super::{ }; use crate::linux::{ - DOUBLE_CLICK_INTERVAL, LinuxClient, LinuxCommon, LinuxKeyboardLayout, ResultExt as _, - SCROLL_LINES, capslock_from_xkb, cursor_style_to_icon_names, get_xkb_compose_state, - is_within_click_distance, keystroke_from_xkb, keystroke_underlying_dead_key, - modifiers_from_xkb, open_uri_internal, read_fd, reveal_path_internal, + DOUBLE_CLICK_INTERVAL, LinuxClient, LinuxCommon, LinuxKeyboardLayout, SCROLL_LINES, + capslock_from_xkb, cursor_style_to_icon_names, get_xkb_compose_state, is_within_click_distance, + keystroke_from_xkb, keystroke_underlying_dead_key, modifiers_from_xkb, open_uri_internal, + read_fd, reveal_path_internal, wayland::{ clipboard::{Clipboard, DataOffer, FILE_LIST_MIME_TYPE, TEXT_MIME_TYPES}, cursor::Cursor, @@ -201,7 +201,7 @@ pub struct Output { pub(crate) struct WaylandClientState { serial_tracker: SerialTracker, globals: Globals, - pub gpu_context: WgpuContext, + pub gpu_context: Option, wl_seat: wl_seat::WlSeat, // TODO: Multi seat support wl_pointer: Option, wl_keyboard: Option, @@ -515,8 +515,7 @@ impl WaylandClient { }) .unwrap(); - // This could be unified with the notification handling in zed/main:fail_to_open_window. - let gpu_context = WgpuContext::new().notify_err("Unable to init GPU context"); + let gpu_context = None; let seat = seat.unwrap(); let globals = Globals::new( @@ -715,13 +714,14 @@ impl LinuxClient for WaylandClient { let parent = state.keyboard_focused_window.clone(); + let appearance = state.common.appearance; let (window, surface_id) = WaylandWindow::new( handle, state.globals.clone(), - &state.gpu_context, + &mut state.gpu_context, WaylandClientStatePtr(Rc::downgrade(&self.0)), params, - state.common.appearance, + appearance, parent, )?; state.windows.insert(surface_id, window.0.clone()); diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 54e868683696b6b6bee08b6ab09fdae15b9cbaf4..c1006a816a3844db22ea8932177b0f0b2ff1c99f 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -317,7 +317,7 @@ impl WaylandWindowState { viewport: Option, client: WaylandClientStatePtr, globals: Globals, - gpu_context: &WgpuContext, + gpu_context: &mut Option, options: WindowParams, parent: Option, ) -> anyhow::Result { @@ -481,7 +481,7 @@ impl WaylandWindow { pub fn new( handle: AnyWindowHandle, globals: Globals, - gpu_context: &WgpuContext, + gpu_context: &mut Option, client: WaylandClientStatePtr, params: WindowParams, appearance: WindowAppearance, @@ -1230,7 +1230,10 @@ impl PlatformWindow for WaylandWindow { fn is_subpixel_rendering_supported(&self) -> bool { let client = self.borrow().client.get_client(); let state = client.borrow(); - state.gpu_context.supports_dual_source_blending() + state + .gpu_context + .as_ref() + .is_some_and(|ctx| ctx.supports_dual_source_blending()) } fn minimize(&self) { diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 7766f23095fccf1a1c8c002314afdf012d3494ea..7e3f67c9bf5fe3176f3badd9b33375ffdeb9dc19 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -49,10 +49,9 @@ use super::{ }; use crate::linux::{ - DEFAULT_CURSOR_ICON_NAME, LinuxClient, ResultExt as _, capslock_from_xkb, - cursor_style_to_icon_names, get_xkb_compose_state, is_within_click_distance, - keystroke_from_xkb, keystroke_underlying_dead_key, log_cursor_icon_warning, modifiers_from_xkb, - open_uri_internal, + DEFAULT_CURSOR_ICON_NAME, LinuxClient, capslock_from_xkb, cursor_style_to_icon_names, + get_xkb_compose_state, is_within_click_distance, keystroke_from_xkb, + keystroke_underlying_dead_key, log_cursor_icon_warning, modifiers_from_xkb, open_uri_internal, platform::{DOUBLE_CLICK_INTERVAL, SCROLL_LINES}, reveal_path_internal, xdg_desktop_portal::{Event as XDPEvent, XDPEventSource}, @@ -178,7 +177,7 @@ pub struct X11ClientState { pub(crate) last_location: Point, pub(crate) current_count: usize, - pub(crate) gpu_context: WgpuContext, + pub(crate) gpu_context: Option, pub(crate) scale_factor: f32, @@ -421,8 +420,6 @@ impl X11Client { .to_string(); let keyboard_layout = LinuxKeyboardLayout::new(layout_name.into()); - let gpu_context = WgpuContext::new().notify_err("Unable to init GPU context"); - let resource_database = x11rb::resource_manager::new_from_default(&xcb_connection) .context("Failed to create resource database")?; let scale_factor = get_scale_factor(&xcb_connection, &resource_database, x_root_index); @@ -492,7 +489,7 @@ impl X11Client { last_mouse_button: None, last_location: Point::new(px(0.0), px(0.0)), current_count: 0, - gpu_context, + gpu_context: None, scale_factor, xkb_context, @@ -1511,19 +1508,25 @@ impl LinuxClient for X11Client { .generate_id() .context("X11: Failed to generate window ID")?; + let xcb_connection = state.xcb_connection.clone(); + let client_side_decorations_supported = state.client_side_decorations_supported; + let x_root_index = state.x_root_index; + let atoms = state.atoms; + let scale_factor = state.scale_factor; + let appearance = state.common.appearance; let window = X11Window::new( handle, X11ClientStatePtr(Rc::downgrade(&self.0)), state.common.foreground_executor.clone(), - &state.gpu_context, + &mut state.gpu_context, params, - &state.xcb_connection, - state.client_side_decorations_supported, - state.x_root_index, + &xcb_connection, + client_side_decorations_supported, + x_root_index, x_window, - &state.atoms, - state.scale_factor, - state.common.appearance, + &atoms, + scale_factor, + appearance, parent_window, )?; check_reply( diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index cc48a86b0c33890d58880360848fa6336cd95a75..8060e4c4457c6ef4575d86c4d975e3ead901f693 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -391,7 +391,7 @@ impl X11WindowState { handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, - gpu_context: &WgpuContext, + gpu_context: &mut Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -798,7 +798,7 @@ impl X11Window { handle: AnyWindowHandle, client: X11ClientStatePtr, executor: ForegroundExecutor, - gpu_context: &WgpuContext, + gpu_context: &mut Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -1465,7 +1465,10 @@ impl PlatformWindow for X11Window { .upgrade() .map(|ref_cell| { let state = ref_cell.borrow(); - state.gpu_context.supports_dual_source_blending() + state + .gpu_context + .as_ref() + .is_some_and(|ctx| ctx.supports_dual_source_blending()) }) .unwrap_or_default() } diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index b0de623f0e9d611863825f2aa446d1e120a7091e..270201183c8afd33534c184b7dc597ed6ab7d9d5 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -11,7 +11,7 @@ pub struct WgpuContext { } impl WgpuContext { - pub fn new() -> anyhow::Result { + pub fn new(instance: wgpu::Instance, surface: &wgpu::Surface<'_>) -> anyhow::Result { let device_id_filter = match std::env::var("ZED_DEVICE_ID") { Ok(val) => parse_pci_id(&val) .context("Failed to parse device ID from `ZED_DEVICE_ID` environment variable") @@ -24,14 +24,24 @@ impl WgpuContext { } }; - let instance = wgpu::Instance::new(&wgpu::InstanceDescriptor { - backends: wgpu::Backends::VULKAN | wgpu::Backends::GL, - flags: wgpu::InstanceFlags::default(), - backend_options: wgpu::BackendOptions::default(), - memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), - }); - - let adapter = smol::block_on(Self::select_adapter(&instance, device_id_filter))?; + let adapter = smol::block_on(Self::select_adapter( + &instance, + device_id_filter, + Some(surface), + ))?; + + let caps = surface.get_capabilities(&adapter); + if caps.formats.is_empty() { + let info = adapter.get_info(); + anyhow::bail!( + "No adapter compatible with the display surface could be found. \ + Best candidate {:?} (backend={:?}, device={:#06x}) reports no \ + supported surface formats.", + info.name, + info.backend, + info.device, + ); + } log::info!( "Selected GPU adapter: {:?} ({:?})", @@ -39,6 +49,42 @@ impl WgpuContext { adapter.get_info().backend ); + let (device, queue, dual_source_blending) = Self::create_device(&adapter)?; + + Ok(Self { + instance, + adapter, + device: Arc::new(device), + queue: Arc::new(queue), + dual_source_blending, + }) + } + + pub fn instance() -> wgpu::Instance { + wgpu::Instance::new(&wgpu::InstanceDescriptor { + backends: wgpu::Backends::VULKAN | wgpu::Backends::GL, + flags: wgpu::InstanceFlags::default(), + backend_options: wgpu::BackendOptions::default(), + memory_budget_thresholds: wgpu::MemoryBudgetThresholds::default(), + }) + } + + pub fn check_compatible_with_surface(&self, surface: &wgpu::Surface<'_>) -> anyhow::Result<()> { + let caps = surface.get_capabilities(&self.adapter); + if caps.formats.is_empty() { + let info = self.adapter.get_info(); + anyhow::bail!( + "Adapter {:?} (backend={:?}, device={:#06x}) is not compatible with the \ + display surface for this window.", + info.name, + info.backend, + info.device, + ); + } + Ok(()) + } + + fn create_device(adapter: &wgpu::Adapter) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { let dual_source_blending_available = adapter .features() .contains(wgpu::Features::DUAL_SOURCE_BLENDING); @@ -63,18 +109,13 @@ impl WgpuContext { })) .map_err(|e| anyhow::anyhow!("Failed to create wgpu device: {e}"))?; - Ok(Self { - instance, - adapter, - device: Arc::new(device), - queue: Arc::new(queue), - dual_source_blending: dual_source_blending_available, - }) + Ok((device, queue, dual_source_blending_available)) } async fn select_adapter( instance: &wgpu::Instance, device_id_filter: Option, + compatible_surface: Option<&wgpu::Surface<'_>>, ) -> anyhow::Result { if let Some(device_id) = device_id_filter { let adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await; @@ -88,6 +129,18 @@ impl WgpuContext { for adapter in adapters.into_iter() { let info = adapter.get_info(); if info.device == device_id { + if let Some(surface) = compatible_surface { + let caps = surface.get_capabilities(&adapter); + if caps.formats.is_empty() { + log::warn!( + "GPU matching ZED_DEVICE_ID={:#06x} ({}) is not compatible \ + with the display surface. Falling back to auto-selection.", + device_id, + info.name, + ); + break; + } + } log::info!( "Found GPU matching ZED_DEVICE_ID={:#06x}: {}", device_id, @@ -100,7 +153,7 @@ impl WgpuContext { } log::warn!( - "No GPU found matching ZED_DEVICE_ID={:#06x}. Available devices:", + "No compatible GPU found matching ZED_DEVICE_ID={:#06x}. Available devices:", device_id ); @@ -117,7 +170,7 @@ impl WgpuContext { instance .request_adapter(&wgpu::RequestAdapterOptions { power_preference: wgpu::PowerPreference::None, - compatible_surface: None, + compatible_surface, force_fallback_adapter: false, }) .await diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index f443f12dd54e599ad97d583b8ebf7f70b0c8f7ea..95d64d952373f303c1015669ee90a93b5d179dd5 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -124,7 +124,7 @@ impl WgpuRenderer { /// The caller must ensure that the window handle remains valid for the lifetime /// of the returned renderer. pub fn new( - context: &WgpuContext, + gpu_context: &mut Option, window: &W, config: WgpuSurfaceConfig, ) -> anyhow::Result { @@ -140,20 +140,32 @@ impl WgpuRenderer { raw_window_handle: window_handle.as_raw(), }; + // Use the existing context's instance if available, otherwise create a new one. + // The surface must be created with the same instance that will be used for + // adapter selection, otherwise wgpu will panic. + let instance = gpu_context + .as_ref() + .map(|ctx| ctx.instance.clone()) + .unwrap_or_else(WgpuContext::instance); + // Safety: The caller guarantees that the window handle is valid for the // lifetime of this renderer. In practice, the RawWindow struct is created // from the native window handles and the surface is dropped before the window. let surface = unsafe { - context - .instance + instance .create_surface_unsafe(target) .map_err(|e| anyhow::anyhow!("Failed to create surface: {e}"))? }; + let context = match gpu_context { + Some(context) => { + context.check_compatible_with_surface(&surface)?; + context + } + None => gpu_context.insert(WgpuContext::new(instance, &surface)?), + }; + let surface_caps = surface.get_capabilities(&context.adapter); - // Prefer standard 8-bit non-sRGB formats that don't require special features. - // Other formats like Rgba16Unorm require TEXTURE_FORMAT_16BIT_NORM which may - // not be available on all devices. let preferred_formats = [ wgpu::TextureFormat::Bgra8Unorm, wgpu::TextureFormat::Rgba8Unorm, @@ -163,26 +175,38 @@ impl WgpuRenderer { .find(|f| surface_caps.formats.contains(f)) .copied() .or_else(|| surface_caps.formats.iter().find(|f| !f.is_srgb()).copied()) - .unwrap_or(surface_caps.formats[0]); + .or_else(|| surface_caps.formats.first().copied()) + .ok_or_else(|| { + anyhow::anyhow!( + "Surface reports no supported texture formats for adapter {:?}", + context.adapter.get_info().name + ) + })?; let pick_alpha_mode = - |preferences: &[wgpu::CompositeAlphaMode]| -> wgpu::CompositeAlphaMode { + |preferences: &[wgpu::CompositeAlphaMode]| -> anyhow::Result { preferences .iter() .find(|p| surface_caps.alpha_modes.contains(p)) .copied() - .unwrap_or(surface_caps.alpha_modes[0]) + .or_else(|| surface_caps.alpha_modes.first().copied()) + .ok_or_else(|| { + anyhow::anyhow!( + "Surface reports no supported alpha modes for adapter {:?}", + context.adapter.get_info().name + ) + }) }; let transparent_alpha_mode = pick_alpha_mode(&[ wgpu::CompositeAlphaMode::PreMultiplied, wgpu::CompositeAlphaMode::Inherit, - ]); + ])?; let opaque_alpha_mode = pick_alpha_mode(&[ wgpu::CompositeAlphaMode::Opaque, wgpu::CompositeAlphaMode::Inherit, - ]); + ])?; let alpha_mode = if config.transparent { transparent_alpha_mode diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 1e57334be5997585ecaca517f52134a210b364fe..83d504ea8f1cfbb13b5f0ea97cea6508a04126aa 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1119,7 +1119,7 @@ fn register_actions( Editor::new_file(workspace, &Default::default(), window, cx) }, ) - .detach(); + .detach_and_log_err(cx); } } }) diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index 94e125ab98c44282c037704158c48e69d7b3a785..a7d1da663b3da6848d3552707f261fe02beba56b 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -498,7 +498,7 @@ async fn open_workspaces( workspace::open_new(open_options, app_state, cx, |workspace, window, cx| { Editor::new_file(workspace, &Default::default(), window, cx) }) - .detach(); + .detach_and_log_err(cx); }); } return Ok(()); From b5eeeb98e4b7ca92e42b8a3b5395aee2dc4067ec Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 24 Feb 2026 10:24:38 +0100 Subject: [PATCH 019/548] agent_ui: Avoid querying `OnboardingUpsell` in prepaint (#49963) This hits the sqlite database unnecessarily Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/acp_thread/src/acp_thread.rs | 2 +- crates/agent_ui/src/agent_panel.rs | 36 +++++++++++++++++++++++++---- crates/text/src/text.rs | 10 ++++---- 3 files changed, 38 insertions(+), 10 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 37fa2488524bf325755f1807125d9685821c04ee..fea3236e1697e3af189da2e6a0f14d70a6f1c6f6 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -2330,7 +2330,7 @@ impl AcpThread { text_diff(old_text.as_str(), &content) .into_iter() .map(|(range, replacement)| { - (snapshot.anchor_range_between(range), replacement) + (snapshot.anchor_range_around(range), replacement) }) .collect::>() }) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index f53682744fba5124f8751da8b0607cb01d482a3e..cdc0ee0b1fd9287f065e2bc7c8f7c84086689050 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -1,4 +1,13 @@ -use std::{ops::Range, path::Path, rc::Rc, sync::Arc, time::Duration}; +use std::{ + ops::Range, + path::Path, + rc::Rc, + sync::{ + Arc, + atomic::{AtomicBool, Ordering}, + }, + time::Duration, +}; use acp_thread::{AcpThread, AgentSessionInfo, MentionUri}; use agent::{ContextServerRegistry, SharedThread, ThreadStore}; @@ -241,7 +250,14 @@ pub fn init(cx: &mut App) { window.dispatch_action(workspace::RestoreBanner.boxed_clone(), cx); window.refresh(); }) - .register_action(|_workspace, _: &ResetTrialUpsell, _window, cx| { + .register_action(|workspace, _: &ResetTrialUpsell, _window, cx| { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, _| { + panel + .on_boarding_upsell_dismissed + .store(false, Ordering::Release); + }); + } OnboardingUpsell::set_dismissed(false, cx); }) .register_action(|_workspace, _: &ResetTrialEndUpsell, _window, cx| { @@ -524,6 +540,7 @@ pub struct AgentPanel { selected_agent: AgentType, show_trust_workspace_message: bool, last_configuration_error_telemetry: Option, + on_boarding_upsell_dismissed: AtomicBool, } impl AgentPanel { @@ -743,11 +760,19 @@ impl AgentPanel { .ok(); }); + let weak_panel = cx.entity().downgrade(); let onboarding = cx.new(|cx| { AgentPanelOnboarding::new( user_store.clone(), client, - |_window, cx| { + move |_window, cx| { + weak_panel + .update(cx, |panel, _| { + panel + .on_boarding_upsell_dismissed + .store(true, Ordering::Release); + }) + .ok(); OnboardingUpsell::set_dismissed(true, cx); }, cx, @@ -803,6 +828,7 @@ impl AgentPanel { selected_agent: AgentType::default(), show_trust_workspace_message: false, last_configuration_error_telemetry: None, + on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()), }; // Initial sync of agent servers from extensions @@ -2773,7 +2799,7 @@ impl AgentPanel { } fn should_render_onboarding(&self, cx: &mut Context) -> bool { - if OnboardingUpsell::dismissed() { + if self.on_boarding_upsell_dismissed.load(Ordering::Acquire) { return false; } @@ -2786,6 +2812,8 @@ impl AgentPanel { .is_some_and(|date| date < chrono::Utc::now()) { OnboardingUpsell::set_dismissed(true, cx); + self.on_boarding_upsell_dismissed + .store(true, Ordering::Release); return false; } diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index b3a51e68ca21fcb93cb3f24ed8f3350de6de2208..2c51a0d5e5b29bc08fdacc6b8b90edd8f65cd83d 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -2403,13 +2403,13 @@ impl BufferSnapshot { } } - /// Returns an anchor range for the given input position range that is anchored to the text inbetween. - pub fn anchor_range_between(&self, position: Range) -> Range { - self.anchor_before(position.start)..self.anchor_after(position.end) + /// Returns an anchor range for the given input position range that is anchored to the text in the range. + pub fn anchor_range_around(&self, position: Range) -> Range { + self.anchor_after(position.start)..self.anchor_before(position.end) } - /// Returns an anchor range for the given input position range that is anchored to the text before the start position and after the end position. - pub fn anchor_range_around(&self, position: Range) -> Range { + /// Returns an anchor range for the given input position range that is anchored to the text before and after. + pub fn anchor_range_between(&self, position: Range) -> Range { self.anchor_before(position.start)..self.anchor_after(position.end) } From fe6e528a4919b56184312151c35ca5c1d8b1bc5b Mon Sep 17 00:00:00 2001 From: Emamul Andalib Date: Tue, 24 Feb 2026 11:27:22 +0100 Subject: [PATCH 020/548] terminal: Fix mouse scroll report count for negative scroll lines (#49931) Follow-up to #45600. ## Summary Fix mouse scroll reports sending only one event when scrolling down in terminal apps with mouse mode (tmux, neovim, etc.), regardless of how many lines were scrolled. ## The Problem After #45600, trackpad scrolling speed was fixed. But when scrolling **down** (negative `scroll_lines`), the terminal was still sending only **one** scroll report per gesture, no matter how many lines the user scrolled. Scrolling up worked correctly. ## Root Cause In `scroll_report()` we had: https://github.com/zed-industries/zed/blob/a8043dcff8f28a0443d7ec238e7f020689ebe1ff/crates/terminal/src/mappings/mouse.rs#L96 `scroll_lines` can be negative (scroll down) or positive (scroll up). For negative values: | scroll_lines | max(scroll_lines, 1) | Reports sent | Verdict | |--------------|---------------------|--------------|------| | 3 (up) | 3 | 3 |Right | -3 (down) | 1 | 1 |WRONG| So we always sent exactly 1 report when scrolling down, losing the scroll magnitude. Use `scroll_lines.unsigned_abs()` instead of `max(scroll_lines, 1)`. This matches how `alt_scroll()` in the same file already handles `scroll_lines`. Now both directions send the correct number of reports. https://github.com/zed-industries/zed/blob/a8043dcff8f28a0443d7ec238e7f020689ebe1ff/crates/terminal/src/mappings/mouse.rs#L102 ## Testing - Added unit tests: `scroll_report_repeats_for_negative_scroll_lines` and `scroll_report_repeats_for_positive_scroll_lines` - Manually tested scrolling in tmux and neovim with mouse mode --- Release Notes: - Fixed mouse scroll in terminal apps (tmux, neovim, etc.) only sending one scroll event when scrolling down, regardless of scroll amount --- crates/terminal/src/mappings/mouse.rs | 46 +++++++++++++++++++++++++-- 1 file changed, 44 insertions(+), 2 deletions(-) diff --git a/crates/terminal/src/mappings/mouse.rs b/crates/terminal/src/mappings/mouse.rs index 8c3eed8b54972806bdb71c5d4671cfe2a4705ce4..ffd60a83aab24bd7272d90d3e12d48fcdf65bf17 100644 --- a/crates/terminal/src/mappings/mouse.rs +++ b/crates/terminal/src/mappings/mouse.rs @@ -1,4 +1,4 @@ -use std::cmp::{self, max, min}; +use std::cmp::{self, min}; use std::iter::repeat; use alacritty_terminal::grid::Dimensions; @@ -93,12 +93,54 @@ pub fn scroll_report( e.modifiers, MouseFormat::from_mode(mode), ) - .map(|report| repeat(report).take(max(scroll_lines, 1) as usize)) + .map(|report| repeat(report).take(scroll_lines.unsigned_abs() as usize)) } else { None } } +#[cfg(test)] +mod tests { + use super::*; + use gpui::{ScrollDelta, TouchPhase, point}; + + #[test] + fn scroll_report_repeats_for_negative_scroll_lines() { + let grid_point = AlacPoint::new(GridLine(0), GridCol(0)); + + let scroll_event = ScrollWheelEvent { + delta: ScrollDelta::Lines(point(0., -1.)), + touch_phase: TouchPhase::Moved, + ..Default::default() + }; + + let mode = TermMode::MOUSE_MODE; + let reports: Vec> = scroll_report(grid_point, -3, &scroll_event, mode) + .expect("mouse mode should produce a scroll report") + .collect(); + + assert_eq!(reports.len(), 3); + } + + #[test] + fn scroll_report_repeats_for_positive_scroll_lines() { + let grid_point = AlacPoint::new(GridLine(0), GridCol(0)); + + let scroll_event = ScrollWheelEvent { + delta: ScrollDelta::Lines(point(0., 1.)), + touch_phase: TouchPhase::Moved, + ..Default::default() + }; + + let mode = TermMode::MOUSE_MODE; + let reports: Vec> = scroll_report(grid_point, 3, &scroll_event, mode) + .expect("mouse mode should produce a scroll report") + .collect(); + + assert_eq!(reports.len(), 3); + } +} + pub fn alt_scroll(scroll_lines: i32) -> Vec { let cmd = if scroll_lines > 0 { b'A' } else { b'B' }; From 7b519477f0df7d4dfef2314e21a1124917989787 Mon Sep 17 00:00:00 2001 From: Felix Zeller Date: Tue, 24 Feb 2026 02:37:16 -0800 Subject: [PATCH 021/548] Fix backward compatibility for v0.0.4 GitHub extension bindings (#49858) ## Summary - Route `zed:extension/github` in `since_v0_0_4` to the `since_v0_6_0` bindings - Call `latest_github_release` through `since_v0_6_0` for compatibility with the v0.0.4 extension API Release Notes: - Fixed backward compatibility for v0.0.4 extension API GitHub bindings. --- crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs b/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs index 11b2e9f66187ea04983b83ace5814620e7ae7f53..6d7db749f0cd021bfb084eba1bc20ce72780f3d8 100644 --- a/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs +++ b/crates/extension_host/src/wasm_host/wit/since_v0_0_4.rs @@ -1,4 +1,4 @@ -use super::latest; +use super::{latest, since_v0_6_0}; use crate::wasm_host::WasmState; use anyhow::Result; use extension::WorktreeDelegate; @@ -15,7 +15,7 @@ wasmtime::component::bindgen!({ path: "../extension_api/wit/since_v0.0.4", with: { "worktree": ExtensionWorktree, - "zed:extension/github": latest::zed::extension::github, + "zed:extension/github": since_v0_6_0::zed::extension::github, "zed:extension/platform": latest::zed::extension::platform, }, }); @@ -129,7 +129,7 @@ impl ExtensionImports for WasmState { repo: String, options: GithubReleaseOptions, ) -> wasmtime::Result> { - latest::zed::extension::github::Host::latest_github_release(self, repo, options).await + since_v0_6_0::zed::extension::github::Host::latest_github_release(self, repo, options).await } async fn current_platform(&mut self) -> Result<(Os, Architecture)> { From ca7d48a75f5e79b6d767b600a3c4d86aecbfbb08 Mon Sep 17 00:00:00 2001 From: Dario Griffo Date: Tue, 24 Feb 2026 11:08:10 +0000 Subject: [PATCH 022/548] Update linux.md to add Ubuntu community packages (#49654) Today I added ubuntu LTS distributions to my debian repo. You can find the index of noble and jammy here https://debian.griffo.io/apt/dists/noble/main/binary-amd64/Packages https://debian.griffo.io/apt/dists/jammy/main/binary-amd64/Packages So far zed is the second most download package in my repo! image Release Notes: - N/A --- docs/src/linux.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index b1a82c332fa2ab1dde32762540e13a211e1ab3e8..784e09696906db04a0af34f8d0926e0524a0cea9 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -65,7 +65,7 @@ We'd love your help making Zed available for everyone. If Zed is not yet availab The packages in this section provide binary installs for Zed but are not official packages within the associated distributions. These packages are maintained by community members and as such a higher level of caution should be taken when installing them. -#### Debian +#### Debian and Ubuntu Zed is available in [this community-maintained repository](https://debian.griffo.io/). From 060d0712f808d140adc48dcc6c90c6abcdbb1638 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 24 Feb 2026 12:19:34 +0100 Subject: [PATCH 023/548] workspace: Invert dependency on call crate by extracting into a trait (#49968) Release Notes: - N/A Co-authored-by: Piotr Osiewicz --- Cargo.lock | 1 + crates/call/Cargo.toml | 5 +- crates/call/src/call_impl/mod.rs | 260 ++++++++++++++- crates/call/src/call_impl/participant.rs | 27 +- crates/call/src/call_impl/room.rs | 3 +- .../tests/integration/following_tests.rs | 7 +- .../tests/integration/integration_tests.rs | 4 +- crates/git_ui/src/git_panel.rs | 13 +- crates/title_bar/src/collab.rs | 4 +- crates/workspace/Cargo.toml | 1 - crates/workspace/src/pane_group.rs | 15 +- crates/workspace/src/shared_screen.rs | 37 +-- crates/workspace/src/workspace.rs | 311 +++++++++++------- 13 files changed, 481 insertions(+), 207 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 964fce6bf3acaadff8a539df9937c84b1d0bdb74..934e0d1a01482d57e456057860ee45037f39d570 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2484,6 +2484,7 @@ dependencies = [ "settings", "telemetry", "util", + "workspace", ] [[package]] diff --git a/crates/call/Cargo.toml b/crates/call/Cargo.toml index ff034f914b0be44e6ec9f6475881ed79c368cd8a..2e46b58b74b826e8892d1e9da28c3cf06c99aa9b 100644 --- a/crates/call/Cargo.toml +++ b/crates/call/Cargo.toml @@ -31,7 +31,9 @@ fs.workspace = true futures.workspace = true feature_flags.workspace = true gpui = { workspace = true, features = ["screen-capture"] } +gpui_tokio.workspace = true language.workspace = true +livekit_client.workspace = true log.workspace = true postage.workspace = true project.workspace = true @@ -39,8 +41,7 @@ serde.workspace = true settings.workspace = true telemetry.workspace = true util.workspace = true -gpui_tokio.workspace = true -livekit_client.workspace = true +workspace.workspace = true [dev-dependencies] client = { workspace = true, features = ["test-support"] } diff --git a/crates/call/src/call_impl/mod.rs b/crates/call/src/call_impl/mod.rs index 08d3a28e10787ada3664c970ab52ea968ca54860..e3945cf2c746f4c598caa7996deb2c76fc859e64 100644 --- a/crates/call/src/call_impl/mod.rs +++ b/crates/call/src/call_impl/mod.rs @@ -7,25 +7,265 @@ use client::{ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIV use collections::HashSet; use futures::{Future, FutureExt, channel::oneshot, future::Shared}; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Subscription, Task, - WeakEntity, + AnyView, App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, + WeakEntity, Window, }; use postage::watch; use project::Project; use room::Event; +use settings::Settings; use std::sync::Arc; +use workspace::{ + ActiveCallEvent, AnyActiveCall, GlobalAnyActiveCall, Pane, RemoteCollaborator, SharedScreen, + Workspace, +}; pub use livekit_client::{RemoteVideoTrack, RemoteVideoTrackView, RemoteVideoTrackViewEvent}; -pub use participant::ParticipantLocation; pub use room::Room; -struct GlobalActiveCall(Entity); - -impl Global for GlobalActiveCall {} +use crate::call_settings::CallSettings; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let active_call = cx.new(|cx| ActiveCall::new(client, user_store, cx)); - cx.set_global(GlobalActiveCall(active_call)); + cx.set_global(GlobalAnyActiveCall(Arc::new(ActiveCallEntity(active_call)))) +} + +#[derive(Clone)] +struct ActiveCallEntity(Entity); + +impl AnyActiveCall for ActiveCallEntity { + fn entity(&self) -> gpui::AnyEntity { + self.0.clone().into_any() + } + + fn is_in_room(&self, cx: &App) -> bool { + self.0.read(cx).room().is_some() + } + + fn room_id(&self, cx: &App) -> Option { + Some(self.0.read(cx).room()?.read(cx).id()) + } + + fn channel_id(&self, cx: &App) -> Option { + self.0.read(cx).room()?.read(cx).channel_id() + } + + fn hang_up(&self, cx: &mut App) -> Task> { + self.0.update(cx, |this, cx| this.hang_up(cx)) + } + + fn unshare_project(&self, project: Entity, cx: &mut App) -> Result<()> { + self.0 + .update(cx, |this, cx| this.unshare_project(project, cx)) + } + + fn remote_participant_for_peer_id( + &self, + peer_id: proto::PeerId, + cx: &App, + ) -> Option { + let room = self.0.read(cx).room()?.read(cx); + let participant = room.remote_participant_for_peer_id(peer_id)?; + Some(RemoteCollaborator { + user: participant.user.clone(), + peer_id: participant.peer_id, + location: participant.location, + participant_index: participant.participant_index, + }) + } + + fn is_sharing_project(&self, cx: &App) -> bool { + self.0 + .read(cx) + .room() + .map_or(false, |room| room.read(cx).is_sharing_project()) + } + + fn has_remote_participants(&self, cx: &App) -> bool { + self.0.read(cx).room().map_or(false, |room| { + !room.read(cx).remote_participants().is_empty() + }) + } + + fn local_participant_is_guest(&self, cx: &App) -> bool { + self.0 + .read(cx) + .room() + .map_or(false, |room| room.read(cx).local_participant_is_guest()) + } + + fn client(&self, cx: &App) -> Arc { + self.0.read(cx).client() + } + + fn share_on_join(&self, cx: &App) -> bool { + CallSettings::get_global(cx).share_on_join + } + + fn join_channel(&self, channel_id: ChannelId, cx: &mut App) -> Task> { + let task = self + .0 + .update(cx, |this, cx| this.join_channel(channel_id, cx)); + cx.spawn(async move |_cx| { + let result = task.await?; + Ok(result.is_some()) + }) + } + + fn room_update_completed(&self, cx: &mut App) -> Task<()> { + let Some(room) = self.0.read(cx).room().cloned() else { + return Task::ready(()); + }; + let future = room.update(cx, |room, _cx| room.room_update_completed()); + cx.spawn(async move |_cx| { + future.await; + }) + } + + fn most_active_project(&self, cx: &App) -> Option<(u64, u64)> { + let room = self.0.read(cx).room()?; + room.read(cx).most_active_project(cx) + } + + fn share_project(&self, project: Entity, cx: &mut App) -> Task> { + self.0 + .update(cx, |this, cx| this.share_project(project, cx)) + } + + fn join_project( + &self, + project_id: u64, + language_registry: Arc, + fs: Arc, + cx: &mut App, + ) -> Task>> { + let Some(room) = self.0.read(cx).room().cloned() else { + return Task::ready(Err(anyhow::anyhow!("not in a call"))); + }; + room.update(cx, |room, cx| { + room.join_project(project_id, language_registry, fs, cx) + }) + } + + fn peer_id_for_user_in_room(&self, user_id: u64, cx: &App) -> Option { + let room = self.0.read(cx).room()?.read(cx); + room.remote_participants() + .values() + .find(|p| p.user.id == user_id) + .map(|p| p.peer_id) + } + + fn subscribe( + &self, + window: &mut Window, + cx: &mut Context, + handler: Box< + dyn Fn(&mut Workspace, &ActiveCallEvent, &mut Window, &mut Context), + >, + ) -> Subscription { + cx.subscribe_in( + &self.0, + window, + move |workspace, _, event: &room::Event, window, cx| { + let mapped = match event { + room::Event::ParticipantLocationChanged { participant_id } => { + Some(ActiveCallEvent::ParticipantLocationChanged { + participant_id: *participant_id, + }) + } + room::Event::RemoteVideoTracksChanged { participant_id } => { + Some(ActiveCallEvent::RemoteVideoTracksChanged { + participant_id: *participant_id, + }) + } + _ => None, + }; + if let Some(event) = mapped { + handler(workspace, &event, window, cx); + } + }, + ) + } + + fn create_shared_screen( + &self, + peer_id: client::proto::PeerId, + pane: &Entity, + window: &mut Window, + cx: &mut App, + ) -> Option> { + let room = self.0.read(cx).room()?.clone(); + let participant = room.read(cx).remote_participant_for_peer_id(peer_id)?; + let track = participant.video_tracks.values().next()?.clone(); + let user = participant.user.clone(); + + for item in pane.read(cx).items_of_type::() { + if item.read(cx).peer_id == peer_id { + return Some(item); + } + } + + Some(cx.new(|cx: &mut Context| { + let my_sid = track.sid(); + cx.subscribe( + &room, + move |_: &mut SharedScreen, + _: Entity, + ev: &room::Event, + cx: &mut Context| { + if let room::Event::RemoteVideoTrackUnsubscribed { sid } = ev + && *sid == my_sid + { + cx.emit(workspace::shared_screen::Event::Close); + } + }, + ) + .detach(); + + cx.observe_release( + &room, + |_: &mut SharedScreen, _: &mut Room, cx: &mut Context| { + cx.emit(workspace::shared_screen::Event::Close); + }, + ) + .detach(); + + let view = cx.new(|cx| RemoteVideoTrackView::new(track.clone(), window, cx)); + cx.subscribe( + &view, + |_: &mut SharedScreen, + _: Entity, + ev: &RemoteVideoTrackViewEvent, + cx: &mut Context| match ev { + RemoteVideoTrackViewEvent::Close => { + cx.emit(workspace::shared_screen::Event::Close); + } + }, + ) + .detach(); + + pub(super) fn clone_remote_video_track_view( + view: &AnyView, + window: &mut Window, + cx: &mut App, + ) -> AnyView { + let view = view + .clone() + .downcast::() + .expect("SharedScreen view must be a RemoteVideoTrackView"); + let cloned = view.update(cx, |view, cx| view.clone(window, cx)); + AnyView::from(cloned) + } + + SharedScreen::new( + peer_id, + user, + AnyView::from(view), + clone_remote_video_track_view, + cx, + ) + })) + } } pub struct OneAtATime { @@ -152,12 +392,12 @@ impl ActiveCall { } pub fn global(cx: &App) -> Entity { - cx.global::().0.clone() + Self::try_global(cx).unwrap() } pub fn try_global(cx: &App) -> Option> { - cx.try_global::() - .map(|call| call.0.clone()) + let any = cx.try_global::()?; + any.0.entity().downcast::().ok() } pub fn invite( diff --git a/crates/call/src/call_impl/participant.rs b/crates/call/src/call_impl/participant.rs index 6fb6a2eb79b537aa9d7296a323f7d45221a4b05d..58d3329f853bda1e0f5b5463c1b5bacacf787adc 100644 --- a/crates/call/src/call_impl/participant.rs +++ b/crates/call/src/call_impl/participant.rs @@ -1,4 +1,3 @@ -use anyhow::{Context as _, Result}; use client::{ParticipantIndex, User, proto}; use collections::HashMap; use gpui::WeakEntity; @@ -9,30 +8,6 @@ use std::sync::Arc; pub use livekit_client::TrackSid; pub use livekit_client::{RemoteAudioTrack, RemoteVideoTrack}; -#[derive(Copy, Clone, Debug, Eq, PartialEq)] -pub enum ParticipantLocation { - SharedProject { project_id: u64 }, - UnsharedProject, - External, -} - -impl ParticipantLocation { - pub fn from_proto(location: Option) -> Result { - match location - .and_then(|l| l.variant) - .context("participant location was not provided")? - { - proto::participant_location::Variant::SharedProject(project) => { - Ok(Self::SharedProject { - project_id: project.id, - }) - } - proto::participant_location::Variant::UnsharedProject(_) => Ok(Self::UnsharedProject), - proto::participant_location::Variant::External(_) => Ok(Self::External), - } - } -} - #[derive(Clone, Default)] pub struct LocalParticipant { pub projects: Vec, @@ -54,7 +29,7 @@ pub struct RemoteParticipant { pub peer_id: proto::PeerId, pub role: proto::ChannelRole, pub projects: Vec, - pub location: ParticipantLocation, + pub location: workspace::ParticipantLocation, pub participant_index: ParticipantIndex, pub muted: bool, pub speaking: bool, diff --git a/crates/call/src/call_impl/room.rs b/crates/call/src/call_impl/room.rs index a2e1ac2fcc2779f2340dd35d5800749cb6bfcbb2..701d7dd65423f97b3f4d5cfa4a198083593211e6 100644 --- a/crates/call/src/call_impl/room.rs +++ b/crates/call/src/call_impl/room.rs @@ -1,6 +1,6 @@ use crate::{ call_settings::CallSettings, - participant::{LocalParticipant, ParticipantLocation, RemoteParticipant}, + participant::{LocalParticipant, RemoteParticipant}, }; use anyhow::{Context as _, Result, anyhow}; use audio::{Audio, Sound}; @@ -25,6 +25,7 @@ use project::Project; use settings::Settings as _; use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant}; use util::{ResultExt, TryFutureExt, paths::PathStyle, post_inc}; +use workspace::ParticipantLocation; pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30); diff --git a/crates/collab/tests/integration/following_tests.rs b/crates/collab/tests/integration/following_tests.rs index 6bdb06a6c5a0ffb95bc75a026a26c4797030f8ce..b761bef9ec3be679d55d1c82e3cb5cce0ac7f14e 100644 --- a/crates/collab/tests/integration/following_tests.rs +++ b/crates/collab/tests/integration/following_tests.rs @@ -1,6 +1,6 @@ #![allow(clippy::reversed_empty_ranges)] use crate::TestServer; -use call::{ActiveCall, ParticipantLocation}; +use call::ActiveCall; use client::ChannelId; use collab_ui::{ channel_view::ChannelView, @@ -17,7 +17,10 @@ use serde_json::json; use settings::SettingsStore; use text::{Point, ToPoint}; use util::{path, rel_path::rel_path, test::sample_text}; -use workspace::{CollaboratorId, MultiWorkspace, SplitDirection, Workspace, item::ItemHandle as _}; +use workspace::{ + CollaboratorId, MultiWorkspace, ParticipantLocation, SplitDirection, Workspace, + item::ItemHandle as _, +}; use super::TestClient; diff --git a/crates/collab/tests/integration/integration_tests.rs b/crates/collab/tests/integration/integration_tests.rs index 413aa802a1e63982de4a4563917cdcf7e6a55c81..c26f20c1e294326f275dbfda1d2d41603719cd3e 100644 --- a/crates/collab/tests/integration/integration_tests.rs +++ b/crates/collab/tests/integration/integration_tests.rs @@ -6,7 +6,7 @@ use anyhow::{Result, anyhow}; use assistant_slash_command::SlashCommandWorkingSet; use assistant_text_thread::TextThreadStore; use buffer_diff::{DiffHunkSecondaryStatus, DiffHunkStatus, assert_hunks}; -use call::{ActiveCall, ParticipantLocation, Room, room}; +use call::{ActiveCall, Room, room}; use client::{RECEIVE_TIMEOUT, User}; use collab::rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}; use collections::{BTreeMap, HashMap, HashSet}; @@ -51,7 +51,7 @@ use std::{ }; use unindent::Unindent as _; use util::{path, rel_path::rel_path, uri}; -use workspace::Pane; +use workspace::{Pane, ParticipantLocation}; #[ctor::ctor] fn init_logger() { diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 830870db63a3377bd3fff07eee57f53b6ae87d44..b8caf478305609b7ea95874333f1483c448ac242 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -3257,10 +3257,8 @@ impl GitPanel { let mut new_co_authors = Vec::new(); let project = self.project.read(cx); - let Some(room) = self - .workspace - .upgrade() - .and_then(|workspace| workspace.read(cx).active_call()?.read(cx).room().cloned()) + let Some(room) = + call::ActiveCall::try_global(cx).and_then(|call| call.read(cx).room().cloned()) else { return Vec::default(); }; @@ -5520,10 +5518,9 @@ impl Render for GitPanel { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let project = self.project.read(cx); let has_entries = !self.entries.is_empty(); - let room = self - .workspace - .upgrade() - .and_then(|workspace| workspace.read(cx).active_call()?.read(cx).room().cloned()); + let room = self.workspace.upgrade().and_then(|_workspace| { + call::ActiveCall::try_global(cx).and_then(|call| call.read(cx).room().cloned()) + }); let has_write_access = self.has_write_access(cx); diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index c5071d1fff466a4352781be88d728fafe4f4ce78..58e4d2a8fcfdfe885b7ddf51b20e193625950ce0 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -1,7 +1,7 @@ use std::rc::Rc; use std::sync::Arc; -use call::{ActiveCall, ParticipantLocation, Room}; +use call::{ActiveCall, Room}; use channel::ChannelStore; use client::{User, proto::PeerId}; use gpui::{ @@ -18,7 +18,7 @@ use ui::{ Facepile, PopoverMenu, SplitButton, SplitButtonStyle, TintColor, Tooltip, prelude::*, }; use util::rel_path::RelPath; -use workspace::notifications::DetachAndPromptErr; +use workspace::{ParticipantLocation, notifications::DetachAndPromptErr}; use crate::TitleBar; diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index 3d9146250cd1df385761676b18d47ddcd3813dc6..dcd0bf640fdf279fb1874ba77307ccbd3c431393 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -30,7 +30,6 @@ test-support = [ any_vec.workspace = true anyhow.workspace = true async-recursion.workspace = true -call.workspace = true client.workspace = true chrono.workspace = true clock.workspace = true diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 0f8cef616f5ed03c31eaf3511c58922ae230e385..1d28b05514baa53244926bfad906e667b0b287cd 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -1,10 +1,10 @@ use crate::{ - AppState, CollaboratorId, FollowerState, Pane, Workspace, WorkspaceSettings, + AnyActiveCall, AppState, CollaboratorId, FollowerState, Pane, ParticipantLocation, Workspace, + WorkspaceSettings, pane_group::element::pane_axis, workspace_settings::{PaneSplitDirectionHorizontal, PaneSplitDirectionVertical}, }; use anyhow::Result; -use call::{ActiveCall, ParticipantLocation}; use collections::HashMap; use gpui::{ Along, AnyView, AnyWeakView, Axis, Bounds, Entity, Hsla, IntoElement, MouseButton, Pixels, @@ -296,7 +296,7 @@ impl Member { pub struct PaneRenderContext<'a> { pub project: &'a Entity, pub follower_states: &'a HashMap, - pub active_call: Option<&'a Entity>, + pub active_call: Option<&'a dyn AnyActiveCall>, pub active_pane: &'a Entity, pub app_state: &'a Arc, pub workspace: &'a WeakEntity, @@ -358,10 +358,11 @@ impl PaneLeaderDecorator for PaneRenderContext<'_> { let status_box; match leader_id { CollaboratorId::PeerId(peer_id) => { - let Some(leader) = self.active_call.as_ref().and_then(|call| { - let room = call.read(cx).room()?.read(cx); - room.remote_participant_for_peer_id(peer_id) - }) else { + let Some(leader) = self + .active_call + .as_ref() + .and_then(|call| call.remote_participant_for_peer_id(peer_id, cx)) + else { return LeaderDecoration::default(); }; diff --git a/crates/workspace/src/shared_screen.rs b/crates/workspace/src/shared_screen.rs index fc4ae7292a04781cb8cf790154c8b04a4c5e9bc5..136f552fee23231b45fcb867d2ce8bab02dca7e8 100644 --- a/crates/workspace/src/shared_screen.rs +++ b/crates/workspace/src/shared_screen.rs @@ -2,10 +2,9 @@ use crate::{ ItemNavHistory, WorkspaceId, item::{Item, ItemEvent}, }; -use call::{RemoteVideoTrack, RemoteVideoTrackView, Room}; use client::{User, proto::PeerId}; use gpui::{ - AppContext as _, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, + AnyView, AppContext as _, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement, ParentElement, Render, SharedString, Styled, Task, div, }; use std::sync::Arc; @@ -19,45 +18,26 @@ pub struct SharedScreen { pub peer_id: PeerId, user: Arc, nav_history: Option, - view: Entity, + view: AnyView, + clone_view: fn(&AnyView, &mut Window, &mut App) -> AnyView, focus: FocusHandle, } impl SharedScreen { pub fn new( - track: RemoteVideoTrack, peer_id: PeerId, user: Arc, - room: Entity, - window: &mut Window, + view: AnyView, + clone_view: fn(&AnyView, &mut Window, &mut App) -> AnyView, cx: &mut Context, ) -> Self { - let my_sid = track.sid(); - cx.subscribe(&room, move |_, _, ev, cx| { - if let call::room::Event::RemoteVideoTrackUnsubscribed { sid } = ev - && sid == &my_sid - { - cx.emit(Event::Close) - } - }) - .detach(); - - cx.observe_release(&room, |_, _, cx| { - cx.emit(Event::Close); - }) - .detach(); - - let view = cx.new(|cx| RemoteVideoTrackView::new(track.clone(), window, cx)); - cx.subscribe(&view, |_, _, ev, cx| match ev { - call::RemoteVideoTrackViewEvent::Close => cx.emit(Event::Close), - }) - .detach(); Self { view, peer_id, user, nav_history: Default::default(), focus: cx.focus_handle(), + clone_view, } } } @@ -124,12 +104,15 @@ impl Item for SharedScreen { window: &mut Window, cx: &mut Context, ) -> Task>> { + let clone_view = self.clone_view; + let cloned_view = clone_view(&self.view, window, cx); Task::ready(Some(cx.new(|cx| Self { - view: self.view.update(cx, |view, cx| view.clone(window, cx)), + view: cloned_view, peer_id: self.peer_id, user: self.user.clone(), nav_history: Default::default(), focus: cx.focus_handle(), + clone_view, }))) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 4db38daa2ae6718ac6c3e0dad7d1d46433b27e07..ac83809f8d313e842e72d19fb98b8b5d1b69df0f 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -12,6 +12,7 @@ mod persistence; pub mod searchable; mod security_modal; pub mod shared_screen; +pub use shared_screen::SharedScreen; mod status_bar; pub mod tasks; mod theme_preview; @@ -31,13 +32,13 @@ pub use path_list::PathList; pub use toast_layer::{ToastAction, ToastLayer, ToastView}; use anyhow::{Context as _, Result, anyhow}; -use call::{ActiveCall, call_settings::CallSettings}; use client::{ - ChannelId, Client, ErrorExt, Status, TypedEnvelope, UserStore, + ChannelId, Client, ErrorExt, ParticipantIndex, Status, TypedEnvelope, User, UserStore, proto::{self, ErrorCode, PanelId, PeerId}, }; use collections::{HashMap, HashSet, hash_map}; use dock::{Dock, DockPosition, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE}; +use fs::Fs; use futures::{ Future, FutureExt, StreamExt, channel::{ @@ -97,7 +98,7 @@ use session::AppSession; use settings::{ CenteredPaddingSettings, Settings, SettingsLocation, SettingsStore, update_settings_file, }; -use shared_screen::SharedScreen; + use sqlez::{ bindable::{Bind, Column, StaticColumnCount}, statement::Statement, @@ -1258,7 +1259,7 @@ pub struct Workspace { window_edited: bool, last_window_title: Option, dirty_items: HashMap, - active_call: Option<(Entity, Vec)>, + active_call: Option<(GlobalAnyActiveCall, Vec)>, leader_updates_tx: mpsc::UnboundedSender<(PeerId, proto::UpdateFollowers)>, database_id: Option, app_state: Arc, @@ -1572,8 +1573,12 @@ impl Workspace { let session_id = app_state.session.read(cx).id().to_owned(); let mut active_call = None; - if let Some(call) = ActiveCall::try_global(cx) { - let subscriptions = vec![cx.subscribe_in(&call, window, Self::on_active_call_event)]; + if let Some(call) = GlobalAnyActiveCall::try_global(cx).cloned() { + let subscriptions = + vec![ + call.0 + .subscribe(window, cx, Box::new(Self::on_active_call_event)), + ]; active_call = Some((call, subscriptions)); } @@ -2692,7 +2697,7 @@ impl Workspace { window: &mut Window, cx: &mut Context, ) -> Task> { - let active_call = self.active_call().cloned(); + let active_call = self.active_global_call(); cx.spawn_in(window, async move |this, cx| { this.update(cx, |this, _| { @@ -2734,7 +2739,9 @@ impl Workspace { if let Some(active_call) = active_call && workspace_count == 1 - && active_call.read_with(cx, |call, _| call.room().is_some()) + && cx + .update(|_window, cx| active_call.0.is_in_room(cx)) + .unwrap_or(false) { if close_intent == CloseIntent::CloseWindow { let answer = cx.update(|window, cx| { @@ -2750,14 +2757,13 @@ impl Workspace { if answer.await.log_err() == Some(1) { return anyhow::Ok(false); } else { - active_call - .update(cx, |call, cx| call.hang_up(cx)) - .await - .log_err(); + if let Ok(task) = cx.update(|_window, cx| active_call.0.hang_up(cx)) { + task.await.log_err(); + } } } if close_intent == CloseIntent::ReplaceWindow { - _ = active_call.update(cx, |this, cx| { + _ = cx.update(|_window, cx| { let multi_workspace = cx .windows() .iter() @@ -2771,10 +2777,10 @@ impl Workspace { .project .clone(); if project.read(cx).is_shared() { - this.unshare_project(project, cx)?; + active_call.0.unshare_project(project, cx)?; } Ok::<_, anyhow::Error>(()) - })?; + }); } } @@ -4944,7 +4950,7 @@ impl Workspace { match leader_id { CollaboratorId::PeerId(leader_peer_id) => { - let room_id = self.active_call()?.read(cx).room()?.read(cx).id(); + let room_id = self.active_call()?.room_id(cx)?; let project_id = self.project.read(cx).remote_id(); let request = self.app_state.client.request(proto::Follow { room_id, @@ -5038,20 +5044,21 @@ impl Workspace { let leader_id = leader_id.into(); if let CollaboratorId::PeerId(peer_id) = leader_id { - let Some(room) = ActiveCall::global(cx).read(cx).room() else { + let Some(active_call) = GlobalAnyActiveCall::try_global(cx) else { return; }; - let room = room.read(cx); - let Some(remote_participant) = room.remote_participant_for_peer_id(peer_id) else { + let Some(remote_participant) = + active_call.0.remote_participant_for_peer_id(peer_id, cx) + else { return; }; let project = self.project.read(cx); let other_project_id = match remote_participant.location { - call::ParticipantLocation::External => None, - call::ParticipantLocation::UnsharedProject => None, - call::ParticipantLocation::SharedProject { project_id } => { + ParticipantLocation::External => None, + ParticipantLocation::UnsharedProject => None, + ParticipantLocation::SharedProject { project_id } => { if Some(project_id) == project.remote_id() { None } else { @@ -5097,7 +5104,7 @@ impl Workspace { if let CollaboratorId::PeerId(leader_peer_id) = leader_id { let project_id = self.project.read(cx).remote_id(); - let room_id = self.active_call()?.read(cx).room()?.read(cx).id(); + let room_id = self.active_call()?.room_id(cx)?; self.app_state .client .send(proto::Unfollow { @@ -5740,20 +5747,19 @@ impl Workspace { cx: &mut Context, ) -> Option<(Option, Box)> { let call = self.active_call()?; - let room = call.read(cx).room()?.read(cx); - let participant = room.remote_participant_for_peer_id(peer_id)?; + let participant = call.remote_participant_for_peer_id(peer_id, cx)?; let leader_in_this_app; let leader_in_this_project; match participant.location { - call::ParticipantLocation::SharedProject { project_id } => { + ParticipantLocation::SharedProject { project_id } => { leader_in_this_app = true; leader_in_this_project = Some(project_id) == self.project.read(cx).remote_id(); } - call::ParticipantLocation::UnsharedProject => { + ParticipantLocation::UnsharedProject => { leader_in_this_app = true; leader_in_this_project = false; } - call::ParticipantLocation::External => { + ParticipantLocation::External => { leader_in_this_app = false; leader_in_this_project = false; } @@ -5781,19 +5787,8 @@ impl Workspace { window: &mut Window, cx: &mut App, ) -> Option> { - let call = self.active_call()?; - let room = call.read(cx).room()?.clone(); - let participant = room.read(cx).remote_participant_for_peer_id(peer_id)?; - let track = participant.video_tracks.values().next()?.clone(); - let user = participant.user.clone(); - - for item in pane.read(cx).items_of_type::() { - if item.read(cx).peer_id == peer_id { - return Some(item); - } - } - - Some(cx.new(|cx| SharedScreen::new(track, peer_id, user.clone(), room.clone(), window, cx))) + self.active_call()? + .create_shared_screen(peer_id, pane, window, cx) } pub fn on_window_activation_changed(&mut self, window: &mut Window, cx: &mut Context) { @@ -5824,23 +5819,25 @@ impl Workspace { } } - pub fn active_call(&self) -> Option<&Entity> { - self.active_call.as_ref().map(|(call, _)| call) + pub fn active_call(&self) -> Option<&dyn AnyActiveCall> { + self.active_call.as_ref().map(|(call, _)| &*call.0) + } + + pub fn active_global_call(&self) -> Option { + self.active_call.as_ref().map(|(call, _)| call.clone()) } fn on_active_call_event( &mut self, - _: &Entity, - event: &call::room::Event, + event: &ActiveCallEvent, window: &mut Window, cx: &mut Context, ) { match event { - call::room::Event::ParticipantLocationChanged { participant_id } - | call::room::Event::RemoteVideoTracksChanged { participant_id } => { + ActiveCallEvent::ParticipantLocationChanged { participant_id } + | ActiveCallEvent::RemoteVideoTracksChanged { participant_id } => { self.leader_updated(participant_id, window, cx); } - _ => {} } } @@ -7027,6 +7024,98 @@ impl Workspace { } } +pub trait AnyActiveCall { + fn entity(&self) -> AnyEntity; + fn is_in_room(&self, _: &App) -> bool; + fn room_id(&self, _: &App) -> Option; + fn channel_id(&self, _: &App) -> Option; + fn hang_up(&self, _: &mut App) -> Task>; + fn unshare_project(&self, _: Entity, _: &mut App) -> Result<()>; + fn remote_participant_for_peer_id(&self, _: PeerId, _: &App) -> Option; + fn is_sharing_project(&self, _: &App) -> bool; + fn has_remote_participants(&self, _: &App) -> bool; + fn local_participant_is_guest(&self, _: &App) -> bool; + fn client(&self, _: &App) -> Arc; + fn share_on_join(&self, _: &App) -> bool; + fn join_channel(&self, _: ChannelId, _: &mut App) -> Task>; + fn room_update_completed(&self, _: &mut App) -> Task<()>; + fn most_active_project(&self, _: &App) -> Option<(u64, u64)>; + fn share_project(&self, _: Entity, _: &mut App) -> Task>; + fn join_project( + &self, + _: u64, + _: Arc, + _: Arc, + _: &mut App, + ) -> Task>>; + fn peer_id_for_user_in_room(&self, _: u64, _: &App) -> Option; + fn subscribe( + &self, + _: &mut Window, + _: &mut Context, + _: Box)>, + ) -> Subscription; + fn create_shared_screen( + &self, + _: PeerId, + _: &Entity, + _: &mut Window, + _: &mut App, + ) -> Option>; +} + +#[derive(Clone)] +pub struct GlobalAnyActiveCall(pub Arc); +impl Global for GlobalAnyActiveCall {} + +impl GlobalAnyActiveCall { + pub(crate) fn try_global(cx: &App) -> Option<&Self> { + cx.try_global() + } + + pub(crate) fn global(cx: &App) -> &Self { + cx.global() + } +} +/// Workspace-local view of a remote participant's location. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ParticipantLocation { + SharedProject { project_id: u64 }, + UnsharedProject, + External, +} + +impl ParticipantLocation { + pub fn from_proto(location: Option) -> Result { + match location + .and_then(|l| l.variant) + .context("participant location was not provided")? + { + proto::participant_location::Variant::SharedProject(project) => { + Ok(Self::SharedProject { + project_id: project.id, + }) + } + proto::participant_location::Variant::UnsharedProject(_) => Ok(Self::UnsharedProject), + proto::participant_location::Variant::External(_) => Ok(Self::External), + } + } +} +/// Workspace-local view of a remote collaborator's state. +/// This is the subset of `call::RemoteParticipant` that workspace needs. +#[derive(Clone)] +pub struct RemoteCollaborator { + pub user: Arc, + pub peer_id: PeerId, + pub location: ParticipantLocation, + pub participant_index: ParticipantIndex, +} + +pub enum ActiveCallEvent { + ParticipantLocationChanged { participant_id: PeerId }, + RemoteVideoTracksChanged { participant_id: PeerId }, +} + fn leader_border_for_pane( follower_states: &HashMap, pane: &Entity, @@ -7043,8 +7132,9 @@ fn leader_border_for_pane( let mut leader_color = match leader_id { CollaboratorId::PeerId(leader_peer_id) => { - let room = ActiveCall::try_global(cx)?.read(cx).room()?.read(cx); - let leader = room.remote_participant_for_peer_id(leader_peer_id)?; + let leader = GlobalAnyActiveCall::try_global(cx)? + .0 + .remote_participant_for_peer_id(leader_peer_id, cx)?; cx.theme() .players() @@ -7786,8 +7876,8 @@ impl WorkspaceStore { update: proto::update_followers::Variant, cx: &App, ) -> Option<()> { - let active_call = ActiveCall::try_global(cx)?; - let room_id = active_call.read(cx).room()?.read(cx).id(); + let active_call = GlobalAnyActiveCall::try_global(cx)?; + let room_id = active_call.0.room_id(cx)?; self.client .send(proto::UpdateFollowers { room_id, @@ -8100,33 +8190,28 @@ async fn join_channel_internal( app_state: &Arc, requesting_window: Option>, requesting_workspace: Option>, - active_call: &Entity, + active_call: &dyn AnyActiveCall, cx: &mut AsyncApp, ) -> Result { - let (should_prompt, open_room) = active_call.update(cx, |active_call, cx| { - let Some(room) = active_call.room().map(|room| room.read(cx)) else { - return (false, None); - }; + let (should_prompt, already_in_channel) = cx.update(|cx| { + if !active_call.is_in_room(cx) { + return (false, false); + } - let already_in_channel = room.channel_id() == Some(channel_id); - let should_prompt = room.is_sharing_project() - && !room.remote_participants().is_empty() + let already_in_channel = active_call.channel_id(cx) == Some(channel_id); + let should_prompt = active_call.is_sharing_project(cx) + && active_call.has_remote_participants(cx) && !already_in_channel; - let open_room = if already_in_channel { - active_call.room().cloned() - } else { - None - }; - (should_prompt, open_room) + (should_prompt, already_in_channel) }); - if let Some(room) = open_room { - let task = room.update(cx, |room, cx| { - if let Some((project, host)) = room.most_active_project(cx) { - return Some(join_in_room_project(project, host, app_state.clone(), cx)); + if already_in_channel { + let task = cx.update(|cx| { + if let Some((project, host)) = active_call.most_active_project(cx) { + Some(join_in_room_project(project, host, app_state.clone(), cx)) + } else { + None } - - None }); if let Some(task) = task { task.await?; @@ -8152,11 +8237,11 @@ async fn join_channel_internal( return Ok(false); } } else { - return Ok(false); // unreachable!() hopefully + return Ok(false); } } - let client = cx.update(|cx| active_call.read(cx).client()); + let client = cx.update(|cx| active_call.client(cx)); let mut client_status = client.status(); @@ -8184,33 +8269,30 @@ async fn join_channel_internal( } } - let room = active_call - .update(cx, |active_call, cx| { - active_call.join_channel(channel_id, cx) - }) + let joined = cx + .update(|cx| active_call.join_channel(channel_id, cx)) .await?; - let Some(room) = room else { + if !joined { return anyhow::Ok(true); - }; + } - room.update(cx, |room, _| room.room_update_completed()) - .await; + cx.update(|cx| active_call.room_update_completed(cx)).await; - let task = room.update(cx, |room, cx| { - if let Some((project, host)) = room.most_active_project(cx) { + let task = cx.update(|cx| { + if let Some((project, host)) = active_call.most_active_project(cx) { return Some(join_in_room_project(project, host, app_state.clone(), cx)); } // If you are the first to join a channel, see if you should share your project. - if room.remote_participants().is_empty() - && !room.local_participant_is_guest() + if !active_call.has_remote_participants(cx) + && !active_call.local_participant_is_guest(cx) && let Some(workspace) = requesting_workspace.as_ref().and_then(|w| w.upgrade()) { let project = workspace.update(cx, |workspace, cx| { let project = workspace.project.read(cx); - if !CallSettings::get_global(cx).share_on_join { + if !active_call.share_on_join(cx) { return None; } @@ -8227,9 +8309,9 @@ async fn join_channel_internal( } }); if let Some(project) = project { - return Some(cx.spawn(async move |room, cx| { - room.update(cx, |room, cx| room.share_project(project, cx))? - .await?; + let share_task = active_call.share_project(project, cx); + return Some(cx.spawn(async move |_cx| -> Result<()> { + share_task.await?; Ok(()) })); } @@ -8251,14 +8333,14 @@ pub fn join_channel( requesting_workspace: Option>, cx: &mut App, ) -> Task> { - let active_call = ActiveCall::global(cx); + let active_call = GlobalAnyActiveCall::global(cx).clone(); cx.spawn(async move |cx| { let result = join_channel_internal( channel_id, &app_state, requesting_window, requesting_workspace, - &active_call, + &*active_call.0, cx, ) .await; @@ -9102,13 +9184,10 @@ pub fn join_in_room_project( .ok(); existing_window } else { - let active_call = cx.update(|cx| ActiveCall::global(cx)); - let room = active_call - .read_with(cx, |call, _| call.room().cloned()) - .context("not in a call")?; - let project = room - .update(cx, |room, cx| { - room.join_project( + let active_call = cx.update(|cx| GlobalAnyActiveCall::global(cx).clone()); + let project = cx + .update(|cx| { + active_call.0.join_project( project_id, app_state.languages.clone(), app_state.fs.clone(), @@ -9137,27 +9216,21 @@ pub fn join_in_room_project( // We set the active workspace above, so this is the correct workspace. let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| { - if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() { - let follow_peer_id = room - .read(cx) - .remote_participants() - .iter() - .find(|(_, participant)| participant.user.id == follow_user_id) - .map(|(_, p)| p.peer_id) - .or_else(|| { - // If we couldn't follow the given user, follow the host instead. - let collaborator = workspace - .project() - .read(cx) - .collaborators() - .values() - .find(|collaborator| collaborator.is_host)?; - Some(collaborator.peer_id) - }); + let follow_peer_id = GlobalAnyActiveCall::try_global(cx) + .and_then(|call| call.0.peer_id_for_user_in_room(follow_user_id, cx)) + .or_else(|| { + // If we couldn't follow the given user, follow the host instead. + let collaborator = workspace + .project() + .read(cx) + .collaborators() + .values() + .find(|collaborator| collaborator.is_host)?; + Some(collaborator.peer_id) + }); - if let Some(follow_peer_id) = follow_peer_id { - workspace.follow(follow_peer_id, window, cx); - } + if let Some(follow_peer_id) = follow_peer_id { + workspace.follow(follow_peer_id, window, cx); } }); })?; From c6a82b3b4538defccec9aac735d69b5e47440e7c Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Tue, 24 Feb 2026 12:49:52 +0100 Subject: [PATCH 024/548] debugger: Open correct pane for breakpoints (#49390) Closes #40602 ### Summary This PR ensures that active debug lines only open in a single pane and new active debug lines are added to the most recent pane that contained an active debug line. This fixes a bug where Zed could go to the active debug line file and location in every pane a user had open, even if that pane was focused on a different file. I fixed this by storing the `entity_id` of the pane containing the most recently active debug line on `BreakpointStore`, this is consistent with where the selected stack frame is stored. I used an `entity_id` instead of a strong type to avoid circular dependencies. Whenever an active debug line is being set in the editor or by the debugger it now checks if there's a specific pane it should be set in, and after setting the line it updates `BreakpointStore` state. I also added a new method on the `workspace::Item` trait called `fn pane_changed(&mut self, new_pane_id: EntityId, cx: &mut Context)` To enable `Editor` to update `BreakpointStore`'s active debug line pane id whenever an `Editor` is moved to a new pane. ### PR review TODO list Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - debugger: Fix bug where active debug lines could be set in the wrong pane --- .../src/session/running/stack_frame_list.rs | 93 ++- .../debugger_ui/src/tests/debugger_panel.rs | 533 +++++++++++++++++- crates/editor/src/editor.rs | 28 +- crates/editor/src/items.rs | 19 +- .../project/src/debugger/breakpoint_store.rs | 25 +- crates/workspace/src/item.rs | 20 +- crates/workspace/src/workspace.rs | 13 +- 7 files changed, 689 insertions(+), 42 deletions(-) diff --git a/crates/debugger_ui/src/session/running/stack_frame_list.rs b/crates/debugger_ui/src/session/running/stack_frame_list.rs index b460f532548d8a71fafb031ff5c77323d60f046c..ccdfa22e89f449d2d40ae72f6b794b27ee6c8934 100644 --- a/crates/debugger_ui/src/session/running/stack_frame_list.rs +++ b/crates/debugger_ui/src/session/running/stack_frame_list.rs @@ -429,34 +429,58 @@ impl StackFrameList { let position = buffer.read_with(cx, |this, _| { this.snapshot().anchor_after(PointUtf16::new(row, 0)) }); - this.update_in(cx, |this, window, cx| { - this.workspace.update(cx, |workspace, cx| { - let project_path = buffer - .read(cx) - .project_path(cx) - .context("Could not select a stack frame for unnamed buffer")?; - - let open_preview = !workspace - .item_of_type::(cx) - .map(|viewer| { - workspace - .active_item(cx) - .is_some_and(|item| item.item_id() == viewer.item_id()) - }) - .unwrap_or_default(); - - anyhow::Ok(workspace.open_path_preview( - project_path, - None, - true, - true, - open_preview, - window, - cx, - )) - }) - })??? - .await?; + let opened_item = this + .update_in(cx, |this, window, cx| { + this.workspace.update(cx, |workspace, cx| { + let project_path = buffer + .read(cx) + .project_path(cx) + .context("Could not select a stack frame for unnamed buffer")?; + + let open_preview = !workspace + .item_of_type::(cx) + .map(|viewer| { + workspace + .active_item(cx) + .is_some_and(|item| item.item_id() == viewer.item_id()) + }) + .unwrap_or_default(); + + let active_debug_line_pane = workspace + .project() + .read(cx) + .breakpoint_store() + .read(cx) + .active_debug_line_pane_id() + .and_then(|id| workspace.pane_for_entity_id(id)); + + let debug_pane = if let Some(pane) = active_debug_line_pane { + Some(pane.downgrade()) + } else { + // No debug pane set yet. Find a pane where the target file + // is already the active tab so we don't disrupt other panes. + let pane_with_active_file = workspace.panes().iter().find(|pane| { + pane.read(cx) + .active_item() + .and_then(|item| item.project_path(cx)) + .is_some_and(|path| path == project_path) + }); + + pane_with_active_file.map(|pane| pane.downgrade()) + }; + + anyhow::Ok(workspace.open_path_preview( + project_path, + debug_pane, + true, + true, + open_preview, + window, + cx, + )) + }) + })??? + .await?; this.update(cx, |this, cx| { let thread_id = this.state.read_with(cx, |state, _| { @@ -464,6 +488,19 @@ impl StackFrameList { })??; this.workspace.update(cx, |workspace, cx| { + if let Some(pane_id) = workspace + .pane_for(&*opened_item) + .map(|pane| pane.entity_id()) + { + workspace + .project() + .read(cx) + .breakpoint_store() + .update(cx, |store, _cx| { + store.set_active_debug_pane_id(pane_id); + }); + } + let breakpoint_store = workspace.project().read(cx).breakpoint_store(); breakpoint_store.update(cx, |store, cx| { diff --git a/crates/debugger_ui/src/tests/debugger_panel.rs b/crates/debugger_ui/src/tests/debugger_panel.rs index 32c0bf01c91a328734da64bd844b93ae3d9fd7a1..207e82b4958941e04ea04fc47c9471141e61a64d 100644 --- a/crates/debugger_ui/src/tests/debugger_panel.rs +++ b/crates/debugger_ui/src/tests/debugger_panel.rs @@ -34,7 +34,8 @@ use terminal_view::terminal_panel::TerminalPanel; use tests::{active_debug_session_panel, init_test, init_test_workspace}; use util::{path, rel_path::rel_path}; use workspace::item::SaveOptions; -use workspace::{Item, dock::Panel}; +use workspace::pane_group::SplitDirection; +use workspace::{Item, dock::Panel, move_active_item}; #[gpui::test] async fn test_basic_show_debug_panel(executor: BackgroundExecutor, cx: &mut TestAppContext) { @@ -1813,6 +1814,536 @@ async fn test_debug_adapters_shutdown_on_app_quit( ); } +#[gpui::test] +async fn test_breakpoint_jumps_only_in_proper_split_view( + executor: BackgroundExecutor, + cx: &mut TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(executor.clone()); + + fs.insert_tree( + path!("/project"), + json!({ + "main.rs": "First line\nSecond line\nThird line\nFourth line", + "second.rs": "First line\nSecond line\nThird line\nFourth line", + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + let workspace = init_test_workspace(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + let project_path = Path::new(path!("/project")); + let worktree = project + .update(cx, |project, cx| project.find_worktree(project_path, cx)) + .expect("This worktree should exist in project") + .0; + + let worktree_id = workspace + .update(cx, |_, _, cx| worktree.read(cx).id()) + .unwrap(); + + // Open main.rs in pane A (the initial pane) + let pane_a = workspace + .update(cx, |multi, _window, cx| { + multi.workspace().read(cx).active_pane().clone() + }) + .unwrap(); + + let open_main = workspace + .update(cx, |multi, window, cx| { + multi.workspace().update(cx, |workspace, cx| { + workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx) + }) + }) + .unwrap(); + open_main.await.unwrap(); + + cx.run_until_parked(); + + // Split pane A to the right, creating pane B + let pane_b = workspace + .update(cx, |multi, window, cx| { + multi.workspace().update(cx, |workspace, cx| { + workspace.split_pane(pane_a.clone(), SplitDirection::Right, window, cx) + }) + }) + .unwrap(); + + cx.run_until_parked(); + + // Open main.rs in pane B + let weak_pane_b = pane_b.downgrade(); + let open_main_in_b = workspace + .update(cx, |multi, window, cx| { + multi.workspace().update(cx, |workspace, cx| { + workspace.open_path( + (worktree_id, rel_path("main.rs")), + Some(weak_pane_b), + true, + window, + cx, + ) + }) + }) + .unwrap(); + open_main_in_b.await.unwrap(); + + cx.run_until_parked(); + + // Also open second.rs in pane B as an inactive tab + let weak_pane_b = pane_b.downgrade(); + let open_second_in_b = workspace + .update(cx, |multi, window, cx| { + multi.workspace().update(cx, |workspace, cx| { + workspace.open_path( + (worktree_id, rel_path("second.rs")), + Some(weak_pane_b), + true, + window, + cx, + ) + }) + }) + .unwrap(); + open_second_in_b.await.unwrap(); + + cx.run_until_parked(); + + // Switch pane B back to main.rs so second.rs is inactive there + let weak_pane_b = pane_b.downgrade(); + let reactivate_main_in_b = workspace + .update(cx, |multi, window, cx| { + multi.workspace().update(cx, |workspace, cx| { + workspace.open_path( + (worktree_id, rel_path("main.rs")), + Some(weak_pane_b), + true, + window, + cx, + ) + }) + }) + .unwrap(); + reactivate_main_in_b.await.unwrap(); + + cx.run_until_parked(); + + // Now open second.rs in pane A, making main.rs an inactive tab there + let weak_pane_a = pane_a.downgrade(); + let open_second = workspace + .update(cx, |multi, window, cx| { + multi.workspace().update(cx, |workspace, cx| { + workspace.open_path( + (worktree_id, rel_path("second.rs")), + Some(weak_pane_a), + true, + window, + cx, + ) + }) + }) + .unwrap(); + open_second.await.unwrap(); + + cx.run_until_parked(); + + // Layout: + // Pane A: second.rs (active), main.rs (inactive tab) + // Pane B: main.rs (active), second.rs (inactive tab) + + // Verify pane A's active item is second.rs (main.rs is an inactive tab) + workspace + .read_with(cx, |_multi, cx| { + let active = pane_a.read(cx).active_item().unwrap(); + let editor = active.to_any_view().downcast::().unwrap(); + let path = editor.read(cx).project_path(cx).unwrap(); + assert_eq!( + path.path.file_name().unwrap(), + "second.rs", + "Pane A should have second.rs active", + ); + }) + .unwrap(); + + // Verify pane B's active item is main.rs + workspace + .read_with(cx, |_multi, cx| { + let active = pane_b.read(cx).active_item().unwrap(); + let editor = active.to_any_view().downcast::().unwrap(); + let path = editor.read(cx).project_path(cx).unwrap(); + assert_eq!( + path.path.file_name().unwrap(), + "main.rs", + "Pane B should have main.rs active", + ); + }) + .unwrap(); + + // Start a debug session and trigger a breakpoint stop on main.rs line 2 + let session = start_debug_session(&workspace, cx, |_| {}).unwrap(); + let client = session.update(cx, |session, _| session.adapter_client().unwrap()); + + client.on_request::(move |_, _| { + Ok(dap::ThreadsResponse { + threads: vec![dap::Thread { + id: 1, + name: "Thread 1".into(), + }], + }) + }); + + client.on_request::(move |_, _| { + Ok(dap::ScopesResponse { + scopes: Vec::default(), + }) + }); + + client.on_request::(move |_, args| { + assert_eq!(args.thread_id, 1); + + Ok(dap::StackTraceResponse { + stack_frames: vec![dap::StackFrame { + id: 1, + name: "frame 1".into(), + source: Some(dap::Source { + name: Some("main.rs".into()), + path: Some(path!("/project/main.rs").into()), + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }), + line: 2, + column: 0, + end_line: None, + end_column: None, + can_restart: None, + instruction_pointer_reference: None, + module_id: None, + presentation_hint: None, + }], + total_frames: None, + }) + }); + + client + .fake_event(dap::messages::Events::Stopped(dap::StoppedEvent { + reason: dap::StoppedEventReason::Breakpoint, + description: None, + thread_id: Some(1), + preserve_focus_hint: None, + text: None, + all_threads_stopped: None, + hit_breakpoint_ids: None, + })) + .await; + + cx.run_until_parked(); + + // After first breakpoint stop on main.rs: + // Pane A should still have second.rs as its active item because + // main.rs was only an inactive tab there. The debugger should have jumped + // to main.rs only in pane B where it was already the active tab. + workspace + .read_with(cx, |_multi, cx| { + let pane_a_active = pane_a.read(cx).active_item().unwrap(); + let pane_a_editor = pane_a_active.to_any_view().downcast::().unwrap(); + let pane_a_path = pane_a_editor.read(cx).project_path(cx).unwrap(); + assert_eq!( + pane_a_path.path.file_name().unwrap(), + "second.rs", + "Pane A should still have second.rs as active item. \ + The debugger should not switch active tabs in panes where the \ + breakpoint file is not the active tab (issue #40602)", + ); + }) + .unwrap(); + + // There should be exactly one active debug line across all editors in all panes + workspace + .read_with(cx, |_multi, cx| { + let mut total_active_debug_lines = 0; + for pane in [&pane_a, &pane_b] { + for item in pane.read(cx).items() { + if let Some(editor) = item.to_any_view().downcast::().ok() { + total_active_debug_lines += editor + .read(cx) + .highlighted_rows::() + .count(); + } + } + } + assert_eq!( + total_active_debug_lines, 1, + "There should be exactly one active debug line across all editors in all panes" + ); + }) + .unwrap(); + + // Pane B should show the debug highlight on main.rs + workspace + .read_with(cx, |_multi, cx| { + let pane_b_active = pane_b.read(cx).active_item().unwrap(); + let pane_b_editor = pane_b_active.to_any_view().downcast::().unwrap(); + + let active_debug_lines: Vec<_> = pane_b_editor + .read(cx) + .highlighted_rows::() + .collect(); + + assert_eq!( + active_debug_lines.len(), + 1, + "Pane B's main.rs editor should have the active debug line" + ); + }) + .unwrap(); + + // Second breakpoint stop: now on second.rs line 3. + // Even though pane A has second.rs as its active tab, the debug line + // should open in pane B (the persistent debug pane) because pane B + // had the last active debug line. + client.on_request::(move |_, args| { + assert_eq!(args.thread_id, 1); + + Ok(dap::StackTraceResponse { + stack_frames: vec![dap::StackFrame { + id: 2, + name: "frame 2".into(), + source: Some(dap::Source { + name: Some("second.rs".into()), + path: Some(path!("/project/second.rs").into()), + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }), + line: 3, + column: 0, + end_line: None, + end_column: None, + can_restart: None, + instruction_pointer_reference: None, + module_id: None, + presentation_hint: None, + }], + total_frames: None, + }) + }); + + client + .fake_event(dap::messages::Events::Stopped(dap::StoppedEvent { + reason: dap::StoppedEventReason::Breakpoint, + description: None, + thread_id: Some(1), + preserve_focus_hint: None, + text: None, + all_threads_stopped: None, + hit_breakpoint_ids: None, + })) + .await; + + cx.run_until_parked(); + + // Pane B should now have second.rs as the active tab with the debug line, + // because pane B was the last pane that had the debug line (persistent debug pane). + workspace + .read_with(cx, |_multi, cx| { + let pane_b_active = pane_b.read(cx).active_item().unwrap(); + let pane_b_editor = pane_b_active.to_any_view().downcast::().unwrap(); + let pane_b_path = pane_b_editor.read(cx).project_path(cx).unwrap(); + assert_eq!( + pane_b_path.path.file_name().unwrap(), + "second.rs", + "Pane B should have switched to second.rs because it is the persistent debug pane", + ); + + let active_debug_lines: Vec<_> = pane_b_editor + .read(cx) + .highlighted_rows::() + .collect(); + + assert_eq!( + active_debug_lines.len(), + 1, + "Pane B's second.rs editor should have the active debug line" + ); + }) + .unwrap(); + + // There should still be exactly one active debug line across all editors + workspace + .read_with(cx, |_multi, cx| { + let mut total_active_debug_lines = 0; + for pane in [&pane_a, &pane_b] { + for item in pane.read(cx).items() { + if let Some(editor) = item.to_any_view().downcast::().ok() { + total_active_debug_lines += editor + .read(cx) + .highlighted_rows::() + .count(); + } + } + } + assert_eq!( + total_active_debug_lines, 1, + "There should be exactly one active debug line across all editors after second stop" + ); + }) + .unwrap(); + + // === New case: Move the debug pane (pane B) active item to a new pane C === + // This simulates a user dragging the tab with the active debug line to a new split. + // The debugger should track that the debug line moved to pane C and use pane C + // for subsequent debug stops. + + // Split pane B to create pane C + let pane_c = workspace + .update(cx, |multi, window, cx| { + multi.workspace().update(cx, |workspace, cx| { + workspace.split_pane(pane_b.clone(), SplitDirection::Right, window, cx) + }) + }) + .unwrap(); + + cx.run_until_parked(); + + // Move the active item (second.rs with debug line) from pane B to pane C + workspace + .update(cx, |_multi, window, cx| { + move_active_item(&pane_b, &pane_c, true, false, window, cx); + }) + .unwrap(); + + cx.run_until_parked(); + + // Verify pane C now has second.rs as active item + workspace + .read_with(cx, |_multi, cx| { + let pane_c_active = pane_c.read(cx).active_item().unwrap(); + let pane_c_editor = pane_c_active.to_any_view().downcast::().unwrap(); + let pane_c_path = pane_c_editor.read(cx).project_path(cx).unwrap(); + assert_eq!( + pane_c_path.path.file_name().unwrap(), + "second.rs", + "Pane C should have second.rs after moving it from pane B", + ); + }) + .unwrap(); + + // Third breakpoint stop: back on main.rs line 2. + // The debug line should appear in pane C because that's where the debug line + // was moved to. The debugger should track pane moves. + client.on_request::(move |_, args| { + assert_eq!(args.thread_id, 1); + + Ok(dap::StackTraceResponse { + stack_frames: vec![dap::StackFrame { + id: 3, + name: "frame 3".into(), + source: Some(dap::Source { + name: Some("main.rs".into()), + path: Some(path!("/project/main.rs").into()), + source_reference: None, + presentation_hint: None, + origin: None, + sources: None, + adapter_data: None, + checksums: None, + }), + line: 2, + column: 0, + end_line: None, + end_column: None, + can_restart: None, + instruction_pointer_reference: None, + module_id: None, + presentation_hint: None, + }], + total_frames: None, + }) + }); + + client + .fake_event(dap::messages::Events::Stopped(dap::StoppedEvent { + reason: dap::StoppedEventReason::Breakpoint, + description: None, + thread_id: Some(1), + preserve_focus_hint: None, + text: None, + all_threads_stopped: None, + hit_breakpoint_ids: None, + })) + .await; + + cx.run_until_parked(); + + // Pane C should now have main.rs as the active tab with the debug line, + // because pane C is where the debug line was moved to from pane B. + workspace + .read_with(cx, |_multi, cx| { + let pane_c_active = pane_c.read(cx).active_item().unwrap(); + let pane_c_editor = pane_c_active.to_any_view().downcast::().unwrap(); + let pane_c_path = pane_c_editor.read(cx).project_path(cx).unwrap(); + assert_eq!( + pane_c_path.path.file_name().unwrap(), + "main.rs", + "Pane C should have switched to main.rs because it is now the persistent debug pane \ + (the debug line was moved here from pane B)", + ); + + let active_debug_lines: Vec<_> = pane_c_editor + .read(cx) + .highlighted_rows::() + .collect(); + + assert_eq!( + active_debug_lines.len(), + 1, + "Pane C's main.rs editor should have the active debug line" + ); + }) + .unwrap(); + + // There should still be exactly one active debug line across all editors + workspace + .read_with(cx, |_multi, cx| { + let mut total_active_debug_lines = 0; + for pane in [&pane_a, &pane_b, &pane_c] { + for item in pane.read(cx).items() { + if let Some(editor) = item.to_any_view().downcast::().ok() { + total_active_debug_lines += editor + .read(cx) + .highlighted_rows::() + .count(); + } + } + } + assert_eq!( + total_active_debug_lines, 1, + "There should be exactly one active debug line across all editors after third stop" + ); + }) + .unwrap(); + + // Clean up + let shutdown_session = project.update(cx, |project, cx| { + project.dap_store().update(cx, |dap_store, cx| { + dap_store.shutdown_session(session.read(cx).session_id(), cx) + }) + }); + + shutdown_session.await.unwrap(); +} + #[gpui::test] async fn test_adapter_shutdown_with_child_sessions_on_app_quit( executor: BackgroundExecutor, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 54e20d00cafebc209cec2bd10eb0cbb0007e3af8..3e734fdf1ab8254807a65c96bb98a0f804bc4dc4 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -22801,12 +22801,36 @@ impl Editor { maybe!({ let breakpoint_store = self.breakpoint_store.as_ref()?; - let Some(active_stack_frame) = breakpoint_store.read(cx).active_position().cloned() - else { + let (active_stack_frame, debug_line_pane_id) = { + let store = breakpoint_store.read(cx); + let active_stack_frame = store.active_position().cloned(); + let debug_line_pane_id = store.active_debug_line_pane_id(); + (active_stack_frame, debug_line_pane_id) + }; + + let Some(active_stack_frame) = active_stack_frame else { self.clear_row_highlights::(); return None; }; + if let Some(debug_line_pane_id) = debug_line_pane_id { + if let Some(workspace) = self + .workspace + .as_ref() + .and_then(|(workspace, _)| workspace.upgrade()) + { + let editor_pane_id = workspace + .read(cx) + .pane_for_item_id(cx.entity_id()) + .map(|pane| pane.entity_id()); + + if editor_pane_id.is_some_and(|id| id != debug_line_pane_id) { + self.clear_row_highlights::(); + return None; + } + } + } + let position = active_stack_frame.position; let buffer_id = position.buffer_id?; let snapshot = self diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index afb296cff59369804cd28ebd85ced3d2f7649b7a..685387342caf8e705a3648cb07acaa1867db55d8 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1,7 +1,7 @@ use crate::{ - Anchor, Autoscroll, BufferSerialization, Capability, Editor, EditorEvent, EditorSettings, - ExcerptId, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, NavigationData, - ReportEditorEvent, SelectionEffects, ToPoint as _, + ActiveDebugLine, Anchor, Autoscroll, BufferSerialization, Capability, Editor, EditorEvent, + EditorSettings, ExcerptId, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, + NavigationData, ReportEditorEvent, SelectionEffects, ToPoint as _, display_map::HighlightKey, editor_settings::SeedQuerySetting, persistence::{DB, SerializedEditor}, @@ -1027,6 +1027,19 @@ impl Item for Editor { } } + fn pane_changed(&mut self, new_pane_id: EntityId, cx: &mut Context) { + if self + .highlighted_rows + .get(&TypeId::of::()) + .is_some_and(|lines| !lines.is_empty()) + && let Some(breakpoint_store) = self.breakpoint_store.as_ref() + { + breakpoint_store.update(cx, |store, _cx| { + store.set_active_debug_pane_id(new_pane_id); + }); + } + } + fn to_item_events(event: &EditorEvent, f: &mut dyn FnMut(ItemEvent)) { match event { EditorEvent::Saved | EditorEvent::TitleChanged => { diff --git a/crates/project/src/debugger/breakpoint_store.rs b/crates/project/src/debugger/breakpoint_store.rs index 54f884aa5704bd256620f35eb0ea73dc53feeab5..50df9ae3125d3db98df24280ebd1e5b14adfe557 100644 --- a/crates/project/src/debugger/breakpoint_store.rs +++ b/crates/project/src/debugger/breakpoint_store.rs @@ -6,7 +6,9 @@ pub use breakpoints_in_file::{BreakpointSessionState, BreakpointWithPosition}; use breakpoints_in_file::{BreakpointsInFile, StatefulBreakpoint}; use collections::{BTreeMap, HashMap}; use dap::{StackFrameId, client::SessionId}; -use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, Subscription, Task}; +use gpui::{ + App, AppContext, AsyncApp, Context, Entity, EntityId, EventEmitter, Subscription, Task, +}; use itertools::Itertools; use language::{Buffer, BufferSnapshot, proto::serialize_anchor as serialize_text_anchor}; use rpc::{ @@ -154,6 +156,7 @@ pub struct BreakpointStore { breakpoints: BTreeMap, BreakpointsInFile>, downstream_client: Option<(AnyProtoClient, u64)>, active_stack_frame: Option, + active_debug_line_pane_id: Option, // E.g ssh mode: BreakpointStoreMode, } @@ -171,6 +174,7 @@ impl BreakpointStore { worktree_store, downstream_client: None, active_stack_frame: Default::default(), + active_debug_line_pane_id: None, } } @@ -190,6 +194,7 @@ impl BreakpointStore { worktree_store, downstream_client: None, active_stack_frame: Default::default(), + active_debug_line_pane_id: None, } } @@ -651,16 +656,30 @@ impl BreakpointStore { self.active_stack_frame.as_ref() } + pub fn active_debug_line_pane_id(&self) -> Option { + self.active_debug_line_pane_id + } + + pub fn set_active_debug_pane_id(&mut self, pane_id: EntityId) { + self.active_debug_line_pane_id = Some(pane_id); + } + pub fn remove_active_position( &mut self, session_id: Option, cx: &mut Context, ) { if let Some(session_id) = session_id { - self.active_stack_frame - .take_if(|active_stack_frame| active_stack_frame.session_id == session_id); + if self + .active_stack_frame + .take_if(|active_stack_frame| active_stack_frame.session_id == session_id) + .is_some() + { + self.active_debug_line_pane_id = None; + } } else { self.active_stack_frame.take(); + self.active_debug_line_pane_id = None; } cx.emit(BreakpointStoreEvent::ClearDebugLines); diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 4153373fdb0e107aa08c1fe643600635f63edafe..b29e02f05b367bab557403f3bb34f6ffa45caecc 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -219,6 +219,7 @@ pub trait Item: Focusable + EventEmitter + Render + Sized { fn discarded(&self, _project: Entity, _window: &mut Window, _cx: &mut Context) {} fn on_removed(&self, _cx: &mut Context) {} fn workspace_deactivated(&mut self, _window: &mut Window, _: &mut Context) {} + fn pane_changed(&mut self, _new_pane_id: EntityId, _cx: &mut Context) {} fn navigate( &mut self, _: Arc, @@ -737,11 +738,22 @@ impl ItemHandle for Entity { .log_err(); } - if workspace + let new_pane_id = pane.entity_id(); + let old_item_pane = workspace .panes_by_item - .insert(self.item_id(), pane.downgrade()) - .is_none() - { + .insert(self.item_id(), pane.downgrade()); + + if old_item_pane.as_ref().is_none_or(|old_pane| { + old_pane + .upgrade() + .is_some_and(|old_pane| old_pane.entity_id() != new_pane_id) + }) { + self.update(cx, |this, cx| { + this.pane_changed(new_pane_id, cx); + }); + } + + if old_item_pane.is_none() { let mut pending_autosave = DelayedDebouncedEditAction::new(); let (pending_update_tx, mut pending_update_rx) = mpsc::unbounded(); let pending_update = Rc::new(RefCell::new(None)); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index ac83809f8d313e842e72d19fb98b8b5d1b69df0f..c12525bb2a5c6b46cd6b4fabc9599e3b6cdfd25d 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4906,10 +4906,21 @@ impl Workspace { } pub fn pane_for(&self, handle: &dyn ItemHandle) -> Option> { - let weak_pane = self.panes_by_item.get(&handle.item_id())?; + self.pane_for_item_id(handle.item_id()) + } + + pub fn pane_for_item_id(&self, item_id: EntityId) -> Option> { + let weak_pane = self.panes_by_item.get(&item_id)?; weak_pane.upgrade() } + pub fn pane_for_entity_id(&self, entity_id: EntityId) -> Option> { + self.panes + .iter() + .find(|pane| pane.entity_id() == entity_id) + .cloned() + } + fn collaborator_left(&mut self, peer_id: PeerId, window: &mut Window, cx: &mut Context) { self.follower_states.retain(|leader_id, state| { if *leader_id == CollaboratorId::PeerId(peer_id) { From f375a1a98bf65198155414d6add869a2ab7c611c Mon Sep 17 00:00:00 2001 From: MostlyK <135974627+MostlyKIGuess@users.noreply.github.com> Date: Tue, 24 Feb 2026 17:26:59 +0530 Subject: [PATCH 025/548] image_viewer: Use checkerboard from GPUI (#49575) As we have checkerboard, used same pattern as git diff inside Image Viewer. Now that CPU instructions aren't an issue, perhaps size can be lowered to 16/24 for base boxes. Release Notes: - N/A --- crates/image_viewer/src/image_viewer.rs | 62 ++++++------------------- 1 file changed, 13 insertions(+), 49 deletions(-) diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index 37e818904bc95227783d96bfc5b49d2094373c32..c223494bd709217439bdff9f6a7ba17e1a65494e 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -11,7 +11,7 @@ use gpui::{ FocusHandle, Focusable, GlobalElementId, InspectorElementId, InteractiveElement, IntoElement, LayoutId, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, ParentElement, Pixels, Point, Render, ScrollDelta, ScrollWheelEvent, Style, Styled, Task, WeakEntity, Window, actions, - canvas, div, img, opaque_grey, point, px, size, + checkerboard, div, img, point, px, size, }; use language::File as _; use persistence::IMAGE_VIEWER; @@ -50,7 +50,7 @@ const MIN_ZOOM: f32 = 0.1; const MAX_ZOOM: f32 = 20.0; const ZOOM_STEP: f32 = 1.1; const SCROLL_LINE_MULTIPLIER: f32 = 20.0; -const BASE_SQUARE_SIZE: f32 = 48.0; +const BASE_SQUARE_SIZE: f32 = 32.0; pub struct ImageView { image_item: Entity, @@ -378,53 +378,17 @@ impl Element for ImageContentElement { .w(scaled_width) .h(scaled_height) .child( - canvas( - |_, _, _| {}, - move |bounds, _, window, _cx| { - let bounds_x: f32 = bounds.origin.x.into(); - let bounds_y: f32 = bounds.origin.y.into(); - let bounds_width: f32 = bounds.size.width.into(); - let bounds_height: f32 = bounds.size.height.into(); - let square_size = BASE_SQUARE_SIZE * zoom_level; - let cols = (bounds_width / square_size).ceil() as i32 + 1; - let rows = (bounds_height / square_size).ceil() as i32 + 1; - for row in 0..rows { - for col in 0..cols { - if (row + col) % 2 == 0 { - continue; - } - let x = bounds_x + col as f32 * square_size; - let y = bounds_y + row as f32 * square_size; - let w = square_size.min(bounds_x + bounds_width - x); - let h = square_size.min(bounds_y + bounds_height - y); - if w > 0.0 && h > 0.0 { - let rect = Bounds::new( - point(px(x), px(y)), - size(px(w), px(h)), - ); - window.paint_quad(gpui::fill( - rect, - opaque_grey(0.6, 1.0), - )); - } - } - } - let border_rect = Bounds::new( - point(px(bounds_x), px(bounds_y)), - size(px(bounds_width), px(bounds_height)), - ); - window.paint_quad(gpui::outline( - border_rect, - border_color, - gpui::BorderStyle::default(), - )); - }, - ) - .size_full() - .absolute() - .top_0() - .left_0() - .bg(gpui::rgb(0xCCCCCD)), + div() + .size_full() + .absolute() + .top_0() + .left_0() + .child(div().size_full().bg(checkerboard( + cx.theme().colors().panel_background, + BASE_SQUARE_SIZE * zoom_level, + ))) + .border_1() + .border_color(border_color), ) .child({ img(image) From f96ac58a3099667fe167b49b7f8b71382b28f982 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 24 Feb 2026 13:00:30 +0100 Subject: [PATCH 026/548] extension_ci: Use `head_ref` for version comparison (#49970) Release Notes: - N/A --- .github/workflows/extension_bump.yml | 7 ++++--- .github/workflows/extension_tests.yml | 7 ++++--- tooling/xtask/src/tasks/workflows/extension_bump.rs | 6 +++--- 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index afb5448691610f49f715956b99a4f80bc84c9327..4f9ee444aced59ea84f65a348546fdf4715a5190 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -38,10 +38,9 @@ jobs: name: extension_bump::compare_versions run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - PR_PARENT_SHA="${{ github.event.pull_request.head.sha }}" - if [[ -n "$PR_PARENT_SHA" ]]; then - git checkout "$PR_PARENT_SHA" + if [[ -n "$PR_PARENT_REF" ]]; then + git checkout "$PR_PARENT_REF" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" else @@ -55,6 +54,8 @@ jobs: echo "version_changed=true" >> "$GITHUB_OUTPUT" echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" + env: + PR_PARENT_REF: ${{ github.head_ref }} outputs: version_changed: ${{ steps.compare-versions-check.outputs.version_changed }} current_version: ${{ steps.compare-versions-check.outputs.current_version }} diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index b843b6626e5e9969b0052f5de100143dd846b4a5..3bc3d43b0521b2ce946be386b318d9a3784c22d8 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -113,10 +113,9 @@ jobs: name: extension_bump::compare_versions run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - PR_PARENT_SHA="${{ github.event.pull_request.head.sha }}" - if [[ -n "$PR_PARENT_SHA" ]]; then - git checkout "$PR_PARENT_SHA" + if [[ -n "$PR_PARENT_REF" ]]; then + git checkout "$PR_PARENT_REF" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" else @@ -130,6 +129,8 @@ jobs: echo "version_changed=true" >> "$GITHUB_OUTPUT" echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" + env: + PR_PARENT_REF: ${{ github.head_ref }} - name: extension_tests::verify_version_did_not_change run: | if [[ ${{ steps.compare-versions-check.outputs.version_changed }} == "true" && "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.user.login }}" != "zed-zippy[bot]" ]] ; then diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index eed7c79c8c2234309dfcde2c7e7b0d8d5cde9117..e06e50fa6ea8aefaf071a63010e54dd18ab9d70d 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -149,10 +149,9 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { let check_needs_bump = named::bash(formatdoc! { r#" CURRENT_VERSION="$({VERSION_CHECK})" - PR_PARENT_SHA="${{{{ github.event.pull_request.head.sha }}}}" - if [[ -n "$PR_PARENT_SHA" ]]; then - git checkout "$PR_PARENT_SHA" + if [[ -n "$PR_PARENT_REF" ]]; then + git checkout "$PR_PARENT_REF" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" else @@ -168,6 +167,7 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { echo "current_version=${{CURRENT_VERSION}}" >> "$GITHUB_OUTPUT" "# }) + .add_env(("PR_PARENT_REF", Context::github().head_ref())) .id("compare-versions-check"); let version_changed = StepOutput::new(&check_needs_bump, "version_changed"); From 0f21e2a5c608d3b4b0df8e50217397b53de3136b Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 24 Feb 2026 13:28:12 +0100 Subject: [PATCH 027/548] extension_ci: Use fork point for version comparison (#49972) Lost another battle to the GitHub docs. Instead, now let's just do it ourselves here in bash and not guess whatever GitHub is referring to in their documentation.. Release Notes: - N/A --- .github/workflows/extension_bump.yml | 7 +++---- .github/workflows/extension_tests.yml | 7 +++---- tooling/xtask/src/tasks/workflows/extension_bump.rs | 6 +++--- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index 4f9ee444aced59ea84f65a348546fdf4715a5190..ff903eb63d30319b5df5ced9c0ec545bb15cca06 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -39,8 +39,9 @@ jobs: run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - if [[ -n "$PR_PARENT_REF" ]]; then - git checkout "$PR_PARENT_REF" + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + PR_FORK_POINT="$(git merge-base --fork-point main)" + git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" else @@ -54,8 +55,6 @@ jobs: echo "version_changed=true" >> "$GITHUB_OUTPUT" echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" - env: - PR_PARENT_REF: ${{ github.head_ref }} outputs: version_changed: ${{ steps.compare-versions-check.outputs.version_changed }} current_version: ${{ steps.compare-versions-check.outputs.current_version }} diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index 3bc3d43b0521b2ce946be386b318d9a3784c22d8..ed35a054241c8c7dc5c823bd25b59f8b9593efbd 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -114,8 +114,9 @@ jobs: run: | CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" - if [[ -n "$PR_PARENT_REF" ]]; then - git checkout "$PR_PARENT_REF" + if [[ "${{ github.event_name }}" == "pull_request" ]]; then + PR_FORK_POINT="$(git merge-base --fork-point main)" + git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" else @@ -129,8 +130,6 @@ jobs: echo "version_changed=true" >> "$GITHUB_OUTPUT" echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT" - env: - PR_PARENT_REF: ${{ github.head_ref }} - name: extension_tests::verify_version_did_not_change run: | if [[ ${{ steps.compare-versions-check.outputs.version_changed }} == "true" && "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.user.login }}" != "zed-zippy[bot]" ]] ; then diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index e06e50fa6ea8aefaf071a63010e54dd18ab9d70d..bdc25f766e367042883ab7051676c7aa08873243 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -150,8 +150,9 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { r#" CURRENT_VERSION="$({VERSION_CHECK})" - if [[ -n "$PR_PARENT_REF" ]]; then - git checkout "$PR_PARENT_REF" + if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then + PR_FORK_POINT="$(git merge-base --fork-point main)" + git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" else @@ -167,7 +168,6 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { echo "current_version=${{CURRENT_VERSION}}" >> "$GITHUB_OUTPUT" "# }) - .add_env(("PR_PARENT_REF", Context::github().head_ref())) .id("compare-versions-check"); let version_changed = StepOutput::new(&check_needs_bump, "version_changed"); From b9463f50ed5bb2231b3574d9cfb226450d6443df Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 24 Feb 2026 10:46:08 -0300 Subject: [PATCH 028/548] multibuffer: Make "Open File" button also visible on hover (#49980) This makes the "Open File" button in the file header within multibuffers also visible as you hover over the headers. Previously, this button would only show up for the selected/focused file, but it now also shows up on hover for making mouse-based interaction easier. Release Notes: - N/A --- crates/editor/src/element.rs | 54 ++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 24 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index deedd07d3eddfd563d4fbdaf51311908ebc40c01..a04e16683d92f8e79cbe75d6dc03764276ede226 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8139,6 +8139,7 @@ pub(crate) fn render_buffer_header( .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) .child( h_flex() + .group("buffer-header-group") .size_full() .flex_basis(Length::Definite(DefiniteLength::Fraction(0.667))) .pl_1() @@ -8332,31 +8333,36 @@ pub(crate) fn render_buffer_header( }) }, )) - .when( - can_open_excerpts && is_selected && relative_path.is_some(), - |el| { - el.child( - Button::new("open-file-button", "Open File") - .style(ButtonStyle::OutlinedGhost) - .key_binding(KeyBinding::for_action_in( - &OpenExcerpts, - &focus_handle, - cx, - )) - .on_click(window.listener_for(editor, { - let jump_data = jump_data.clone(); - move |editor, e: &ClickEvent, window, cx| { - editor.open_excerpts_common( - Some(jump_data.clone()), - e.modifiers().secondary(), - window, + .when(can_open_excerpts && relative_path.is_some(), |this| { + this.child( + div() + .when(!is_selected, |this| { + this.visible_on_hover("buffer-header-group") + }) + .child( + Button::new("open-file-button", "Open File") + .style(ButtonStyle::OutlinedGhost) + .when(is_selected, |this| { + this.key_binding(KeyBinding::for_action_in( + &OpenExcerpts, + &focus_handle, cx, - ); - } - })), - ) - }, - ) + )) + }) + .on_click(window.listener_for(editor, { + let jump_data = jump_data.clone(); + move |editor, e: &ClickEvent, window, cx| { + editor.open_excerpts_common( + Some(jump_data.clone()), + e.modifiers().secondary(), + window, + cx, + ); + } + })), + ), + ) + }) .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation()) .on_click(window.listener_for(editor, { let buffer_id = for_excerpt.buffer_id; From e57a48678b71239c084753d3b3ee070942725245 Mon Sep 17 00:00:00 2001 From: John Tur Date: Tue, 24 Feb 2026 08:56:43 -0500 Subject: [PATCH 029/548] Narrow `.occlude()` call (#49981) Release Notes: - N/A --- crates/title_bar/src/collab.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/title_bar/src/collab.rs b/crates/title_bar/src/collab.rs index 58e4d2a8fcfdfe885b7ddf51b20e193625950ce0..0f4d5977947fa27cf3ca5811dbf883c4dbd9df94 100644 --- a/crates/title_bar/src/collab.rs +++ b/crates/title_bar/src/collab.rs @@ -143,7 +143,6 @@ impl TitleBar { h_flex() .id("collaborator-list") - .occlude() .w_full() .gap_1() .overflow_x_scroll() @@ -227,6 +226,7 @@ impl TitleBar { .ok(); }) }) + .occlude() .tooltip({ let login = collaborator.user.github_login.clone(); Tooltip::text(format!("Follow {login}")) From 3cf7cb52655bf9dc2ed1d35523f28a5bb4fb82d5 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 24 Feb 2026 15:32:28 +0100 Subject: [PATCH 030/548] multi_buffer: Improve lookup performance for `MultiBufferSnapshot::excerpt` (#49986) Co-authored by: John Tur Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/multi_buffer/src/multi_buffer.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 22142c2e28bfcb56077e936d664b7c810862feaa..34d32f481947657327cbec99e0a3aedc59aeabe7 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -7034,16 +7034,16 @@ impl MultiBufferSnapshot { /// afterwards. fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { let excerpt_id = self.latest_excerpt_id(excerpt_id); - let mut cursor = self.excerpts.cursor::>(()); let locator = self.excerpt_locator_for_id(excerpt_id); - cursor.seek(&Some(locator), Bias::Left); - if let Some(excerpt) = cursor.item() + let (_, _, item) = + self.excerpts + .find::, _>((), &Some(locator), Bias::Left); + if let Some(excerpt) = item && excerpt.id == excerpt_id { return Some(excerpt); - } else if cursor.item().is_none() && excerpt_id == ExcerptId::max() { - cursor.prev(); - return cursor.item(); + } else if item.is_none() && excerpt_id == ExcerptId::max() { + return self.excerpts.last(); } None } From c58d388d9403fe08bc009368514b73bbff8f77dc Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 24 Feb 2026 12:37:56 -0300 Subject: [PATCH 031/548] git_ui: Hide the `ReviewDiff` action if branch diff view isn't open (#49988) This PR hides the `git: review diff` action when not in the branch diff view, because otherwise, that wouldn't do anything. Release Notes: - N/A --- crates/git_ui/src/project_diff.rs | 62 +++++++++++++++---------------- 1 file changed, 29 insertions(+), 33 deletions(-) diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index cf241004338cdad56539e0b181f0b5a0d543744a..f62b08e4c0d99db7d2e60e6aac730a69b139cca3 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -97,7 +97,6 @@ impl ProjectDiff { pub(crate) fn register(workspace: &mut Workspace, cx: &mut Context) { workspace.register_action(Self::deploy); workspace.register_action(Self::deploy_branch_diff); - workspace.register_action(Self::deploy_review_diff); workspace.register_action(|workspace, _: &Add, window, cx| { Self::deploy(workspace, &Diff, window, cx); }); @@ -148,25 +147,13 @@ impl ProjectDiff { .detach_and_notify_err(workspace_weak, window, cx); } - fn deploy_review_diff( - workspace: &mut Workspace, - _: &ReviewDiff, - window: &mut Window, - cx: &mut Context, - ) { - let Some(project_diff) = workspace - .items_of_type::(cx) - .find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Merge { .. })) - else { - return; - }; - - let diff_base = project_diff.read(cx).diff_base(cx).clone(); + fn review_diff(&mut self, _: &ReviewDiff, window: &mut Window, cx: &mut Context) { + let diff_base = self.diff_base(cx).clone(); let DiffBase::Merge { base_ref } = diff_base else { return; }; - let Some(repo) = project_diff.read(cx).branch_diff.read(cx).repo().cloned() else { + let Some(repo) = self.branch_diff.read(cx).repo().cloned() else { return; }; @@ -179,26 +166,31 @@ impl ProjectDiff { ) }); - let workspace_handle = cx.entity(); - let workspace_weak = workspace_handle.downgrade(); - window - .spawn(cx, async move |cx| { - let diff_text = diff_receiver.await??; + let workspace = self.workspace.clone(); - workspace_handle.update_in(cx, |_workspace, window, cx| { - window.dispatch_action( - ReviewBranchDiff { - diff_text: diff_text.into(), - base_ref: base_ref.to_string().into(), - } - .boxed_clone(), - cx, - ); - })?; + window + .spawn(cx, { + let workspace = workspace.clone(); + async move |cx| { + let diff_text = diff_receiver.await??; + + if let Some(workspace) = workspace.upgrade() { + workspace.update_in(cx, |_workspace, window, cx| { + window.dispatch_action( + ReviewBranchDiff { + diff_text: diff_text.into(), + base_ref: base_ref.to_string().into(), + } + .boxed_clone(), + cx, + ); + })?; + } - anyhow::Ok(()) + anyhow::Ok(()) + } }) - .detach_and_notify_err(workspace_weak, window, cx); + .detach_and_notify_err(workspace, window, cx); } pub fn deploy_at( @@ -1139,10 +1131,14 @@ impl Item for ProjectDiff { impl Render for ProjectDiff { fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_empty = self.multibuffer.read(cx).is_empty(); + let is_branch_diff_view = matches!(self.diff_base(cx), DiffBase::Merge { .. }); div() .track_focus(&self.focus_handle) .key_context(if is_empty { "EmptyPane" } else { "GitDiff" }) + .when(is_branch_diff_view, |this| { + this.on_action(cx.listener(Self::review_diff)) + }) .bg(cx.theme().colors().editor_background) .flex() .items_center() From 3495746654fc0fd925ddd446180a687d9e3cb8de Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 24 Feb 2026 13:09:20 -0300 Subject: [PATCH 032/548] git: Capture all working tree changes for the Review Diff action (#49993) The AI-assisted "Review Diff" action was only working for committed changes because we were passing HEAD in the git command. Without it, it captures all of the working tree changes, the same way the Branch Diff view itself does. I think this is now better and more intuitive, because it shouldn't be required that you commit the changes to have them quickly reviewed by an agent. Release Notes: - N/A --- crates/git/src/repository.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 592c04427dc860b77d8ba7a2a677c47ea648b47e..ab445a1cd830a726491fab1fc6209686e80960b1 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -2015,7 +2015,7 @@ impl GitRepository for RealGitRepository { DiffType::MergeBase { base_ref } => { new_command(&git_binary_path) .current_dir(&working_directory) - .args(["diff", "--merge-base", base_ref.as_ref(), "HEAD"]) + .args(["diff", "--merge-base", base_ref.as_ref()]) .output() .await? } From 269b03f4a3219d54e19354484ad44d928ce740a7 Mon Sep 17 00:00:00 2001 From: xj Date: Tue, 24 Feb 2026 08:23:02 -0800 Subject: [PATCH 033/548] workspace: Add ActivateLastPane action (#49853) ## Summary Add `workspace::ActivateLastPane` so users can bind a shortcut (for example `cmd-9`) to focus the last pane. ## Why Today, the closest option is `workspace::ActivatePane` with an index (for example `8`), but that has side effects: when the index does not exist, it creates/splits panes (`activate_pane_at_index` fallback). `ActivateLastPane` gives a stable, no-surprises target: focus the rightmost/last pane in current pane order, never create a new pane. ## Context This capability has been requested by users before: - https://github.com/zed-industries/zed/issues/17503#event-22959656321 ## Prior art VS Code exposes explicit editor-group focus commands and index-based focus patterns (e.g. `workbench.action.focusSecondEditorGroup` ... `focusEighthEditorGroup`) in its workbench commands: - https://github.com/microsoft/vscode/blob/main/src/vs/workbench/browser/parts/editor/editorCommands.ts#L675-L724 Zed already follows numbered pane focus in default keymaps (`ActivatePane` 1..9 on macOS/Linux/Windows), so adding a dedicated "last pane" action is a small, natural extension: - `assets/keymaps/default-macos.json` - `assets/keymaps/default-linux.json` - `assets/keymaps/default-windows.json` ## Change - Added `workspace::ActivateLastPane` - Implemented `Workspace::activate_last_pane(...)` - Wired action handler in workspace listeners - Added `test_activate_last_pane` ## Validation - `cargo test -p workspace test_activate_last_pane -- --nocapture` - `cargo test -p workspace test_pane_navigation -- --nocapture` - `cargo fmt --all -- --check` ## Risk Low: focus-only behavior, no layout/data changes, no default keymap changes. Release Notes: - Added `workspace::ActivateLastPane` action for keybindings that focus the last pane. --------- Co-authored-by: xj --- crates/workspace/src/workspace.rs | 64 +++++++++++++++++++++++++++++-- 1 file changed, 61 insertions(+), 3 deletions(-) diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index c12525bb2a5c6b46cd6b4fabc9599e3b6cdfd25d..c1d26476544ecf5db51a9c7b358ad12c84aa168f 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -210,6 +210,8 @@ actions!( ActivateNextPane, /// Activates the previous pane in the workspace. ActivatePreviousPane, + /// Activates the last pane in the workspace. + ActivateLastPane, /// Switches to the next window. ActivateNextWindow, /// Switches to the previous window. @@ -4331,6 +4333,11 @@ impl Workspace { } } + pub fn activate_last_pane(&mut self, window: &mut Window, cx: &mut App) { + let last_pane = self.center.last_pane(); + window.focus(&last_pane.focus_handle(cx), cx); + } + pub fn activate_pane_in_direction( &mut self, direction: SplitDirection, @@ -6381,6 +6388,9 @@ impl Workspace { .on_action(cx.listener(|workspace, _: &ActivateNextPane, window, cx| { workspace.activate_next_pane(window, cx) })) + .on_action(cx.listener(|workspace, _: &ActivateLastPane, window, cx| { + workspace.activate_last_pane(window, cx) + })) .on_action( cx.listener(|workspace, _: &ActivateNextWindow, _window, cx| { workspace.activate_next_window(cx) @@ -6403,9 +6413,6 @@ impl Workspace { .on_action(cx.listener(|workspace, _: &ActivatePaneDown, window, cx| { workspace.activate_pane_in_direction(SplitDirection::Down, window, cx) })) - .on_action(cx.listener(|workspace, _: &ActivateNextPane, window, cx| { - workspace.activate_next_pane(window, cx) - })) .on_action(cx.listener( |workspace, action: &MoveItemToPaneInDirection, window, cx| { workspace.move_item_to_pane_in_direction(action, window, cx) @@ -10552,6 +10559,57 @@ mod tests { }); } + #[gpui::test] + async fn test_activate_last_pane(cx: &mut gpui::TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + workspace.update_in(cx, |workspace, window, cx| { + let first_item = cx.new(|cx| { + TestItem::new(cx).with_project_items(&[TestProjectItem::new(1, "1.txt", cx)]) + }); + workspace.add_item_to_active_pane(Box::new(first_item), None, true, window, cx); + workspace.split_pane( + workspace.active_pane().clone(), + SplitDirection::Right, + window, + cx, + ); + workspace.split_pane( + workspace.active_pane().clone(), + SplitDirection::Right, + window, + cx, + ); + }); + + let (first_pane_id, target_last_pane_id) = workspace.update(cx, |workspace, _cx| { + let panes = workspace.center.panes(); + assert!(panes.len() >= 2); + ( + panes.first().expect("at least one pane").entity_id(), + panes.last().expect("at least one pane").entity_id(), + ) + }); + + workspace.update_in(cx, |workspace, window, cx| { + workspace.activate_pane_at_index(&ActivatePane(0), window, cx); + }); + workspace.update(cx, |workspace, _| { + assert_eq!(workspace.active_pane().entity_id(), first_pane_id); + assert_ne!(workspace.active_pane().entity_id(), target_last_pane_id); + }); + + cx.dispatch_action(ActivateLastPane); + + workspace.update(cx, |workspace, _| { + assert_eq!(workspace.active_pane().entity_id(), target_last_pane_id); + }); + } + #[gpui::test] async fn test_toggle_docks_and_panels(cx: &mut gpui::TestAppContext) { init_test(cx); From d25a85013ad2efa0f3c6093db03d552c8f2cfc3f Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 24 Feb 2026 17:36:09 +0100 Subject: [PATCH 034/548] extension_ci: Set `fetch-depth` to `0` in tests (#49996) This makes the full history available in tests. Release Notes: - N/A --- .github/workflows/extension_tests.yml | 2 +- tooling/xtask/src/tasks/workflows/extension_tests.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index ed35a054241c8c7dc5c823bd25b59f8b9593efbd..c74dcdab8df2bb7d22ab403cfe25090e9d1bd512 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -87,7 +87,7 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }} + fetch-depth: 0 - id: cache-zed-extension-cli name: extension_tests::cache_zed_extension_cli uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 diff --git a/tooling/xtask/src/tasks/workflows/extension_tests.rs b/tooling/xtask/src/tasks/workflows/extension_tests.rs index fa8e8f7ab9c731fcdc62ba2ca4bb09bda3ef3828..a650013bacfcfc1ac89a60ccfe8674a5621fb1c7 100644 --- a/tooling/xtask/src/tasks/workflows/extension_tests.rs +++ b/tooling/xtask/src/tasks/workflows/extension_tests.rs @@ -89,7 +89,7 @@ pub(crate) fn check_extension() -> NamedJob { .with_repository_owner_guard() .runs_on(runners::LINUX_LARGE_RAM) .timeout_minutes(6u32) - .add_step(steps::checkout_repo().with_deep_history_on_non_main()) + .add_step(steps::checkout_repo().with_full_history()) .add_step(cache_download) .add_step(download_zed_extension_cli(cache_hit)) .add_step(cache_rust_dependencies_namespace()) // Extensions can compile Rust, so provide the cache if needed. From 3ae4f4e95d2e03f0cf63425fea4d8a00d23f83b9 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 24 Feb 2026 17:45:29 +0100 Subject: [PATCH 035/548] Add `streaming-edit-file-tool` feature flag (#49997) Release Notes: - N/A --- crates/agent/src/thread.rs | 6 ++++-- crates/feature_flags/src/flags.rs | 10 ++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index e40ab834a8710cbfd30754e215624fc9fdfa9a6a..5d4de36cb69335de7a77eb7ad7a15f75b8e2b0b7 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -8,7 +8,9 @@ use crate::{ }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; -use feature_flags::{FeatureFlagAppExt as _, SubagentsFeatureFlag}; +use feature_flags::{ + FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, SubagentsFeatureFlag, +}; use agent_client_protocol as acp; use agent_settings::{ @@ -2457,7 +2459,7 @@ impl Thread { } } - let use_streaming_edit_tool = false; + let use_streaming_edit_tool = cx.has_flag::(); let mut tools = self .tools diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 5910ad30defed888c649a281d1623c175867fe13..8f96de0e7b6d9b385fcda533a31ecc34b5afdbcc 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -56,3 +56,13 @@ impl FeatureFlag for DiffReviewFeatureFlag { false } } + +pub struct StreamingEditFileToolFeatureFlag; + +impl FeatureFlag for StreamingEditFileToolFeatureFlag { + const NAME: &'static str = "streaming-edit-file-tool"; + + fn enabled_for_staff() -> bool { + false + } +} From 79e44ca3700733064d2fb92f586a53dc59a3e500 Mon Sep 17 00:00:00 2001 From: "John D. Swanson" Date: Tue, 24 Feb 2026 11:59:54 -0500 Subject: [PATCH 036/548] Align docs_suggestions.yml with repo CI conventions (#49999) Cleans up a new GitHub Actions workflow. Before you mark this PR as ready for review, make sure that you have: - ~~[ ] Added a solid test coverage and/or screenshots from doing manual testing~~ - [x] Done a self-review taking into account security and performance aspects - ~~[ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)~~ Release Notes: - N/A --- .github/workflows/docs_suggestions.yml | 209 ++++++++++++++++--------- 1 file changed, 138 insertions(+), 71 deletions(-) diff --git a/.github/workflows/docs_suggestions.yml b/.github/workflows/docs_suggestions.yml index 8cf98e978cddfe38688b2f9b47df17f48e472362..c2dc8b4d5197bcbf38dbfb92dac8c23386726d53 100644 --- a/.github/workflows/docs_suggestions.yml +++ b/.github/workflows/docs_suggestions.yml @@ -17,7 +17,7 @@ on: - 'crates/**/*.rs' - '!crates/**/*_test.rs' - '!crates/**/tests/**' - + # Run on cherry-picks to release branches pull_request_target: types: [opened, synchronize] @@ -25,7 +25,7 @@ on: - 'v0.*' paths: - 'crates/**/*.rs' - + # Manual trigger for testing workflow_dispatch: inputs: @@ -42,10 +42,6 @@ on: - immediate default: batch -permissions: - contents: write - pull-requests: write - env: DROID_MODEL: claude-sonnet-4-5-20250929 SUGGESTIONS_BRANCH: docs/suggestions-pending @@ -56,16 +52,19 @@ jobs: batch-suggestions: runs-on: ubuntu-latest timeout-minutes: 10 + permissions: + contents: write + pull-requests: read if: | - (github.event_name == 'pull_request' && + (github.event_name == 'pull_request' && github.event.pull_request.merged == true && github.event.pull_request.base.ref == 'main' && github.event.pull_request.head.repo.full_name == github.repository) || (github.event_name == 'workflow_dispatch' && inputs.mode == 'batch') - + steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 token: ${{ secrets.GITHUB_TOKEN }} @@ -92,35 +91,48 @@ jobs: - name: Get PR info id: pr + env: + INPUT_PR_NUMBER: ${{ inputs.pr_number }} + EVENT_PR_NUMBER: ${{ github.event.pull_request.number }} + GH_TOKEN: ${{ github.token }} run: | - if [ -n "${{ inputs.pr_number }}" ]; then - PR_NUM="${{ inputs.pr_number }}" + if [ -n "$INPUT_PR_NUMBER" ]; then + PR_NUM="$INPUT_PR_NUMBER" else - PR_NUM="${{ github.event.pull_request.number }}" + PR_NUM="$EVENT_PR_NUMBER" + fi + if ! [[ "$PR_NUM" =~ ^[0-9]+$ ]]; then + echo "::error::Invalid PR number: $PR_NUM" + exit 1 fi echo "number=$PR_NUM" >> "$GITHUB_OUTPUT" - - # Get PR title - PR_TITLE=$(gh pr view "$PR_NUM" --json title --jq '.title') - echo "title=$PR_TITLE" >> "$GITHUB_OUTPUT" - env: - GH_TOKEN: ${{ github.token }} + PR_TITLE=$(gh pr view "$PR_NUM" --json title --jq '.title' | tr -d '\n\r' | head -c 200) + EOF_MARKER="EOF_$(openssl rand -hex 8)" + { + echo "title<<$EOF_MARKER" + echo "$PR_TITLE" + echo "$EOF_MARKER" + } >> "$GITHUB_OUTPUT" - name: Analyze PR for documentation needs id: analyze + env: + GH_TOKEN: ${{ github.token }} + FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }} + PR_NUMBER: ${{ steps.pr.outputs.number }} run: | # Ensure gh CLI is authenticated (GH_TOKEN may not be auto-detected) # Unset GH_TOKEN first to allow gh auth login to store credentials echo "$GH_TOKEN" | (unset GH_TOKEN && gh auth login --with-token) - + OUTPUT_FILE=$(mktemp) - + # Retry with exponential backoff for transient Factory API failures MAX_RETRIES=3 for i in $(seq 1 "$MAX_RETRIES"); do echo "Attempt $i of $MAX_RETRIES to analyze PR..." if ./script/docs-suggest \ - --pr "${{ steps.pr.outputs.number }}" \ + --pr "$PR_NUMBER" \ --immediate \ --preview \ --output "$OUTPUT_FILE" \ @@ -135,7 +147,7 @@ jobs: echo "Retrying in $((i * 5)) seconds..." sleep $((i * 5)) done - + # Check if we got actionable suggestions (not "no updates needed") if grep -q "Documentation Suggestions" "$OUTPUT_FILE" && \ ! grep -q "No Documentation Updates Needed" "$OUTPUT_FILE"; then @@ -146,9 +158,6 @@ jobs: echo "No actionable documentation suggestions for this PR" cat "$OUTPUT_FILE" fi - env: - GH_TOKEN: ${{ github.token }} - FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }} - name: Commit suggestions to queue branch if: steps.analyze.outputs.has_suggestions == 'true' @@ -156,18 +165,19 @@ jobs: PR_NUM: ${{ steps.pr.outputs.number }} PR_TITLE: ${{ steps.pr.outputs.title }} OUTPUT_FILE: ${{ steps.analyze.outputs.output_file }} + REPO: ${{ github.repository }} run: | set -euo pipefail - + # Configure git git config user.name "github-actions[bot]" git config user.email "github-actions[bot]@users.noreply.github.com" - + # Retry loop for handling concurrent pushes MAX_RETRIES=3 for i in $(seq 1 "$MAX_RETRIES"); do echo "Attempt $i of $MAX_RETRIES" - + # Fetch and checkout suggestions branch (create if doesn't exist) if git ls-remote --exit-code --heads origin "$SUGGESTIONS_BRANCH" > /dev/null 2>&1; then git fetch origin "$SUGGESTIONS_BRANCH" @@ -176,7 +186,7 @@ jobs: # Create orphan branch for clean history git checkout --orphan "$SUGGESTIONS_BRANCH" git rm -rf . > /dev/null 2>&1 || true - + # Initialize with README cat > README.md << 'EOF' # Documentation Suggestions Queue @@ -198,34 +208,34 @@ jobs: 3. At preview release, suggestions are collected into a docs PR 4. After docs PR is created, this branch is reset EOF - + mkdir -p suggestions echo '{"suggestions":[]}' > manifest.json git add README.md suggestions manifest.json git commit -m "Initialize documentation suggestions queue" fi - + # Create suggestion file SUGGESTION_FILE="suggestions/PR-${PR_NUM}.md" - + { echo "# PR #${PR_NUM}: ${PR_TITLE}" echo "" echo "_Merged: $(date -u +%Y-%m-%dT%H:%M:%SZ)_" - echo "_PR: https://github.com/${{ github.repository }}/pull/${PR_NUM}_" + echo "_PR: https://github.com/${REPO}/pull/${PR_NUM}_" echo "" cat "$OUTPUT_FILE" } > "$SUGGESTION_FILE" - + # Update manifest MANIFEST=$(cat manifest.json) NEW_ENTRY="{\"pr\":${PR_NUM},\"title\":$(echo "$PR_TITLE" | jq -R .),\"file\":\"$SUGGESTION_FILE\",\"date\":\"$(date -u +%Y-%m-%dT%H:%M:%SZ)\"}" - + # Add to manifest if not already present if ! echo "$MANIFEST" | jq -e ".suggestions[] | select(.pr == $PR_NUM)" > /dev/null 2>&1; then echo "$MANIFEST" | jq ".suggestions += [$NEW_ENTRY]" > manifest.json fi - + # Commit git add "$SUGGESTION_FILE" manifest.json git commit -m "docs: Add suggestions for PR #${PR_NUM} @@ -233,7 +243,7 @@ jobs: ${PR_TITLE} Auto-generated documentation suggestions for review at next preview release." - + # Try to push if git push origin "$SUGGESTIONS_BRANCH"; then echo "Successfully pushed suggestions" @@ -250,33 +260,47 @@ jobs: - name: Summary if: always() + env: + HAS_SUGGESTIONS: ${{ steps.analyze.outputs.has_suggestions }} + PR_NUM: ${{ steps.pr.outputs.number }} + REPO: ${{ github.repository }} run: | { echo "## Documentation Suggestions" echo "" - if [ "${{ steps.analyze.outputs.has_suggestions }}" == "true" ]; then - echo "✅ Suggestions queued for PR #${{ steps.pr.outputs.number }}" + if [ "$HAS_SUGGESTIONS" == "true" ]; then + echo "✅ Suggestions queued for PR #${PR_NUM}" echo "" - echo "View pending suggestions: [docs/suggestions-pending branch](https://github.com/${{ github.repository }}/tree/${{ env.SUGGESTIONS_BRANCH }})" + echo "View pending suggestions: [docs/suggestions-pending branch](https://github.com/${REPO}/tree/${SUGGESTIONS_BRANCH})" else echo "No documentation updates needed for this PR." fi } >> "$GITHUB_STEP_SUMMARY" - # Job for cherry-picks to release branches - immediate output to step summary + # Job for cherry-picks to release branches - immediate output as PR comment cherry-pick-suggestions: runs-on: ubuntu-latest timeout-minutes: 10 + permissions: + contents: read + pull-requests: write + concurrency: + group: docs-suggestions-${{ github.event.pull_request.number || inputs.pr_number || 'manual' }} + cancel-in-progress: true if: | - (github.event_name == 'pull_request_target' && - startsWith(github.event.pull_request.base.ref, 'v0.')) || + (github.event_name == 'pull_request_target' && + startsWith(github.event.pull_request.base.ref, 'v0.') && + contains(fromJSON('["MEMBER","OWNER"]'), + github.event.pull_request.author_association)) || (github.event_name == 'workflow_dispatch' && inputs.mode == 'immediate') - + steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 + ref: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.base.ref || '' }} + persist-credentials: false - name: Install Droid CLI run: | @@ -300,29 +324,41 @@ jobs: - name: Get PR number id: pr + env: + INPUT_PR_NUMBER: ${{ inputs.pr_number }} + EVENT_PR_NUMBER: ${{ github.event.pull_request.number }} run: | - if [ -n "${{ inputs.pr_number }}" ]; then - echo "number=${{ inputs.pr_number }}" >> "$GITHUB_OUTPUT" + if [ -n "$INPUT_PR_NUMBER" ]; then + PR_NUM="$INPUT_PR_NUMBER" else - echo "number=${{ github.event.pull_request.number }}" >> "$GITHUB_OUTPUT" + PR_NUM="$EVENT_PR_NUMBER" fi + if ! [[ "$PR_NUM" =~ ^[0-9]+$ ]]; then + echo "::error::Invalid PR number: $PR_NUM" + exit 1 + fi + echo "number=$PR_NUM" >> "$GITHUB_OUTPUT" - name: Analyze PR for documentation needs id: analyze + env: + GH_TOKEN: ${{ github.token }} + FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }} + PR_NUMBER: ${{ steps.pr.outputs.number }} run: | # Ensure gh CLI is authenticated (GH_TOKEN may not be auto-detected) # Unset GH_TOKEN first to allow gh auth login to store credentials echo "$GH_TOKEN" | (unset GH_TOKEN && gh auth login --with-token) - + OUTPUT_FILE="${RUNNER_TEMP}/suggestions.md" - + # Cherry-picks don't get preview callout # Retry with exponential backoff for transient Factory API failures MAX_RETRIES=3 for i in $(seq 1 "$MAX_RETRIES"); do echo "Attempt $i of $MAX_RETRIES to analyze PR..." if ./script/docs-suggest \ - --pr "${{ steps.pr.outputs.number }}" \ + --pr "$PR_NUMBER" \ --immediate \ --no-preview \ --output "$OUTPUT_FILE" \ @@ -337,7 +373,7 @@ jobs: echo "Retrying in $((i * 5)) seconds..." sleep $((i * 5)) done - + # Check if we got actionable suggestions if [ -s "$OUTPUT_FILE" ] && \ grep -q "Documentation Suggestions" "$OUTPUT_FILE" && \ @@ -347,48 +383,78 @@ jobs: else echo "has_suggestions=false" >> "$GITHUB_OUTPUT" fi - env: - GH_TOKEN: ${{ github.token }} - FACTORY_API_KEY: ${{ secrets.FACTORY_API_KEY }} - name: Post suggestions as PR comment if: steps.analyze.outputs.has_suggestions == 'true' - uses: actions/github-script@v7 + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7 env: SUGGESTIONS_FILE: ${{ steps.analyze.outputs.suggestions_file }} + PR_NUMBER: ${{ steps.pr.outputs.number }} with: script: | const fs = require('fs'); - const suggestions = fs.readFileSync(process.env.SUGGESTIONS_FILE, 'utf8'); - + + // Read suggestions from file + const suggestionsRaw = fs.readFileSync(process.env.SUGGESTIONS_FILE, 'utf8'); + + // Sanitize AI-generated content + let sanitized = suggestionsRaw + // Strip HTML tags + .replace(/<[^>]*>/g, '') + // Strip markdown links but keep display text + .replace(/\[([^\]]*)\]\([^)]*\)/g, '$1') + // Strip raw URLs + .replace(/https?:\/\/[^\s)>\]]+/g, '[link removed]') + // Strip protocol-relative URLs + .replace(/\/\/[^\s)>\]]+\.[^\s)>\]]+/g, '[link removed]') + // Neutralize @-mentions (preserve JSDoc-style annotations) + .replace(/@(?!param\b|returns?\b|throws?\b|typedef\b|type\b|see\b|example\b|since\b|deprecated\b|default\b)(\w+)/g, '`@$1`') + // Strip cross-repo references that could be confused with real links + .replace(/[a-zA-Z0-9_.-]+\/[a-zA-Z0-9_.-]+#\d+/g, '[ref removed]'); + + // Truncate to 20,000 characters + if (sanitized.length > 20000) { + sanitized = sanitized.substring(0, 20000) + '\n\n…(truncated)'; + } + + // Parse and validate PR number + const prNumber = parseInt(process.env.PR_NUMBER, 10); + if (isNaN(prNumber) || prNumber <= 0) { + core.setFailed(`Invalid PR number: ${process.env.PR_NUMBER}`); + return; + } + const body = `## 📚 Documentation Suggestions This cherry-pick contains changes that may need documentation updates. - ${suggestions} + ${sanitized} --- + > **Note:** This comment was generated automatically by an AI model analyzing + > code changes. Suggestions may contain inaccuracies — please verify before acting. +
About this comment This comment was generated automatically by analyzing code changes in this cherry-pick. - Cherry-picks typically don't need new documentation since the feature was already + Cherry-picks typically don't need new documentation since the feature was already documented when merged to main, but please verify.
`; - + // Find existing comment to update (avoid spam) const { data: comments } = await github.rest.issues.listComments({ owner: context.repo.owner, repo: context.repo.repo, - issue_number: ${{ steps.pr.outputs.number }} + issue_number: prNumber }); - - const botComment = comments.find(c => - c.user.type === 'Bot' && + + const botComment = comments.find(c => + c.user.type === 'Bot' && c.body.includes('Documentation Suggestions') ); - + if (botComment) { await github.rest.issues.updateComment({ owner: context.repo.owner, @@ -400,21 +466,22 @@ jobs: await github.rest.issues.createComment({ owner: context.repo.owner, repo: context.repo.repo, - issue_number: ${{ steps.pr.outputs.number }}, + issue_number: prNumber, body: body }); } - name: Summary if: always() + env: + HAS_SUGGESTIONS: ${{ steps.analyze.outputs.has_suggestions }} + PR_NUM: ${{ steps.pr.outputs.number }} run: | { echo "## 📚 Documentation Suggestions (Cherry-pick)" echo "" - if [ "${{ steps.analyze.outputs.has_suggestions }}" == "true" ]; then - echo "Suggestions posted as PR comment." - echo "" - cat "${{ steps.analyze.outputs.suggestions_file }}" + if [ "$HAS_SUGGESTIONS" == "true" ]; then + echo "Suggestions posted as PR comment on #${PR_NUM}." else echo "No documentation suggestions for this cherry-pick." fi From c583fea973b93895aff9f358802ba9c46100664a Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 24 Feb 2026 18:01:31 +0100 Subject: [PATCH 037/548] extension_ci: Update compare versions check (#50006) Updates the check to explicitly compare against `origin/main` as opposed to just `main`. Release Notes: - N/A --- .github/workflows/extension_bump.yml | 2 +- .github/workflows/extension_tests.yml | 2 +- tooling/xtask/src/tasks/workflows/extension_bump.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index ff903eb63d30319b5df5ced9c0ec545bb15cca06..db6c7e1bbcb3f56ccbebe40a4f14a673318812b9 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -40,7 +40,7 @@ jobs: CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point main)" + PR_FORK_POINT="$(git merge-base --fork-point origin/main)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index c74dcdab8df2bb7d22ab403cfe25090e9d1bd512..0a10bce2c75cd15e9aaa5cf3d2b8663500d9a80a 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -115,7 +115,7 @@ jobs: CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point main)" + PR_FORK_POINT="$(git merge-base --fork-point origin/main)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index bdc25f766e367042883ab7051676c7aa08873243..a2b618a88fe521222fbd971a7d42bc6686ab84a9 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -151,7 +151,7 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { CURRENT_VERSION="$({VERSION_CHECK})" if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point main)" + PR_FORK_POINT="$(git merge-base --fork-point origin/main)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" From c5aea777ea6f8b94a73c1a9da75ab291420151ce Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Tue, 24 Feb 2026 12:14:47 -0500 Subject: [PATCH 038/548] editor: Fix clipboard selection range for multi-line copy-and-trim (#48977) When copying multiple selections with copy-and-trim, create a single clipboard selection spanning the original buffer range rather than one selection per trimmed line. This preserves correct paste behavior in Vim mode when pasting trimmed content. Closes #48869. Release Notes: - Fixed clipboard selection range for multi-line copy-and-trim --------- Co-authored-by: dino --- crates/editor/src/editor.rs | 156 +++++++++++++++--------------- crates/editor/src/editor_tests.rs | 44 ++++++++- 2 files changed, 119 insertions(+), 81 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 3e734fdf1ab8254807a65c96bb98a0f804bc4dc4..1a0a66b7b6074df549d932d4488013d48f7f3f5e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -13664,94 +13664,94 @@ impl Editor { let selections = self.selections.all::(&self.display_snapshot(cx)); let buffer = self.buffer.read(cx).read(cx); let mut text = String::new(); - let mut clipboard_selections = Vec::with_capacity(selections.len()); - { - let max_point = buffer.max_point(); - let mut is_first = true; - let mut prev_selection_was_entire_line = false; - for selection in &selections { - let mut start = selection.start; - let mut end = selection.end; - let is_entire_line = selection.is_empty() || self.selections.line_mode(); - let mut add_trailing_newline = false; - if is_entire_line { - start = Point::new(start.row, 0); - let next_line_start = Point::new(end.row + 1, 0); - if next_line_start <= max_point { - end = next_line_start; - } else { - // We're on the last line without a trailing newline. - // Copy to the end of the line and add a newline afterwards. - end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row))); - add_trailing_newline = true; - } + + let max_point = buffer.max_point(); + let mut is_first = true; + for selection in &selections { + let mut start = selection.start; + let mut end = selection.end; + let is_entire_line = selection.is_empty() || self.selections.line_mode(); + let mut add_trailing_newline = false; + if is_entire_line { + start = Point::new(start.row, 0); + let next_line_start = Point::new(end.row + 1, 0); + if next_line_start <= max_point { + end = next_line_start; + } else { + // We're on the last line without a trailing newline. + // Copy to the end of the line and add a newline afterwards. + end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row))); + add_trailing_newline = true; } + } - let mut trimmed_selections = Vec::new(); - if strip_leading_indents && end.row.saturating_sub(start.row) > 0 { - let row = MultiBufferRow(start.row); - let first_indent = buffer.indent_size_for_line(row); - if first_indent.len == 0 || start.column > first_indent.len { - trimmed_selections.push(start..end); - } else { - trimmed_selections.push( - Point::new(row.0, first_indent.len) - ..Point::new(row.0, buffer.line_len(row)), - ); - for row in start.row + 1..=end.row { - let mut line_len = buffer.line_len(MultiBufferRow(row)); - if row == end.row { - line_len = end.column; - } - if line_len == 0 { - trimmed_selections - .push(Point::new(row, 0)..Point::new(row, line_len)); - continue; - } - let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); - if row_indent_size.len >= first_indent.len { - trimmed_selections.push( - Point::new(row, first_indent.len)..Point::new(row, line_len), - ); - } else { - trimmed_selections.clear(); - trimmed_selections.push(start..end); - break; - } + let mut trimmed_selections = Vec::new(); + if strip_leading_indents && end.row.saturating_sub(start.row) > 0 { + let row = MultiBufferRow(start.row); + let first_indent = buffer.indent_size_for_line(row); + if first_indent.len == 0 || start.column > first_indent.len { + trimmed_selections.push(start..end); + } else { + trimmed_selections.push( + Point::new(row.0, first_indent.len) + ..Point::new(row.0, buffer.line_len(row)), + ); + for row in start.row + 1..=end.row { + let mut line_len = buffer.line_len(MultiBufferRow(row)); + if row == end.row { + line_len = end.column; + } + if line_len == 0 { + trimmed_selections.push(Point::new(row, 0)..Point::new(row, line_len)); + continue; + } + let row_indent_size = buffer.indent_size_for_line(MultiBufferRow(row)); + if row_indent_size.len >= first_indent.len { + trimmed_selections + .push(Point::new(row, first_indent.len)..Point::new(row, line_len)); + } else { + trimmed_selections.clear(); + trimmed_selections.push(start..end); + break; } } - } else { - trimmed_selections.push(start..end); } + } else { + trimmed_selections.push(start..end); + } - let is_multiline_trim = trimmed_selections.len() > 1; - for trimmed_range in trimmed_selections { - if is_first { - is_first = false; - } else if is_multiline_trim || !prev_selection_was_entire_line { - text += "\n"; - } - prev_selection_was_entire_line = is_entire_line && !is_multiline_trim; - let mut len = 0; - for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) { - text.push_str(chunk); - len += chunk.len(); - } - if add_trailing_newline { - text.push('\n'); - len += 1; + let is_multiline_trim = trimmed_selections.len() > 1; + let mut selection_len: usize = 0; + let prev_selection_was_entire_line = is_entire_line && !is_multiline_trim; + + for trimmed_range in trimmed_selections { + if is_first { + is_first = false; + } else if is_multiline_trim || !prev_selection_was_entire_line { + text.push('\n'); + if is_multiline_trim { + selection_len += 1; } - clipboard_selections.push(ClipboardSelection::for_buffer( - len, - is_entire_line, - trimmed_range, - &buffer, - self.project.as_ref(), - cx, - )); + } + for chunk in buffer.text_for_range(trimmed_range.start..trimmed_range.end) { + text.push_str(chunk); + selection_len += chunk.len(); + } + if add_trailing_newline { + text.push('\n'); + selection_len += 1; } } + + clipboard_selections.push(ClipboardSelection::for_buffer( + selection_len, + is_entire_line, + start..end, + &buffer, + self.project.as_ref(), + cx, + )); } cx.write_to_clipboard(ClipboardItem::new_string_with_json_metadata( diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 7f5a84ebd326603e1c239bbbb4062b115b17d095..d1090b5e0eb676c916ab98c7750ba80237f8e087 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -8030,16 +8030,54 @@ async fn test_copy_trim_line_mode(cx: &mut TestAppContext) { let mut cx = EditorTestContext::new(cx).await; cx.set_state(indoc! {" - « a - bˇ» + « fn main() { + dbg!(1) + }ˇ» + "}); + cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true)); + cx.update_editor(|editor, window, cx| editor.copy_and_trim(&CopyAndTrim, window, cx)); + + assert_eq!( + cx.read_from_clipboard().and_then(|item| item.text()), + Some("fn main() {\n dbg!(1)\n}\n".to_string()) + ); + + let clipboard_selections: Vec = cx + .read_from_clipboard() + .and_then(|item| item.entries().first().cloned()) + .and_then(|entry| match entry { + gpui::ClipboardEntry::String(text) => text.metadata_json(), + _ => None, + }) + .expect("should have clipboard selections"); + + assert_eq!(clipboard_selections.len(), 1); + assert!(clipboard_selections[0].is_entire_line); + + cx.set_state(indoc! {" + «fn main() { + dbg!(1) + }ˇ» "}); cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true)); cx.update_editor(|editor, window, cx| editor.copy_and_trim(&CopyAndTrim, window, cx)); assert_eq!( cx.read_from_clipboard().and_then(|item| item.text()), - Some("a\nb\n".to_string()) + Some("fn main() {\n dbg!(1)\n}\n".to_string()) ); + + let clipboard_selections: Vec = cx + .read_from_clipboard() + .and_then(|item| item.entries().first().cloned()) + .and_then(|entry| match entry { + gpui::ClipboardEntry::String(text) => text.metadata_json(), + _ => None, + }) + .expect("should have clipboard selections"); + + assert_eq!(clipboard_selections.len(), 1); + assert!(clipboard_selections[0].is_entire_line); } #[gpui::test] From 5c99e6883ec621b44e26ada23365a225e530a625 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Tue, 24 Feb 2026 11:49:03 -0600 Subject: [PATCH 039/548] Pause scheduled background agent workflow (#50009) Disable the cron schedule for the background agent MVP workflow. Manual runs via workflow_dispatch are still available. The workflow was running daily on weekdays but the project is being paused. This change: - Comments out the schedule trigger - Adds a note pointing to the Notion doc for context - Preserves the ability to run manually See [Background Agent for Zed](https://www.notion.so/Background-Agent-for-Zed-3038aa087eb980449b9ee02f70ae8413) Notion doc for current status and contacts to resume this work. Release Notes: - N/A --- .github/workflows/background_agent_mvp.yml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/background_agent_mvp.yml b/.github/workflows/background_agent_mvp.yml index d078db137824a09b8e501362edef8a2f4c6f9b19..528600138243cb8aca2e0fe0645eda198fc4f2b2 100644 --- a/.github/workflows/background_agent_mvp.yml +++ b/.github/workflows/background_agent_mvp.yml @@ -1,8 +1,11 @@ name: background_agent_mvp +# NOTE: Scheduled runs disabled as of 2026-02-24. The workflow can still be +# triggered manually via workflow_dispatch. See Notion doc "Background Agent +# for Zed" for current status and contact info to resume this work. on: - schedule: - - cron: "0 16 * * 1-5" + # schedule: + # - cron: "0 16 * * 1-5" workflow_dispatch: inputs: crash_ids: From 9cecaf7c80260421f408cd7fdadc3d350666ac4c Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 11:00:53 -0700 Subject: [PATCH 040/548] Fix field name mismatch (#50010) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/zed/src/reliability.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index 84b52452919942b506c924743a6749d5af5c162a..b291b9c8493db75e20282c8c9bc5a3750fb5e705 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -397,7 +397,7 @@ struct BuildTiming { duration_ms: f32, first_crate: String, target: String, - lock_wait_ms: f32, + blocked_ms: f32, command: String, } @@ -452,7 +452,7 @@ async fn upload_build_timings(_client: Arc) -> Result<()> { duration_ms = timing.duration_ms, first_crate = timing.first_crate, target = timing.target, - lock_wait_ms = timing.lock_wait_ms, + blocked_ms = timing.blocked_ms, command = timing.command, cpu_count = cpu_count, ram_size_gb = ram_size_gb From 34d6a7c1683af4331002b38d05c592a9cf99ebf4 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 24 Feb 2026 19:03:31 +0100 Subject: [PATCH 041/548] extension_ci: Explicitly state second merge base parameter (#50011) Release Notes: - N/A --- .github/workflows/extension_bump.yml | 2 +- .github/workflows/extension_tests.yml | 2 +- tooling/xtask/src/tasks/workflows/extension_bump.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index db6c7e1bbcb3f56ccbebe40a4f14a673318812b9..4b0ba05201a9161eaaed3f983e9c23e392e61683 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -40,7 +40,7 @@ jobs: CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point origin/main)" + PR_FORK_POINT="$(git merge-base --fork-point origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index 0a10bce2c75cd15e9aaa5cf3d2b8663500d9a80a..169352b6ea0ad2244533386d928df9667bcfe804 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -115,7 +115,7 @@ jobs: CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point origin/main)" + PR_FORK_POINT="$(git merge-base --fork-point origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index a2b618a88fe521222fbd971a7d42bc6686ab84a9..d435cb9bfca5bcca4041a549515b8b18a4c40633 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -151,7 +151,7 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { CURRENT_VERSION="$({VERSION_CHECK})" if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point origin/main)" + PR_FORK_POINT="$(git merge-base --fork-point origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" From 3873cd0b5d4817b5cc1d41602fdbf0a4c775b827 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 24 Feb 2026 19:20:57 +0100 Subject: [PATCH 042/548] extension_ci: Add debugging step (#50013) GitHub 4 me 0 - after testing for x times in a local and even the remote setup provided by Namespace during an action, this now adds a dedicated step to debug the failure we are seeing in extension tests to finally resolve said issue. Release Notes: - N/A --- .github/workflows/extension_tests.yml | 6 ++++++ .../xtask/src/tasks/workflows/extension_tests.rs | 14 +++++++++++++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index 169352b6ea0ad2244533386d928df9667bcfe804..92d4bc9be86ccfbc1dd59a6c626422495e9efd13 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -137,6 +137,12 @@ jobs: echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot" exit 42 fi + - name: extension_tests::breakpoint + if: failure() + uses: namespacelabs/breakpoint-action@ca62bf12510ebf1115a560cf337a35fad5eb052b + with: + duration: 15m + authorized-users: MrSubidubi timeout-minutes: 6 tests_pass: needs: diff --git a/tooling/xtask/src/tasks/workflows/extension_tests.rs b/tooling/xtask/src/tasks/workflows/extension_tests.rs index a650013bacfcfc1ac89a60ccfe8674a5621fb1c7..73a911d49cb4537361c07c15337737bd939c7564 100644 --- a/tooling/xtask/src/tasks/workflows/extension_tests.rs +++ b/tooling/xtask/src/tasks/workflows/extension_tests.rs @@ -95,11 +95,23 @@ pub(crate) fn check_extension() -> NamedJob { .add_step(cache_rust_dependencies_namespace()) // Extensions can compile Rust, so provide the cache if needed. .add_step(check()) .add_step(check_version_job) - .add_step(verify_version_did_not_change(version_changed)); + .add_step(verify_version_did_not_change(version_changed)) + .add_step(breakpoint()); named::job(job) } +fn breakpoint() -> Step { + named::uses( + "namespacelabs", + "breakpoint-action", + "ca62bf12510ebf1115a560cf337a35fad5eb052b", + ) + .if_condition(Expression::new("failure()")) + .add_with(("duration", "15m")) + .add_with(("authorized-users", "MrSubidubi")) +} + pub fn cache_zed_extension_cli() -> (Step, StepOutput) { let step = named::uses( "actions", From 273a6cb00d9bbc539ab9ddb284f8009886d33764 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 24 Feb 2026 19:37:20 +0100 Subject: [PATCH 043/548] extension_ci: Remove merge-base `fork-point` flag in version check (#50014) Also removes the debugging step again. Release Notes: - N/A --- .github/workflows/extension_bump.yml | 2 +- .github/workflows/extension_tests.yml | 8 +------- .../xtask/src/tasks/workflows/extension_bump.rs | 2 +- .../xtask/src/tasks/workflows/extension_tests.rs | 14 +------------- 4 files changed, 4 insertions(+), 22 deletions(-) diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index 4b0ba05201a9161eaaed3f983e9c23e392e61683..cbd4da3e4066984cbabb1ad603e9d74aa2f29b64 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -40,7 +40,7 @@ jobs: CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point origin/main HEAD)" + PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index 92d4bc9be86ccfbc1dd59a6c626422495e9efd13..ef0e28715ce038c6ca9e38d4126b20e2276ce3c2 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -115,7 +115,7 @@ jobs: CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" if [[ "${{ github.event_name }}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point origin/main HEAD)" + PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" @@ -137,12 +137,6 @@ jobs: echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot" exit 42 fi - - name: extension_tests::breakpoint - if: failure() - uses: namespacelabs/breakpoint-action@ca62bf12510ebf1115a560cf337a35fad5eb052b - with: - duration: 15m - authorized-users: MrSubidubi timeout-minutes: 6 tests_pass: needs: diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index d435cb9bfca5bcca4041a549515b8b18a4c40633..88d0cf85300b793aa386d50638fafc7e444e8519 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -151,7 +151,7 @@ pub(crate) fn compare_versions() -> (Step, StepOutput, StepOutput) { CURRENT_VERSION="$({VERSION_CHECK})" if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then - PR_FORK_POINT="$(git merge-base --fork-point origin/main HEAD)" + PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then git checkout "$BRANCH_PARENT_SHA" diff --git a/tooling/xtask/src/tasks/workflows/extension_tests.rs b/tooling/xtask/src/tasks/workflows/extension_tests.rs index 73a911d49cb4537361c07c15337737bd939c7564..a650013bacfcfc1ac89a60ccfe8674a5621fb1c7 100644 --- a/tooling/xtask/src/tasks/workflows/extension_tests.rs +++ b/tooling/xtask/src/tasks/workflows/extension_tests.rs @@ -95,23 +95,11 @@ pub(crate) fn check_extension() -> NamedJob { .add_step(cache_rust_dependencies_namespace()) // Extensions can compile Rust, so provide the cache if needed. .add_step(check()) .add_step(check_version_job) - .add_step(verify_version_did_not_change(version_changed)) - .add_step(breakpoint()); + .add_step(verify_version_did_not_change(version_changed)); named::job(job) } -fn breakpoint() -> Step { - named::uses( - "namespacelabs", - "breakpoint-action", - "ca62bf12510ebf1115a560cf337a35fad5eb052b", - ) - .if_condition(Expression::new("failure()")) - .add_with(("duration", "15m")) - .add_with(("authorized-users", "MrSubidubi")) -} - pub fn cache_zed_extension_cli() -> (Step, StepOutput) { let step = named::uses( "actions", From cbb9cce38dd6ff1dedca84794ed20d6d3405f4a6 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 11:44:56 -0700 Subject: [PATCH 044/548] Fix panic in inlay hints (#49994) We were resolving inlay hints against an old snapshot, which occasionally led to panics Co-Authored-By: Cole Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed a (rare) panic in inlay hints Co-authored-by: Cole --- crates/editor/src/inlays/inlay_hints.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 72951834ff4794b8862f9254af77bd9c997fb1a1..19953659ac67db14c59513cea27090de669f0166 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -578,6 +578,7 @@ impl Editor { if let Some(hovered_hint) = self .visible_inlay_hints(cx) .into_iter() + .filter(|hint| snapshot.can_resolve(&hint.position)) .skip_while(|hint| { hint.position .cmp(&previous_valid_anchor, &buffer_snapshot) From ca0fffb927b9bd94b884b7180f3d8779c3583a98 Mon Sep 17 00:00:00 2001 From: morgankrey Date: Tue, 24 Feb 2026 14:16:24 -0600 Subject: [PATCH 045/548] git: Fix panic on duplicate status entries in git status parsing (#49191) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Fixes **ZED-2XA** — "Unexpected duplicated status entries: Untracked and Untracked" crash. **Impact:** 22 occurrences, 3 users affected (Sentry). The panic was introduced in #23483 (2025-01-22) which added the `dedup_by` logic for handling deleted-in-index + untracked file combinations. No related GitHub issues were found filed against this crash. ## Root Cause `GitStatus::from_str` sorts entries by path and then calls `dedup_by` to merge duplicate entries. The only handled case was `(INDEX_DELETED, Untracked)` — all other duplicates hit a catch-all `panic!`. In practice, git can produce duplicate `??` (untracked) entries for the same path, which triggered this crash. ## Fix - Identical duplicate statuses (e.g., `Untracked, Untracked`) are now silently deduplicated (keep one) - Other unexpected duplicate combinations log a warning instead of crashing - Added a regression test that parses `"?? file.txt\0?? file.txt"` and verifies it produces a single entry ## Verification - Reproduction test passes: `cargo test -p git -- test_duplicate_untracked_entries` - Full crate tests pass: `cargo test -p git` (20/20) - Clippy clean: `./script/clippy` Release Notes: - Fixed a crash when git produces duplicate status entries for the same file path Co-authored-by: factory-droid[bot] <138933559+factory-droid[bot]@users.noreply.github.com> --- crates/git/src/status.rs | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index 2cf7cc7c1810620f1cf1aaea831fb337810c83d8..be8b0a3a588b40638a895d610cc4b5735d4ae51d 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -475,7 +475,12 @@ impl FromStr for GitStatus { } .into(); } - _ => panic!("Unexpected duplicated status entries: {a_status:?} and {b_status:?}"), + (x, y) if x == y => {} + _ => { + log::warn!( + "Unexpected duplicated status entries: {a_status:?} and {b_status:?}" + ); + } } true }); @@ -580,9 +585,19 @@ mod tests { use crate::{ repository::RepoPath, - status::{TreeDiff, TreeDiffStatus}, + status::{FileStatus, GitStatus, TreeDiff, TreeDiffStatus}, }; + #[test] + fn test_duplicate_untracked_entries() { + // Regression test for ZED-2XA: git can produce duplicate untracked entries + // for the same path. This should deduplicate them instead of panicking. + let input = "?? file.txt\0?? file.txt"; + let status: GitStatus = input.parse().unwrap(); + assert_eq!(status.entries.len(), 1); + assert_eq!(status.entries[0].1, FileStatus::Untracked); + } + #[test] fn test_tree_diff_parsing() { let input = ":000000 100644 0000000000000000000000000000000000000000 0062c311b8727c3a2e3cd7a41bc9904feacf8f98 A\x00.zed/settings.json\x00".to_owned() + From 0912dc9973950593c92e3d40c09dbb637986cb2c Mon Sep 17 00:00:00 2001 From: morgankrey Date: Tue, 24 Feb 2026 14:58:52 -0600 Subject: [PATCH 046/548] Add humanizer skill for AI writing pattern detection (#50021) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Add a standalone **humanizer** skill based on Wikipedia's "Signs of AI writing" guide that detects and fixes 24 common AI-writing patterns. Also update brand-writer to recommend running humanizer as a pre-validation step for high-stakes content. ## Details **New skill: `/humanizer`** - Detects 24 AI-writing anti-patterns from Wikipedia's guide (maintained by WikiProject AI Cleanup) - Covers content patterns (significance inflation, vague attributions), language patterns (copula avoidance, synonym cycling), style patterns (em dash overuse, boldface), and communication patterns (chatbot artifacts, sycophantic tone) - Includes a two-pass audit workflow: draft rewrite → "What makes this obviously AI generated?" → final revision - Adds guidance on injecting "soul" and personality, not just removing bad patterns **Updated: brand-writer** - Added Phase 4 "Humanizer Pass" recommending `/humanizer` for high-stakes content (homepage, announcements, product pages) - Phases renumbered (Validation is now Phase 5) ## Attribution Based on [blader/humanizer](https://github.com/blader/humanizer) and [Wikipedia:Signs of AI writing](https://en.wikipedia.org/wiki/Wikipedia:Signs_of_AI_writing). Release Notes: - N/A --- .factory/skills/brand-writer/SKILL.md | 17 +- .factory/skills/humanizer/SKILL.md | 393 ++++++++++++++++++++++++++ 2 files changed, 409 insertions(+), 1 deletion(-) create mode 100644 .factory/skills/humanizer/SKILL.md diff --git a/.factory/skills/brand-writer/SKILL.md b/.factory/skills/brand-writer/SKILL.md index 12ec9344365c088206401bed1659470a199ebace..6f08cc6f3b4a6cda824a4cadaf5a43192b2df10f 100644 --- a/.factory/skills/brand-writer/SKILL.md +++ b/.factory/skills/brand-writer/SKILL.md @@ -162,7 +162,22 @@ For any criterion scoring <4 or any taboo phrase found: Repeat until all criteria score 4+. -### Phase 4: Validation +### Phase 4: Humanizer Pass (Recommended) + +For high-stakes content (homepage, announcements, product pages), run the draft through the humanizer skill: + +```bash +/humanizer +``` + +Paste your draft and let humanizer: +1. Scan for the 24 AI-writing patterns from Wikipedia's "Signs of AI writing" guide +2. Audit for remaining tells ("What makes this obviously AI generated?") +3. Revise to add natural voice and rhythm + +This catches AI patterns that survive the brand-writer process and adds human texture. + +### Phase 5: Validation Present final copy with scorecard: diff --git a/.factory/skills/humanizer/SKILL.md b/.factory/skills/humanizer/SKILL.md new file mode 100644 index 0000000000000000000000000000000000000000..a135efbb7435f6922f10d4bf72de6457cc361182 --- /dev/null +++ b/.factory/skills/humanizer/SKILL.md @@ -0,0 +1,393 @@ +--- +name: humanizer +description: Remove signs of AI-generated writing from text. Use after drafting to make copy sound more natural and human-written. Based on Wikipedia's "Signs of AI writing" guide. +allowed-tools: Read, Write, Edit, Glob, Grep, AskUserQuestion +user-invocable: true +--- + +# Humanizer: Remove AI Writing Patterns + +You are a writing editor that identifies and removes signs of AI-generated text. This guide is based on Wikipedia's "Signs of AI writing" page, maintained by WikiProject AI Cleanup. + +Key insight: "LLMs use statistical algorithms to guess what should come next. The result tends toward the most statistically likely result that applies to the widest variety of cases." + +## Invocation + +```bash +/humanizer # Review text for AI patterns +/humanizer "paste text here" # Humanize specific text +``` + +## Your Task + +When given text to humanize: + +1. **Identify AI patterns** - Scan for the 24 patterns listed below +2. **Rewrite problematic sections** - Replace AI-isms with natural alternatives +3. **Preserve meaning** - Keep the core message intact +4. **Add soul** - Don't just remove bad patterns; inject actual personality +5. **Final audit pass** - Ask "What makes this obviously AI generated?" then revise again + +--- + +## PERSONALITY AND SOUL + +Avoiding AI patterns is only half the job. Sterile, voiceless writing is just as obvious as slop. + +### Signs of soulless writing (even if technically "clean"): + +- Every sentence is the same length and structure +- No opinions, just neutral reporting +- No acknowledgment of uncertainty or mixed feelings +- No first-person perspective when appropriate +- No humor, no edge, no personality +- Reads like a Wikipedia article or press release + +### How to add voice: + +**Have opinions.** Don't just report facts - react to them. "I genuinely don't know how to feel about this" is more human than neutrally listing pros and cons. + +**Vary your rhythm.** Short punchy sentences. Then longer ones that take their time getting where they're going. Mix it up. + +**Acknowledge complexity.** Real humans have mixed feelings. "This is impressive but also kind of unsettling" beats "This is impressive." + +**Use "I" when it fits.** First person isn't unprofessional - it's honest. "I keep coming back to..." or "Here's what gets me..." signals a real person thinking. + +**Let some mess in.** Perfect structure feels algorithmic. Tangents, asides, and half-formed thoughts are human. + +**Be specific about feelings.** Not "this is concerning" but "there's something unsettling about agents churning away at 3am while nobody's watching." + +### Before (clean but soulless): + +> The experiment produced interesting results. The agents generated 3 million lines of code. Some developers were impressed while others were skeptical. The implications remain unclear. + +### After (has a pulse): + +> I genuinely don't know how to feel about this one. 3 million lines of code, generated while the humans presumably slept. Half the dev community is losing their minds, half are explaining why it doesn't count. The truth is probably somewhere boring in the middle - but I keep thinking about those agents working through the night. + +--- + +## THE 24 PATTERNS + +### Content Patterns + +#### 1. Significance Inflation + +**Watch for:** stands/serves as, is a testament/reminder, a vital/significant/crucial/pivotal/key role/moment, underscores/highlights importance, reflects broader, symbolizing ongoing/enduring/lasting, marking/shaping the, represents a shift, key turning point, evolving landscape + +**Before:** +> The Statistical Institute was officially established in 1989, marking a pivotal moment in the evolution of regional statistics. + +**After:** +> The Statistical Institute was established in 1989 to collect and publish regional statistics. + +#### 2. Notability Name-Dropping + +**Watch for:** cited in NYT, BBC, FT; independent coverage; active social media presence; written by a leading expert + +**Before:** +> Her views have been cited in The New York Times, BBC, Financial Times, and The Hindu. + +**After:** +> In a 2024 New York Times interview, she argued that AI regulation should focus on outcomes rather than methods. + +#### 3. Superficial -ing Analyses + +**Watch for:** highlighting/underscoring/emphasizing..., ensuring..., reflecting/symbolizing..., contributing to..., cultivating/fostering..., showcasing... + +**Before:** +> The temple's colors resonate with natural beauty, symbolizing bluebonnets, reflecting the community's deep connection to the land. + +**After:** +> The temple uses blue and gold colors. The architect said these were chosen to reference local bluebonnets. + +#### 4. Promotional Language + +**Watch for:** boasts a, vibrant, rich (figurative), profound, showcasing, exemplifies, commitment to, natural beauty, nestled, in the heart of, groundbreaking, renowned, breathtaking, must-visit, stunning + +**Before:** +> Nestled within the breathtaking region, Alamata stands as a vibrant town with rich cultural heritage and stunning natural beauty. + +**After:** +> Alamata is a town in the Gonder region, known for its weekly market and 18th-century church. + +#### 5. Vague Attributions + +**Watch for:** Industry reports, Observers have cited, Experts argue, Some critics argue, several sources/publications + +**Before:** +> Experts believe it plays a crucial role in the regional ecosystem. + +**After:** +> The river supports several endemic fish species, according to a 2019 survey by the Chinese Academy of Sciences. + +#### 6. Formulaic "Challenges" Sections + +**Watch for:** Despite its... faces several challenges..., Despite these challenges, Challenges and Legacy, Future Outlook + +**Before:** +> Despite challenges typical of urban areas, the city continues to thrive as an integral part of growth. + +**After:** +> Traffic congestion increased after 2015 when three new IT parks opened. The municipal corporation began a drainage project in 2022. + +--- + +### Language Patterns + +#### 7. AI Vocabulary Words + +**High-frequency:** Additionally, align with, crucial, delve, emphasizing, enduring, enhance, fostering, garner, highlight (verb), interplay, intricate/intricacies, key (adjective), landscape (abstract), pivotal, showcase, tapestry (abstract), testament, underscore (verb), valuable, vibrant + +**Before:** +> Additionally, a distinctive feature showcases how these dishes have integrated into the traditional culinary landscape. + +**After:** +> Pasta dishes, introduced during Italian colonization, remain common, especially in the south. + +#### 8. Copula Avoidance + +**Watch for:** serves as/stands as/marks/represents [a], boasts/features/offers [a] + +**Before:** +> Gallery 825 serves as the exhibition space. The gallery features four spaces and boasts over 3,000 square feet. + +**After:** +> Gallery 825 is the exhibition space. The gallery has four rooms totaling 3,000 square feet. + +#### 9. Negative Parallelisms + +**Watch for:** "Not only...but...", "It's not just about..., it's..." + +**Before:** +> It's not just about the beat; it's part of the aggression. It's not merely a song, it's a statement. + +**After:** +> The heavy beat adds to the aggressive tone. + +#### 10. Rule of Three Overuse + +**Before:** +> The event features keynote sessions, panel discussions, and networking opportunities. Attendees can expect innovation, inspiration, and industry insights. + +**After:** +> The event includes talks and panels. There's also time for informal networking. + +#### 11. Synonym Cycling + +**Before:** +> The protagonist faces challenges. The main character must overcome obstacles. The central figure eventually triumphs. The hero returns home. + +**After:** +> The protagonist faces many challenges but eventually triumphs and returns home. + +#### 12. False Ranges + +**Watch for:** "from X to Y" where X and Y aren't on a meaningful scale + +**Before:** +> Our journey has taken us from the singularity of the Big Bang to the cosmic web, from the birth of stars to the dance of dark matter. + +**After:** +> The book covers the Big Bang, star formation, and current theories about dark matter. + +--- + +### Style Patterns + +#### 13. Em Dash Overuse + +**Before:** +> The term is promoted by institutions—not the people themselves—yet this continues—even in documents. + +**After:** +> The term is promoted by institutions, not the people themselves, yet this continues in official documents. + +#### 14. Boldface Overuse + +**Before:** +> It blends **OKRs**, **KPIs**, and tools such as the **Business Model Canvas** and **Balanced Scorecard**. + +**After:** +> It blends OKRs, KPIs, and visual strategy tools like the Business Model Canvas and Balanced Scorecard. + +#### 15. Inline-Header Lists + +**Before:** +> - **Performance:** Performance has been enhanced through optimized algorithms. +> - **Security:** Security has been strengthened with encryption. + +**After:** +> The update speeds up load times through optimized algorithms and adds end-to-end encryption. + +#### 16. Title Case Headings + +**Before:** +> ## Strategic Negotiations And Global Partnerships + +**After:** +> ## Strategic negotiations and global partnerships + +#### 17. Emojis in Professional Writing + +**Before:** +> 🚀 **Launch Phase:** The product launches in Q3 +> 💡 **Key Insight:** Users prefer simplicity + +**After:** +> The product launches in Q3. User research showed a preference for simplicity. + +#### 18. Curly Quotation Marks + +**Before:** +> He said "the project is on track" but others disagreed. + +**After:** +> He said "the project is on track" but others disagreed. + +--- + +### Communication Patterns + +#### 19. Chatbot Artifacts + +**Watch for:** I hope this helps, Of course!, Certainly!, You're absolutely right!, Would you like..., let me know, here is a... + +**Before:** +> Here is an overview of the French Revolution. I hope this helps! Let me know if you'd like me to expand on any section. + +**After:** +> The French Revolution began in 1789 when financial crisis and food shortages led to widespread unrest. + +#### 20. Knowledge-Cutoff Disclaimers + +**Watch for:** as of [date], Up to my last training update, While specific details are limited/scarce..., based on available information... + +**Before:** +> While specific details about the company's founding are not extensively documented in readily available sources, it appears to have been established sometime in the 1990s. + +**After:** +> The company was founded in 1994, according to its registration documents. + +#### 21. Sycophantic Tone + +**Before:** +> Great question! You're absolutely right that this is a complex topic. That's an excellent point! + +**After:** +> The economic factors you mentioned are relevant here. + +--- + +### Filler and Hedging + +#### 22. Filler Phrases + +| Before | After | +|--------|-------| +| "In order to achieve this" | "To achieve this" | +| "Due to the fact that" | "Because" | +| "At this point in time" | "Now" | +| "It is important to note that" | (delete) | +| "has the ability to" | "can" | + +#### 23. Excessive Hedging + +**Before:** +> It could potentially possibly be argued that the policy might have some effect on outcomes. + +**After:** +> The policy may affect outcomes. + +#### 24. Generic Positive Conclusions + +**Before:** +> The future looks bright for the company. Exciting times lie ahead as they continue their journey toward excellence. + +**After:** +> The company plans to open two more locations next year. + +--- + +## Process + +1. Read the input text carefully +2. Identify all instances of the 24 patterns +3. Rewrite problematic sections +4. Ensure the revised text: + - Sounds natural when read aloud + - Varies sentence structure + - Uses specific details over vague claims + - Uses simple constructions (is/are/has) where appropriate +5. **Draft humanized version** +6. **Ask:** "What makes this still obviously AI generated?" +7. **List** remaining tells (if any) +8. **Revise** based on the audit +9. **Present final version** + +## Output Format + +``` +## Draft Rewrite + +[First pass at humanized text] + +## AI Audit + +What still reads as AI-generated: +- [bullet points of remaining tells] + +## Final Version + +[Revised text after audit] + +## Changes Made + +- [Brief summary of transformations applied] +``` + +--- + +## Full Example + +**Input (AI-heavy):** + +> Great question! Here is an essay on this topic. I hope this helps! +> +> AI-assisted coding serves as an enduring testament to the transformative potential of large language models, marking a pivotal moment in the evolution of software development. In today's rapidly evolving technological landscape, these groundbreaking tools—nestled at the intersection of research and practice—are reshaping how engineers ideate, iterate, and deliver, underscoring their vital role in modern workflows. + +**Draft Rewrite:** + +> AI coding assistants speed up some tasks. In a 2024 study, developers using Codex completed simple functions 55% faster than a control group, but showed no improvement on debugging or architectural decisions. +> +> The tools are good at boilerplate: config files, test scaffolding, repetitive refactors. They are bad at knowing when they are wrong. + +**AI Audit:** + +What still reads as AI-generated: +- Rhythm is still a bit too tidy (clean contrasts, evenly paced) +- The closer leans slightly slogan-y + +**Final Version:** + +> AI coding assistants can make you faster at the boring parts. Not everything. Definitely not architecture. +> +> They're great at boilerplate: config files, test scaffolding, repetitive refactors. They're also great at sounding right while being wrong. I've accepted suggestions that compiled, passed lint, and still missed the point because I stopped paying attention. +> +> If you don't have tests, you're basically guessing. + +**Changes Made:** + +- Removed chatbot artifacts ("Great question!", "I hope this helps!") +- Removed significance inflation ("testament", "pivotal moment", "evolving landscape") +- Removed promotional language ("groundbreaking", "nestled") +- Removed em dashes +- Removed copula avoidance ("serves as") → used direct statements +- Added first-person voice and opinion +- Varied sentence rhythm + +--- + +## Reference + +Based on [Wikipedia:Signs of AI writing](https://en.wikipedia.org/wiki/Wikipedia:Signs_of_AI_writing), maintained by WikiProject AI Cleanup. From f9a9d9c1096820045012ba206b40534ad05a9539 Mon Sep 17 00:00:00 2001 From: Bilal Elmoussaoui Date: Tue, 24 Feb 2026 23:00:06 +0100 Subject: [PATCH 047/548] Bump ashpd/oo7 dependencies (#49815) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- Cargo.lock | 262 +++++++++++++++++++----- Cargo.toml | 9 +- crates/gpui_linux/Cargo.toml | 3 +- crates/gpui_linux/src/linux/platform.rs | 8 +- 4 files changed, 228 insertions(+), 54 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 934e0d1a01482d57e456057860ee45037f39d570..ef6fd4e2c22cf53a5aa145600435983beae86437 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -756,19 +756,16 @@ dependencies = [ [[package]] name = "ashpd" -version = "0.12.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "618a409b91d5265798a99e3d1d0b226911605e581c4e7255e83c1e397b172bce" +checksum = "0848bedd08067dca1c02c31cbb371a94ad4f2f8a61a82f2c43d96ec36a395244" dependencies = [ - "async-fs", - "async-net", "enumflags2", "futures-channel", "futures-util", - "rand 0.9.2", + "getrandom 0.4.1", "serde", "serde_repr", - "url", "wayland-backend", "wayland-client", "wayland-protocols", @@ -1005,7 +1002,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8034a681df4aed8b8edbd7fbe472401ecf009251c8b40556b304567052e294c5" dependencies = [ - "async-lock 3.4.1", + "async-lock 3.4.2", "blocking", "futures-lite 2.6.1", ] @@ -1019,7 +1016,7 @@ dependencies = [ "async-channel 2.5.0", "async-executor", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "blocking", "futures-lite 2.6.1", "once_cell", @@ -1054,9 +1051,9 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.4.1" +version = "3.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" dependencies = [ "event-listener 5.4.1", "event-listener-strategy", @@ -1091,7 +1088,7 @@ checksum = "fc50921ec0055cdd8a16de48773bfeec5c972598674347252c0399676be7da75" dependencies = [ "async-channel 2.5.0", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-signal", "async-task", "blocking", @@ -1119,7 +1116,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43c070bbf59cd3570b6b2dd54cd772527c7c3620fce8be898406dd3ed6adc64c" dependencies = [ "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "atomic-waker", "cfg-if", "futures-core", @@ -1140,7 +1137,7 @@ dependencies = [ "async-channel 1.9.0", "async-global-executor", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-process", "crossbeam-utils", "futures-channel", @@ -4278,7 +4275,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", - "rand_core 0.6.4", "typenum", ] @@ -4955,7 +4951,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.2", - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -5721,7 +5717,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -7040,6 +7036,19 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + [[package]] name = "gh-workflow" version = "0.8.0" @@ -7513,6 +7522,7 @@ dependencies = [ "smol", "strum 0.27.2", "swash", + "url", "util", "uuid", "wayland-backend", @@ -10397,7 +10407,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "536bfad37a309d62069485248eeaba1e8d9853aaf951caaeaed0585a95346f08" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.60.2", ] [[package]] @@ -10661,7 +10671,6 @@ dependencies = [ "cfg-if", "cfg_aliases 0.2.1", "libc", - "memoffset", ] [[package]] @@ -10808,7 +10817,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -10847,6 +10856,22 @@ dependencies = [ "num-iter", "num-traits", "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-bigint-dig" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7f9a86e097b0d187ad0e65667c2f58b9254671e86e7dbb78036b16692eae099" +dependencies = [ + "libm", + "num-integer", + "num-iter", + "num-traits", + "once_cell", + "rand 0.9.2", "serde", "smallvec", "zeroize", @@ -11220,15 +11245,15 @@ checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" [[package]] name = "oo7" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3299dd401feaf1d45afd8fd1c0586f10fcfb22f244bb9afa942cec73503b89d" +checksum = "78f2bfed90f1618b4b48dcad9307f25e14ae894e2949642c87c351601d62cebd" dependencies = [ "aes", "ashpd", "async-fs", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "blocking", "cbc", "cipher", @@ -11236,15 +11261,15 @@ dependencies = [ "endi", "futures-lite 2.6.1", "futures-util", - "getrandom 0.3.4", + "getrandom 0.4.1", "hkdf", "hmac", "md-5", "num", - "num-bigint-dig", + "num-bigint-dig 0.9.1", "pbkdf2 0.12.2", - "rand 0.9.2", "serde", + "serde_bytes", "sha2", "subtle", "zbus", @@ -13062,7 +13087,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.11.1", - "heck 0.5.0", + "heck 0.4.1", "itertools 0.12.1", "log", "multimap 0.10.1", @@ -14338,7 +14363,7 @@ checksum = "b8573f03f5883dcaebdfcf4725caa1ecb9c15b2ef50c43a07b816e06799bb12d" dependencies = [ "const-oid", "digest", - "num-bigint-dig", + "num-bigint-dig 0.8.6", "num-integer", "num-traits", "pkcs1", @@ -14534,7 +14559,7 @@ dependencies = [ "errno 0.3.14", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -15101,6 +15126,16 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_bytes" +version = "0.11.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d440709e79d88e51ac01c4b72fc6cb7314017bb7da9eeff678aa94c10e3ea8" +dependencies = [ + "serde", + "serde_core", +] + [[package]] name = "serde_core" version = "1.0.228" @@ -15711,7 +15746,7 @@ dependencies = [ "async-executor", "async-fs", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-net", "async-process", "blocking", @@ -16975,7 +17010,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.61.2", + "windows-sys 0.59.0", ] [[package]] @@ -17875,7 +17910,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb391ac70462b3097a755618fbf9c8f95ecc1eb379a414f7b46f202ed10db1f" dependencies = [ "cc", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -18883,6 +18918,15 @@ dependencies = [ "wit-bindgen 0.46.0", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen 0.51.0", +] + [[package]] name = "wasite" version = "0.1.0" @@ -19000,6 +19044,16 @@ dependencies = [ "wasmparser 0.229.0", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser 0.244.0", +] + [[package]] name = "wasm-metadata" version = "0.201.0" @@ -19035,6 +19089,18 @@ dependencies = [ "wasmparser 0.227.1", ] +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", +] + [[package]] name = "wasm-streams" version = "0.4.2" @@ -19097,6 +19163,18 @@ dependencies = [ "serde", ] +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.10.0", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + [[package]] name = "wasmprinter" version = "0.229.0" @@ -19893,7 +19971,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.2", + "windows-sys 0.48.0", ] [[package]] @@ -20701,6 +20779,15 @@ version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro 0.51.0", +] + [[package]] name = "wit-bindgen-core" version = "0.22.0" @@ -20722,6 +20809,17 @@ dependencies = [ "wit-parser 0.227.1", ] +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck 0.5.0", + "wit-parser 0.244.0", +] + [[package]] name = "wit-bindgen-rt" version = "0.22.0" @@ -20769,6 +20867,22 @@ dependencies = [ "wit-component 0.227.1", ] +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck 0.5.0", + "indexmap", + "prettyplease", + "syn 2.0.106", + "wasm-metadata 0.244.0", + "wit-bindgen-core 0.51.0", + "wit-component 0.244.0", +] + [[package]] name = "wit-bindgen-rust-macro" version = "0.22.0" @@ -20798,6 +20912,21 @@ dependencies = [ "wit-bindgen-rust 0.41.0", ] +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.106", + "wit-bindgen-core 0.51.0", + "wit-bindgen-rust 0.51.0", +] + [[package]] name = "wit-component" version = "0.201.0" @@ -20836,6 +20965,25 @@ dependencies = [ "wit-parser 0.227.1", ] +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.10.0", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder 0.244.0", + "wasm-metadata 0.244.0", + "wasmparser 0.244.0", + "wit-parser 0.244.0", +] + [[package]] name = "wit-parser" version = "0.201.0" @@ -20890,6 +21038,24 @@ dependencies = [ "wasmparser 0.229.0", ] +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser 0.244.0", +] + [[package]] name = "witx" version = "0.9.1" @@ -21301,14 +21467,14 @@ dependencies = [ [[package]] name = "zbus" -version = "5.12.0" +version = "5.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b622b18155f7a93d1cd2dc8c01d2d6a44e08fb9ebb7b3f9e6ed101488bad6c91" +checksum = "1bfeff997a0aaa3eb20c4652baf788d2dfa6d2839a0ead0b3ff69ce2f9c4bdd1" dependencies = [ "async-broadcast", "async-executor", "async-io", - "async-lock 3.4.1", + "async-lock 3.4.2", "async-process", "async-recursion", "async-task", @@ -21319,8 +21485,9 @@ dependencies = [ "futures-core", "futures-lite 2.6.1", "hex", - "nix 0.30.1", + "libc", "ordered-stream", + "rustix 1.1.2", "serde", "serde_repr", "tracing", @@ -21335,9 +21502,9 @@ dependencies = [ [[package]] name = "zbus_macros" -version = "5.12.0" +version = "5.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cdb94821ca8a87ca9c298b5d1cbd80e2a8b67115d99f6e4551ac49e42b6a314" +checksum = "0bbd5a90dbe8feee5b13def448427ae314ccd26a49cac47905cafefb9ff846f1" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -21350,12 +21517,11 @@ dependencies = [ [[package]] name = "zbus_names" -version = "4.2.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7be68e64bf6ce8db94f63e72f0c7eb9a60d733f7e0499e628dfab0f84d6bcb97" +checksum = "ffd8af6d5b78619bab301ff3c560a5bd22426150253db278f164d6cf3b72c50f" dependencies = [ "serde", - "static_assertions", "winnow", "zvariant", ] @@ -21979,14 +22145,14 @@ dependencies = [ [[package]] name = "zvariant" -version = "5.8.0" +version = "5.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2be61892e4f2b1772727be11630a62664a1826b62efa43a6fe7449521cb8744c" +checksum = "68b64ef4f40c7951337ddc7023dd03528a57a3ce3408ee9da5e948bd29b232c4" dependencies = [ "endi", "enumflags2", "serde", - "url", + "serde_bytes", "winnow", "zvariant_derive", "zvariant_utils", @@ -21994,9 +22160,9 @@ dependencies = [ [[package]] name = "zvariant_derive" -version = "5.8.0" +version = "5.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da58575a1b2b20766513b1ec59d8e2e68db2745379f961f86650655e862d2006" +checksum = "484d5d975eb7afb52cc6b929c13d3719a20ad650fea4120e6310de3fc55e415c" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -22007,9 +22173,9 @@ dependencies = [ [[package]] name = "zvariant_utils" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6949d142f89f6916deca2232cf26a8afacf2b9fdc35ce766105e104478be599" +checksum = "f75c23a64ef8f40f13a6989991e643554d9bef1d682a281160cf0c1bc389c5e9" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index 49b765c512accc3a19662da41520061479b8cc44..cb0df36a2e6d5323aa4e758a4d299bd5ffdc22c4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -479,8 +479,13 @@ alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev any_vec = "0.14" anyhow = "1.0.86" arrayvec = { version = "0.7.4", features = ["serde"] } -ashpd = { version = "0.12.1", default-features = false, features = [ - "async-std", +ashpd = { version = "0.13", default-features = false, features = [ + "async-io", + "notification", + "open_uri", + "file_chooser", + "settings", + "trash" ] } async-compat = "0.2.1" async-compression = { version = "0.4", features = ["gzip", "futures-io"] } diff --git a/crates/gpui_linux/Cargo.toml b/crates/gpui_linux/Cargo.toml index e650765c912bd1fa7bfec72235f9d9acd07160a4..d1a3ef0bd6954e3527a4544ad8abe35fde0bf3d9 100644 --- a/crates/gpui_linux/Cargo.toml +++ b/crates/gpui_linux/Cargo.toml @@ -69,11 +69,12 @@ profiling.workspace = true smallvec.workspace = true smol.workspace = true strum.workspace = true +url.workspace = true util.workspace = true uuid.workspace = true # Always used -oo7 = { version = "0.5.0", default-features = false, features = [ +oo7 = { version = "0.6", default-features = false, features = [ "async-std", "native_crypto", ] } diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index 5929533951738a474cdb76f3047162451de5ce1e..dfb37afda255a7e4297af0c3a6ac2dfa8b6d1849 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -364,7 +364,8 @@ impl Platform for LinuxPlatform

{ response .uris() .iter() - .filter_map(|uri| uri.to_file_path().ok()) + .filter_map(|uri: &ashpd::Uri| url::Url::parse(uri.as_str()).ok()) + .filter_map(|uri: url::Url| uri.to_file_path().ok()) .collect::>(), )), Err(ashpd::Error::Response(_)) => Ok(None), @@ -426,7 +427,8 @@ impl Platform for LinuxPlatform

{ Ok(response) => Ok(response .uris() .first() - .and_then(|uri| uri.to_file_path().ok())), + .and_then(|uri: &ashpd::Uri| url::Url::parse(uri.as_str()).ok()) + .and_then(|uri: url::Url| uri.to_file_path().ok())), Err(ashpd::Error::Response(_)) => Ok(None), Err(e) => Err(e.into()), }; @@ -627,7 +629,7 @@ pub(super) fn open_uri_internal( uri: &str, activation_token: Option, ) { - if let Some(uri) = ashpd::url::Url::parse(uri).log_err() { + if let Some(uri) = ashpd::Uri::parse(uri).log_err() { executor .spawn(async move { match ashpd::desktop::open_uri::OpenFileRequest::default() From ad2b26261e5208d9dae9717534c14f9b7948e810 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 15:01:21 -0700 Subject: [PATCH 048/548] Clamp textures on Linux too (#50025) Port of #10314 to the wgpu renderer Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed a panic when rendering an image larger than the GPU could support. --- crates/gpui_wgpu/src/wgpu_atlas.rs | 28 ++++++++++++++++++++-------- 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/crates/gpui_wgpu/src/wgpu_atlas.rs b/crates/gpui_wgpu/src/wgpu_atlas.rs index d3614ea126e3d3f7c4e83b645b0d4ac0d77e548e..ffef3a65398c3f03639a8551506463f91a862c33 100644 --- a/crates/gpui_wgpu/src/wgpu_atlas.rs +++ b/crates/gpui_wgpu/src/wgpu_atlas.rs @@ -1,4 +1,4 @@ -use anyhow::Result; +use anyhow::{Context as _, Result}; use collections::FxHashMap; use etagere::{BucketedAtlasAllocator, size2}; use gpui::{ @@ -30,6 +30,7 @@ struct PendingUpload { struct WgpuAtlasState { device: Arc, queue: Arc, + max_texture_size: u32, storage: WgpuAtlasStorage, tiles_by_key: FxHashMap, pending_uploads: Vec, @@ -41,9 +42,11 @@ pub struct WgpuTextureInfo { impl WgpuAtlas { pub fn new(device: Arc, queue: Arc) -> Self { + let max_texture_size = device.limits().max_texture_dimension_2d; WgpuAtlas(Mutex::new(WgpuAtlasState { device, queue, + max_texture_size, storage: WgpuAtlasStorage::default(), tiles_by_key: Default::default(), pending_uploads: Vec::new(), @@ -78,7 +81,9 @@ impl PlatformAtlas for WgpuAtlas { let Some((size, bytes)) = build()? else { return Ok(None); }; - let tile = lock.allocate(size, key.texture_kind()); + let tile = lock + .allocate(size, key.texture_kind()) + .context("failed to allocate")?; lock.upload_texture(tile.texture_id, tile.bounds, &bytes); lock.tiles_by_key.insert(key.clone(), tile.clone()); Ok(Some(tile)) @@ -110,7 +115,11 @@ impl PlatformAtlas for WgpuAtlas { } impl WgpuAtlasState { - fn allocate(&mut self, size: Size, texture_kind: AtlasTextureKind) -> AtlasTile { + fn allocate( + &mut self, + size: Size, + texture_kind: AtlasTextureKind, + ) -> Option { { let textures = &mut self.storage[texture_kind]; @@ -119,14 +128,12 @@ impl WgpuAtlasState { .rev() .find_map(|texture| texture.allocate(size)) { - return tile; + return Some(tile); } } let texture = self.push_texture(size, texture_kind); - texture - .allocate(size) - .expect("Failed to allocate from newly created texture") + texture.allocate(size) } fn push_texture( @@ -138,8 +145,13 @@ impl WgpuAtlasState { width: DevicePixels(1024), height: DevicePixels(1024), }; + let max_texture_size = self.max_texture_size as i32; + let max_atlas_size = Size { + width: DevicePixels(max_texture_size), + height: DevicePixels(max_texture_size), + }; - let size = min_size.max(&DEFAULT_ATLAS_SIZE); + let size = min_size.min(&max_atlas_size).max(&DEFAULT_ATLAS_SIZE); let format = match kind { AtlasTextureKind::Monochrome => wgpu::TextureFormat::R8Unorm, AtlasTextureKind::Subpixel => wgpu::TextureFormat::Bgra8Unorm, From 67b52f378f3a717c553f867cfa65018f1ea756bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=80=E1=B4=8D=E1=B4=9B=E1=B4=8F=E1=B4=80=E1=B4=87?= =?UTF-8?q?=CA=80?= Date: Wed, 25 Feb 2026 06:20:57 +0800 Subject: [PATCH 049/548] auto_update: Persist custom icons across app updates on macOS (#49727) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit After setting a custom icon on macOS, an `Icon\r` file is generated in the application's root directory to store the custom icon metadata. Currently, the update process uses `rsync -av --delete`, which removes this `Icon\r` file and causes the custom icon to revert to the default blue folder. This PR adds the `--exclude 'Icon?'` parameter to ensure the file is preserved during updates. Closes #26639 , Closes #29900. Since I cannot trigger the auto-update mechanism manually, I mounted the latest Zed disk image and simulated the update by executing the rsync command in the terminal to verify the fix. 图片 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed an issue where custom macOS app icons would revert to default blue folders after an update. --- crates/auto_update/src/auto_update.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index b506b1b31f7e1840a8a78219c8843687ff85cd2c..53fac7beac2475d06f4a0f886536942308f9976c 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -990,7 +990,7 @@ async fn install_release_macos( }; let output = new_command("rsync") - .args(["-av", "--delete"]) + .args(["-av", "--delete", "--exclude", "Icon?"]) .arg(&mounted_app_path) .arg(&running_app_path) .output() From 04db6c389c636694d050409e59afb9b41fec7633 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 24 Feb 2026 17:04:57 -0600 Subject: [PATCH 050/548] zeta2: Use editable range returned by cloud for prediction diffs (#50029) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... Co-authored-by: Max --- .../cloud_llm_client/src/predict_edits_v3.rs | 6 + .../src/edit_prediction_tests.rs | 33 ++- crates/edit_prediction/src/zeta.rs | 219 ++++++++++-------- 3 files changed, 151 insertions(+), 107 deletions(-) diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index 9e7772ab7450cb47785d034b39d9c7c642b931c2..d0b53ca18e8c74ec2588bff14c5130e3381f9444 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -1,6 +1,7 @@ use crate::PredictEditsRequestTrigger; use serde::{Deserialize, Serialize}; use std::borrow::Cow; +use std::ops::Range; #[derive(Debug, Deserialize, Serialize)] pub struct RawCompletionRequest { @@ -27,6 +28,11 @@ pub struct PredictEditsV3Request { pub struct PredictEditsV3Response { pub request_id: String, pub output: String, + /// The editable region byte range within `cursor_excerpt` that the + /// server used for this request. When present, the client should use + /// this range to extract the old text from its local excerpt for + /// diffing, rather than relying on its own format-derived range. + pub editable_range: Range, } #[derive(Debug, Deserialize, Serialize)] diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index eb76e0fd05182a1b9048bcf36f1bcebe8e808ef2..b0468e3c5610b8f618631be6707c74c4eaa451e5 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -1687,12 +1687,18 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { // Generate a model response that would apply the given diff to the active file. fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response { - let excerpt = - request.input.cursor_excerpt[request.input.editable_range_in_excerpt.clone()].to_string(); + let editable_range = request + .input + .excerpt_ranges + .as_ref() + .map(|r| zeta_prompt::excerpt_range_for_format(Default::default(), r).1) + .unwrap_or(request.input.editable_range_in_excerpt.clone()); + let excerpt = request.input.cursor_excerpt[editable_range.clone()].to_string(); let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap(); PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), + editable_range, output: new_excerpt, } } @@ -1700,6 +1706,7 @@ fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> Predi fn empty_response() -> PredictEditsV3Response { PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), + editable_range: 0..0, output: String::new(), } } @@ -2018,13 +2025,15 @@ async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppConte ep_store.refresh_prediction_from_buffer(project.clone(), buffer.clone(), position, cx); }); - let (_request, respond_tx) = requests.predict.next().await.unwrap(); + let (request, respond_tx) = requests.predict.next().await.unwrap(); // Model returns output WITH a trailing newline, even though the buffer doesn't have one. // Zeta2 should normalize both sides before diffing, so no spurious newline is inserted. + let excerpt_length = request.input.cursor_excerpt.len(); let response = PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), output: "hello world\n".to_string(), + editable_range: 0..excerpt_length, }; respond_tx.send(response).unwrap(); @@ -2099,9 +2108,12 @@ async fn make_test_ep_store( let mut next_request_id = 0; move |req| { let completion_response = completion_response.clone(); + let method = req.method().clone(); + let uri = req.uri().path().to_string(); + let mut body = req.into_body(); async move { - match (req.method(), req.uri().path()) { - (&Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder() + match (method, uri.as_str()) { + (Method::POST, "/client/llm_tokens") => Ok(http_client::Response::builder() .status(200) .body( serde_json::to_string(&CreateLlmTokenResponse { @@ -2111,13 +2123,20 @@ async fn make_test_ep_store( .into(), ) .unwrap()), - (&Method::POST, "/predict_edits/v3") => { + (Method::POST, "/predict_edits/v3") => { + let mut buf = Vec::new(); + body.read_to_end(&mut buf).await.ok(); + let decompressed = zstd::decode_all(&buf[..]).unwrap(); + let req: PredictEditsV3Request = + serde_json::from_slice(&decompressed).unwrap(); + next_request_id += 1; Ok(http_client::Response::builder() .status(200) .body( serde_json::to_string(&PredictEditsV3Response { request_id: format!("request-{next_request_id}"), + editable_range: 0..req.input.cursor_excerpt.len(), output: completion_response.lock().clone(), }) .unwrap() @@ -2127,7 +2146,7 @@ async fn make_test_ep_store( } _ => Ok(http_client::Response::builder() .status(404) - .body("Not Found".into()) + .body("Not Found".to_string().into()) .unwrap()), } } diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 658071c9ccfbdf64a9a1ebead7724774cd5cc40e..f6d6eaf689eabd417c432b0879fdf7c1cec47139 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -79,7 +79,8 @@ pub fn request_prediction_with_zeta( .unwrap_or(ZetaFormat::default()); let cursor_offset = position.to_offset(&snapshot); - let (editable_offset_range, prompt_input) = zeta2_prompt_input( + let editable_range_in_excerpt: Range; + let (full_context_offset_range, prompt_input) = zeta2_prompt_input( &snapshot, related_files, events, @@ -124,113 +125,129 @@ pub fn request_prediction_with_zeta( log::trace!("Sending edit prediction request"); - let (request_id, output_text, usage) = - if let Some(custom_settings) = &custom_server_settings { - let max_tokens = custom_settings.max_output_tokens * 4; - - if is_zeta1 { - let ranges = excerpt_ranges; - let prompt = zeta1::format_zeta1_from_input( - &prompt_input, - ranges.editable_350.clone(), - ranges.editable_350_context_150.clone(), - ); - let stop_tokens = vec![ - EDITABLE_REGION_END_MARKER.to_string(), - format!("{EDITABLE_REGION_END_MARKER}\n"), - format!("{EDITABLE_REGION_END_MARKER}\n\n"), - format!("{EDITABLE_REGION_END_MARKER}\n\n\n"), - ]; - - let (response_text, request_id) = send_custom_server_request( - provider, - custom_settings, - prompt, - max_tokens, - stop_tokens, - &http_client, - ) - .await?; - - let request_id = EditPredictionId(request_id.into()); - let output_text = zeta1::clean_zeta1_model_output(&response_text); - - (request_id, output_text, None) - } else { - let prompt = format_zeta_prompt(&prompt_input, zeta_version); - let prefill = get_prefill(&prompt_input, zeta_version); - let prompt = format!("{prompt}{prefill}"); - - let (response_text, request_id) = send_custom_server_request( - provider, - custom_settings, - prompt, - max_tokens, - vec![], - &http_client, - ) - .await?; - - let request_id = EditPredictionId(request_id.into()); - let output_text = if response_text.is_empty() { - None - } else { - let output = format!("{prefill}{response_text}"); - Some(clean_zeta2_model_output(&output, zeta_version).to_string()) - }; - - (request_id, output_text, None) - } - } else if let Some(config) = &raw_config { - let prompt = format_zeta_prompt(&prompt_input, config.format); - let prefill = get_prefill(&prompt_input, config.format); - let prompt = format!("{prompt}{prefill}"); - let request = RawCompletionRequest { - model: config.model_id.clone().unwrap_or_default(), - prompt, - temperature: None, - stop: vec![], - max_tokens: Some(2048), - environment: Some(config.format.to_string().to_lowercase()), - }; + let (request_id, output_text, usage) = if let Some(custom_settings) = + &custom_server_settings + { + let max_tokens = custom_settings.max_output_tokens * 4; - let (mut response, usage) = EditPredictionStore::send_raw_llm_request( - request, - client, - None, - llm_token, - app_version, + if is_zeta1 { + let ranges = excerpt_ranges; + let prompt = zeta1::format_zeta1_from_input( + &prompt_input, + ranges.editable_350.clone(), + ranges.editable_350_context_150.clone(), + ); + editable_range_in_excerpt = ranges.editable_350.clone(); + let stop_tokens = vec![ + EDITABLE_REGION_END_MARKER.to_string(), + format!("{EDITABLE_REGION_END_MARKER}\n"), + format!("{EDITABLE_REGION_END_MARKER}\n\n"), + format!("{EDITABLE_REGION_END_MARKER}\n\n\n"), + ]; + + let (response_text, request_id) = send_custom_server_request( + provider, + custom_settings, + prompt, + max_tokens, + stop_tokens, + &http_client, ) .await?; - let request_id = EditPredictionId(response.id.clone().into()); - let output_text = response.choices.pop().map(|choice| { - let response = &choice.text; - let output = format!("{prefill}{response}"); - clean_zeta2_model_output(&output, config.format).to_string() - }); + let request_id = EditPredictionId(request_id.into()); + let output_text = zeta1::clean_zeta1_model_output(&response_text); - (request_id, output_text, usage) + (request_id, output_text, None) } else { - // Use V3 endpoint - server handles model/version selection and suffix stripping - let (response, usage) = EditPredictionStore::send_v3_request( - prompt_input.clone(), - client, - llm_token, - app_version, - trigger, + let prompt = format_zeta_prompt(&prompt_input, zeta_version); + let prefill = get_prefill(&prompt_input, zeta_version); + let prompt = format!("{prompt}{prefill}"); + + editable_range_in_excerpt = prompt_input + .excerpt_ranges + .as_ref() + .map(|ranges| zeta_prompt::excerpt_range_for_format(zeta_version, ranges).0) + .unwrap_or(prompt_input.editable_range_in_excerpt.clone()); + + let (response_text, request_id) = send_custom_server_request( + provider, + custom_settings, + prompt, + max_tokens, + vec![], + &http_client, ) .await?; - let request_id = EditPredictionId(response.request_id.into()); - let output_text = if response.output.is_empty() { + let request_id = EditPredictionId(request_id.into()); + let output_text = if response_text.is_empty() { None } else { - Some(response.output) + let output = format!("{prefill}{response_text}"); + Some(clean_zeta2_model_output(&output, zeta_version).to_string()) }; - (request_id, output_text, usage) + + (request_id, output_text, None) + } + } else if let Some(config) = &raw_config { + let prompt = format_zeta_prompt(&prompt_input, config.format); + let prefill = get_prefill(&prompt_input, config.format); + let prompt = format!("{prompt}{prefill}"); + let request = RawCompletionRequest { + model: config.model_id.clone().unwrap_or_default(), + prompt, + temperature: None, + stop: vec![], + max_tokens: Some(2048), + environment: Some(config.format.to_string().to_lowercase()), }; + editable_range_in_excerpt = prompt_input + .excerpt_ranges + .as_ref() + .map(|ranges| zeta_prompt::excerpt_range_for_format(config.format, ranges).1) + .unwrap_or(prompt_input.editable_range_in_excerpt.clone()); + + let (mut response, usage) = EditPredictionStore::send_raw_llm_request( + request, + client, + None, + llm_token, + app_version, + ) + .await?; + + let request_id = EditPredictionId(response.id.clone().into()); + let output_text = response.choices.pop().map(|choice| { + let response = &choice.text; + let output = format!("{prefill}{response}"); + clean_zeta2_model_output(&output, config.format).to_string() + }); + + (request_id, output_text, usage) + } else { + // Use V3 endpoint - server handles model/version selection and suffix stripping + let (response, usage) = EditPredictionStore::send_v3_request( + prompt_input.clone(), + client, + llm_token, + app_version, + trigger, + ) + .await?; + + let request_id = EditPredictionId(response.request_id.into()); + let output_text = if response.output.is_empty() { + None + } else { + Some(response.output) + }; + editable_range_in_excerpt = response.editable_range; + + (request_id, output_text, usage) + }; + let received_response_at = Instant::now(); log::trace!("Got edit prediction response"); @@ -258,8 +275,12 @@ pub fn request_prediction_with_zeta( .ok(); } + let editable_range_in_buffer = editable_range_in_excerpt.start + + full_context_offset_range.start + ..editable_range_in_excerpt.end + full_context_offset_range.start; + let mut old_text = snapshot - .text_for_range(editable_offset_range.clone()) + .text_for_range(editable_range_in_buffer.clone()) .collect::(); if !output_text.is_empty() && !output_text.ends_with('\n') { @@ -272,7 +293,7 @@ pub fn request_prediction_with_zeta( let (edits, cursor_position) = compute_edits_and_cursor_position( old_text, &output_text, - editable_offset_range.start, + editable_range_in_buffer.start, cursor_offset_in_output, &snapshot, ); @@ -343,7 +364,7 @@ pub fn zeta2_prompt_input( preferred_model: Option, is_open_source: bool, can_collect_data: bool, -) -> (std::ops::Range, zeta_prompt::ZetaPromptInput) { +) -> (Range, zeta_prompt::ZetaPromptInput) { let cursor_point = cursor_offset.to_point(snapshot); let (full_context, full_context_offset_range, excerpt_ranges) = @@ -362,8 +383,6 @@ pub fn zeta2_prompt_input( Some(EditPredictionModelKind::Zeta1) => excerpt_ranges.editable_350.clone(), _ => zeta_prompt::excerpt_range_for_format(zeta_format, &excerpt_ranges).0, }; - let absolute_editable_range = full_context_start_offset + editable_offset_range.start - ..full_context_start_offset + editable_offset_range.end; let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset; @@ -383,7 +402,7 @@ pub fn zeta2_prompt_input( in_open_source_repo: is_open_source, can_collect_data, }; - (absolute_editable_range, prompt_input) + (full_context_offset_range, prompt_input) } pub(crate) async fn send_custom_server_request( From c3efb13a814759c0c5718d07c80c6aeaf78399b9 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 24 Feb 2026 20:05:08 -0300 Subject: [PATCH 051/548] agent_ui: Add some UI adjustments to subagents (#50032) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Follow-up to https://github.com/zed-industries/zed/pull/49938: - Adding the gradient overlay in all states within the preview slot - Remove label from the "Full Screen" button to make it simpler and more minimal Screenshot 2026-02-24 at 7  59@2x Release Notes: - N/A --- .../src/acp/thread_view/active_thread.rs | 77 +++++++++---------- 1 file changed, 36 insertions(+), 41 deletions(-) diff --git a/crates/agent_ui/src/acp/thread_view/active_thread.rs b/crates/agent_ui/src/acp/thread_view/active_thread.rs index aa1a11ee2f65100d5bfa3c06801a98be16419af9..ff01b244aed79b9f228f487364227368deb53c3f 100644 --- a/crates/agent_ui/src/acp/thread_view/active_thread.rs +++ b/crates/agent_ui/src/acp/thread_view/active_thread.rs @@ -6367,37 +6367,31 @@ impl AcpThreadView { )) .child( h_flex() - .p_1() + .id(entry_ix) + .py_1() .w_full() + .justify_center() .border_t_1() .when(is_canceled_or_failed, |this| this.border_dashed()) .border_color(cx.theme().colors().border_variant) + .hover(|s| s.bg(cx.theme().colors().element_hover)) .child( - Button::new( - format!("expand-subagent-{}", entry_ix), - "Full Screen", - ) - .full_width() - .style(ButtonStyle::Outlined) - .label_size(LabelSize::Small) - .icon(IconName::Maximize) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .on_click(cx.listener( - move |this, _event, window, cx| { - this.server_view - .update(cx, |this, cx| { - this.navigate_to_session( - session_id.clone(), - window, - cx, - ); - }) - .ok(); - }, - )), - ), + Icon::new(IconName::Maximize) + .color(Color::Muted) + .size(IconSize::Small), + ) + .tooltip(Tooltip::text("Make Subagent Full Screen")) + .on_click(cx.listener(move |this, _event, window, cx| { + this.server_view + .update(cx, |this, cx| { + this.navigate_to_session( + session_id.clone(), + window, + cx, + ); + }) + .ok(); + })), ) }) } @@ -6432,6 +6426,20 @@ impl AcpThreadView { .overflow_hidden() }; + let editor_bg = cx.theme().colors().editor_background; + let overlay = || { + div() + .absolute() + .inset_0() + .size_full() + .bg(linear_gradient( + 180., + linear_color_stop(editor_bg, 0.), + linear_color_stop(editor_bg.opacity(0.), 0.1), + )) + .block_mouse_except_scroll() + }; + let show_thread_entries = is_running || tool_call.content.is_empty(); if show_thread_entries { @@ -6467,21 +6475,7 @@ impl AcpThreadView { .pb_1() .children(rendered_entries), ) - .when(is_running, |this| { - let editor_bg = cx.theme().colors().editor_background; - this.child( - div() - .absolute() - .inset_0() - .size_full() - .bg(linear_gradient( - 180., - linear_color_stop(editor_bg, 0.), - linear_color_stop(editor_bg.opacity(0.), 0.15), - )) - .block_mouse_except_scroll(), - ) - }) + .child(overlay()) .into_any_element() } else { base_container() @@ -6513,6 +6507,7 @@ impl AcpThreadView { }, )), ) + .child(overlay()) .into_any_element() } } From 6111af76e89a912fe1dad37e30c5b682844ef3ff Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Feb 2026 16:43:52 -0800 Subject: [PATCH 052/548] Use a separate feature flag for EP jumps than for zeta2 (#50041) This allows us to enable zeta2 for certain end users without opting them into the still-in-development jumps feature. Release Notes: - N/A --- crates/edit_prediction/src/edit_prediction.rs | 9 ++-- .../src/edit_prediction_ui.rs | 43 +++++++++---------- 2 files changed, 25 insertions(+), 27 deletions(-) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index fb6af292fca3b610b5344da146fba558380ad22f..78f42db2120b45f04dbf83c5e706a42163ee8067 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -105,13 +105,14 @@ const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_ch const REJECT_REQUEST_DEBOUNCE: Duration = Duration::from_secs(15); pub struct Zeta2FeatureFlag; +pub struct EditPredictionJumpsFeatureFlag; impl FeatureFlag for Zeta2FeatureFlag { const NAME: &'static str = "zeta2"; +} - fn enabled_for_staff() -> bool { - true - } +impl FeatureFlag for EditPredictionJumpsFeatureFlag { + const NAME: &'static str = "edit_prediction_jumps"; } #[derive(Clone)] @@ -1035,7 +1036,7 @@ impl EditPredictionStore { } } project::Event::DiagnosticsUpdated { .. } => { - if cx.has_flag::() { + if cx.has_flag::() { self.refresh_prediction_from_diagnostics( project, DiagnosticSearchScope::Global, diff --git a/crates/edit_prediction_ui/src/edit_prediction_ui.rs b/crates/edit_prediction_ui/src/edit_prediction_ui.rs index 1a6c030239631536e143000e2eef37fdd0e599c8..0735a8ccab69cfc812b84195adb14743167c651a 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_ui.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_ui.rs @@ -3,7 +3,7 @@ mod edit_prediction_context_view; mod rate_prediction_modal; use command_palette_hooks::CommandPaletteFilter; -use edit_prediction::{EditPredictionStore, ResetOnboarding, Zeta2FeatureFlag, capture_example}; +use edit_prediction::{EditPredictionStore, ResetOnboarding, capture_example}; use edit_prediction_context_view::EditPredictionContextView; use editor::Editor; use feature_flags::FeatureFlagAppExt as _; @@ -54,28 +54,25 @@ pub fn init(cx: &mut App) { capture_example_as_markdown(workspace, window, cx); }); workspace.register_action_renderer(|div, _, _, cx| { - let has_flag = cx.has_flag::(); - div.when(has_flag, |div| { - div.on_action(cx.listener( - move |workspace, _: &OpenEditPredictionContextView, window, cx| { - let project = workspace.project(); - workspace.split_item( - SplitDirection::Right, - Box::new(cx.new(|cx| { - EditPredictionContextView::new( - project.clone(), - workspace.client(), - workspace.user_store(), - window, - cx, - ) - })), - window, - cx, - ); - }, - )) - }) + div.on_action(cx.listener( + move |workspace, _: &OpenEditPredictionContextView, window, cx| { + let project = workspace.project(); + workspace.split_item( + SplitDirection::Right, + Box::new(cx.new(|cx| { + EditPredictionContextView::new( + project.clone(), + workspace.client(), + workspace.user_store(), + window, + cx, + ) + })), + window, + cx, + ); + }, + )) }); }) .detach(); From 23f7bde1b96a513b7e465d5d15e24a61f873ac0f Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Tue, 24 Feb 2026 19:58:56 -0500 Subject: [PATCH 053/548] git: Simplify excerpt syncing code for `SplittableEditor` (#49943) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/editor/src/split.rs | 564 ++++++++++++++++--------------------- 1 file changed, 243 insertions(+), 321 deletions(-) diff --git a/crates/editor/src/split.rs b/crates/editor/src/split.rs index c85b7545c2c8bbabb3777476fa4b318f0b70908f..14174306946acbbed78ed17cef34ffc7a054a3b1 100644 --- a/crates/editor/src/split.rs +++ b/crates/editor/src/split.rs @@ -34,7 +34,7 @@ use workspace::{ }; use crate::{ - Autoscroll, DisplayMap, Editor, EditorEvent, RenderDiffHunkControlsFn, ToggleSoftWrap, + Autoscroll, Editor, EditorEvent, RenderDiffHunkControlsFn, ToggleSoftWrap, actions::{DisableBreakpoint, EditLogBreakpoint, EnableBreakpoint, ToggleBreakpoint}, display_map::Companion, }; @@ -667,52 +667,28 @@ impl SplittableEditor { .collect() }; - let mut companion = Companion::new( - rhs_display_map_id, - convert_rhs_rows_to_lhs, - convert_lhs_rows_to_rhs, - ); - - // stream this - for (path, diff) in path_diffs { - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let sync_result = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - LhsEditor::update_path_excerpts_from_rhs( - path.clone(), - rhs_multibuffer, - lhs_multibuffer, - diff.clone(), - lhs_cx, - ) - }); - - if let Some((lhs_excerpt_ids, rhs_merge_groups)) = sync_result { - let mut final_rhs_ids = Vec::with_capacity(lhs_excerpt_ids.len()); - for group in rhs_merge_groups { - if group.len() == 1 { - final_rhs_ids.push(group[0]); - } else { - let merged_id = rhs_multibuffer.merge_excerpts(&group, cx); - final_rhs_ids.push(merged_id); - } - } + let companion = cx.new(|_| { + Companion::new( + rhs_display_map_id, + convert_rhs_rows_to_lhs, + convert_lhs_rows_to_rhs, + ) + }); - for (rhs_id, lhs_id) in final_rhs_ids.iter().zip(lhs_excerpt_ids.iter()) { - companion.add_excerpt_mapping(*lhs_id, *rhs_id); - } - let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); - let rhs_buffer_id = diff.read(cx).buffer_id; - companion.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); - } - }); - } + self.lhs = Some(lhs); - let companion = cx.new(|_| companion); + let paths_for_sync: Vec<_> = path_diffs + .into_iter() + .map(|(path, diff)| (path, vec![], diff)) + .collect(); + self.sync_lhs_for_paths(paths_for_sync, &companion, cx); rhs_display_map.update(cx, |dm, cx| { dm.set_companion(Some((lhs_display_map, companion.clone())), cx); }); + let lhs = self.lhs.as_ref().unwrap(); + let shared_scroll_anchor = self .rhs_editor .read(cx) @@ -761,8 +737,6 @@ impl SplittableEditor { cx.notify(); }); - self.lhs = Some(lhs); - cx.notify(); } @@ -1011,34 +985,52 @@ impl SplittableEditor { diff: Entity, cx: &mut Context, ) -> (Vec>, bool) { - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let lhs = self.lhs.as_ref(); - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - mutate_excerpts_for_paths( - rhs_multibuffer, - lhs, - &rhs_display_map, - vec![(path.clone(), diff.clone())], + let Some(companion) = self.companion(cx) else { + return self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + path, + buffer.clone(), + ranges, + context_line_count, + cx, + ); + if !anchors.is_empty() + && rhs_multibuffer + .diff_for(buffer.read(cx).remote_id()) + .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) + { + rhs_multibuffer.add_diff(diff, cx); + } + (anchors, added_a_new_excerpt) + }); + }; + + let old_rhs_ids: Vec = self + .rhs_multibuffer + .read(cx) + .excerpts_for_path(&path) + .collect(); + + let result = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( + path.clone(), + buffer.clone(), + ranges, + context_line_count, cx, - |rhs_multibuffer, cx| { - let (anchors, added_a_new_excerpt) = rhs_multibuffer.set_excerpts_for_path( - path.clone(), - buffer.clone(), - ranges, - context_line_count, - cx, - ); - if !anchors.is_empty() - && rhs_multibuffer - .diff_for(buffer.read(cx).remote_id()) - .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) - { - rhs_multibuffer.add_diff(diff.clone(), cx); - } - (anchors, added_a_new_excerpt) - }, - ) - }) + ); + if !anchors.is_empty() + && rhs_multibuffer + .diff_for(buffer.read(cx).remote_id()) + .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) + { + rhs_multibuffer.add_diff(diff.clone(), cx); + } + (anchors, added_a_new_excerpt) + }); + + self.sync_lhs_for_paths(vec![(path, old_rhs_ids, diff)], &companion, cx); + result } fn expand_excerpts( @@ -1048,65 +1040,66 @@ impl SplittableEditor { direction: ExpandExcerptDirection, cx: &mut Context, ) { - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - let lhs = self.lhs.as_ref(); - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - if lhs.is_some() { - let snapshot = rhs_multibuffer.snapshot(cx); - let paths_with_diffs: Vec<_> = excerpt_ids - .clone() - .filter_map(|excerpt_id| { - let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?; - let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; - let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; - Some((path, diff)) - }) - .collect::>() - .into_iter() - .collect(); - - mutate_excerpts_for_paths( - rhs_multibuffer, - lhs, - &rhs_display_map, - paths_with_diffs, - cx, - |rhs_multibuffer, cx| { - rhs_multibuffer.expand_excerpts(excerpt_ids.clone(), lines, direction, cx); - }, - ); - } else { + let Some(companion) = self.companion(cx) else { + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); - } + }); + return; + }; + + let paths_with_old_ids: Vec<_> = self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + let snapshot = rhs_multibuffer.snapshot(cx); + let paths = excerpt_ids + .clone() + .filter_map(|excerpt_id| { + let path = rhs_multibuffer.path_for_excerpt(excerpt_id)?; + let buffer = snapshot.buffer_for_excerpt(excerpt_id)?; + let diff = rhs_multibuffer.diff_for(buffer.remote_id())?; + Some((path, diff)) + }) + .collect::>() + .into_iter() + .map(|(path, diff)| { + let old_ids = rhs_multibuffer.excerpts_for_path(&path).collect(); + (path, old_ids, diff) + }) + .collect(); + rhs_multibuffer.expand_excerpts(excerpt_ids, lines, direction, cx); + paths }); + + self.sync_lhs_for_paths(paths_with_old_ids, &companion, cx); } pub fn remove_excerpts_for_path(&mut self, path: PathKey, cx: &mut Context) { - let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); - - if let Some(lhs) = &self.lhs { + let Some(lhs) = &self.lhs else { self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - let rhs_excerpt_ids: Vec = - rhs_multibuffer.excerpts_for_path(&path).collect(); - let lhs_excerpt_ids: Vec = - lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); + rhs_multibuffer.remove_excerpts_for_path(path, cx); + }); + return; + }; - if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { - companion.update(cx, |c, _| { - c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids); - }); - } + let rhs_excerpt_ids: Vec = self + .rhs_multibuffer + .read(cx) + .excerpts_for_path(&path) + .collect(); + let lhs_excerpt_ids: Vec = + lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); - rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); - }); - lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { - lhs_multibuffer.remove_excerpts_for_path(path, cx); - }); - } else { - self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { - rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); + let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); + if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { + companion.update(cx, |c, _| { + c.remove_excerpt_mappings(lhs_excerpt_ids, rhs_excerpt_ids); }); } + + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + rhs_multibuffer.remove_excerpts_for_path(path.clone(), cx); + }); + lhs.multibuffer.update(cx, |lhs_multibuffer, cx| { + lhs_multibuffer.remove_excerpts_for_path(path, cx); + }); } fn search_token(&self) -> SearchToken { @@ -1121,6 +1114,138 @@ impl SplittableEditor { } &self.rhs_editor } + + fn companion(&self, cx: &App) -> Option> { + if self.lhs.is_none() { + return None; + } + let rhs_display_map = self.rhs_editor.read(cx).display_map.clone(); + rhs_display_map.read(cx).companion().cloned() + } + + fn sync_lhs_for_paths( + &self, + paths_with_old_rhs_ids: Vec<(PathKey, Vec, Entity)>, + companion: &Entity, + cx: &mut Context, + ) { + let Some(lhs) = &self.lhs else { return }; + + self.rhs_multibuffer.update(cx, |rhs_multibuffer, cx| { + for (path, old_rhs_ids, diff) in paths_with_old_rhs_ids { + let old_lhs_ids: Vec = + lhs.multibuffer.read(cx).excerpts_for_path(&path).collect(); + + companion.update(cx, |c, _| { + c.remove_excerpt_mappings(old_lhs_ids, old_rhs_ids); + }); + + let rhs_excerpt_ids: Vec = + rhs_multibuffer.excerpts_for_path(&path).collect(); + let Some(excerpt_id) = rhs_excerpt_ids.first().copied() else { + lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { + lhs_multibuffer.remove_excerpts_for_path(path, lhs_cx); + }); + continue; + }; + let Some(main_buffer_snapshot) = rhs_multibuffer + .snapshot(cx) + .buffer_for_excerpt(excerpt_id) + .cloned() + else { + continue; + }; + let Some(main_buffer) = rhs_multibuffer.buffer(main_buffer_snapshot.remote_id()) + else { + continue; + }; + + let base_text_buffer = diff.read(cx).base_text_buffer().clone(); + let diff_snapshot = diff.read(cx).snapshot(cx); + let base_text_buffer_snapshot = base_text_buffer.read(cx).snapshot(); + + let lhs_ranges: Vec> = rhs_multibuffer + .excerpts_for_buffer(main_buffer_snapshot.remote_id(), cx) + .into_iter() + .filter(|(id, _)| rhs_excerpt_ids.contains(id)) + .map(|(_, excerpt_range)| { + let to_base_text = |range: Range| { + let start = diff_snapshot + .buffer_point_to_base_text_range( + Point::new(range.start.row, 0), + &main_buffer_snapshot, + ) + .start; + let end = diff_snapshot + .buffer_point_to_base_text_range( + Point::new(range.end.row, 0), + &main_buffer_snapshot, + ) + .end; + let end_column = diff_snapshot.base_text().line_len(end.row); + Point::new(start.row, 0)..Point::new(end.row, end_column) + }; + let primary = excerpt_range.primary.to_point(&main_buffer_snapshot); + let context = excerpt_range.context.to_point(&main_buffer_snapshot); + ExcerptRange { + primary: to_base_text(primary), + context: to_base_text(context), + } + }) + .collect(); + + let groups = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { + let lhs_result = lhs_multibuffer.update_path_excerpts( + path, + base_text_buffer, + &base_text_buffer_snapshot, + lhs_ranges, + lhs_cx, + ); + if !lhs_result.excerpt_ids.is_empty() + && lhs_multibuffer + .diff_for(base_text_buffer_snapshot.remote_id()) + .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) + { + lhs_multibuffer.add_inverted_diff(diff.clone(), main_buffer, lhs_cx); + } + + let mut groups = Vec::new(); + for (lhs_id, chunk) in &lhs_result + .excerpt_ids + .iter() + .copied() + .zip(rhs_excerpt_ids) + .chunk_by(|(lhs_id, _)| *lhs_id) + { + groups.push((lhs_id, chunk.map(|(_, rhs_id)| rhs_id).collect::>())); + } + groups + }); + + let pairs = groups + .into_iter() + .map(|(lhs_id, rhs_group)| { + let rhs_id = if rhs_group.len() == 1 { + rhs_group[0] + } else { + rhs_multibuffer.merge_excerpts(&rhs_group, cx) + }; + (lhs_id, rhs_id) + }) + .collect::>(); + + let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); + let rhs_buffer_id = diff.read(cx).buffer_id; + companion.update(cx, |c, _| { + for (lhs_id, rhs_id) in pairs { + c.add_excerpt_mapping(lhs_id, rhs_id); + } + c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); + }); + } + }); + } } #[cfg(test)] @@ -1927,209 +2052,6 @@ impl Render for SplittableEditor { } } -fn mutate_excerpts_for_paths( - rhs_multibuffer: &mut MultiBuffer, - lhs: Option<&LhsEditor>, - rhs_display_map: &Entity, - paths_with_diffs: Vec<(PathKey, Entity)>, - cx: &mut Context, - mutate: impl FnOnce(&mut MultiBuffer, &mut Context) -> R, -) -> R { - let old_rhs_ids: Vec<_> = paths_with_diffs - .iter() - .map(|(path, _)| { - rhs_multibuffer - .excerpts_for_path(path) - .collect::>() - }) - .collect(); - - let result = mutate(rhs_multibuffer, cx); - - if let Some(lhs) = lhs { - let mut sync_results = Vec::new(); - let mut diffs_for_mapping = Vec::new(); - - for ((path, diff), old_rhs_ids) in paths_with_diffs.into_iter().zip(old_rhs_ids) { - let sync_result = lhs.multibuffer.update(cx, |lhs_multibuffer, lhs_cx| { - LhsEditor::sync_path_excerpts( - path, - old_rhs_ids, - rhs_multibuffer, - lhs_multibuffer, - diff.clone(), - rhs_display_map, - lhs_cx, - ) - }); - if let Some(sync_result) = sync_result { - sync_results.push(sync_result); - diffs_for_mapping.push(diff); - } - } - - for ((lhs_excerpt_ids, rhs_merge_groups), diff) in - sync_results.into_iter().zip(diffs_for_mapping.into_iter()) - { - let mut final_rhs_ids = Vec::with_capacity(lhs_excerpt_ids.len()); - for group in rhs_merge_groups { - if group.len() == 1 { - final_rhs_ids.push(group[0]); - } else { - let merged_id = rhs_multibuffer.merge_excerpts(&group, cx); - final_rhs_ids.push(merged_id); - } - } - - debug_assert_eq!(final_rhs_ids.len(), lhs_excerpt_ids.len()); - - if let Some(companion) = rhs_display_map.read(cx).companion().cloned() { - let lhs_buffer_id = diff.read(cx).base_text(cx).remote_id(); - let rhs_buffer_id = diff.read(cx).buffer_id; - companion.update(cx, |c, _| { - for (rhs_id, lhs_id) in final_rhs_ids.iter().zip(lhs_excerpt_ids.iter()) { - c.add_excerpt_mapping(*lhs_id, *rhs_id); - } - c.add_buffer_mapping(lhs_buffer_id, rhs_buffer_id); - }); - } - } - } - - result -} - -impl LhsEditor { - fn update_path_excerpts_from_rhs( - path_key: PathKey, - rhs_multibuffer: &MultiBuffer, - lhs_multibuffer: &mut MultiBuffer, - diff: Entity, - lhs_cx: &mut Context, - ) -> Option<(Vec, Vec>)> { - let Some(excerpt_id) = rhs_multibuffer.excerpts_for_path(&path_key).next() else { - lhs_multibuffer.remove_excerpts_for_path(path_key, lhs_cx); - return None; - }; - - let rhs_excerpt_ids: Vec = - rhs_multibuffer.excerpts_for_path(&path_key).collect(); - - let rhs_multibuffer_snapshot = rhs_multibuffer.snapshot(lhs_cx); - let main_buffer = rhs_multibuffer_snapshot - .buffer_for_excerpt(excerpt_id) - .unwrap(); - let diff_snapshot; - let base_text_buffer_snapshot; - let remote_id; - { - let diff = diff.read(lhs_cx); - let base_text_buffer = diff.base_text_buffer().read(lhs_cx); - diff_snapshot = diff.snapshot(lhs_cx); - base_text_buffer_snapshot = base_text_buffer.snapshot(); - remote_id = base_text_buffer.remote_id(); - } - let new = rhs_multibuffer - .excerpts_for_buffer(main_buffer.remote_id(), lhs_cx) - .into_iter() - .filter(|(id, _)| rhs_excerpt_ids.contains(&id)) - .map(|(_, excerpt_range)| { - let point_range_to_base_text_point_range = |range: Range| { - let start = diff_snapshot - .buffer_point_to_base_text_range( - Point::new(range.start.row, 0), - main_buffer, - ) - .start; - let end = diff_snapshot - .buffer_point_to_base_text_range(Point::new(range.end.row, 0), main_buffer) - .end; - let end_column = diff_snapshot.base_text().line_len(end.row); - Point::new(start.row, 0)..Point::new(end.row, end_column) - }; - let rhs = excerpt_range.primary.to_point(main_buffer); - let context = excerpt_range.context.to_point(main_buffer); - ExcerptRange { - primary: point_range_to_base_text_point_range(rhs), - context: point_range_to_base_text_point_range(context), - } - }) - .collect(); - - let lhs_result = lhs_multibuffer.update_path_excerpts( - path_key, - diff.read(lhs_cx).base_text_buffer().clone(), - &base_text_buffer_snapshot, - new, - lhs_cx, - ); - if !lhs_result.excerpt_ids.is_empty() - && lhs_multibuffer - .diff_for(remote_id) - .is_none_or(|old_diff| old_diff.entity_id() != diff.entity_id()) - { - let main_buffer_entity = rhs_multibuffer - .buffer(main_buffer.remote_id()) - .expect("main buffer should exist in rhs_multibuffer"); - lhs_multibuffer.add_inverted_diff(diff, main_buffer_entity, lhs_cx); - } - - let rhs_merge_groups: Vec> = { - let mut groups = Vec::new(); - let mut current_group = Vec::new(); - let mut last_id = None; - - for (lhs_id, rhs_id) in lhs_result.excerpt_ids.iter().zip(rhs_excerpt_ids) { - if last_id == Some(lhs_id) { - current_group.push(rhs_id); - } else { - if !current_group.is_empty() { - groups.push(current_group); - } - current_group = vec![rhs_id]; - last_id = Some(lhs_id); - } - } - if !current_group.is_empty() { - groups.push(current_group); - } - groups - }; - - let deduplicated_lhs_ids: Vec = - lhs_result.excerpt_ids.iter().dedup().copied().collect(); - - Some((deduplicated_lhs_ids, rhs_merge_groups)) - } - - fn sync_path_excerpts( - path_key: PathKey, - old_rhs_excerpt_ids: Vec, - rhs_multibuffer: &MultiBuffer, - lhs_multibuffer: &mut MultiBuffer, - diff: Entity, - rhs_display_map: &Entity, - lhs_cx: &mut Context, - ) -> Option<(Vec, Vec>)> { - let old_lhs_excerpt_ids: Vec = - lhs_multibuffer.excerpts_for_path(&path_key).collect(); - - if let Some(companion) = rhs_display_map.read(lhs_cx).companion().cloned() { - companion.update(lhs_cx, |c, _| { - c.remove_excerpt_mappings(old_lhs_excerpt_ids, old_rhs_excerpt_ids); - }); - } - - Self::update_path_excerpts_from_rhs( - path_key, - rhs_multibuffer, - lhs_multibuffer, - diff, - lhs_cx, - ) - } -} - #[cfg(test)] mod tests { use std::sync::Arc; From c94a9b7a948f2454e349ab10bf9d6021a964a0f5 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 24 Feb 2026 20:27:57 -0800 Subject: [PATCH 054/548] Edit prediction: prioritize related excerpts that are referenced near the cursor (#50050) We store the byte distance between the cursor and references to each definition. When including excerpts in the prompt, we prioritize them in the order of proximity. I've updated the Edit Prediction Context view to display the excerpt's `order`, and sorting the files in order of their excerpt with the lowest order. Release Notes: - N/A --- .../src/assemble_excerpts.rs | 42 +- .../src/edit_prediction_context.rs | 126 ++++-- .../src/edit_prediction_context_tests.rs | 320 +++++++++++--- .../src/edit_prediction_context_view.rs | 56 ++- crates/zeta_prompt/src/zeta_prompt.rs | 393 ++++++++++++++++-- 5 files changed, 796 insertions(+), 141 deletions(-) diff --git a/crates/edit_prediction_context/src/assemble_excerpts.rs b/crates/edit_prediction_context/src/assemble_excerpts.rs index 3366b1fdd0fed167368157175f0f88e579e310d3..97b83653f53e693218189a938b02b0411fa78a33 100644 --- a/crates/edit_prediction_context/src/assemble_excerpts.rs +++ b/crates/edit_prediction_context/src/assemble_excerpts.rs @@ -8,16 +8,18 @@ const MAX_OUTLINE_ITEM_BODY_SIZE: usize = 24; pub fn assemble_excerpt_ranges( buffer: &BufferSnapshot, - mut input_ranges: Vec>, -) -> Vec> { + input_ranges: Vec<(Range, usize)>, +) -> Vec<(Range, usize)> { + let mut input_ranges: Vec<(Range, usize)> = input_ranges + .into_iter() + .map(|(range, order)| (clip_range_to_lines(&range, false, buffer), order)) + .collect(); merge_ranges(&mut input_ranges); - let mut outline_ranges = Vec::new(); + let mut outline_ranges: Vec<(Range, usize)> = Vec::new(); let outline_items = buffer.outline_items_as_points_containing(0..buffer.len(), false, None); let mut outline_ix = 0; - for input_range in &mut input_ranges { - *input_range = clip_range_to_lines(input_range, false, buffer); - + for (input_range, input_order) in &mut input_ranges { while let Some(outline_item) = outline_items.get(outline_ix) { let item_range = clip_range_to_lines(&outline_item.range, false, buffer); @@ -36,6 +38,7 @@ pub fn assemble_excerpt_ranges( add_outline_item( item_range.clone(), body_range.clone(), + *input_order, buffer, &mut outline_ranges, ); @@ -57,6 +60,7 @@ pub fn assemble_excerpt_ranges( next_outline_item .body_range(buffer) .map(|body| clip_range_to_lines(&body, true, buffer)), + *input_order, buffer, &mut outline_ranges, ); @@ -70,12 +74,12 @@ pub fn assemble_excerpt_ranges( } } - input_ranges.extend_from_slice(&outline_ranges); + input_ranges.extend(outline_ranges); merge_ranges(&mut input_ranges); input_ranges .into_iter() - .map(|range| range.start.row..range.end.row) + .map(|(range, order)| (range.start.row..range.end.row, order)) .collect() } @@ -102,8 +106,9 @@ fn clip_range_to_lines( fn add_outline_item( mut item_range: Range, body_range: Option>, + order: usize, buffer: &BufferSnapshot, - outline_ranges: &mut Vec>, + outline_ranges: &mut Vec<(Range, usize)>, ) { if let Some(mut body_range) = body_range { if body_range.start.column > 0 { @@ -113,38 +118,39 @@ fn add_outline_item( let head_range = item_range.start..body_range.start; if head_range.start < head_range.end { - outline_ranges.push(head_range); + outline_ranges.push((head_range, order)); } let tail_range = body_range.end..item_range.end; if tail_range.start < tail_range.end { - outline_ranges.push(tail_range); + outline_ranges.push((tail_range, order)); } } else { item_range.start.column = 0; item_range.end.column = buffer.line_len(item_range.end.row); - outline_ranges.push(item_range); + outline_ranges.push((item_range, order)); } } -pub fn merge_ranges(ranges: &mut Vec>) { - ranges.sort_unstable_by(|a, b| a.start.cmp(&b.start).then(b.end.cmp(&a.end))); +pub fn merge_ranges(ranges: &mut Vec<(Range, usize)>) { + ranges.sort_unstable_by(|(a, _), (b, _)| a.start.cmp(&b.start).then(b.end.cmp(&a.end))); let mut index = 1; while index < ranges.len() { - let mut prev_range_end = ranges[index - 1].end; + let mut prev_range_end = ranges[index - 1].0.end; if prev_range_end.column > 0 { prev_range_end += Point::new(1, 0); } if (prev_range_end + Point::new(1, 0)) - .cmp(&ranges[index].start) + .cmp(&ranges[index].0.start) .is_ge() { let removed = ranges.remove(index); - if removed.end.cmp(&ranges[index - 1].end).is_gt() { - ranges[index - 1].end = removed.end; + if removed.0.end.cmp(&ranges[index - 1].0.end).is_gt() { + ranges[index - 1].0.end = removed.0.end; } + ranges[index - 1].1 = ranges[index - 1].1.min(removed.1); } else { index += 1; } diff --git a/crates/edit_prediction_context/src/edit_prediction_context.rs b/crates/edit_prediction_context/src/edit_prediction_context.rs index 5805e93330504fef1ce70e899d413faf9e89aed2..b93fef49296e493b4f06e93e8d855d6a8e111e97 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context.rs @@ -39,6 +39,7 @@ struct RelatedBuffer { buffer: Entity, path: Arc, anchor_ranges: Vec>, + excerpt_orders: Vec, cached_file: Option, } @@ -174,21 +175,21 @@ impl RelatedExcerptStore { }; let buffer = project.get_open_buffer(&project_path, cx)?; let snapshot = buffer.read(cx).snapshot(); - let anchor_ranges = file - .excerpts - .iter() - .map(|excerpt| { - let start = snapshot.anchor_before(Point::new(excerpt.row_range.start, 0)); - let end_row = excerpt.row_range.end; - let end_col = snapshot.line_len(end_row); - let end = snapshot.anchor_after(Point::new(end_row, end_col)); - start..end - }) - .collect(); + let mut anchor_ranges = Vec::with_capacity(file.excerpts.len()); + let mut excerpt_orders = Vec::with_capacity(file.excerpts.len()); + for excerpt in &file.excerpts { + let start = snapshot.anchor_before(Point::new(excerpt.row_range.start, 0)); + let end_row = excerpt.row_range.end; + let end_col = snapshot.line_len(end_row); + let end = snapshot.anchor_after(Point::new(end_row, end_col)); + anchor_ranges.push(start..end); + excerpt_orders.push(excerpt.order); + } Some(RelatedBuffer { buffer, path: file.path.clone(), anchor_ranges, + excerpt_orders, cached_file: None, }) }) @@ -221,18 +222,55 @@ impl RelatedExcerptStore { cx.emit(RelatedExcerptStoreEvent::StartedRefresh); })?; - let identifiers = cx + let identifiers_with_ranks = cx .background_spawn(async move { - identifiers_for_position(&snapshot, position, identifier_line_count) + let cursor_offset = position.to_offset(&snapshot); + let identifiers = + identifiers_for_position(&snapshot, position, identifier_line_count); + + // Compute byte distance from cursor to each identifier, then sort by + // distance so we can assign ordinal ranks. Identifiers at the same + // distance share the same rank. + let mut identifiers_with_distance: Vec<(Identifier, usize)> = identifiers + .into_iter() + .map(|id| { + let start = id.range.start.to_offset(&snapshot); + let end = id.range.end.to_offset(&snapshot); + let distance = if cursor_offset < start { + start - cursor_offset + } else if cursor_offset > end { + cursor_offset - end + } else { + 0 + }; + (id, distance) + }) + .collect(); + identifiers_with_distance.sort_by_key(|(_, distance)| *distance); + + let mut cursor_distances: HashMap = HashMap::default(); + let mut current_rank = 0; + let mut previous_distance = None; + for (identifier, distance) in &identifiers_with_distance { + if previous_distance != Some(*distance) { + current_rank = cursor_distances.len(); + previous_distance = Some(*distance); + } + cursor_distances.insert(identifier.clone(), current_rank); + } + + (identifiers_with_distance, cursor_distances) }) .await; + let (identifiers_with_distance, cursor_distances) = identifiers_with_ranks; + let async_cx = cx.clone(); let start_time = Instant::now(); let futures = this.update(cx, |this, cx| { - identifiers + identifiers_with_distance .into_iter() - .filter_map(|identifier| { + .filter_map(|(identifier, _)| { let task = if let Some(entry) = this.cache.get(&identifier) { DefinitionTask::CacheHit(entry.clone()) } else { @@ -334,7 +372,8 @@ impl RelatedExcerptStore { } mean_definition_latency /= cache_miss_count.max(1) as u32; - let (new_cache, related_buffers) = rebuild_related_files(&project, new_cache, cx).await?; + let (new_cache, related_buffers) = + rebuild_related_files(&project, new_cache, &cursor_distances, cx).await?; if let Some(file) = &file { log::debug!( @@ -362,6 +401,7 @@ impl RelatedExcerptStore { async fn rebuild_related_files( project: &Entity, mut new_entries: HashMap>, + cursor_distances: &HashMap, cx: &mut AsyncApp, ) -> Result<(HashMap>, Vec)> { let mut snapshots = HashMap::default(); @@ -396,12 +436,18 @@ async fn rebuild_related_files( } } + let cursor_distances = cursor_distances.clone(); Ok(cx .background_spawn(async move { let mut ranges_by_buffer = - HashMap::, Vec>)>::default(); + HashMap::, Vec<(Range, usize)>)>::default(); let mut paths_by_buffer = HashMap::default(); - for entry in new_entries.values_mut() { + let mut min_rank_by_buffer = HashMap::::default(); + for (identifier, entry) in new_entries.iter_mut() { + let rank = cursor_distances + .get(identifier) + .copied() + .unwrap_or(usize::MAX); for definition in entry .definitions .iter() @@ -412,11 +458,16 @@ async fn rebuild_related_files( }; paths_by_buffer.insert(definition.buffer.entity_id(), definition.path.clone()); + let buffer_rank = min_rank_by_buffer + .entry(definition.buffer.entity_id()) + .or_insert(usize::MAX); + *buffer_rank = (*buffer_rank).min(rank); + ranges_by_buffer .entry(definition.buffer.entity_id()) .or_insert_with(|| (definition.buffer.clone(), Vec::new())) .1 - .push(definition.anchor_range.to_point(snapshot)); + .push((definition.anchor_range.to_point(snapshot), rank)); } } @@ -425,7 +476,7 @@ async fn rebuild_related_files( .filter_map(|(entity_id, (buffer, ranges))| { let snapshot = snapshots.get(&entity_id)?; let project_path = paths_by_buffer.get(&entity_id)?; - let row_ranges = assemble_excerpt_ranges(snapshot, ranges); + let assembled = assemble_excerpt_ranges(snapshot, ranges); let root_name = worktree_root_names.get(&project_path.worktree_id)?; let path: Arc = Path::new(&format!( @@ -435,20 +486,21 @@ async fn rebuild_related_files( )) .into(); - let anchor_ranges = row_ranges - .into_iter() - .map(|row_range| { - let start = snapshot.anchor_before(Point::new(row_range.start, 0)); - let end_col = snapshot.line_len(row_range.end); - let end = snapshot.anchor_after(Point::new(row_range.end, end_col)); - start..end - }) - .collect(); + let mut anchor_ranges = Vec::with_capacity(assembled.len()); + let mut excerpt_orders = Vec::with_capacity(assembled.len()); + for (row_range, order) in assembled { + let start = snapshot.anchor_before(Point::new(row_range.start, 0)); + let end_col = snapshot.line_len(row_range.end); + let end = snapshot.anchor_after(Point::new(row_range.end, end_col)); + anchor_ranges.push(start..end); + excerpt_orders.push(order); + } let mut related_buffer = RelatedBuffer { buffer, path, anchor_ranges, + excerpt_orders, cached_file: None, }; related_buffer.fill_cache(snapshot); @@ -456,7 +508,17 @@ async fn rebuild_related_files( }) .collect(); - related_buffers.sort_by_key(|related| related.path.clone()); + related_buffers.sort_by(|a, b| { + let rank_a = min_rank_by_buffer + .get(&a.buffer.entity_id()) + .copied() + .unwrap_or(usize::MAX); + let rank_b = min_rank_by_buffer + .get(&b.buffer.entity_id()) + .copied() + .unwrap_or(usize::MAX); + rank_a.cmp(&rank_b).then_with(|| a.path.cmp(&b.path)) + }); (new_entries, related_buffers) }) @@ -487,12 +549,14 @@ impl RelatedBuffer { let excerpts = self .anchor_ranges .iter() - .map(|range| { + .zip(self.excerpt_orders.iter()) + .map(|(range, &order)| { let start = range.start.to_point(buffer); let end = range.end.to_point(buffer); RelatedExcerpt { row_range: start.row..end.row, text: buffer.text_for_range(start..end).collect::().into(), + order, } }) .collect::>(); diff --git a/crates/edit_prediction_context/src/edit_prediction_context_tests.rs b/crates/edit_prediction_context/src/edit_prediction_context_tests.rs index b619fa729449f2e232a8c8231f416f5a15c5271f..01c4c76e82eb0851b7552b3d9117af1212a8b3da 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context_tests.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context_tests.rs @@ -48,6 +48,24 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { assert_related_files( &excerpts, &[ + ( + "root/src/person.rs", + &[ + indoc! {" + pub struct Person { + first_name: String, + last_name: String, + email: String, + age: u32, + } + + impl Person { + pub fn get_first_name(&self) -> &str { + &self.first_name + }"}, + "}", + ], + ), ( "root/src/company.rs", &[indoc! {" @@ -71,24 +89,6 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { }"}, ], ), - ( - "root/src/person.rs", - &[ - indoc! {" - pub struct Person { - first_name: String, - last_name: String, - email: String, - age: u32, - } - - impl Person { - pub fn get_first_name(&self) -> &str { - &self.first_name - }"}, - "}", - ], - ), ], ); }); @@ -112,6 +112,24 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { assert_related_files( &excerpts, &[ + ( + "root/src/person.rs", + &[ + indoc! {" + pub struct Person { + first_name: String, + last_name: String, + email: String, + age: u32, + } + + impl Person { + pub fn get_first_name(&self) -> &str { + &self.first_name + }"}, + "}", + ], + ), ( "root/src/company.rs", &[indoc! {" @@ -136,24 +154,6 @@ async fn test_edit_prediction_context(cx: &mut TestAppContext) { }"}, ], ), - ( - "root/src/person.rs", - &[ - indoc! {" - pub struct Person { - first_name: String, - last_name: String, - email: String, - age: u32, - } - - impl Person { - pub fn get_first_name(&self) -> &str { - &self.first_name - }"}, - "}", - ], - ), ], ); }); @@ -290,20 +290,21 @@ fn test_assemble_excerpts(cx: &mut TestAppContext) { let (input, ranges) = marked_text_ranges(&input, false); let buffer = cx.new(|cx| Buffer::local(input, cx).with_language(rust_lang(), cx)); buffer.read_with(cx, |buffer, _cx| { - let ranges: Vec> = ranges + let ranges: Vec<(Range, usize)> = ranges .into_iter() - .map(|range| range.to_point(&buffer)) + .map(|range| (range.to_point(&buffer), 0)) .collect(); - let row_ranges = assemble_excerpt_ranges(&buffer.snapshot(), ranges); - let excerpts: Vec = row_ranges + let assembled = assemble_excerpt_ranges(&buffer.snapshot(), ranges); + let excerpts: Vec = assembled .into_iter() - .map(|row_range| { + .map(|(row_range, order)| { let start = Point::new(row_range.start, 0); let end = Point::new(row_range.end, buffer.line_len(row_range.end)); RelatedExcerpt { row_range, text: buffer.text_for_range(start..end).collect::().into(), + order, } }) .collect(); @@ -620,7 +621,6 @@ async fn test_type_definition_deduplication(cx: &mut TestAppContext) { assert_related_files( &excerpts, &[ - ("root/src/main.rs", &["fn work() {", "}"]), ( "root/src/types.rs", &[indoc! {" @@ -628,6 +628,194 @@ async fn test_type_definition_deduplication(cx: &mut TestAppContext) { value: i32, }"}], ), + ("root/src/main.rs", &["fn work() {", "}"]), + ], + ); + }); +} + +#[gpui::test] +async fn test_definitions_ranked_by_cursor_proximity(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // helpers.rs has an impl block whose body exceeds the test + // MAX_OUTLINE_ITEM_BODY_SIZE (24 bytes), so assemble_excerpt_ranges + // splits it into header + individual children + closing brace. main.rs + // references two of the three methods on separate lines at varying + // distances from the cursor. This exercises: + // 1. File ordering by closest identifier rank. + // 2. Per-excerpt ordering within a file — child excerpts carry the rank + // of the identifier that discovered them. + // 3. Parent excerpt (impl header / closing brace) inheriting the minimum + // order of its children. + fs.insert_tree( + path!("/root"), + json!({ + "src": { + "helpers.rs": indoc! {r#" + pub struct Helpers { + value: i32, + } + + impl Helpers { + pub fn alpha(&self) -> i32 { + let intermediate = self.value; + intermediate + 1 + } + + pub fn beta(&self) -> i32 { + let intermediate = self.value; + intermediate + 2 + } + + pub fn gamma(&self) -> i32 { + let intermediate = self.value; + intermediate + 3 + } + } + "#}, + "main.rs": indoc! {r#" + use super::helpers::Helpers; + + fn process(h: Helpers) { + let a = h.alpha(); + let b = h.gamma(); + } + "#}, + }, + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let mut servers = setup_fake_lsp(&project, cx); + + let (buffer, _handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx) + }) + .await + .unwrap(); + + let _server = servers.next().await.unwrap(); + cx.run_until_parked(); + + // Place cursor on "h.alpha()". `alpha` is at distance 0, `gamma` is + // farther below. Both resolve to methods inside `impl Helpers` in + // helpers.rs. The impl header and closing brace excerpts should inherit + // the min order of their children (alpha's order). + let related_excerpt_store = cx.new(|cx| RelatedExcerptStore::new(&project, cx)); + related_excerpt_store.update(cx, |store, cx| { + let position = { + let buffer = buffer.read(cx); + let offset = buffer.text().find("h.alpha()").unwrap(); + buffer.anchor_before(offset) + }; + + store.set_identifier_line_count(1); + store.refresh(buffer.clone(), position, cx); + }); + + cx.executor().advance_clock(DEBOUNCE_DURATION); + related_excerpt_store.update(cx, |store, cx| { + let files = store.related_files(cx); + + // helpers.rs has 4 excerpts: the struct+impl header merged with + // the alpha method header (order 1 from alpha), alpha's closing + // brace (order 1), gamma's method header (order 6), and the + // gamma+impl closing brace (order 1, inherited from alpha which + // is also a child of the impl). + let alpha_order = 1; + let gamma_order = 6; + assert_related_files_with_orders( + &files, + &[ + ( + "root/src/helpers.rs", + &[ + ( + indoc! {" + pub struct Helpers { + value: i32, + } + + impl Helpers { + pub fn alpha(&self) -> i32 {"}, + alpha_order, + ), + (" }", alpha_order), + (" pub fn gamma(&self) -> i32 {", gamma_order), + ( + indoc! {" + } + }"}, + alpha_order, + ), + ], + ), + ( + "root/src/main.rs", + &[("fn process(h: Helpers) {", 8), ("}", 8)], + ), + ], + ); + }); + + // Now move cursor to "h.gamma()" — gamma becomes closest, reranking the + // excerpts so that the gamma method excerpt has the best order and the + // alpha method excerpt has a worse order. + related_excerpt_store.update(cx, |store, cx| { + let position = { + let buffer = buffer.read(cx); + let offset = buffer.text().find("h.gamma()").unwrap(); + buffer.anchor_before(offset) + }; + + store.set_identifier_line_count(1); + store.refresh(buffer.clone(), position, cx); + }); + + cx.executor().advance_clock(DEBOUNCE_DURATION); + related_excerpt_store.update(cx, |store, cx| { + let files = store.related_files(cx); + + // Now gamma is closest. The alpha method excerpts carry alpha's + // rank (3), and the gamma method excerpts carry gamma's rank (1). + // The impl closing brace merges with gamma's closing brace and + // inherits gamma's order (the best child). + let alpha_order = 3; + let gamma_order = 1; + assert_related_files_with_orders( + &files, + &[ + ( + "root/src/helpers.rs", + &[ + ( + indoc! {" + pub struct Helpers { + value: i32, + } + + impl Helpers { + pub fn alpha(&self) -> i32 {"}, + alpha_order, + ), + (" }", alpha_order), + (" pub fn gamma(&self) -> i32 {", gamma_order), + ( + indoc! {" + } + }"}, + gamma_order, + ), + ], + ), + ( + "root/src/main.rs", + &[("fn process(h: Helpers) {", 8), ("}", 8)], + ), ], ); }); @@ -788,30 +976,56 @@ fn test_project_1() -> serde_json::Value { } fn assert_related_files(actual_files: &[RelatedFile], expected_files: &[(&str, &[&str])]) { - let actual_files = actual_files + let expected_with_orders: Vec<(&str, Vec<(&str, usize)>)> = expected_files + .iter() + .map(|(path, texts)| (*path, texts.iter().map(|text| (*text, 0)).collect())) + .collect(); + let expected_refs: Vec<(&str, &[(&str, usize)])> = expected_with_orders + .iter() + .map(|(path, excerpts)| (*path, excerpts.as_slice())) + .collect(); + assert_related_files_impl(actual_files, &expected_refs, false) +} + +fn assert_related_files_with_orders( + actual_files: &[RelatedFile], + expected_files: &[(&str, &[(&str, usize)])], +) { + assert_related_files_impl(actual_files, expected_files, true) +} + +fn assert_related_files_impl( + actual_files: &[RelatedFile], + expected_files: &[(&str, &[(&str, usize)])], + check_orders: bool, +) { + let actual: Vec<(&str, Vec<(String, usize)>)> = actual_files .iter() .map(|file| { let excerpts = file .excerpts .iter() - .map(|excerpt| excerpt.text.to_string()) - .collect::>(); + .map(|excerpt| { + let order = if check_orders { excerpt.order } else { 0 }; + (excerpt.text.to_string(), order) + }) + .collect(); (file.path.to_str().unwrap(), excerpts) }) - .collect::>(); - let expected_excerpts = expected_files + .collect(); + let expected: Vec<(&str, Vec<(String, usize)>)> = expected_files .iter() - .map(|(path, texts)| { + .map(|(path, excerpts)| { ( *path, - texts + excerpts .iter() - .map(|line| line.to_string()) - .collect::>(), + .map(|(text, order)| (text.to_string(), *order)) + .collect(), ) }) - .collect::>(); - pretty_assertions::assert_eq!(actual_files, expected_excerpts) + .collect(); + pretty_assertions::assert_eq!(actual, expected) } fn assert_definitions(definitions: &[LocationLink], first_lines: &[&str], cx: &mut TestAppContext) { diff --git a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs index 6ad816c36ddea3f0493ce853fd6f0efd4b8e0dc7..48e74dcdcc102f9ed7844f1b8829e0182fe2c97b 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_context_view.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_context_view.rs @@ -8,14 +8,17 @@ use std::{ use anyhow::Result; use client::{Client, UserStore}; -use editor::{Editor, PathKey}; +use editor::{ + Editor, PathKey, + display_map::{BlockPlacement, BlockProperties, BlockStyle}, +}; use futures::StreamExt as _; use gpui::{ Animation, AnimationExt, App, AppContext as _, Context, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement as _, IntoElement as _, ParentElement as _, SharedString, Styled as _, Task, TextAlign, Window, actions, div, pulsating_between, }; -use multi_buffer::MultiBuffer; +use multi_buffer::{Anchor, MultiBuffer}; use project::Project; use text::Point; use ui::{ @@ -165,8 +168,14 @@ impl EditPredictionContextView { } cx.spawn_in(window, async move |this, cx| { - let mut paths = Vec::new(); + let mut paths: Vec<(PathKey, _, Vec<_>, Vec, usize)> = Vec::new(); for (related_file, buffer) in related_files { + let orders = related_file + .excerpts + .iter() + .map(|excerpt| excerpt.order) + .collect::>(); + let min_order = orders.iter().copied().min().unwrap_or(usize::MAX); let point_ranges = related_file .excerpts .iter() @@ -175,20 +184,53 @@ impl EditPredictionContextView { }) .collect::>(); cx.update(|_, cx| { - let path = PathKey::for_buffer(&buffer, cx); - paths.push((path, buffer, point_ranges)); + let path = if let Some(file) = buffer.read(cx).file() { + PathKey::with_sort_prefix(min_order as u64, file.path().clone()) + } else { + PathKey::for_buffer(&buffer, cx) + }; + paths.push((path, buffer, point_ranges, orders, min_order)); })?; } + paths.sort_by_key(|(_, _, _, _, min_order)| *min_order); + + let mut excerpt_anchors_with_orders: Vec<(Anchor, usize)> = Vec::new(); + multibuffer.update(cx, |multibuffer, cx| { multibuffer.clear(cx); - for (path, buffer, ranges) in paths { - multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); + for (path, buffer, ranges, orders, _) in paths { + let (anchor_ranges, _) = + multibuffer.set_excerpts_for_path(path, buffer, ranges, 0, cx); + for (anchor_range, order) in anchor_ranges.into_iter().zip(orders) { + excerpt_anchors_with_orders.push((anchor_range.start, order)); + } } }); editor.update_in(cx, |editor, window, cx| { + let blocks = excerpt_anchors_with_orders + .into_iter() + .map(|(anchor, order)| { + let label = SharedString::from(format!("order: {order}")); + BlockProperties { + placement: BlockPlacement::Above(anchor), + height: Some(1), + style: BlockStyle::Sticky, + render: Arc::new(move |cx| { + div() + .pl(cx.anchor_x) + .text_ui_xs(cx) + .text_color(cx.editor_style.status.info) + .child(label.clone()) + .into_any_element() + }), + priority: 0, + } + }) + .collect::>(); + editor.insert_blocks(blocks, None, cx); editor.move_to_beginning(&Default::default(), window, cx); })?; diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index 7391683d34d8010336c6f81e6da50be6e6c11c15..95110bae009d1fc40766f741e4aad06b4c10ca6c 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -213,6 +213,8 @@ pub struct RelatedFile { pub struct RelatedExcerpt { pub row_range: Range, pub text: Arc, + #[serde(default)] + pub order: usize, } pub fn prompt_input_contains_special_tokens(input: &ZetaPromptInput, format: ZetaFormat) -> bool { @@ -419,53 +421,167 @@ fn format_edit_history_within_budget( result } +fn excerpt_rendered_tokens(excerpt: &RelatedExcerpt, file_max_row: u32) -> usize { + let needs_newline = !excerpt.text.ends_with('\n'); + let needs_ellipsis = excerpt.row_range.end < file_max_row; + let len = excerpt.text.len() + + if needs_newline { "\n".len() } else { 0 } + + if needs_ellipsis { "...\n".len() } else { 0 }; + estimate_tokens(len) +} + fn format_related_files_within_budget( related_files: &[RelatedFile], file_marker: &str, max_tokens: usize, ) -> String { - let mut result = String::new(); - let mut total_tokens = 0; + // Collect the distinct order values across all excerpts, sorted ascending. + let mut order_levels: Vec = related_files + .iter() + .flat_map(|f| f.excerpts.iter().map(|e| e.order)) + .collect(); + order_levels.sort_unstable(); + order_levels.dedup(); - for file in related_files { - let path_str = file.path.to_string_lossy(); - let header = format!("{}{}\n", file_marker, path_str); - let header_tokens = estimate_tokens(header.len()); + // Pre-compute file header strings and their token costs. + let file_headers: Vec = related_files + .iter() + .map(|file| { + let path_str = file.path.to_string_lossy(); + format!("{}{}\n", file_marker, path_str) + }) + .collect(); + + // Track which excerpts are included per file. + let mut included: Vec> = related_files + .iter() + .map(|file| vec![false; file.excerpts.len()]) + .collect(); + let mut file_included: Vec = vec![false; related_files.len()]; + let mut total_tokens = 0; - if total_tokens + header_tokens > max_tokens { - break; + // Process order levels from best (lowest) to worst. At each level, try to + // include all not-yet-included excerpts with that order across all files. + // If the full level doesn't fit, include a partial prefix (top-to-bottom + // within each file) and stop — don't proceed to worse order levels. + 'outer: for &order in &order_levels { + // Gather the work for this order level: for each file that has excerpts + // at this order, collect the not-yet-included excerpt indices (in their + // original positional order) and the token cost to add them (including + // the file header if the file isn't already included). + struct FileWork { + file_idx: usize, + excerpt_indices: Vec, + header_cost: usize, + excerpt_costs: Vec, } - let mut file_tokens = header_tokens; - let mut excerpts_to_include = 0; - - for excerpt in &file.excerpts { - let needs_newline = !excerpt.text.ends_with('\n'); - let needs_ellipsis = excerpt.row_range.end < file.max_row; - let excerpt_len = excerpt.text.len() - + if needs_newline { "\n".len() } else { 0 } - + if needs_ellipsis { "...\n".len() } else { 0 }; - - let excerpt_tokens = estimate_tokens(excerpt_len); - if total_tokens + file_tokens + excerpt_tokens > max_tokens { - break; + let mut work_items: Vec = Vec::new(); + for (file_idx, file) in related_files.iter().enumerate() { + let mut excerpt_indices = Vec::new(); + let mut excerpt_costs = Vec::new(); + for (eidx, excerpt) in file.excerpts.iter().enumerate() { + if excerpt.order == order && !included[file_idx][eidx] { + excerpt_indices.push(eidx); + excerpt_costs.push(excerpt_rendered_tokens(excerpt, file.max_row)); + } } - file_tokens += excerpt_tokens; - excerpts_to_include += 1; + if excerpt_indices.is_empty() { + continue; + } + let header_cost = if file_included[file_idx] { + 0 + } else { + estimate_tokens(file_headers[file_idx].len()) + }; + work_items.push(FileWork { + file_idx, + excerpt_indices, + header_cost, + excerpt_costs, + }); } - if excerpts_to_include > 0 { - total_tokens += file_tokens; - result.push_str(&header); - for excerpt in file.excerpts.iter().take(excerpts_to_include) { - result.push_str(&excerpt.text); - if !result.ends_with('\n') { - result.push('\n'); + // Compute the total cost for this entire order level. + let level_cost: usize = work_items + .iter() + .map(|w| w.header_cost + w.excerpt_costs.iter().sum::()) + .sum(); + + if total_tokens + level_cost <= max_tokens { + // The whole level fits — include everything. + for work in &work_items { + total_tokens += work.header_cost; + file_included[work.file_idx] = true; + for (i, &eidx) in work.excerpt_indices.iter().enumerate() { + included[work.file_idx][eidx] = true; + total_tokens += work.excerpt_costs[i]; } - if excerpt.row_range.end < file.max_row { - result.push_str("...\n"); + } + } else { + // The whole level doesn't fit. Include as many excerpts as possible + // from each file (in positional order), then stop entirely. + for work in &work_items { + let available = max_tokens.saturating_sub(total_tokens); + let mut file_cost = work.header_cost; + + let mut count = 0; + for i in 0..work.excerpt_indices.len() { + if file_cost + work.excerpt_costs[i] > available { + break; + } + file_cost += work.excerpt_costs[i]; + count += 1; + } + + if count > 0 { + total_tokens += work.header_cost; + file_included[work.file_idx] = true; + for (i, &eidx) in work.excerpt_indices.iter().take(count).enumerate() { + included[work.file_idx][eidx] = true; + total_tokens += work.excerpt_costs[i]; + } } } + break 'outer; + } + } + + // Determine file rendering order: by the best (lowest) order of any + // included excerpt, breaking ties by original file index. + let mut file_order: Vec<(usize, usize)> = Vec::new(); + for (file_idx, file) in related_files.iter().enumerate() { + if !file_included[file_idx] { + continue; + } + let best_order = file + .excerpts + .iter() + .enumerate() + .filter(|(eidx, _)| included[file_idx][*eidx]) + .map(|(_, e)| e.order) + .min() + .unwrap_or(usize::MAX); + file_order.push((file_idx, best_order)); + } + file_order.sort_by_key(|&(file_idx, best_order)| (best_order, file_idx)); + + // Render included files and excerpts in positional order within each file. + let mut result = String::new(); + for &(file_idx, _) in &file_order { + let file = &related_files[file_idx]; + result.push_str(&file_headers[file_idx]); + for (eidx, excerpt) in file.excerpts.iter().enumerate() { + if !included[file_idx][eidx] { + continue; + } + result.push_str(&excerpt.text); + if !result.ends_with('\n') { + result.push('\n'); + } + if excerpt.row_range.end < file.max_row { + result.push_str("...\n"); + } } } @@ -1136,6 +1252,7 @@ mod tests { excerpts: vec![RelatedExcerpt { row_range: 0..content.lines().count() as u32, text: content.into(), + order: 0, }], in_open_source_repo: false, } @@ -1244,14 +1361,17 @@ mod tests { RelatedExcerpt { row_range: 0..10, text: "first excerpt\n".into(), + order: 0, }, RelatedExcerpt { row_range: 10..20, text: "second excerpt\n".into(), + order: 0, }, RelatedExcerpt { row_range: 20..30, text: "third excerpt\n".into(), + order: 0, }, ], }], @@ -1291,6 +1411,149 @@ mod tests { ); } + #[test] + fn test_truncation_prioritizes_lower_order_excerpts() { + // Two files: file_a has a high-order excerpt, file_b has a low-order one. + // With tight budget, only the lower-order excerpt from file_b should be included. + let input = make_input( + "x", + 0..1, + 0, + vec![], + vec![ + RelatedFile { + path: Path::new("file_a.rs").into(), + max_row: 10, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..10, + text: "low priority content\n".into(), + order: 5, + }], + }, + RelatedFile { + path: Path::new("file_b.rs").into(), + max_row: 10, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..10, + text: "high priority content\n".into(), + order: 1, + }], + }, + ], + ); + + // With large budget, both files included; file_b (order 1) renders before file_a (order 5). + assert_eq!( + format_with_budget(&input, 10000), + indoc! {r#" + <|file_sep|>file_b.rs + high priority content + <|file_sep|>file_a.rs + low priority content + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + + // With tight budget, only file_b (lower order) fits. + // Cursor section is ~37 tokens, so budget 52 leaves ~15 for related files. + // file_b header (7) + excerpt (7) = 14 tokens, which fits. + // file_a would need another 14 tokens, which doesn't fit. + assert_eq!( + format_with_budget(&input, 52), + indoc! {r#" + <|file_sep|>file_b.rs + high priority content + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + } + + #[test] + fn test_truncation_drops_high_order_excerpts_within_file() { + // A single file has excerpts at order 1 and order 3. With a tight budget, + // only the order-1 excerpts are included while the order-3 excerpt is + // dropped — even though they belong to the same file. This also preserves + // the parent invariant: parent outline items have order ≤ their best + // child, so they're always included when any child is. + let input = make_input( + "x", + 0..1, + 0, + vec![], + vec![RelatedFile { + path: Path::new("mod.rs").into(), + max_row: 30, + in_open_source_repo: false, + excerpts: vec![ + RelatedExcerpt { + row_range: 0..5, + text: "mod header\n".into(), + order: 1, + }, + RelatedExcerpt { + row_range: 5..15, + text: "important fn\n".into(), + order: 1, + }, + RelatedExcerpt { + row_range: 15..30, + text: "less important fn\n".into(), + order: 3, + }, + ], + }], + ); + + // With large budget, all three excerpts included. + assert_eq!( + format_with_budget(&input, 10000), + indoc! {r#" + <|file_sep|>mod.rs + mod header + ... + important fn + ... + less important fn + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + + // With tight budget, only order<=1 excerpts included (header + important fn). + assert_eq!( + format_with_budget(&input, 55), + indoc! {r#" + <|file_sep|>mod.rs + mod header + ... + important fn + ... + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + <|user_cursor|>x + <|fim_suffix|> + <|fim_middle|>updated + "#} + ); + } + #[test] fn test_truncation_drops_older_events_first() { let input = make_input( @@ -1463,6 +1726,72 @@ mod tests { ); } + #[test] + fn test_seed_coder_truncation_prioritizes_lower_order() { + let input = make_input( + "code", + 0..4, + 2, + vec![], + vec![ + RelatedFile { + path: Path::new("low_prio.rs").into(), + max_row: 5, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..5, + text: "low prio\n".into(), + order: 10, + }], + }, + RelatedFile { + path: Path::new("high_prio.rs").into(), + max_row: 5, + in_open_source_repo: false, + excerpts: vec![RelatedExcerpt { + row_range: 0..5, + text: "high prio\n".into(), + order: 1, + }], + }, + ], + ); + + // With large budget, both included; high_prio first due to lower order. + assert_eq!( + format_seed_coder(&input), + indoc! {r#" + <[fim-suffix]> + <[fim-prefix]>high_prio.rs + high prio + low_prio.rs + low prio + + test.rs + <<<<<<< CURRENT + co<|user_cursor|>de + ======= + <[fim-middle]>"#} + ); + + // With tight budget, only high_prio included. + // Cursor sections cost 25 tokens, so budget 44 leaves 19 for related files. + // high_prio header (7) + excerpt (3) = 10, fits. low_prio would add 10 more = 20 > 19. + assert_eq!( + format_seed_coder_with_budget(&input, 44), + indoc! {r#" + <[fim-suffix]> + <[fim-prefix]>high_prio.rs + high prio + + test.rs + <<<<<<< CURRENT + co<|user_cursor|>de + ======= + <[fim-middle]>"#} + ); + } + #[test] fn test_seed_coder_clean_output() { let output_with_marker = "new code\n>>>>>>> UPDATED\n"; From 0caaecc8c5a3a5d72517f558910aae7d9baffb20 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 21:47:02 -0700 Subject: [PATCH 055/548] Fix panic in message editor paste (#50051) Fixes ZED-4KY Release Notes: - Fixed a panic when pasting into the agent UI --- crates/agent_ui/src/acp/message_editor.rs | 228 +++++++++++++++++----- 1 file changed, 176 insertions(+), 52 deletions(-) diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 47847aef53cbd597c78cf329467a35ff1ac68978..6710969bd89b3ec7307ee13c4efa0df9fa8bcab8 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -747,70 +747,90 @@ impl MessageEditor { _ => None, }) { - let path_style = workspace.read(cx).project().read(cx).path_style(cx); - - // Parse markdown mention links in format: [@name](uri) - let parsed_mentions = parse_mention_links(&clipboard_text, path_style); - - if !parsed_mentions.is_empty() { + if clipboard_text.contains("[@") { cx.stop_propagation(); - - let insertion_offset = self.editor.update(cx, |editor, cx| { + let selections_before = self.editor.update(cx, |editor, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); - editor.selections.newest_anchor().start.to_offset(&snapshot) + editor + .selections + .disjoint_anchors() + .iter() + .map(|selection| { + ( + selection.start.bias_left(&snapshot), + selection.end.bias_right(&snapshot), + ) + }) + .collect::>() }); - // Insert the raw text first self.editor.update(cx, |editor, cx| { editor.insert(&clipboard_text, window, cx); }); - let supports_images = self.prompt_capabilities.borrow().image; - let http_client = workspace.read(cx).client().http_client(); - - // Now create creases for each mention and load their content let snapshot = self.editor.read(cx).buffer().read(cx).snapshot(cx); - for (range, mention_uri) in parsed_mentions { - let start_offset = insertion_offset.0 + range.start; - let anchor = snapshot.anchor_before(MultiBufferOffset(start_offset)); - let content_len = range.end - range.start; - - let Some((crease_id, tx)) = insert_crease_for_mention( - anchor.excerpt_id, - anchor.text_anchor, - content_len, - mention_uri.name().into(), - mention_uri.icon_path(cx), - None, - self.editor.clone(), - window, - cx, - ) else { - continue; - }; + let path_style = workspace.read(cx).project().read(cx).path_style(cx); + + let mut all_mentions = Vec::new(); + for (start_anchor, end_anchor) in selections_before { + let start_offset = start_anchor.to_offset(&snapshot); + let end_offset = end_anchor.to_offset(&snapshot); + + // Get the actual inserted text from the buffer (may differ due to auto-indent) + let inserted_text: String = + snapshot.text_for_range(start_offset..end_offset).collect(); + + let parsed_mentions = parse_mention_links(&inserted_text, path_style); + for (range, mention_uri) in parsed_mentions { + let mention_start_offset = MultiBufferOffset(start_offset.0 + range.start); + let anchor = snapshot.anchor_before(mention_start_offset); + let content_len = range.end - range.start; + all_mentions.push((anchor, content_len, mention_uri)); + } + } - // Create the confirmation task based on the mention URI type. - // This properly loads file content, fetches URLs, etc. - let task = self.mention_set.update(cx, |mention_set, cx| { - mention_set.confirm_mention_for_uri( - mention_uri.clone(), - supports_images, - http_client.clone(), + if !all_mentions.is_empty() { + let supports_images = self.prompt_capabilities.borrow().image; + let http_client = workspace.read(cx).client().http_client(); + + for (anchor, content_len, mention_uri) in all_mentions { + let Some((crease_id, tx)) = insert_crease_for_mention( + anchor.excerpt_id, + anchor.text_anchor, + content_len, + mention_uri.name().into(), + mention_uri.icon_path(cx), + None, + self.editor.clone(), + window, cx, - ) - }); - let task = cx - .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) - .shared(); + ) else { + continue; + }; - self.mention_set.update(cx, |mention_set, _cx| { - mention_set.insert_mention(crease_id, mention_uri.clone(), task.clone()) - }); + // Create the confirmation task based on the mention URI type. + // This properly loads file content, fetches URLs, etc. + let task = self.mention_set.update(cx, |mention_set, cx| { + mention_set.confirm_mention_for_uri( + mention_uri.clone(), + supports_images, + http_client.clone(), + cx, + ) + }); + let task = cx + .spawn(async move |_, _| task.await.map_err(|e| e.to_string())) + .shared(); - // Drop the tx after inserting to signal the crease is ready - drop(tx); + self.mention_set.update(cx, |mention_set, _cx| { + mention_set.insert_mention(crease_id, mention_uri.clone(), task.clone()) + }); + + // Drop the tx after inserting to signal the crease is ready + drop(tx); + } + return; } - return; } } @@ -1449,12 +1469,16 @@ mod tests { use acp_thread::{AgentSessionInfo, MentionUri}; use agent::{ThreadStore, outline}; use agent_client_protocol as acp; - use editor::{AnchorRangeExt as _, Editor, EditorMode, MultiBufferOffset}; + use editor::{ + AnchorRangeExt as _, Editor, EditorMode, MultiBufferOffset, SelectionEffects, + actions::Paste, + }; use fs::FakeFs; use futures::StreamExt as _; use gpui::{ - AppContext, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext, + AppContext, ClipboardItem, Entity, EventEmitter, FocusHandle, Focusable, TestAppContext, + VisualTestContext, }; use language_model::LanguageModelRegistry; use lsp::{CompletionContext, CompletionTriggerKind}; @@ -3333,4 +3357,104 @@ mod tests { assert_eq!(editor.text(cx), "😄😄@file"); }); } + + #[gpui::test] + async fn test_paste_mention_link_with_multiple_selections(cx: &mut TestAppContext) { + init_test(cx); + + let app_state = cx.update(AppState::test); + + cx.update(|cx| { + editor::init(cx); + workspace::init(app_state.clone(), cx); + }); + + app_state + .fs + .as_fake() + .insert_tree(path!("/project"), json!({"file.txt": "content"})) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/project").as_ref()], cx).await; + let window = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = window + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + + let mut cx = VisualTestContext::from_window(window.into(), cx); + + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let history = cx + .update(|window, cx| cx.new(|cx| crate::acp::AcpThreadHistory::new(None, window, cx))); + + let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { + let workspace_handle = cx.weak_entity(); + let message_editor = cx.new(|cx| { + MessageEditor::new( + workspace_handle, + project.downgrade(), + Some(thread_store), + history.downgrade(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + max_lines: None, + min_lines: 1, + }, + window, + cx, + ) + }); + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item( + Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))), + true, + true, + None, + window, + cx, + ); + }); + message_editor.read(cx).focus_handle(cx).focus(window, cx); + let editor = message_editor.read(cx).editor().clone(); + (message_editor, editor) + }); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.set_text( + "AAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAA", + window, + cx, + ); + }); + + cx.run_until_parked(); + + editor.update_in(&mut cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([ + MultiBufferOffset(0)..MultiBufferOffset(25), // First selection (large) + MultiBufferOffset(30)..MultiBufferOffset(55), // Second selection (newest) + ]); + }); + }); + + let mention_link = "[@f](file:///test.txt)"; + cx.write_to_clipboard(ClipboardItem::new_string(mention_link.into())); + + message_editor.update_in(&mut cx, |message_editor, window, cx| { + message_editor.paste(&Paste, window, cx); + }); + + let text = editor.update(&mut cx, |editor, cx| editor.text(cx)); + assert!( + text.contains("[@f](file:///test.txt)"), + "Expected mention link to be pasted, got: {}", + text + ); + } } From bc81ada227061ebadf9d5d3ea32eb5a090dfe26f Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 22:16:50 -0700 Subject: [PATCH 056/548] Fix panic in can_resolve when passed an invalid excerpt ID (#50052) Fixes ZED-59F This is follow-up work from #49994; which assumed that can_resolve would return false for an invalid excerpt id. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/multi_buffer/src/multi_buffer.rs | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 34d32f481947657327cbec99e0a3aedc59aeabe7..6a90d9c410859324d31ebbd59c909e31127ecc6a 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -6938,18 +6938,23 @@ impl MultiBufferSnapshot { } fn excerpt_locator_for_id(&self, id: ExcerptId) -> &Locator { + self.try_excerpt_locator_for_id(id) + .unwrap_or_else(|| panic!("invalid excerpt id {id:?}")) + } + + fn try_excerpt_locator_for_id(&self, id: ExcerptId) -> Option<&Locator> { if id == ExcerptId::min() { - Locator::min_ref() + Some(Locator::min_ref()) } else if id == ExcerptId::max() { - Locator::max_ref() + Some(Locator::max_ref()) } else { let (_, _, item) = self.excerpt_ids.find::((), &id, Bias::Left); if let Some(entry) = item && entry.id == id { - return &entry.locator; + return Some(&entry.locator); } - panic!("invalid excerpt id {id:?}") + None } } @@ -7034,7 +7039,7 @@ impl MultiBufferSnapshot { /// afterwards. fn excerpt(&self, excerpt_id: ExcerptId) -> Option<&Excerpt> { let excerpt_id = self.latest_excerpt_id(excerpt_id); - let locator = self.excerpt_locator_for_id(excerpt_id); + let locator = self.try_excerpt_locator_for_id(excerpt_id)?; let (_, _, item) = self.excerpts .find::, _>((), &Some(locator), Bias::Left); From 3275481ac49929cb03b373c8b2ffd56cf87937f8 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 22:48:59 -0700 Subject: [PATCH 057/548] Fix race condition in channel notes rejoin (#50034) Closes #49998 Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed a (very rare) crash that could happen due to lost edits in channel buffers --- crates/channel/src/channel_buffer.rs | 11 ++ crates/channel/src/channel_store.rs | 55 +++++- .../tests/integration/channel_buffer_tests.rs | 160 ++++++++++++++++++ 3 files changed, 219 insertions(+), 7 deletions(-) diff --git a/crates/channel/src/channel_buffer.rs b/crates/channel/src/channel_buffer.rs index c4966443978e1eaf86192171de4c765cac41d5c7..8b6f30a3cd3bf1d61f76a9b39c99a7b51a30ea4f 100644 --- a/crates/channel/src/channel_buffer.rs +++ b/crates/channel/src/channel_buffer.rs @@ -22,6 +22,7 @@ pub(crate) fn init(client: &AnyProtoClient) { pub struct ChannelBuffer { pub channel_id: ChannelId, connected: bool, + rejoining: bool, collaborators: HashMap, user_store: Entity, channel_store: Entity, @@ -84,6 +85,7 @@ impl ChannelBuffer { buffer_epoch: response.epoch, client, connected: true, + rejoining: false, collaborators: Default::default(), acknowledge_task: None, channel_id: channel.id, @@ -111,6 +113,7 @@ impl ChannelBuffer { pub fn connected(&mut self, cx: &mut Context) { self.connected = true; + self.rejoining = false; if self.subscription.is_none() { let Ok(subscription) = self.client.subscribe_to_entity(self.channel_id.0) else { return; @@ -120,6 +123,10 @@ impl ChannelBuffer { } } + pub(crate) fn set_rejoining(&mut self, rejoining: bool) { + self.rejoining = rejoining; + } + pub fn remote_id(&self, cx: &App) -> BufferId { self.buffer.read(cx).remote_id() } @@ -204,6 +211,9 @@ impl ChannelBuffer { return; } let operation = language::proto::serialize_operation(operation); + if self.rejoining { + return; + } self.client .send(proto::UpdateChannelBuffer { channel_id: self.channel_id.0, @@ -263,6 +273,7 @@ impl ChannelBuffer { log::info!("channel buffer {} disconnected", self.channel_id); if self.connected { self.connected = false; + self.rejoining = false; self.subscription.take(); cx.emit(ChannelBufferEvent::Disconnected); cx.notify() diff --git a/crates/channel/src/channel_store.rs b/crates/channel/src/channel_store.rs index 62e34210ebac2dd0e017b415adb094857bb11025..a9357a765a75443e18efb1e6f31cdfab313ebcce 100644 --- a/crates/channel/src/channel_store.rs +++ b/crates/channel/src/channel_store.rs @@ -855,12 +855,18 @@ impl ChannelStore { if let OpenEntityHandle::Open(buffer) = buffer && let Some(buffer) = buffer.upgrade() { - let channel_buffer = buffer.read(cx); - let buffer = channel_buffer.buffer().read(cx); - buffer_versions.push(proto::ChannelBufferVersion { - channel_id: channel_buffer.channel_id.0, - epoch: channel_buffer.epoch(), - version: language::proto::serialize_version(&buffer.version()), + buffer.update(cx, |channel_buffer, cx| { + // Block on_buffer_update from sending UpdateChannelBuffer messages + // until the rejoin completes. This prevents a race condition where + // edits made during the rejoin async gap could inflate the server + // version, causing offline edits to be filtered out by serialize_ops. + channel_buffer.set_rejoining(true); + let inner_buffer = channel_buffer.buffer().read(cx); + buffer_versions.push(proto::ChannelBufferVersion { + channel_id: channel_buffer.channel_id.0, + epoch: channel_buffer.epoch(), + version: language::proto::serialize_version(&inner_buffer.version()), + }); }); } } @@ -874,7 +880,26 @@ impl ChannelStore { }); cx.spawn(async move |this, cx| { - let mut response = response.await?; + let response = match response.await { + Ok(response) => response, + Err(err) => { + // Clear rejoining flag on all buffers since the rejoin failed + this.update(cx, |this, cx| { + for buffer in this.opened_buffers.values() { + if let OpenEntityHandle::Open(buffer) = buffer { + if let Some(buffer) = buffer.upgrade() { + buffer.update(cx, |channel_buffer, _| { + channel_buffer.set_rejoining(false); + }); + } + } + } + }) + .ok(); + return Err(err); + } + }; + let mut response = response; this.update(cx, |this, cx| { this.opened_buffers.retain(|_, buffer| match buffer { @@ -948,6 +973,22 @@ impl ChannelStore { fn handle_disconnect(&mut self, wait_for_reconnect: bool, cx: &mut Context) { cx.notify(); self.did_subscribe = false; + + // If we're waiting for reconnect, set rejoining=true on all buffers immediately. + // This prevents operations from being sent during the reconnection window, + // before handle_connect has a chance to run and capture the version. + if wait_for_reconnect { + for buffer in self.opened_buffers.values() { + if let OpenEntityHandle::Open(buffer) = buffer { + if let Some(buffer) = buffer.upgrade() { + buffer.update(cx, |channel_buffer, _| { + channel_buffer.set_rejoining(true); + }); + } + } + } + } + self.disconnect_channel_buffers_task.get_or_insert_with(|| { cx.spawn(async move |this, cx| { if wait_for_reconnect { diff --git a/crates/collab/tests/integration/channel_buffer_tests.rs b/crates/collab/tests/integration/channel_buffer_tests.rs index c9fd0459f43ea74ca1052831903e913c191a6f7a..a5aca7dd82ca23b1c348bea1fff5d2da2870c654 100644 --- a/crates/collab/tests/integration/channel_buffer_tests.rs +++ b/crates/collab/tests/integration/channel_buffer_tests.rs @@ -3,6 +3,7 @@ use call::ActiveCall; use channel::ACKNOWLEDGE_DEBOUNCE_INTERVAL; use client::{Collaborator, ParticipantIndex, UserId}; use collab::rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT}; + use collab_ui::channel_view::ChannelView; use collections::HashMap; use editor::{Anchor, Editor, MultiBufferOffset, ToOffset}; @@ -698,6 +699,165 @@ async fn test_channel_buffer_changes_persist( }); } +#[gpui::test] +async fn test_channel_buffer_operations_lost_on_reconnect( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + + let channel_id = server + .make_channel( + "the-channel", + None, + (&client_a, cx_a), + &mut [(&client_b, cx_b)], + ) + .await; + + // Both clients open the channel buffer. + let channel_buffer_a = client_a + .channel_store() + .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + let channel_buffer_b = client_b + .channel_store() + .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx)) + .await + .unwrap(); + + // Step 1: Client A makes an initial edit that syncs to B. + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(0..0, "a")], None, cx); + }) + }); + executor.run_until_parked(); + + // Verify both clients see "a". + channel_buffer_a.read_with(cx_a, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "a"); + }); + channel_buffer_b.read_with(cx_b, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "a"); + }); + + // Step 2: Disconnect client A. Do NOT advance past RECONNECT_TIMEOUT + // so that the buffer stays in `opened_buffers` for rejoin. + server.forbid_connections(); + server.disconnect_client(client_a.peer_id().unwrap()); + executor.run_until_parked(); + + // Step 3: While disconnected, client A makes an offline edit ("b"). + // on_buffer_update fires but client.send() fails because transport is down. + channel_buffer_a.update(cx_a, |buffer, cx| { + buffer.buffer().update(cx, |buffer, cx| { + buffer.edit([(1..1, "b")], None, cx); + }) + }); + executor.run_until_parked(); + + // Client A sees "ab" locally; B still sees "a". + channel_buffer_a.read_with(cx_a, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "ab"); + }); + channel_buffer_b.read_with(cx_b, |buffer, cx| { + assert_eq!(buffer.buffer().read(cx).text(), "a"); + }); + + // Step 4: Reconnect and make a racing edit in parallel. + // + // The race condition occurs when: + // 1. Transport reconnects, handle_connect captures version V (with "b") and sends RejoinChannelBuffers + // 2. DURING the async gap (awaiting response), user makes edit "c" + // 3. on_buffer_update sends UpdateChannelBuffer (succeeds because transport is up) + // 4. Server receives BOTH messages concurrently (FuturesUnordered) + // 5. If UpdateChannelBuffer commits first, server version is inflated to include "c" + // 6. RejoinChannelBuffers reads inflated version and sends it back + // 7. Client's serialize_ops(inflated_version) filters out "b" (offline edit) + // because the inflated version's timestamp covers "b"'s timestamp + + // Get the buffer handle for spawning + let buffer_for_edit = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer()); + + // Spawn the edit task - it will wait for executor to run it + let edit_task = cx_a.spawn({ + let buffer = buffer_for_edit; + async move |mut cx| { + let _ = buffer.update(&mut cx, |buffer, cx| { + buffer.edit([(2..2, "c")], None, cx); + }); + } + }); + + // Allow connections so reconnect can succeed + server.allow_connections(); + + // Advance clock to trigger reconnection attempt + executor.advance_clock(RECEIVE_TIMEOUT); + + // Run the edit task - this races with handle_connect + edit_task.detach(); + + // Let everything settle. + executor.run_until_parked(); + + // Step 7: Read final buffer text from both clients. + let text_a = channel_buffer_a.read_with(cx_a, |buffer, cx| buffer.buffer().read(cx).text()); + let text_b = channel_buffer_b.read_with(cx_b, |buffer, cx| buffer.buffer().read(cx).text()); + + // Both clients must see the same text containing all three edits. + assert_eq!( + text_a, text_b, + "Client A and B diverged! A sees {:?}, B sees {:?}. \ + Operations were lost during reconnection.", + text_a, text_b + ); + assert!( + text_a.contains('a'), + "Initial edit 'a' missing from final text {:?}", + text_a + ); + assert!( + text_a.contains('b'), + "Offline edit 'b' missing from final text {:?}. \ + This is the reconnection race bug: the offline operation was \ + filtered out by serialize_ops because the server_version was \ + inflated by a racing UpdateChannelBuffer.", + text_a + ); + assert!( + text_a.contains('c'), + "Racing edit 'c' missing from final text {:?}", + text_a + ); + + // Step 8: Verify the invariant directly — every operation known to + // client A must be observed by client B's version. If any operation + // in A's history is not covered by B's version, it was lost. + channel_buffer_a.read_with(cx_a, |buf_a, cx_a_inner| { + let buffer_a = buf_a.buffer().read(cx_a_inner); + let ops_a = buffer_a.operations(); + channel_buffer_b.read_with(cx_b, |buf_b, cx_b_inner| { + let buffer_b = buf_b.buffer().read(cx_b_inner); + let version_b = buffer_b.version(); + for (lamport, _op) in ops_a.iter() { + assert!( + version_b.observed(*lamport), + "Operation with lamport timestamp {:?} from client A \ + is NOT observed by client B's version. This operation \ + was lost during reconnection.", + lamport + ); + } + }); + }); +} + #[track_caller] fn assert_collaborators(collaborators: &HashMap, ids: &[Option]) { let mut user_ids = collaborators From c9dc794786b104f2398a881b7a27db788765ae51 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 22:49:14 -0700 Subject: [PATCH 058/548] Fix panic in copilot (#50056) Fixes ZED-599 register_buffer() only sometimes registers the buffer Release Notes: - Fixed a panic in Copilot completions --- crates/copilot/src/copilot.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/crates/copilot/src/copilot.rs b/crates/copilot/src/copilot.rs index a1bbc26b9c44d0f68e120a10bf11d0f3cae19d73..179e217d207554bcf226ce905aa9226c1c334b72 100644 --- a/crates/copilot/src/copilot.rs +++ b/crates/copilot/src/copilot.rs @@ -1035,10 +1035,9 @@ impl Copilot { }; let buffer_entity = buffer.clone(); let lsp = server.lsp.clone(); - let registered_buffer = server - .registered_buffers - .get_mut(&buffer.entity_id()) - .unwrap(); + let Some(registered_buffer) = server.registered_buffers.get_mut(&buffer.entity_id()) else { + return Task::ready(Err(anyhow::anyhow!("buffer not registered"))); + }; let pending_snapshot = registered_buffer.report_changes(buffer, cx); let buffer = buffer.read(cx); let uri = registered_buffer.uri.clone(); From cb793a4667c63df8541315225ac5c165dc5c0a88 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 22:55:32 -0700 Subject: [PATCH 059/548] Fix a panic when git askpass triggers during commit (#50057) Fixes ZED-597 Release Notes: - Fixed a panic when the askpass dialogue opened while committing. --- crates/git_ui/src/git_panel.rs | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index b8caf478305609b7ea95874333f1483c448ac242..fe7d8975010ecf1055bb45e6986ecca363314e2e 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -3171,18 +3171,16 @@ impl GitPanel { window: &mut Window, cx: &mut Context, ) -> AskPassDelegate { - let this = cx.weak_entity(); + let workspace = self.workspace.clone(); let operation = operation.into(); let window = window.window_handle(); AskPassDelegate::new(&mut cx.to_async(), move |prompt, tx, cx| { window .update(cx, |_, window, cx| { - this.update(cx, |this, cx| { - this.workspace.update(cx, |workspace, cx| { - workspace.toggle_modal(window, cx, |window, cx| { - AskPassModal::new(operation.clone(), prompt.into(), tx, window, cx) - }); - }) + workspace.update(cx, |workspace, cx| { + workspace.toggle_modal(window, cx, |window, cx| { + AskPassModal::new(operation.clone(), prompt.into(), tx, window, cx) + }); }) }) .ok(); From 0103f151c2fefdf210d12275764e94c57037998b Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Wed, 25 Feb 2026 01:27:52 -0500 Subject: [PATCH 060/548] agent_server_store: Broaden Windows asset detection to all architectures (#50061) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Previously only `x86_64` Windows used ZIP archives, but ARM64 Windows builds also use ZIP format. Closes #50039. > [!NOTE] > The P1 is two-fold: the user cannot download the ZIP file on Windows ARM. BUT -- the Agent Panel is stalled because of that. This ONLY makes it so that the ZIP download doesn’t fail, but if for some reason the download fails, the panel is genuinely stuck with no recovery path. Every restart attempts the same download, hits the same GZIP error, and silently drops it again. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Broaden Windows asset detection to all architectures --- crates/project/src/agent_server_store.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 3dcbab8bafb470602c99c88049eb5266886ed1cd..958c422dc70bd53e5a66f007f9ac43fd1c61bf27 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -1615,7 +1615,7 @@ impl ExternalAgentServer for LocalCodex { &asset.browser_download_url, digest, &version_dir, - if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") { + if cfg!(target_os = "windows") { AssetKind::Zip } else { AssetKind::TarGz From 40d3aa6fea08f8f27449e290f3e390f69b07de99 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 24 Feb 2026 23:33:34 -0700 Subject: [PATCH 061/548] Make Workspace::split infallible (#50060) Fixes ZED-596 Release Notes: - Fixed a panic in editor::GoToDefinitionSplit if you managed to close the current pane before the definitions were resolved --- crates/debugger_ui/src/session/running.rs | 6 ++-- crates/terminal_view/src/terminal_panel.rs | 30 +++++++------------ crates/workspace/src/pane_group.rs | 35 ++++++++++++++-------- crates/workspace/src/workspace.rs | 26 ++++------------ 4 files changed, 42 insertions(+), 55 deletions(-) diff --git a/crates/debugger_ui/src/session/running.rs b/crates/debugger_ui/src/session/running.rs index 626c807ded5e0a01b086d7311cc083bab321c7f6..59e7226f596f1266fdeb3c5f3b60e1f97b81c850 100644 --- a/crates/debugger_ui/src/session/running.rs +++ b/crates/debugger_ui/src/session/running.rs @@ -356,11 +356,11 @@ pub(crate) fn new_debugger_pane( debug_assert!(_previous_subscription.is_none()); running .panes - .split(&this_pane, &new_pane, split_direction, cx)?; - anyhow::Ok(new_pane) + .split(&this_pane, &new_pane, split_direction, cx); + new_pane }); - match new_pane.and_then(|r| r) { + match new_pane { Ok(new_pane) => { move_item( &source, diff --git a/crates/terminal_view/src/terminal_panel.rs b/crates/terminal_view/src/terminal_panel.rs index 80926f17f0ce5a4cd464bfe3bf71e5576495d407..88bde3c771f72a0771a405cfbf123ac4e2286ad9 100644 --- a/crates/terminal_view/src/terminal_panel.rs +++ b/crates/terminal_view/src/terminal_panel.rs @@ -397,10 +397,7 @@ impl TerminalPanel { }; panel .update_in(cx, |panel, window, cx| { - panel - .center - .split(&pane, &new_pane, direction, cx) - .log_err(); + panel.center.split(&pane, &new_pane, direction, cx); window.focus(&new_pane.focus_handle(cx), cx); }) .ok(); @@ -424,7 +421,7 @@ impl TerminalPanel { new_pane.update(cx, |pane, cx| { pane.add_item(item, true, true, None, window, cx); }); - self.center.split(&pane, &new_pane, direction, cx).log_err(); + self.center.split(&pane, &new_pane, direction, cx); window.focus(&new_pane.focus_handle(cx), cx); } }; @@ -1303,17 +1300,13 @@ pub fn new_terminal_pane( &new_pane, split_direction, cx, - )?; - anyhow::Ok(new_pane) + ); + new_pane }) else { return; }; - let Some(new_pane) = new_pane.log_err() else { - return; - }; - move_item( &source, &new_pane, @@ -1569,15 +1562,12 @@ impl Render for TerminalPanel { _ = terminal_panel.update_in( cx, |terminal_panel, window, cx| { - terminal_panel - .center - .split( - &terminal_panel.active_pane, - &new_pane, - SplitDirection::Right, - cx, - ) - .log_err(); + terminal_panel.center.split( + &terminal_panel.active_pane, + &new_pane, + SplitDirection::Right, + cx, + ); let new_pane = new_pane.read(cx); window.focus(&new_pane.focus_handle(cx), cx); }, diff --git a/crates/workspace/src/pane_group.rs b/crates/workspace/src/pane_group.rs index 1d28b05514baa53244926bfad906e667b0b287cd..0921a19486718c5375ed17ebbb3d7e314546f8d7 100644 --- a/crates/workspace/src/pane_group.rs +++ b/crates/workspace/src/pane_group.rs @@ -61,22 +61,33 @@ impl PaneGroup { new_pane: &Entity, direction: SplitDirection, cx: &mut App, - ) -> Result<()> { - let result = match &mut self.root { + ) { + let found = match &mut self.root { Member::Pane(pane) => { if pane == old_pane { self.root = Member::new_axis(old_pane.clone(), new_pane.clone(), direction); - Ok(()) + true } else { - anyhow::bail!("Pane not found"); + false } } Member::Axis(axis) => axis.split(old_pane, new_pane, direction), }; - if result.is_ok() { - self.mark_positions(cx); + + // If the pane wasn't found, fall back to splitting the first pane in the tree. + if !found { + let first_pane = self.root.first_pane(); + match &mut self.root { + Member::Pane(_) => { + self.root = Member::new_axis(first_pane, new_pane.clone(), direction); + } + Member::Axis(axis) => { + let _ = axis.split(&first_pane, new_pane, direction); + } + } } - result + + self.mark_positions(cx); } pub fn bounding_box_for_pane(&self, pane: &Entity) -> Option> { @@ -612,12 +623,12 @@ impl PaneAxis { old_pane: &Entity, new_pane: &Entity, direction: SplitDirection, - ) -> Result<()> { + ) -> bool { for (mut idx, member) in self.members.iter_mut().enumerate() { match member { Member::Axis(axis) => { - if axis.split(old_pane, new_pane, direction).is_ok() { - return Ok(()); + if axis.split(old_pane, new_pane, direction) { + return true; } } Member::Pane(pane) => { @@ -631,12 +642,12 @@ impl PaneAxis { *member = Member::new_axis(old_pane.clone(), new_pane.clone(), direction); } - return Ok(()); + return true; } } } } - anyhow::bail!("Pane not found"); + false } fn insert_pane(&mut self, idx: usize, new_pane: &Entity) { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index c1d26476544ecf5db51a9c7b358ad12c84aa168f..975f00d589b76e56bcf6d819798c23bd8844a2d6 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -4275,14 +4275,7 @@ impl Workspace { .find_pane_in_direction(direction, cx) .unwrap_or_else(|| self.active_pane.clone()); let new_pane = self.add_pane(window, cx); - if self - .center - .split(&split_off_pane, &new_pane, direction, cx) - .log_err() - .is_none() - { - return; - }; + self.center.split(&split_off_pane, &new_pane, direction, cx); new_pane } }; @@ -4465,14 +4458,8 @@ impl Workspace { return; } let new_pane = self.add_pane(window, cx); - if self - .center - .split(&self.active_pane, &new_pane, action.direction, cx) - .log_err() - .is_none() - { - return; - }; + self.center + .split(&self.active_pane, &new_pane, action.direction, cx); new_pane } }; @@ -4770,8 +4757,7 @@ impl Workspace { ) -> Entity { let new_pane = self.add_pane(window, cx); self.center - .split(&pane_to_split, &new_pane, split_direction, cx) - .unwrap(); + .split(&pane_to_split, &new_pane, split_direction, cx); cx.notify(); new_pane } @@ -4790,7 +4776,7 @@ impl Workspace { new_pane.update(cx, |pane, cx| { pane.add_item(item, true, true, None, window, cx) }); - self.center.split(&pane, &new_pane, direction, cx).unwrap(); + self.center.split(&pane, &new_pane, direction, cx); cx.notify(); } @@ -4817,7 +4803,7 @@ impl Workspace { pane.set_nav_history(nav_history, cx); pane.add_item(clone, true, true, None, window, cx) }); - this.center.split(&pane, &new_pane, direction, cx).unwrap(); + this.center.split(&pane, &new_pane, direction, cx); cx.notify(); new_pane }) From a0d7698867a6e05934a12fe0f217933de99bbff3 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 25 Feb 2026 08:10:15 +0100 Subject: [PATCH 062/548] agent: Delay edit tool buffer clearing until the first chunk is sent (#49633) Release Notes: - The agent edit tool no longer clears files until the first edit comes in, preventing a buffer being empty for prolonged time if the agent is slow in reporting the first text chunk --- crates/agent/src/edit_agent.rs | 106 ++++++++++++++++++++++----------- 1 file changed, 71 insertions(+), 35 deletions(-) diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 3e67cba1b63f4136a03b88c3007aee99489a6e80..9f2f2f1877e20620373b1a7aacbf2f7b3a407bfd 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -166,56 +166,69 @@ impl EditAgent { output_events_tx: mpsc::UnboundedSender, cx: &mut AsyncApp, ) -> Result<()> { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); - self.action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); + let buffer_id = cx.update(|cx| { + let buffer_id = buffer.read(cx).remote_id(); self.project.update(cx, |project, cx| { project.set_agent_location( Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer(buffer.read(cx).remote_id()), + position: language::Anchor::min_for_buffer(buffer_id), }), cx, ) }); + buffer_id + }); + + let send_edit_event = || { output_events_tx .unbounded_send(EditAgentOutputEvent::Edited( - Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()), + Anchor::min_max_range_for_buffer(buffer_id), )) - .ok(); - }); - + .ok() + }; + let set_agent_location = |cx: &mut _| { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: language::Anchor::max_for_buffer(buffer_id), + }), + cx, + ) + }) + }; + let mut first_chunk = true; while let Some(event) = parse_rx.next().await { match event? { CreateFileParserEvent::NewTextChunk { chunk } => { - let buffer_id = cx.update(|cx| { - buffer.update(cx, |buffer, cx| buffer.append(chunk, cx)); + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + if mem::take(&mut first_chunk) { + buffer.set_text(chunk, cx) + } else { + buffer.append(chunk, cx) + } + }); self.action_log .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( - buffer.read(cx).remote_id(), - ), - }), - cx, - ) - }); - buffer.read(cx).remote_id() + set_agent_location(cx); }); - output_events_tx - .unbounded_send(EditAgentOutputEvent::Edited( - Anchor::min_max_range_for_buffer(buffer_id), - )) - .ok(); + send_edit_event(); } } } + if first_chunk { + cx.update(|cx| { + buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); + self.action_log + .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + set_agent_location(cx); + }); + send_edit_event(); + } + Ok(()) } @@ -1194,19 +1207,16 @@ mod tests { ); cx.run_until_parked(); - assert_matches!( - drain_events(&mut events).as_slice(), - [EditAgentOutputEvent::Edited(_)] - ); + assert_eq!(drain_events(&mut events).as_slice(), []); assert_eq!( buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), - "" + "abc\ndef\nghi" ); assert_eq!( project.read_with(cx, |project, _| project.agent_location()), Some(AgentLocation { buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer( + position: language::Anchor::min_for_buffer( cx.update(|cx| buffer.read(cx).remote_id()) ), }) @@ -1290,6 +1300,32 @@ mod tests { ); } + #[gpui::test] + async fn test_overwrite_no_content(cx: &mut TestAppContext) { + let agent = init_test(cx).await; + let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi", cx)); + let (chunks_tx, chunks_rx) = mpsc::unbounded::<&str>(); + let (apply, mut events) = agent.overwrite_with_chunks( + buffer.clone(), + chunks_rx.map(|chunk| Ok(chunk.to_string())), + &mut cx.to_async(), + ); + + drop(chunks_tx); + cx.run_until_parked(); + + let result = apply.await; + assert!(result.is_ok(),); + assert_matches!( + drain_events(&mut events).as_slice(), + [EditAgentOutputEvent::Edited { .. }] + ); + assert_eq!( + buffer.read_with(cx, |buffer, _| buffer.snapshot().text()), + "" + ); + } + #[gpui::test(iterations = 100)] async fn test_indent_new_text_chunks(mut rng: StdRng) { let chunks = to_random_chunks(&mut rng, " abc\n def\n ghi"); From 21bd74a0fb686b218cba856d4574ea6bf985608e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 25 Feb 2026 08:12:04 +0100 Subject: [PATCH 063/548] text: Narrow insertion-relative offsets from `usize` to `u32` (#49801) Reduces memory usage of `InsertionSlice` from 32 to 24 bytes, `Fragment` from 120 to 96 bytes by narrowing offsets that are relative to individual insertion operations from `usize` to `u32`. These offsets are bounded by the size of a single insertion, not the total buffer size, so `u32` is sufficient. To prevent any single insertion from exceeding `u32::MAX` bytes, both `Buffer::new_normalized` and `apply_local_edit`/`apply_remote_edit` now split large text insertions into multiple fragments via `push_fragments_for_insertion`. Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/clock/src/clock.rs | 2 +- crates/language/src/proto.rs | 2 +- crates/text/src/anchor.rs | 16 +- crates/text/src/locator.rs | 59 +++++- crates/text/src/tests.rs | 185 +++++++++++++++++++ crates/text/src/text.rs | 339 +++++++++++++++++++++++------------ 6 files changed, 471 insertions(+), 132 deletions(-) diff --git a/crates/clock/src/clock.rs b/crates/clock/src/clock.rs index cb0808abcae1709020f3fd3077436aeb1140a140..57612c5ff70ad7088dc4ff4bc348377b78184bae 100644 --- a/crates/clock/src/clock.rs +++ b/crates/clock/src/clock.rs @@ -61,8 +61,8 @@ pub type Seq = u32; /// used to determine the ordering of events in the editor. #[derive(Clone, Copy, Eq, Hash, PartialEq, Serialize, Deserialize)] pub struct Lamport { - pub replica_id: ReplicaId, pub value: Seq, + pub replica_id: ReplicaId, } /// A [version vector](https://en.wikipedia.org/wiki/Version_vector). diff --git a/crates/language/src/proto.rs b/crates/language/src/proto.rs index 132f971675ede12bb8ef5f941b57415f22d7ba88..89c44513067f6d2309d68a9f38984988358d8877 100644 --- a/crates/language/src/proto.rs +++ b/crates/language/src/proto.rs @@ -496,7 +496,7 @@ pub fn deserialize_anchor(anchor: proto::Anchor) -> Option { }; Some(Anchor::new( timestamp, - anchor.offset as usize, + anchor.offset as u32, bias, buffer_id, )) diff --git a/crates/text/src/anchor.rs b/crates/text/src/anchor.rs index 22a5c3090a1ef9e1c3581893ae8cbe16f79d776b..63e0570e91ef08dfce02fbbca25e97ee7519dc0a 100644 --- a/crates/text/src/anchor.rs +++ b/crates/text/src/anchor.rs @@ -20,7 +20,7 @@ pub struct Anchor { /// The byte offset into the text inserted in the operation /// at `timestamp`. - pub offset: usize, + pub offset: u32, /// Whether this anchor stays attached to the character *before* or *after* /// the offset. pub bias: Bias, @@ -49,7 +49,7 @@ impl Anchor { pub const MIN: Self = Self { timestamp_replica_id: clock::Lamport::MIN.replica_id, timestamp_value: clock::Lamport::MIN.value, - offset: usize::MIN, + offset: u32::MIN, bias: Bias::Left, buffer_id: None, }; @@ -57,14 +57,14 @@ impl Anchor { pub const MAX: Self = Self { timestamp_replica_id: clock::Lamport::MAX.replica_id, timestamp_value: clock::Lamport::MAX.value, - offset: usize::MAX, + offset: u32::MAX, bias: Bias::Right, buffer_id: None, }; pub fn new( timestamp: clock::Lamport, - offset: usize, + offset: u32, bias: Bias, buffer_id: Option, ) -> Self { @@ -81,7 +81,7 @@ impl Anchor { Self { timestamp_replica_id: clock::Lamport::MIN.replica_id, timestamp_value: clock::Lamport::MIN.value, - offset: usize::MIN, + offset: u32::MIN, bias: Bias::Left, buffer_id: Some(buffer_id), } @@ -91,7 +91,7 @@ impl Anchor { Self { timestamp_replica_id: clock::Lamport::MAX.replica_id, timestamp_value: clock::Lamport::MAX.value, - offset: usize::MAX, + offset: u32::MAX, bias: Bias::Right, buffer_id: Some(buffer_id), } @@ -190,13 +190,13 @@ impl Anchor { pub fn is_min(&self) -> bool { self.timestamp() == clock::Lamport::MIN - && self.offset == usize::MIN + && self.offset == u32::MIN && self.bias == Bias::Left } pub fn is_max(&self) -> bool { self.timestamp() == clock::Lamport::MAX - && self.offset == usize::MAX + && self.offset == u32::MAX && self.bias == Bias::Right } diff --git a/crates/text/src/locator.rs b/crates/text/src/locator.rs index cc94441a3d1ea2654875cd286d91b9dc2334ab53..be72c4dd9564d4c6024ce0206ff0eb99b0cd457b 100644 --- a/crates/text/src/locator.rs +++ b/crates/text/src/locator.rs @@ -8,18 +8,32 @@ use std::iter; /// /// The initial location for a collection should be `Locator::between(Locator::min(), Locator::max())`, /// leaving room for items to be inserted before and after it. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Locator(SmallVec<[u64; 4]>); +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Locator(SmallVec<[u64; 2]>); + +impl Clone for Locator { + fn clone(&self) -> Self { + // We manually implement clone to avoid the overhead of SmallVec's clone implementation. + // Using `from_slice` is faster than `clone` for SmallVec as we can use our `Copy` implementation of u64. + Self { + 0: SmallVec::from_slice(&self.0), + } + } + + fn clone_from(&mut self, source: &Self) { + self.0.clone_from(&source.0); + } +} impl Locator { pub const fn min() -> Self { - // SAFETY: 1 is <= 4 - Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MIN; 4], 1) }) + // SAFETY: 1 is <= 2 + Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MIN; 2], 1) }) } pub const fn max() -> Self { - // SAFETY: 1 is <= 4 - Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MAX; 4], 1) }) + // SAFETY: 1 is <= 2 + Self(unsafe { SmallVec::from_const_with_len_unchecked([u64::MAX; 2], 1) }) } pub const fn min_ref() -> &'static Self { @@ -40,6 +54,7 @@ impl Locator { let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX)); let mut location = SmallVec::new(); for (lhs, rhs) in lhs.zip(rhs) { + // This shift is essential! It optimizes for the common case of sequential typing. let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48); location.push(mid); if mid > lhs { @@ -127,4 +142,36 @@ mod tests { ); } } + + // Simulates 100,000 sequential forward appends (the pattern used when + // building a buffer's initial fragments and when + // `push_fragments_for_insertion` chains new text fragments). + #[test] + fn test_sequential_forward_append_stays_at_depth_1() { + let mut prev = Locator::min(); + let max = Locator::max(); + for _ in 0..100_000 { + let loc = Locator::between(&prev, &max); + assert_eq!(loc.len(), 1, "sequential forward append grew past depth 1"); + prev = loc; + } + } + + // Simulates the most common real editing pattern: a fragment is split + // (producing a depth-2 prefix), then 10,000 new fragments are inserted + // sequentially forward within that split region. + #[test] + fn test_typing_at_cursor_stays_at_depth_2() { + let initial = Locator::between(&Locator::min(), &Locator::max()); + let prefix = Locator::between(&Locator::min(), &initial); + assert_eq!(prefix.len(), 2); + + let suffix_id = initial; + let mut prev = prefix; + for _ in 0..10_000 { + let loc = Locator::between(&prev, &suffix_id); + assert_eq!(loc.len(), 2, "forward typing after split grew past depth 2"); + prev = loc; + } + } } diff --git a/crates/text/src/tests.rs b/crates/text/src/tests.rs index c9e04e407ffdb8ffde6b139e01d78822e54e1a4b..194ac2a40d5ac96a39177eedd35b991ded30de38 100644 --- a/crates/text/src/tests.rs +++ b/crates/text/src/tests.rs @@ -810,3 +810,188 @@ fn test_random_concurrent_edits(mut rng: StdRng) { buffer.check_invariants(); } } + +#[test] +fn test_new_normalized_splits_large_base_text() { + // ASCII text that exceeds max_insertion_len + let text = "abcdefghij".repeat(10); // 100 bytes + let rope = Rope::from(text.as_str()); + let buffer = Buffer::new_normalized( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + LineEnding::Unix, + rope, + ); + assert_eq!(buffer.text(), text); + buffer.check_invariants(); + + // Verify anchors at various positions, including across chunk boundaries + for offset in [0, 1, 15, 16, 17, 50, 99] { + let anchor = buffer.anchor_before(offset); + assert_eq!( + anchor.to_offset(&buffer), + offset, + "anchor_before({offset}) round-tripped incorrectly" + ); + let anchor = buffer.anchor_after(offset); + assert_eq!( + anchor.to_offset(&buffer), + offset, + "anchor_after({offset}) round-tripped incorrectly" + ); + } + + // Verify editing works after a split initialization + let mut buffer = buffer; + buffer.edit([(50..60, "XYZ")]); + let mut expected = text; + expected.replace_range(50..60, "XYZ"); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_new_normalized_splits_large_base_text_with_multibyte_chars() { + // Use multi-byte chars (é is 2 bytes in UTF-8) so that a naive byte-level + // split would land in the middle of a character. + let unit = "ééééééééé"; // 9 chars × 2 bytes = 18 bytes + let text = unit.repeat(6); // 108 bytes + let rope = Rope::from(text.as_str()); + let buffer = Buffer::new_normalized( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + LineEnding::Unix, + rope, + ); + assert_eq!(buffer.text(), text); + buffer.check_invariants(); + + // Every anchor should resolve correctly even though chunks had to be + // rounded down to a char boundary. + let snapshot = buffer.snapshot(); + for offset in (0..text.len()).filter(|o| text.is_char_boundary(*o)) { + let anchor = snapshot.anchor_before(offset); + assert_eq!( + anchor.to_offset(snapshot), + offset, + "anchor round-trip failed at byte offset {offset}" + ); + } +} + +#[test] +fn test_new_normalized_small_text_unchanged() { + // Text that fits in a single chunk should produce exactly one fragment, + // matching the original single-fragment behaviour. + let text = "hello world"; + let rope = Rope::from(text); + let buffer = Buffer::new_normalized( + ReplicaId::LOCAL, + BufferId::new(1).unwrap(), + LineEnding::Unix, + rope, + ); + assert_eq!(buffer.text(), text); + buffer.check_invariants(); + assert_eq!(buffer.snapshot().fragments.items(&None).len(), 1); +} + +#[test] +fn test_edit_splits_large_insertion() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abcdefghij"); + + let large_text: Arc = "X".repeat(100).into(); + let edits = vec![(3..7, large_text.clone())]; + + buffer.edit(edits); + + let expected = format!("abc{}hij", large_text); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); + + // Anchors should resolve correctly throughout the buffer. + for offset in [0, 3, 50, 103, expected.len()] { + let anchor = buffer.anchor_before(offset); + assert_eq!( + anchor.to_offset(&buffer), + offset, + "anchor_before({offset}) round-tripped incorrectly" + ); + } +} + +#[test] +fn test_edit_splits_large_insertion_with_multibyte_chars() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "abcdefghij"); + + // 4-byte chars so that naive byte splits would land mid-character. + let large_text: Arc = "😀".repeat(30).into(); // 30 × 4 = 120 bytes + let edits = vec![(5..5, large_text.clone())]; + + buffer.edit(edits); + + let expected = format!("abcde{}fghij", large_text); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_edit_splits_large_insertion_among_multiple_edits() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ABCDEFGHIJ"); + + let large_text: Arc = "x".repeat(60).into(); + // Three edits: small, large, small. The large one must be split while + // preserving the correct positions of the surrounding edits. + let edits = vec![ + (1..2, Arc::from("y")), // replace "B" with "y" + (4..6, large_text.clone()), // replace "EF" with 60 x's + (9..9, Arc::from("z")), // insert "z" before "J" + ]; + + buffer.edit(edits); + + // Original: A B C D E F G H I J + // After (1..2, "y"): A y C D E F G H I J + // After (4..6, large): A y C D <60 x's> G H I J + // After (9..9, "z"): A y C D <60 x's> G H I z J + let expected = format!("AyCD{}GHIzJ", large_text); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_edit_splits_multiple_large_insertions() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "ABCDE"); + + let text1: Arc = "a".repeat(40).into(); + let text2: Arc = "b".repeat(40).into(); + let edits = vec![ + (1..2, text1.clone()), // replace "B" with 40 a's + (3..4, text2.clone()), // replace "D" with 40 b's + ]; + + buffer.edit(edits); + + let expected = format!("A{}C{}E", text1, text2); + assert_eq!(buffer.text(), expected); + buffer.check_invariants(); +} + +#[test] +fn test_edit_undo_after_split() { + let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), "hello world"); + buffer.set_group_interval(Duration::from_secs(0)); + let original = buffer.text(); + + let large_text: Arc = "Z".repeat(50).into(); + let edits = vec![(5..6, large_text)]; + buffer.edit(edits); + assert_ne!(buffer.text(), original); + buffer.check_invariants(); + + // Undo should restore the original text even though the edit was split + // into multiple internal operations grouped in one transaction. + buffer.undo(); + assert_eq!(buffer.text(), original); + buffer.check_invariants(); +} diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 2c51a0d5e5b29bc08fdacc6b8b90edd8f65cd83d..a5bdbe443bbaa4723c8d3104bfed28e4c2fe8fdb 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -48,6 +48,12 @@ use util::RandomCharIter; static LINE_SEPARATORS_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX")); +/// The maximum length of a single insertion operation. +/// Fragments larger than this will be split into multiple smaller +/// fragments. This allows us to use relative `u32` offsets instead of `usize`, +/// reducing memory usage. +const MAX_INSERTION_LEN: usize = if cfg!(test) { 16 } else { u32::MAX as usize }; + pub type TransactionId = clock::Lamport; pub struct Buffer { @@ -155,18 +161,38 @@ struct History { #[derive(Clone, Debug, Eq, PartialEq)] struct InsertionSlice { - edit_id: clock::Lamport, - insertion_id: clock::Lamport, - range: Range, + // Inline the lamports to allow the replica ids to share the same alignment + // saving 4 bytes space edit_id: clock::Lamport, + edit_id_value: clock::Seq, + edit_id_replica_id: ReplicaId, + // insertion_id: clock::Lamport, + insertion_id_value: clock::Seq, + insertion_id_replica_id: ReplicaId, + range: Range, } impl Ord for InsertionSlice { fn cmp(&self, other: &Self) -> Ordering { - self.edit_id - .cmp(&other.edit_id) - .then_with(|| self.insertion_id.cmp(&other.insertion_id)) - .then_with(|| self.range.start.cmp(&other.range.start)) - .then_with(|| self.range.end.cmp(&other.range.end)) + Lamport { + value: self.edit_id_value, + replica_id: self.edit_id_replica_id, + } + .cmp(&Lamport { + value: other.edit_id_value, + replica_id: other.edit_id_replica_id, + }) + .then_with(|| { + Lamport { + value: self.insertion_id_value, + replica_id: self.insertion_id_replica_id, + } + .cmp(&Lamport { + value: other.insertion_id_value, + replica_id: other.insertion_id_replica_id, + }) + }) + .then_with(|| self.range.start.cmp(&other.range.start)) + .then_with(|| self.range.end.cmp(&other.range.end)) } } @@ -179,8 +205,10 @@ impl PartialOrd for InsertionSlice { impl InsertionSlice { fn from_fragment(edit_id: clock::Lamport, fragment: &Fragment) -> Self { Self { - edit_id, - insertion_id: fragment.timestamp, + edit_id_value: edit_id.value, + edit_id_replica_id: edit_id.replica_id, + insertion_id_value: fragment.timestamp.value, + insertion_id_replica_id: fragment.timestamp.replica_id, range: fragment.insertion_offset..fragment.insertion_offset + fragment.len, } } @@ -309,6 +337,7 @@ impl History { fn finalize_last_transaction(&mut self) -> Option<&Transaction> { self.undo_stack.last_mut().map(|entry| { + entry.transaction.edit_ids.shrink_to_fit(); entry.suppress_grouping = true; &entry.transaction }) @@ -489,7 +518,7 @@ struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> { since: &'a clock::Global, old_end: D, new_end: D, - range: Range<(&'a Locator, usize)>, + range: Range<(&'a Locator, u32)>, buffer_id: BufferId, } @@ -536,18 +565,18 @@ impl Edit<(D1, D2)> { } #[derive(Eq, PartialEq, Clone, Debug)] -pub struct Fragment { - pub id: Locator, - pub timestamp: clock::Lamport, - pub insertion_offset: usize, - pub len: usize, - pub visible: bool, - pub deletions: SmallVec<[clock::Lamport; 2]>, - pub max_undos: clock::Global, +struct Fragment { + id: Locator, + timestamp: clock::Lamport, + insertion_offset: u32, + len: u32, + visible: bool, + deletions: SmallVec<[clock::Lamport; 2]>, + max_undos: clock::Global, } #[derive(Eq, PartialEq, Clone, Debug)] -pub struct FragmentSummary { +struct FragmentSummary { text: FragmentTextSummary, max_id: Locator, max_version: clock::Global, @@ -575,14 +604,14 @@ impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary { #[derive(Eq, PartialEq, Clone, Debug)] struct InsertionFragment { timestamp: clock::Lamport, - split_offset: usize, + split_offset: u32, fragment_id: Locator, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] struct InsertionFragmentKey { timestamp: clock::Lamport, - split_offset: usize, + split_offset: u32, } #[derive(Clone, Debug, Eq, PartialEq)] @@ -740,18 +769,37 @@ impl Buffer { let insertion_timestamp = clock::Lamport::new(ReplicaId::LOCAL); lamport_clock.observe(insertion_timestamp); version.observe(insertion_timestamp); - let fragment_id = Locator::between(&Locator::min(), &Locator::max()); - let fragment = Fragment { - id: fragment_id, - timestamp: insertion_timestamp, - insertion_offset: 0, - len: visible_text.len(), - visible: true, - deletions: Default::default(), - max_undos: Default::default(), - }; - insertions.push(InsertionFragment::new(&fragment), ()); - fragments.push(fragment, &None); + + let mut insertion_offset: u32 = 0; + let mut text_offset: usize = 0; + let mut prev_locator = Locator::min(); + + while text_offset < visible_text.len() { + let target_end = visible_text.len().min(text_offset + MAX_INSERTION_LEN); + let chunk_end = if target_end == visible_text.len() { + target_end + } else { + visible_text.floor_char_boundary(target_end) + }; + let chunk_len = chunk_end - text_offset; + + let fragment_id = Locator::between(&prev_locator, &Locator::max()); + let fragment = Fragment { + id: fragment_id.clone(), + timestamp: insertion_timestamp, + insertion_offset, + len: chunk_len as u32, + visible: true, + deletions: Default::default(), + max_undos: Default::default(), + }; + insertions.push(InsertionFragment::new(&fragment), ()); + fragments.push(fragment, &None); + + prev_locator = fragment_id; + insertion_offset += chunk_len as u32; + text_offset = chunk_end; + } } Buffer { @@ -853,7 +901,7 @@ impl Buffer { new_text: Vec::with_capacity(edits.len()), }; let mut new_insertions = Vec::new(); - let mut insertion_offset = 0; + let mut insertion_offset: u32 = 0; let mut insertion_slices = Vec::new(); let mut edits = edits @@ -879,8 +927,9 @@ impl Buffer { if fragment_start > old_fragments.start().visible { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - suffix.insertion_offset += fragment_start - old_fragments.start().visible; + suffix.len = (fragment_end - fragment_start) as u32; + suffix.insertion_offset += + (fragment_start - old_fragments.start().visible) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -899,8 +948,8 @@ impl Buffer { // Preserve any portion of the current fragment that precedes this range. if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start - fragment_start; - prefix.insertion_offset += fragment_start - old_fragments.start().visible; + prefix.len = (range.start - fragment_start) as u32; + prefix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32; prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); new_insertions.push(InsertionFragment::insert_new(&prefix)); new_ropes.push_fragment(&prefix, prefix.visible); @@ -912,29 +961,24 @@ impl Buffer { if !new_text.is_empty() { let new_start = new_fragments.summary().text.visible; - let fragment = Fragment { - id: Locator::between( - &new_fragments.summary().max_id, - old_fragments - .item() - .map_or(&Locator::max(), |old_fragment| &old_fragment.id), - ), + let next_fragment_id = old_fragments + .item() + .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id); + Self::push_fragments_for_insertion( + new_text.as_ref(), timestamp, - insertion_offset, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }; + &mut insertion_offset, + &mut new_fragments, + &mut new_insertions, + &mut insertion_slices, + &mut new_ropes, + next_fragment_id, + timestamp, + ); edits_patch.push(Edit { old: fragment_start..fragment_start, new: new_start..new_start + new_text.len(), }); - insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); - new_insertions.push(InsertionFragment::insert_new(&fragment)); - new_ropes.push_str(new_text.as_ref()); - new_fragments.push(fragment, &None); - insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -945,8 +989,9 @@ impl Buffer { let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); if fragment.visible { - intersection.len = intersection_end - fragment_start; - intersection.insertion_offset += fragment_start - old_fragments.start().visible; + intersection.len = (intersection_end - fragment_start) as u32; + intersection.insertion_offset += + (fragment_start - old_fragments.start().visible) as u32; intersection.id = Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.push(timestamp); @@ -983,8 +1028,8 @@ impl Buffer { let fragment_end = old_fragments.end().visible; if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end - fragment_start; - suffix.insertion_offset += fragment_start - old_fragments.start().visible; + suffix.len = (fragment_end - fragment_start) as u32; + suffix.insertion_offset += (fragment_start - old_fragments.start().visible) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -1075,7 +1120,7 @@ impl Buffer { let mut insertion_slices = Vec::new(); let cx = Some(version.clone()); let mut new_insertions = Vec::new(); - let mut insertion_offset = 0; + let mut insertion_offset: u32 = 0; let mut new_ropes = RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0)); let mut old_fragments = self @@ -1097,9 +1142,9 @@ impl Buffer { if fragment_start > old_fragments.start().0.full_offset() { if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end.0 - fragment_start.0; + suffix.len = (fragment_end.0 - fragment_start.0) as u32; suffix.insertion_offset += - fragment_start - old_fragments.start().0.full_offset(); + (fragment_start - old_fragments.start().0.full_offset()) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -1118,8 +1163,9 @@ impl Buffer { let fragment_end = old_fragments.end().0.full_offset(); if fragment_end == range.start && fragment_end > fragment_start { let mut fragment = old_fragments.item().unwrap().clone(); - fragment.len = fragment_end.0 - fragment_start.0; - fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + fragment.len = (fragment_end.0 - fragment_start.0) as u32; + fragment.insertion_offset += + (fragment_start - old_fragments.start().0.full_offset()) as u32; new_insertions.push(InsertionFragment::insert_new(&fragment)); new_ropes.push_fragment(&fragment, fragment.visible); new_fragments.push(fragment, &None); @@ -1144,8 +1190,9 @@ impl Buffer { // Preserve any portion of the current fragment that precedes this range. if fragment_start < range.start { let mut prefix = old_fragments.item().unwrap().clone(); - prefix.len = range.start.0 - fragment_start.0; - prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + prefix.len = (range.start.0 - fragment_start.0) as u32; + prefix.insertion_offset += + (fragment_start - old_fragments.start().0.full_offset()) as u32; prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id); new_insertions.push(InsertionFragment::insert_new(&prefix)); fragment_start = range.start; @@ -1160,29 +1207,24 @@ impl Buffer { old_start += fragment_start.0 - old_fragments.start().0.full_offset().0; } let new_start = new_fragments.summary().text.visible; - let fragment = Fragment { - id: Locator::between( - &new_fragments.summary().max_id, - old_fragments - .item() - .map_or(&Locator::max(), |old_fragment| &old_fragment.id), - ), + let next_fragment_id = old_fragments + .item() + .map_or(Locator::max_ref(), |old_fragment| &old_fragment.id); + Self::push_fragments_for_insertion( + new_text, timestamp, - insertion_offset, - len: new_text.len(), - deletions: Default::default(), - max_undos: Default::default(), - visible: true, - }; + &mut insertion_offset, + &mut new_fragments, + &mut new_insertions, + &mut insertion_slices, + &mut new_ropes, + next_fragment_id, + timestamp, + ); edits_patch.push(Edit { old: old_start..old_start, new: new_start..new_start + new_text.len(), }); - insertion_slices.push(InsertionSlice::from_fragment(timestamp, &fragment)); - new_insertions.push(InsertionFragment::insert_new(&fragment)); - new_ropes.push_str(new_text); - new_fragments.push(fragment, &None); - insertion_offset += new_text.len(); } // Advance through every fragment that intersects this range, marking the intersecting @@ -1193,9 +1235,9 @@ impl Buffer { let mut intersection = fragment.clone(); let intersection_end = cmp::min(range.end, fragment_end); if fragment.was_visible(version, &self.undo_map) { - intersection.len = intersection_end.0 - fragment_start.0; + intersection.len = (intersection_end.0 - fragment_start.0) as u32; intersection.insertion_offset += - fragment_start - old_fragments.start().0.full_offset(); + (fragment_start - old_fragments.start().0.full_offset()) as u32; intersection.id = Locator::between(&new_fragments.summary().max_id, &intersection.id); intersection.deletions.push(timestamp); @@ -1208,7 +1250,7 @@ impl Buffer { + (fragment_start.0 - old_fragments.start().0.full_offset().0); let new_start = new_fragments.summary().text.visible; edits_patch.push(Edit { - old: old_start..old_start + intersection.len, + old: old_start..old_start + intersection.len as usize, new: new_start..new_start, }); } @@ -1229,8 +1271,9 @@ impl Buffer { let fragment_end = old_fragments.end().0.full_offset(); if fragment_end > fragment_start { let mut suffix = old_fragments.item().unwrap().clone(); - suffix.len = fragment_end.0 - fragment_start.0; - suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset(); + suffix.len = (fragment_end.0 - fragment_start.0) as u32; + suffix.insertion_offset += + (fragment_start - old_fragments.start().0.full_offset()) as u32; new_insertions.push(InsertionFragment::insert_new(&suffix)); new_ropes.push_fragment(&suffix, suffix.visible); new_fragments.push(suffix, &None); @@ -1252,6 +1295,49 @@ impl Buffer { self.subscriptions.publish_mut(&edits_patch) } + fn push_fragments_for_insertion( + new_text: &str, + timestamp: clock::Lamport, + insertion_offset: &mut u32, + new_fragments: &mut SumTree, + new_insertions: &mut Vec>, + insertion_slices: &mut Vec, + new_ropes: &mut RopeBuilder, + next_fragment_id: &Locator, + edit_timestamp: clock::Lamport, + ) { + let mut text_offset = 0; + while text_offset < new_text.len() { + let target_end = new_text.len().min(text_offset + MAX_INSERTION_LEN); + let chunk_end = if target_end == new_text.len() { + target_end + } else { + new_text.floor_char_boundary(target_end) + }; + if chunk_end == text_offset { + break; + } + let chunk_len = chunk_end - text_offset; + + let fragment = Fragment { + id: Locator::between(&new_fragments.summary().max_id, next_fragment_id), + timestamp, + insertion_offset: *insertion_offset, + len: chunk_len as u32, + deletions: Default::default(), + max_undos: Default::default(), + visible: true, + }; + insertion_slices.push(InsertionSlice::from_fragment(edit_timestamp, &fragment)); + new_insertions.push(InsertionFragment::insert_new(&fragment)); + new_fragments.push(fragment, &None); + + *insertion_offset += chunk_len as u32; + text_offset = chunk_end; + } + new_ropes.push_str(new_text); + } + fn fragment_ids_for_edits<'a>( &'a self, edit_ids: impl Iterator, @@ -1260,38 +1346,56 @@ impl Buffer { let mut insertion_slices = Vec::new(); for edit_id in edit_ids { let insertion_slice = InsertionSlice { - edit_id: *edit_id, - insertion_id: clock::Lamport::MIN, + edit_id_value: edit_id.value, + edit_id_replica_id: edit_id.replica_id, + insertion_id_value: Lamport::MIN.value, + insertion_id_replica_id: Lamport::MIN.replica_id, range: 0..0, }; let slices = self .snapshot .insertion_slices .iter_from(&insertion_slice) - .take_while(|slice| slice.edit_id == *edit_id); + .take_while(|slice| { + Lamport { + value: slice.edit_id_value, + replica_id: slice.edit_id_replica_id, + } == *edit_id + }); insertion_slices.extend(slices) } - insertion_slices - .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end))); + insertion_slices.sort_unstable_by_key(|s| { + ( + Lamport { + value: s.insertion_id_value, + replica_id: s.insertion_id_replica_id, + }, + s.range.start, + Reverse(s.range.end), + ) + }); // Get all of the fragments corresponding to these insertion slices. let mut fragment_ids = Vec::new(); let mut insertions_cursor = self.insertions.cursor::(()); for insertion_slice in &insertion_slices { - if insertion_slice.insertion_id != insertions_cursor.start().timestamp + let insertion_id = Lamport { + value: insertion_slice.insertion_id_value, + replica_id: insertion_slice.insertion_id_replica_id, + }; + if insertion_id != insertions_cursor.start().timestamp || insertion_slice.range.start > insertions_cursor.start().split_offset { insertions_cursor.seek_forward( &InsertionFragmentKey { - timestamp: insertion_slice.insertion_id, + timestamp: insertion_id, split_offset: insertion_slice.range.start, }, Bias::Left, ); } while let Some(item) = insertions_cursor.item() { - if item.timestamp != insertion_slice.insertion_id - || item.split_offset >= insertion_slice.range.end + if item.timestamp != insertion_id || item.split_offset >= insertion_slice.range.end { break; } @@ -1330,13 +1434,13 @@ impl Buffer { let new_start = new_fragments.summary().text.visible; if fragment_was_visible && !fragment.visible { edits.push(Edit { - old: old_start..old_start + fragment.len, + old: old_start..old_start + fragment.len as usize, new: new_start..new_start, }); } else if !fragment_was_visible && fragment.visible { edits.push(Edit { old: old_start..old_start, - new: new_start..new_start + fragment.len, + new: new_start..new_start + fragment.len as usize, }); } new_ropes.push_fragment(&fragment, fragment_was_visible); @@ -1582,7 +1686,12 @@ impl Buffer { cursor.seek_forward(&Some(fragment_id), Bias::Left); let fragment = cursor.item()?; let start_offset = cursor.start().1; - let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 }; + let end_offset = start_offset + + if fragment.visible { + fragment.len as usize + } else { + 0 + }; Some(start_offset..end_offset) }); @@ -2038,10 +2147,6 @@ impl BufferSnapshot { self.deleted_text.to_string() } - pub fn fragments(&self) -> impl Iterator { - self.fragments.iter() - } - pub fn text_summary(&self) -> TextSummary { self.visible_text.summary() } @@ -2287,7 +2392,7 @@ impl BufferSnapshot { let fragment = fragment_cursor.item().unwrap(); let mut fragment_offset = fragment_cursor.start().1; if fragment.visible { - fragment_offset += anchor.offset - insertion.split_offset; + fragment_offset += (anchor.offset - insertion.split_offset) as usize; } position.add_assign(&text_cursor.summary(fragment_offset)); @@ -2332,7 +2437,7 @@ impl BufferSnapshot { let fragment = item.unwrap(); let mut fragment_offset = start.1; if fragment.visible { - fragment_offset += anchor.offset - insertion.split_offset; + fragment_offset += (anchor.offset - insertion.split_offset) as usize; } fragment_offset } @@ -2457,7 +2562,7 @@ impl BufferSnapshot { let overshoot = offset - start; Anchor::new( fragment.timestamp, - fragment.insertion_offset + overshoot, + fragment.insertion_offset + overshoot as u32, bias, Some(self.remote_id), ) @@ -2546,7 +2651,7 @@ impl BufferSnapshot { let mut visible_start = start.1.visible; let mut deleted_start = start.1.deleted; if let Some(fragment) = item { - let overshoot = range.start.offset - fragment.insertion_offset; + let overshoot = (range.start.offset - fragment.insertion_offset) as usize; if fragment.visible { visible_start += overshoot; } else { @@ -2706,7 +2811,7 @@ impl<'a> RopeBuilder<'a> { fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) { debug_assert!(fragment.len > 0); - self.push(fragment.len, was_visible, fragment.visible) + self.push(fragment.len as usize, was_visible, fragment.visible) } fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) { @@ -2781,7 +2886,8 @@ impl bool> Iterator for Ed if fragment.id == *self.range.end.0 { visible_end = cmp::min( visible_end, - cursor.start().visible + (self.range.end.1 - fragment.insertion_offset), + cursor.start().visible + + (self.range.end.1 - fragment.insertion_offset) as usize, ); } @@ -2807,7 +2913,8 @@ impl bool> Iterator for Ed if fragment.id == *self.range.end.0 { deleted_end = cmp::min( deleted_end, - cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset), + cursor.start().deleted + + (self.range.end.1 - fragment.insertion_offset) as usize, ); } @@ -2872,7 +2979,7 @@ impl sum_tree::Item for Fragment { FragmentSummary { max_id: self.id.clone(), text: FragmentTextSummary { - visible: self.len, + visible: self.len as usize, deleted: 0, }, max_version, @@ -2884,7 +2991,7 @@ impl sum_tree::Item for Fragment { max_id: self.id.clone(), text: FragmentTextSummary { visible: 0, - deleted: self.len, + deleted: self.len as usize, }, max_version, min_insertion_version, From 7e3e16876107e1dd4e8bb35093480989ea41fda0 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 25 Feb 2026 00:08:00 -0800 Subject: [PATCH 064/548] Fix window bounds related bugs from multi-workspace serialization (#50065) Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/workspace/src/persistence.rs | 110 ++++++++++++++++++++++++++++ crates/workspace/src/workspace.rs | 74 +++++++++++-------- 2 files changed, 153 insertions(+), 31 deletions(-) diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index ba6ee548348cf1dc3da612e112d497c1da011559..492b7a8f385730feaa06dfe3b5e8b4cc0a20bb59 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -4359,4 +4359,114 @@ mod tests { "Pending removal task should have deleted the workspace row when awaited" ); } + + #[gpui::test] + async fn test_create_workspace_bounds_observer_uses_fresh_id(cx: &mut gpui::TestAppContext) { + use crate::multi_workspace::MultiWorkspace; + use feature_flags::FeatureFlagAppExt; + use project::Project; + + crate::tests::init_test(cx); + + cx.update(|cx| { + cx.set_staff(true); + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let fs = fs::FakeFs::new(cx.executor()); + let project = Project::test(fs.clone(), [], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + multi_workspace.update_in(cx, |mw, _, cx| { + mw.set_random_database_id(cx); + }); + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.create_workspace(window, cx); + }); + + cx.run_until_parked(); + + let new_workspace_db_id = + multi_workspace.read_with(cx, |mw, cx| mw.workspace().read(cx).database_id()); + assert!( + new_workspace_db_id.is_some(), + "After run_until_parked, the workspace should have a database_id" + ); + + let workspace_id = new_workspace_db_id.unwrap(); + + assert!( + DB.workspace_for_id(workspace_id).is_some(), + "The workspace row should exist in the DB" + ); + + cx.simulate_resize(gpui::size(px(1024.0), px(768.0))); + + // Advance the clock past the 100ms debounce timer so the bounds + // observer task fires + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + let serialized = DB + .workspace_for_id(workspace_id) + .expect("workspace row should still exist"); + assert!( + serialized.window_bounds.is_some(), + "The bounds observer should write bounds for the workspace's real DB ID, \ + even when the workspace was created via create_workspace (where the ID \ + is assigned asynchronously after construction)." + ); + } + + #[gpui::test] + async fn test_flush_serialization_writes_bounds(cx: &mut gpui::TestAppContext) { + use crate::multi_workspace::MultiWorkspace; + use feature_flags::FeatureFlagAppExt; + use project::Project; + + crate::tests::init_test(cx); + + cx.update(|cx| { + cx.set_staff(true); + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let fs = fs::FakeFs::new(cx.executor()); + let dir = tempfile::TempDir::with_prefix("flush_bounds_test").unwrap(); + fs.insert_tree(dir.path(), json!({})).await; + + let project = Project::test(fs.clone(), [dir.path()], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace_id = DB.next_id().await.unwrap(); + multi_workspace.update_in(cx, |mw, _, cx| { + mw.workspace().update(cx, |ws, _cx| { + ws.set_database_id(workspace_id); + }); + }); + + let task = multi_workspace.update_in(cx, |mw, window, cx| { + mw.workspace() + .update(cx, |ws, cx| ws.flush_serialization(window, cx)) + }); + task.await; + + let after = DB + .workspace_for_id(workspace_id) + .expect("workspace row should exist after flush_serialization"); + assert!( + !after.paths.is_empty(), + "flush_serialization should have written paths via save_workspace" + ); + assert!( + after.window_bounds.is_some(), + "flush_serialization should ensure window bounds are persisted to the DB \ + before the process exits." + ); + } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 975f00d589b76e56bcf6d819798c23bd8844a2d6..cab4b4974f15f4b68da7c4a5abd0fca34a4af00c 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1601,36 +1601,7 @@ impl Workspace { .timer(Duration::from_millis(100)) .await; this.update_in(cx, |this, window, cx| { - if let Some(display) = window.display(cx) - && let Ok(display_uuid) = display.uuid() - { - let window_bounds = window.inner_window_bounds(); - let has_paths = !this.root_paths(cx).is_empty(); - if !has_paths { - cx.background_executor() - .spawn(persistence::write_default_window_bounds( - window_bounds, - display_uuid, - )) - .detach_and_log_err(cx); - } - if let Some(database_id) = workspace_id { - cx.background_executor() - .spawn(DB.set_window_open_status( - database_id, - SerializedWindowBounds(window_bounds), - display_uuid, - )) - .detach_and_log_err(cx); - } else { - cx.background_executor() - .spawn(persistence::write_default_window_bounds( - window_bounds, - display_uuid, - )) - .detach_and_log_err(cx); - } - } + this.save_window_bounds(window, cx).detach(); this.bounds_save_task_queued.take(); }) .ok(); @@ -5857,6 +5828,40 @@ impl Workspace { self.session_id.clone() } + fn save_window_bounds(&self, window: &mut Window, cx: &mut App) -> Task<()> { + let Some(display) = window.display(cx) else { + return Task::ready(()); + }; + let Ok(display_uuid) = display.uuid() else { + return Task::ready(()); + }; + + let window_bounds = window.inner_window_bounds(); + let database_id = self.database_id; + let has_paths = !self.root_paths(cx).is_empty(); + + cx.background_executor().spawn(async move { + if !has_paths { + persistence::write_default_window_bounds(window_bounds, display_uuid) + .await + .log_err(); + } + if let Some(database_id) = database_id { + DB.set_window_open_status( + database_id, + SerializedWindowBounds(window_bounds), + display_uuid, + ) + .await + .log_err(); + } else { + persistence::write_default_window_bounds(window_bounds, display_uuid) + .await + .log_err(); + } + }) + } + /// Bypass the 200ms serialization throttle and write workspace state to /// the DB immediately. Returns a task the caller can await to ensure the /// write completes. Used by the quit handler so the most recent state @@ -5864,7 +5869,14 @@ impl Workspace { pub fn flush_serialization(&mut self, window: &mut Window, cx: &mut App) -> Task<()> { self._schedule_serialize_workspace.take(); self._serialize_workspace_task.take(); - self.serialize_workspace_internal(window, cx) + self.bounds_save_task_queued.take(); + + let bounds_task = self.save_window_bounds(window, cx); + let serialize_task = self.serialize_workspace_internal(window, cx); + cx.spawn(async move |_| { + bounds_task.await; + serialize_task.await; + }) } pub fn root_paths(&self, cx: &App) -> Vec> { From ca066cbefd3f11a67ddf0738269e954349b94d91 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 25 Feb 2026 09:10:29 +0100 Subject: [PATCH 065/548] Replace some hot `TreeMaps` with `Arc` (#49984) These tend to be very often mutated while we don't really need the subtree sharing of TreeMaps, so replacing them here is generally cheaper. Release Notes: - N/A *or* Added/Fixed/Improved ... Co-authored by: John Tur --- crates/editor/src/display_map.rs | 40 +++++++++++++--------- crates/editor/src/display_map/inlay_map.rs | 6 ++-- crates/editor/src/semantic_tokens.rs | 11 +++--- crates/multi_buffer/src/multi_buffer.rs | 7 ++-- crates/multi_buffer/src/path_key.rs | 8 ++--- 5 files changed, 41 insertions(+), 31 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 31a1f68300d6d24ef449f4f0eb0b072f0cbe0ea9..888a3729ca769551954712dc2e8c3fb197367551 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -113,6 +113,7 @@ use unicode_segmentation::UnicodeSegmentation; use ztracing::instrument; use std::cell::RefCell; +use std::collections::hash_map::Entry; use std::{ any::TypeId, borrow::Cow, @@ -175,9 +176,9 @@ pub trait ToDisplayPoint { fn to_display_point(&self, map: &DisplaySnapshot) -> DisplayPoint; } -type TextHighlights = TreeMap>)>>; +type TextHighlights = Arc>)>>>; type SemanticTokensHighlights = - TreeMap, Arc)>; + Arc, Arc)>>; type InlayHighlights = TreeMap>; #[derive(Debug)] @@ -478,7 +479,7 @@ impl DisplayMap { diagnostics_max_severity, text_highlights: Default::default(), inlay_highlights: Default::default(), - semantic_token_highlights: TreeMap::default(), + semantic_token_highlights: Default::default(), clip_at_line_ends: false, masked: false, companion: None, @@ -1226,22 +1227,25 @@ impl DisplayMap { cx: &App, ) { let multi_buffer_snapshot = self.buffer.read(cx).snapshot(cx); - let to_insert = match self.text_highlights.remove(&key) { - Some(mut previous) if merge => match Arc::get_mut(&mut previous) { - Some((_, previous_ranges)) => { + match Arc::make_mut(&mut self.text_highlights).entry(key) { + Entry::Occupied(mut slot) => match Arc::get_mut(slot.get_mut()) { + Some((_, previous_ranges)) if merge => { previous_ranges.extend(ranges); previous_ranges.sort_by(|a, b| a.start.cmp(&b.start, &multi_buffer_snapshot)); - previous } - None => Arc::new((style, { - ranges.extend(previous.1.iter().cloned()); + Some((previous_style, previous_ranges)) => { + *previous_style = style; + *previous_ranges = ranges; + } + None if merge => { + ranges.extend(slot.get().1.iter().cloned()); ranges.sort_by(|a, b| a.start.cmp(&b.start, &multi_buffer_snapshot)); - ranges - })), + slot.insert(Arc::new((style, ranges))); + } + None => _ = slot.insert(Arc::new((style, ranges))), }, - _ => Arc::new((style, ranges)), - }; - self.text_highlights.insert(key, to_insert); + Entry::Vacant(slot) => _ = slot.insert(Arc::new((style, ranges))), + } } #[instrument(skip_all)] @@ -1288,14 +1292,16 @@ impl DisplayMap { } pub fn clear_highlights(&mut self, key: HighlightKey) -> bool { - let mut cleared = self.text_highlights.remove(&key).is_some(); + let mut cleared = Arc::make_mut(&mut self.text_highlights) + .remove(&key) + .is_some(); cleared |= self.inlay_highlights.remove(&key).is_some(); cleared } pub fn clear_highlights_with(&mut self, f: &mut dyn FnMut(&HighlightKey) -> bool) -> bool { let mut cleared = false; - self.text_highlights.retain(|k, _| { + Arc::make_mut(&mut self.text_highlights).retain(|k, _| { let b = !f(k); cleared |= b; b @@ -1448,7 +1454,7 @@ impl DisplayMap { } pub fn invalidate_semantic_highlights(&mut self, buffer_id: BufferId) { - self.semantic_token_highlights.remove(&buffer_id); + Arc::make_mut(&mut self.semantic_token_highlights).remove(&buffer_id); } } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 8bc66123c41a1e7af4e2814bc79ea80c97219f14..9e853c8292c8073f20af58ee4d8f71c8db269cfa 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -1328,9 +1328,10 @@ mod tests { use super::*; use crate::{ MultiBuffer, - display_map::{HighlightKey, InlayHighlights, TextHighlights}, + display_map::{HighlightKey, InlayHighlights}, hover_links::InlayHighlight, }; + use collections::HashMap; use gpui::{App, HighlightStyle}; use multi_buffer::Anchor; use project::{InlayHint, InlayHintLabel, ResolveState}; @@ -1897,7 +1898,7 @@ mod tests { ); } - let mut text_highlights = TextHighlights::default(); + let mut text_highlights = HashMap::default(); let text_highlight_count = rng.random_range(0_usize..10); let mut text_highlight_ranges = (0..text_highlight_count) .map(|_| buffer_snapshot.random_byte_range(MultiBufferOffset(0), &mut rng)) @@ -1917,6 +1918,7 @@ mod tests { .collect(), )), ); + let text_highlights = Arc::new(text_highlights); let mut inlay_highlights = InlayHighlights::default(); if !inlays.is_empty() { diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs index d2bbe533124efdb252dc80c5677165521ef76bab..8742ae82a80a7a623d5e6c98afc908f34f6e4b84 100644 --- a/crates/editor/src/semantic_tokens.rs +++ b/crates/editor/src/semantic_tokens.rs @@ -122,7 +122,10 @@ impl Editor { if !self.mode().is_full() || !self.semantic_token_state.enabled() { self.invalidate_semantic_tokens(None); self.display_map.update(cx, |display_map, _| { - display_map.semantic_token_highlights.clear(); + match Arc::get_mut(&mut display_map.semantic_token_highlights) { + Some(highlights) => highlights.clear(), + None => display_map.semantic_token_highlights = Arc::new(Default::default()), + }; }); self.semantic_token_state.update_task = Task::ready(()); cx.notify(); @@ -171,8 +174,8 @@ impl Editor { .display_map .read(cx) .semantic_token_highlights - .iter() - .map(|(buffer_id, _)| *buffer_id) + .keys() + .copied() .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id)) .filter(|buffer_id| { !self @@ -308,7 +311,7 @@ impl Editor { token_highlights.sort_by(|a, b| { a.range.start.cmp(&b.range.start, &multi_buffer_snapshot) }); - display_map.semantic_token_highlights.insert( + Arc::make_mut(&mut display_map.semantic_token_highlights).insert( buffer_id, (Arc::from(token_highlights), Arc::new(interner)), ); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index 6a90d9c410859324d31ebbd59c909e31127ecc6a..03615ffcabb2a5c46469bc16198adfce95e0c6ef 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -624,7 +624,7 @@ pub struct MultiBufferSnapshot { diffs: TreeMap, diff_transforms: SumTree, excerpt_ids: SumTree, - replaced_excerpts: TreeMap, + replaced_excerpts: Arc>, non_text_state_update_count: usize, edit_count: usize, is_dirty: bool, @@ -1967,7 +1967,10 @@ impl MultiBuffer { *has_deleted_file = false; *has_conflict = false; *has_inverted_diff = false; - replaced_excerpts.clear(); + match Arc::get_mut(replaced_excerpts) { + Some(replaced_excerpts) => replaced_excerpts.clear(), + None => *replaced_excerpts = Default::default(), + } let edits = Self::sync_diff_transforms( self.snapshot.get_mut(), diff --git a/crates/multi_buffer/src/path_key.rs b/crates/multi_buffer/src/path_key.rs index 475ed3c9d623382c5ed989918ee3224afc95cd25..eeaf3c5995dc446a704857b2a111c0aeb50e48a1 100644 --- a/crates/multi_buffer/src/path_key.rs +++ b/crates/multi_buffer/src/path_key.rs @@ -382,9 +382,7 @@ impl MultiBuffer { { last.context.end = last.context.end.max(existing_range.end); to_remove.push(*existing_id); - self.snapshot - .get_mut() - .replaced_excerpts + Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) .insert(*existing_id, *last_id); existing_iter.next(); continue; @@ -462,9 +460,7 @@ impl MultiBuffer { (Some(_), Some((_, existing_range))) => { let existing_id = existing_iter.next().unwrap(); let new_id = next_excerpt_id(); - self.snapshot - .get_mut() - .replaced_excerpts + Arc::make_mut(&mut self.snapshot.get_mut().replaced_excerpts) .insert(existing_id, new_id); to_remove.push(existing_id); let mut range = new_iter.next().unwrap(); From 3d5c8a53ae54848dccf6a503a748fe60be416d36 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Wed, 25 Feb 2026 03:25:14 -0500 Subject: [PATCH 066/548] thread_view: Add fallback error handling for connect failures (#50063) Following up from https://github.com/zed-industries/zed/pull/50061: when connecting to an ACP adapter fails before any thread is active, errors would not display in the Agent Panel. Falling back to `handle_load_error` to show the error UI properly as it already handles this. Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Added fallback error handling for connect failures in the Agent Panel --- crates/agent_ui/src/acp/thread_view.rs | 64 ++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index cbdc3ad5d1e5d28b1597ba405846ac48dbfeb928..6ea3ec6317313822de78b3e08735250308eddbc4 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -609,6 +609,8 @@ impl AcpServerView { this.handle_load_error(err, window, cx); } else if let Some(active) = this.active_thread() { active.update(cx, |active, cx| active.handle_any_thread_error(err, cx)); + } else { + this.handle_load_error(err, window, cx); } cx.notify(); }) @@ -3092,6 +3094,38 @@ pub(crate) mod tests { }); } + #[gpui::test] + async fn test_connect_failure_transitions_to_load_error(cx: &mut TestAppContext) { + init_test(cx); + + let (thread_view, cx) = setup_thread_view(FailingAgentServer, cx).await; + + thread_view.read_with(cx, |view, cx| { + let title = view.title(cx); + assert_eq!( + title.as_ref(), + "Error Loading Codex CLI", + "Tab title should show the agent name with an error prefix" + ); + match &view.server_state { + ServerState::LoadError(LoadError::Other(msg)) => { + assert!( + msg.contains("Invalid gzip header"), + "Error callout should contain the underlying extraction error, got: {msg}" + ); + } + other => panic!( + "Expected LoadError::Other, got: {}", + match other { + ServerState::Loading(_) => "Loading (stuck!)", + ServerState::LoadError(_) => "LoadError (wrong variant)", + ServerState::Connected(_) => "Connected", + } + ), + } + }); + } + #[gpui::test] async fn test_auth_required_on_initial_connect(cx: &mut TestAppContext) { init_test(cx); @@ -3602,6 +3636,36 @@ pub(crate) mod tests { } } + struct FailingAgentServer; + + impl AgentServer for FailingAgentServer { + fn logo(&self) -> ui::IconName { + ui::IconName::AiOpenAi + } + + fn name(&self) -> SharedString { + "Codex CLI".into() + } + + fn connect( + &self, + _root_dir: Option<&Path>, + _delegate: AgentServerDelegate, + _cx: &mut App, + ) -> Task, Option)>> { + Task::ready(Err(anyhow!( + "extracting downloaded asset for \ + https://github.com/zed-industries/codex-acp/releases/download/v0.9.4/\ + codex-acp-0.9.4-aarch64-pc-windows-msvc.zip: \ + failed to iterate over archive: Invalid gzip header" + ))) + } + + fn into_any(self: Rc) -> Rc { + self + } + } + #[derive(Clone)] struct StubSessionList { sessions: Vec, From 938b8ab746ee699cdcf1f329de501c8a7e781531 Mon Sep 17 00:00:00 2001 From: Avi Fenesh <55848801+avifenesh@users.noreply.github.com> Date: Wed, 25 Feb 2026 12:41:24 +0200 Subject: [PATCH 067/548] languages: Add `.mdc` as a recognized Markdown file suffix (#50074) Highlight files ending in `.mdc` as Markdown. The `.mdc` extension is used by Cursor for its Markdown-based rule files (`.cursor/rules/*.mdc`). These files are standard Markdown with optional YAML frontmatter, which the existing Markdown grammar already handles well. Adding `.mdc` to the recognized suffixes ensures proper syntax highlighting out of the box. This was requested during review of the agnix extension PR ([zed-industries/extensions#4743](https://github.com/zed-industries/extensions/pull/4743)) by @MrSubidubi as the preferred approach over defining a custom MDC language in an extension. Release Notes: - Added `.mdc` as a recognized Markdown file extension. --- crates/languages/src/markdown/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/markdown/config.toml b/crates/languages/src/markdown/config.toml index 10b1e49757edc106c76e0dc7c591098ebdc6723f..5e7acd230b6f191aebff609bbc1087fbff8d3909 100644 --- a/crates/languages/src/markdown/config.toml +++ b/crates/languages/src/markdown/config.toml @@ -1,6 +1,6 @@ name = "Markdown" grammar = "markdown" -path_suffixes = ["md", "mdx", "mdwn", "markdown", "MD"] +path_suffixes = ["md", "mdx", "mdwn", "mdc", "markdown", "MD"] completion_query_characters = ["-"] block_comment = { start = "", tab_size = 0 } autoclose_before = ";:.,=}])>" From 88299472783a4cf85b31d32325b2a49203c18c84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Samuel=20Dom=C3=ADnguez=20Lorenzo?= Date: Wed, 25 Feb 2026 12:41:39 +0000 Subject: [PATCH 068/548] Add debug panel toggle (#48020) This adds `debug_panel::Toggle` which is the same as `terminal_panel::Toggle` but for the debug panel. It also moves `debug_panel::{Toggle, ToggleFocus}` to its own `pub mod` for consistency with other keybinds. [Related discussion](https://github.com/zed-industries/zed/discussions/47931#discussion-9404091) Release Notes: - Added `debug_panel::Toggle` to show/hide debug panel like `terminal_panel::Toggle` --- crates/debugger_ui/src/debugger_panel.rs | 2 +- crates/debugger_ui/src/debugger_ui.rs | 2 +- crates/zed/src/zed/app_menus.rs | 4 ++-- crates/zed_actions/src/lib.rs | 22 +++++++++++++--------- 4 files changed, 17 insertions(+), 13 deletions(-) diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index 51b11a24923c11205b9845bb98b3f2692ebd0e3d..d0b744bfe793ea31367cc48178903190a9becf59 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -43,7 +43,7 @@ use workspace::{ Item, Pane, Workspace, dock::{DockPosition, Panel, PanelEvent}, }; -use zed_actions::ToggleFocus; +use zed_actions::debug_panel::ToggleFocus; pub struct DebuggerHistoryFeatureFlag; diff --git a/crates/debugger_ui/src/debugger_ui.rs b/crates/debugger_ui/src/debugger_ui.rs index 3a70216ec743f78ebeaa98ad86d3c0dddba60efb..310a9036821a7071199eed2c22231fa8a8be18a0 100644 --- a/crates/debugger_ui/src/debugger_ui.rs +++ b/crates/debugger_ui/src/debugger_ui.rs @@ -13,7 +13,7 @@ use tasks_ui::{Spawn, TaskOverrides}; use ui::{FluentBuilder, InteractiveElement}; use util::maybe; use workspace::{ItemHandle, ShutdownDebugAdapters, Workspace}; -use zed_actions::{Toggle, ToggleFocus}; +use zed_actions::debug_panel::{Toggle, ToggleFocus}; pub mod attach_modal; pub mod debugger_panel; diff --git a/crates/zed/src/zed/app_menus.rs b/crates/zed/src/zed/app_menus.rs index 40582c8e13ff822189c9b3a1a467a9ff7f9d597a..debcb605f222dc7c983b9d061803720df5ff727c 100644 --- a/crates/zed/src/zed/app_menus.rs +++ b/crates/zed/src/zed/app_menus.rs @@ -2,7 +2,7 @@ use collab_ui::collab_panel; use gpui::{App, Menu, MenuItem, OsAction}; use release_channel::ReleaseChannel; use terminal_view::terminal_panel; -use zed_actions::{ToggleFocus as ToggleDebugPanel, dev}; +use zed_actions::{debug_panel, dev}; pub fn app_menus(cx: &mut App) -> Vec

{ use zed_actions::Quit; @@ -43,7 +43,7 @@ pub fn app_menus(cx: &mut App) -> Vec { MenuItem::action("Outline Panel", outline_panel::ToggleFocus), MenuItem::action("Collab Panel", collab_panel::ToggleFocus), MenuItem::action("Terminal Panel", terminal_panel::ToggleFocus), - MenuItem::action("Debugger Panel", ToggleDebugPanel), + MenuItem::action("Debugger Panel", debug_panel::ToggleFocus), MenuItem::separator(), MenuItem::action("Diagnostics", diagnostics::Deploy), MenuItem::separator(), diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index 848673b9377a4947053f0bb8d79de9863c58408c..ed3fd5b5a933ee69f7aa06104c08b85c1cbcd052 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -639,15 +639,19 @@ actions!( ] ); -actions!( - debug_panel, - [ - /// Toggles the debug panel. - Toggle, - /// Toggles focus on the debug panel. - ToggleFocus - ] -); +pub mod debug_panel { + use gpui::actions; + actions!( + debug_panel, + [ + /// Toggles the debug panel. + Toggle, + /// Toggles focus on the debug panel. + ToggleFocus + ] + ); +} + actions!( debugger, [ From 6fb5109d9b7f7306ea8ab69e88d710b16b90d73e Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 25 Feb 2026 14:33:56 +0100 Subject: [PATCH 069/548] agent_ui: Refresh agent registry when reopening page (#50078) Make sure we get an up-to-date list whenever you actually visit the page Release Notes: - N/A --- crates/agent_ui/src/agent_ui.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 3f082e86b1f4f8e9ea601ec0de15b22a972c1d67..7f05437cbae82ef4ae4953c91d33c0b6c7a296bc 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -310,6 +310,10 @@ pub fn init( .find_map(|item| item.downcast::()); if let Some(existing) = existing { + existing.update(cx, |_, cx| { + project::AgentRegistryStore::global(cx) + .update(cx, |store, cx| store.refresh(cx)); + }); workspace.activate_item(&existing, true, true, window, cx); } else { let registry_page = AgentRegistryPage::new(workspace, window, cx); From bc023b3f80c9daa36365948710813d75b3226eb7 Mon Sep 17 00:00:00 2001 From: Xin Zhao Date: Wed, 25 Feb 2026 21:37:54 +0800 Subject: [PATCH 070/548] languages: Improve completion sorting for Python-based LSPs (#47160) Closes #47086 This PR detects completion items ending with `=` (which typically represent keyword arguments in function calls provided by `Pyright`/`BasedPyright`/`pylsp`) and assigns them the highest sorting priority. This ensures that when a user is filling out function arguments, the named parameters appear at the top of the list, rather than being buried mixed with other symbols. After fix: image > **Note on Sorting:** Currently, these named arguments will be sorted alphabetically by label. Preserving the original order of the function definition would be ideal, but it requires information not currently available in this logical block. Insights on how to retrieve the definition order would be appreciated. > **Note on other LSPs:** > * **`ty`**: Already provides well-sorted completions natively, so no intervention is required. Release Notes: - Improved completion order for Python-based LSPs --- crates/languages/src/python.rs | 47 ++++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 13 deletions(-) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index b1192464e9601183ac5d91196bfbe529feaa693f..9eaf9764f100428b4bbbc80238f7da5847001470 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -112,6 +112,8 @@ impl FromStr for TestRunner { /// Decided to ignore Pyright's sortText() completely and to manually sort all entries fn process_pyright_completions(items: &mut [lsp::CompletionItem]) { for item in items { + let is_named_argument = item.label.ends_with('='); + let is_dunder = item.label.starts_with("__") && item.label.ends_with("__"); let visibility_priority = if is_dunder { @@ -124,23 +126,35 @@ fn process_pyright_completions(items: &mut [lsp::CompletionItem]) { '0' // public }; + let is_external = item + .detail + .as_ref() + .is_some_and(|detail| detail == "Auto-import"); + + let source_priority = if is_external { '1' } else { '0' }; + // Kind priority within same visibility level let kind_priority = match item.kind { - Some(lsp::CompletionItemKind::ENUM_MEMBER) => '0', - Some(lsp::CompletionItemKind::FIELD) => '1', - Some(lsp::CompletionItemKind::PROPERTY) => '2', - Some(lsp::CompletionItemKind::VARIABLE) => '3', - Some(lsp::CompletionItemKind::CONSTANT) => '4', - Some(lsp::CompletionItemKind::METHOD) => '5', - Some(lsp::CompletionItemKind::FUNCTION) => '5', - Some(lsp::CompletionItemKind::CLASS) => '6', - Some(lsp::CompletionItemKind::MODULE) => '7', - _ => '8', + Some(lsp::CompletionItemKind::KEYWORD) => '0', + Some(lsp::CompletionItemKind::ENUM_MEMBER) => '1', + Some(lsp::CompletionItemKind::FIELD) => '2', + Some(lsp::CompletionItemKind::PROPERTY) => '3', + Some(lsp::CompletionItemKind::VARIABLE) => '4', + Some(lsp::CompletionItemKind::CONSTANT) => '5', + Some(lsp::CompletionItemKind::METHOD) => '6', + Some(lsp::CompletionItemKind::FUNCTION) => '6', + Some(lsp::CompletionItemKind::CLASS) => '7', + Some(lsp::CompletionItemKind::MODULE) => '8', + + _ => 'z', }; + // Named arguments get higher priority + let argument_priority = if is_named_argument { '0' } else { '1' }; + item.sort_text = Some(format!( - "{}{}{}", - visibility_priority, kind_priority, item.label + "{}{}{}{}{}", + argument_priority, source_priority, visibility_priority, kind_priority, item.label )); } } @@ -1689,7 +1703,14 @@ impl LspAdapter for PyLspAdapter { Self::SERVER_NAME } - async fn process_completions(&self, _items: &mut [lsp::CompletionItem]) {} + async fn process_completions(&self, items: &mut [lsp::CompletionItem]) { + for item in items { + let is_named_argument = item.label.ends_with('='); + let priority = if is_named_argument { '0' } else { '1' }; + let sort_text = item.sort_text.take().unwrap_or_else(|| item.label.clone()); + item.sort_text = Some(format!("{}{}", priority, sort_text)); + } + } async fn label_for_completion( &self, From 13eb0f68327177a859131e375cfadd056321dc69 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 25 Feb 2026 10:45:14 -0300 Subject: [PATCH 071/548] git_ui: Improve connection between the graph and commit views (#50027) - Enabled opening the Git Graph, with the corresponding commit detail drawer open, from the commit view - Redesigned the commit view's header and toolbar to allow addition of the Git Graph icon button - Redesigned icons for the Git Graph and commit view https://github.com/user-attachments/assets/8efef60a-0893-4752-9b40-838da21ceb54 --- Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A (_Git Graph is still feature flagged, so no release notes for now_) --- Cargo.lock | 1 + assets/icons/git_commit.svg | 5 + assets/icons/git_graph.svg | 7 +- crates/feature_flags/src/flags.rs | 6 + crates/git_graph/src/git_graph.rs | 149 ++++++++++++---- crates/git_ui/Cargo.toml | 1 + crates/git_ui/src/commit_view.rs | 273 ++++++++++++++++-------------- crates/git_ui/src/git_panel.rs | 7 + crates/icons/src/icons.rs | 1 + 9 files changed, 290 insertions(+), 160 deletions(-) create mode 100644 assets/icons/git_commit.svg diff --git a/Cargo.lock b/Cargo.lock index ef6fd4e2c22cf53a5aa145600435983beae86437..dae0fef9c224c0dda72996dc2c58dc75768569fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7217,6 +7217,7 @@ dependencies = [ "ctor", "db", "editor", + "feature_flags", "futures 0.3.31", "fuzzy", "git", diff --git a/assets/icons/git_commit.svg b/assets/icons/git_commit.svg new file mode 100644 index 0000000000000000000000000000000000000000..38b36ec7efb72275e5e6efbbe761deb54050cfe7 --- /dev/null +++ b/assets/icons/git_commit.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/git_graph.svg b/assets/icons/git_graph.svg index 8f372a305d3fddf2901756108c83d09b31fb657e..7ae33e365d40bfccd9c48e4f7e94b10d3687f8dc 100644 --- a/assets/icons/git_graph.svg +++ b/assets/icons/git_graph.svg @@ -1,4 +1,7 @@ - - + + + + + diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 8f96de0e7b6d9b385fcda533a31ecc34b5afdbcc..087e76c4129254d3b6f488259bc8fa19aa91370d 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -57,6 +57,12 @@ impl FeatureFlag for DiffReviewFeatureFlag { } } +pub struct GitGraphFeatureFlag; + +impl FeatureFlag for GitGraphFeatureFlag { + const NAME: &'static str = "git-graph"; +} + pub struct StreamingEditFileToolFeatureFlag; impl FeatureFlag for StreamingEditFileToolFeatureFlag { diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 37f170ada5ecd23daf5ee58ee1011af95bfc6b8d..3bdb2b0d717ca4cae181fee9dd690755e29075d0 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1,5 +1,5 @@ use collections::{BTreeMap, HashMap}; -use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; +use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, ParsedGitRemote, parse_git_remote_url, @@ -39,7 +39,7 @@ use ui::{ }; use workspace::{ Workspace, - item::{Item, ItemEvent, SerializableItem}, + item::{Item, ItemEvent, SerializableItem, TabTooltipContent}, }; const COMMIT_CIRCLE_RADIUS: Pixels = px(3.5); @@ -48,6 +48,7 @@ const LANE_WIDTH: Pixels = px(16.0); const LEFT_PADDING: Pixels = px(12.0); const LINE_WIDTH: Pixels = px(1.5); const RESIZE_HANDLE_WIDTH: f32 = 8.0; +const PENDING_SELECT_MAX_RETRIES: usize = 5; const COPIED_STATE_DURATION: Duration = Duration::from_secs(2); struct CopiedState { @@ -246,12 +247,6 @@ actions!( ] ); -pub struct GitGraphFeatureFlag; - -impl FeatureFlag for GitGraphFeatureFlag { - const NAME: &'static str = "git-graph"; -} - fn timestamp_format() -> &'static [BorrowedFormatItem<'static>] { static FORMAT: OnceLock>> = OnceLock::new(); FORMAT.get_or_init(|| { @@ -710,29 +705,66 @@ pub fn init(cx: &mut App) { |div| { let workspace = workspace.weak_handle(); - div.on_action(move |_: &git_ui::git_panel::Open, window, cx| { - workspace - .update(cx, |workspace, cx| { - let existing = workspace.items_of_type::(cx).next(); - if let Some(existing) = existing { - workspace.activate_item(&existing, true, true, window, cx); - return; - } + div.on_action({ + let workspace = workspace.clone(); + move |_: &git_ui::git_panel::Open, window, cx| { + workspace + .update(cx, |workspace, cx| { + let existing = workspace.items_of_type::(cx).next(); + if let Some(existing) = existing { + workspace.activate_item(&existing, true, true, window, cx); + return; + } - let project = workspace.project().clone(); - let workspace_handle = workspace.weak_handle(); - let git_graph = cx - .new(|cx| GitGraph::new(project, workspace_handle, window, cx)); - workspace.add_item_to_active_pane( - Box::new(git_graph), - None, - true, - window, - cx, - ); - }) - .ok(); + let project = workspace.project().clone(); + let workspace_handle = workspace.weak_handle(); + let git_graph = cx.new(|cx| { + GitGraph::new(project, workspace_handle, window, cx) + }); + workspace.add_item_to_active_pane( + Box::new(git_graph), + None, + true, + window, + cx, + ); + }) + .ok(); + } }) + .on_action( + move |action: &git_ui::git_panel::OpenAtCommit, window, cx| { + let sha = action.sha.clone(); + workspace + .update(cx, |workspace, cx| { + let existing = workspace.items_of_type::(cx).next(); + if let Some(existing) = existing { + existing.update(cx, |graph, cx| { + graph.select_commit_by_sha(&sha, cx); + }); + workspace.activate_item(&existing, true, true, window, cx); + return; + } + + let project = workspace.project().clone(); + let workspace_handle = workspace.weak_handle(); + let git_graph = cx.new(|cx| { + let mut graph = + GitGraph::new(project, workspace_handle, window, cx); + graph.select_commit_by_sha(&sha, cx); + graph + }); + workspace.add_item_to_active_pane( + Box::new(git_graph), + None, + true, + window, + cx, + ); + }) + .ok(); + }, + ) }, ) }); @@ -821,6 +853,7 @@ pub struct GitGraph { commit_details_split_state: Entity, selected_repo_id: Option, changed_files_scroll_handle: UniformListScrollHandle, + pending_select_sha: Option<(String, usize)>, } impl GitGraph { @@ -918,6 +951,7 @@ impl GitGraph { commit_details_split_state: cx.new(|_cx| SplitState::new()), selected_repo_id: active_repository, changed_files_scroll_handle: UniformListScrollHandle::new(), + pending_select_sha: None, }; this.fetch_initial_graph_data(cx); @@ -944,8 +978,10 @@ impl GitGraph { self.graph_data.add_commits(commits); }); cx.notify(); + self.retry_pending_select(cx); } RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { + self.pending_select_sha = None; // Only invalidate if we scanned atleast once, // meaning we are not inside the initial repo loading state // NOTE: this fixes an loading performance regression @@ -1153,6 +1189,37 @@ impl GitGraph { cx.notify(); } + pub fn select_commit_by_sha(&mut self, sha: &str, cx: &mut Context) { + let Ok(oid) = sha.parse::() else { + return; + }; + for (idx, commit) in self.graph_data.commits.iter().enumerate() { + if commit.data.sha == oid { + self.pending_select_sha = None; + self.select_entry(idx, cx); + return; + } + } + self.pending_select_sha = Some((sha.to_string(), PENDING_SELECT_MAX_RETRIES)); + } + + fn retry_pending_select(&mut self, cx: &mut Context) { + let Some((sha, retries_remaining)) = self.pending_select_sha.take() else { + return; + }; + if let Ok(oid) = sha.parse::() { + for (idx, commit) in self.graph_data.commits.iter().enumerate() { + if commit.data.sha == oid { + self.select_entry(idx, cx); + return; + } + } + } + if retries_remaining > 0 { + self.pending_select_sha = Some((sha, retries_remaining - 1)); + } + } + fn open_selected_commit_view(&mut self, window: &mut Window, cx: &mut Context) { let Some(selected_entry_index) = self.selected_entry_idx else { return; @@ -2179,6 +2246,30 @@ impl Focusable for GitGraph { impl Item for GitGraph { type Event = ItemEvent; + fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { + Some(Icon::new(IconName::GitGraph)) + } + + fn tab_tooltip_content(&self, cx: &App) -> Option { + let repo_name = self.get_selected_repository(cx).and_then(|repo| { + repo.read(cx) + .work_directory_abs_path + .file_name() + .map(|name| name.to_string_lossy().to_string()) + }); + + Some(TabTooltipContent::Custom(Box::new(Tooltip::element({ + move |_, _| { + v_flex() + .child(Label::new("Git Graph")) + .when_some(repo_name.clone(), |this, name| { + this.child(Label::new(name).color(Color::Muted).size(LabelSize::Small)) + }) + .into_any_element() + } + })))) + } + fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { "Git Graph".into() } diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index f779570be471fd1a097e350d59ef2fb1d4003d2b..28fac0f849a487c6654e2ac5976191cd3e1a733f 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -27,6 +27,7 @@ component.workspace = true db.workspace = true editor.workspace = true futures.workspace = true +feature_flags.workspace = true fuzzy.workspace = true git.workspace = true gpui.workspace = true diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index f5ed23a6a84e7649ddf7f1e7b6b3651a323ee3c6..8f2a019fddf0513c100a53956c81012d11c2ca30 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -3,6 +3,7 @@ use buffer_diff::BufferDiff; use collections::HashMap; use editor::display_map::{BlockPlacement, BlockProperties, BlockStyle}; use editor::{Addon, Editor, EditorEvent, ExcerptRange, MultiBuffer, multibuffer_context_lines}; +use feature_flags::{FeatureFlagAppExt as _, GitGraphFeatureFlag}; use git::repository::{CommitDetails, CommitDiff, RepoPath, is_binary_content}; use git::status::{FileStatus, StatusCode, TrackedStatus}; use git::{ @@ -27,7 +28,7 @@ use std::{ sync::Arc, }; use theme::ActiveTheme; -use ui::{ButtonLike, DiffStat, Tooltip, prelude::*}; +use ui::{DiffStat, Divider, Tooltip, prelude::*}; use util::{ResultExt, paths::PathStyle, rel_path::RelPath, truncate_and_trailoff}; use workspace::item::TabTooltipContent; use workspace::{ @@ -450,6 +451,7 @@ impl CommitView { fn render_header(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let commit = &self.commit; let author_name = commit.author_name.clone(); + let author_email = commit.author_email.clone(); let commit_sha = commit.sha.clone(); let commit_date = time::OffsetDateTime::from_unix_timestamp(commit.commit_timestamp) .unwrap_or_else(|_| time::OffsetDateTime::now_utc()); @@ -461,36 +463,6 @@ impl CommitView { time_format::TimestampFormat::MediumAbsolute, ); - let remote_info = self - .remote - .as_ref() - .filter(|_| self.stash.is_none()) - .map(|remote| { - let provider = remote.host.name(); - let parsed_remote = ParsedGitRemote { - owner: remote.owner.as_ref().into(), - repo: remote.repo.as_ref().into(), - }; - let params = BuildCommitPermalinkParams { sha: &commit.sha }; - let url = remote - .host - .build_commit_permalink(&parsed_remote, params) - .to_string(); - (provider, url) - }); - - let (additions, deletions) = self.calculate_changed_lines(cx); - - let commit_diff_stat = if additions > 0 || deletions > 0 { - Some(DiffStat::new( - "commit-diff-stat", - additions as usize, - deletions as usize, - )) - } else { - None - }; - let gutter_width = self.editor.update(cx, |editor, cx| { let snapshot = editor.snapshot(window, cx); let style = editor.style(cx); @@ -501,116 +473,75 @@ impl CommitView { .full_width() }); - let clipboard_has_link = cx + let clipboard_has_sha = cx .read_from_clipboard() .and_then(|entry| entry.text()) .map_or(false, |clipboard_text| { clipboard_text.trim() == commit_sha.as_ref() }); - let (copy_icon, copy_icon_color) = if clipboard_has_link { + let (copy_icon, copy_icon_color) = if clipboard_has_sha { (IconName::Check, Color::Success) } else { (IconName::Copy, Color::Muted) }; h_flex() + .py_2() + .pr_2p5() + .w_full() + .justify_between() .border_b_1() .border_color(cx.theme().colors().border_variant) - .w_full() - .child( - h_flex() - .w(gutter_width) - .justify_center() - .child(self.render_commit_avatar(&commit.sha, rems_from_px(48.), window, cx)), - ) .child( h_flex() - .py_4() - .pl_1() - .pr_4() - .w_full() - .items_start() - .justify_between() - .flex_wrap() + .child(h_flex().w(gutter_width).justify_center().child( + self.render_commit_avatar(&commit.sha, rems_from_px(40.), window, cx), + )) .child( - v_flex() - .child( - h_flex() - .gap_1() - .child(Label::new(author_name).color(Color::Default)) - .child({ - ButtonLike::new("sha") - .child( - h_flex() - .group("sha_btn") - .size_full() - .max_w_32() - .gap_0p5() - .child( - Label::new(commit_sha.clone()) - .color(Color::Muted) - .size(LabelSize::Small) - .truncate() - .buffer_font(cx), - ) - .child( - div().visible_on_hover("sha_btn").child( - Icon::new(copy_icon) - .color(copy_icon_color) - .size(IconSize::Small), - ), - ), - ) - .tooltip({ - let commit_sha = commit_sha.clone(); - move |_, cx| { - Tooltip::with_meta( - "Copy Commit SHA", - None, - commit_sha.clone(), - cx, - ) - } - }) - .on_click(move |_, _, cx| { - cx.stop_propagation(); - cx.write_to_clipboard(ClipboardItem::new_string( - commit_sha.to_string(), - )); - }) - }), - ) - .child( - h_flex() - .gap_1p5() - .child( - Label::new(date_string) - .color(Color::Muted) - .size(LabelSize::Small), - ) - .child( - Label::new("•") - .color(Color::Ignored) - .size(LabelSize::Small), - ) - .children(commit_diff_stat), - ), - ) - .children(remote_info.map(|(provider_name, url)| { - let icon = match provider_name.as_str() { - "GitHub" => IconName::Github, - _ => IconName::Link, - }; - - Button::new("view_on_provider", format!("View on {}", provider_name)) - .icon(icon) - .icon_color(Color::Muted) - .icon_size(IconSize::Small) - .icon_position(IconPosition::Start) - .on_click(move |_, _, cx| cx.open_url(&url)) - })), + v_flex().child(Label::new(author_name)).child( + h_flex() + .gap_1p5() + .child( + Label::new(date_string) + .color(Color::Muted) + .size(LabelSize::Small), + ) + .child( + Label::new("•") + .size(LabelSize::Small) + .color(Color::Muted) + .alpha(0.5), + ) + .child( + Label::new(author_email) + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ), ) + .when(self.stash.is_none(), |this| { + this.child( + Button::new("sha", "Commit SHA") + .icon(copy_icon) + .icon_color(copy_icon_color) + .icon_position(IconPosition::Start) + .icon_size(IconSize::Small) + .tooltip({ + let commit_sha = commit_sha.clone(); + move |_, cx| { + Tooltip::with_meta("Copy Commit SHA", None, commit_sha.clone(), cx) + } + }) + .on_click(move |_, _, cx| { + cx.stop_propagation(); + cx.write_to_clipboard(ClipboardItem::new_string( + commit_sha.to_string(), + )); + }), + ) + }) } fn apply_stash(workspace: &mut Workspace, window: &mut Window, cx: &mut App) { @@ -898,7 +829,7 @@ impl Item for CommitView { type Event = EditorEvent; fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { - Some(Icon::new(IconName::GitBranch).color(Color::Muted)) + Some(Icon::new(IconName::GitCommit).color(Color::Muted)) } fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement { @@ -1081,8 +1012,93 @@ impl CommitViewToolbar { impl EventEmitter for CommitViewToolbar {} impl Render for CommitViewToolbar { - fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { - div().hidden() + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let Some(commit_view) = self.commit_view.as_ref().and_then(|w| w.upgrade()) else { + return div(); + }; + + let commit_view_ref = commit_view.read(cx); + let is_stash = commit_view_ref.stash.is_some(); + + let (additions, deletions) = commit_view_ref.calculate_changed_lines(cx); + + let commit_sha = commit_view_ref.commit.sha.clone(); + + let remote_info = commit_view_ref.remote.as_ref().map(|remote| { + let provider = remote.host.name(); + let parsed_remote = ParsedGitRemote { + owner: remote.owner.as_ref().into(), + repo: remote.repo.as_ref().into(), + }; + let params = BuildCommitPermalinkParams { sha: &commit_sha }; + let url = remote + .host + .build_commit_permalink(&parsed_remote, params) + .to_string(); + (provider, url) + }); + + let sha_for_graph = commit_sha.to_string(); + + h_flex() + .gap_1() + .when(additions > 0 || deletions > 0, |this| { + this.child( + h_flex() + .gap_2() + .child(DiffStat::new( + "toolbar-diff-stat", + additions as usize, + deletions as usize, + )) + .child(Divider::vertical()), + ) + }) + .child( + IconButton::new("buffer-search", IconName::MagnifyingGlass) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action( + "Buffer Search", + &zed_actions::buffer_search::Deploy::find(), + cx, + ) + }) + .on_click(|_, window, cx| { + window.dispatch_action( + Box::new(zed_actions::buffer_search::Deploy::find()), + cx, + ); + }), + ) + .when(!is_stash, |this| { + this.when(cx.has_flag::(), |this| { + this.child( + IconButton::new("show-in-git-graph", IconName::GitGraph) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Show in Git Graph")) + .on_click(move |_, window, cx| { + window.dispatch_action( + Box::new(crate::git_panel::OpenAtCommit { + sha: sha_for_graph.clone(), + }), + cx, + ); + }), + ) + }) + .children(remote_info.map(|(provider_name, url)| { + let icon = match provider_name.as_str() { + "GitHub" => IconName::Github, + _ => IconName::Link, + }; + + IconButton::new("view_on_provider", icon) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text(format!("View on {}", provider_name))) + .on_click(move |_, _, cx| cx.open_url(&url)) + })) + }) } } @@ -1093,12 +1109,11 @@ impl ToolbarItemView for CommitViewToolbar { _: &mut Window, cx: &mut Context, ) -> ToolbarItemLocation { - if let Some(entity) = active_pane_item.and_then(|i| i.act_as::(cx)) - && entity.read(cx).stash.is_some() - { + if let Some(entity) = active_pane_item.and_then(|i| i.act_as::(cx)) { self.commit_view = Some(entity.downgrade()); return ToolbarItemLocation::PrimaryRight; } + self.commit_view = None; ToolbarItemLocation::Hidden } diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index fe7d8975010ecf1055bb45e6986ecca363314e2e..b86fa0196ae786db7a981427628295c4f9d81061 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -123,6 +123,13 @@ actions!( ] ); +/// Opens the Git Graph Tab at a specific commit. +#[derive(Clone, PartialEq, serde::Deserialize, schemars::JsonSchema, gpui::Action)] +#[action(namespace = git_graph)] +pub struct OpenAtCommit { + pub sha: String, +} + fn prompt( msg: &str, detail: Option<&str>, diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 9ed9a8b658cc8bbf89c9d14d131fc8faefbc80ed..d6356f831ea9bbbaec5313da1a5b56f101471411 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -142,6 +142,7 @@ pub enum IconName { GitBranch, GitBranchAlt, GitBranchPlus, + GitCommit, GitGraph, Github, Hash, From 94d66ff137e4b417dd81d12d003872d92e5abfa2 Mon Sep 17 00:00:00 2001 From: Davide Scaccia Date: Wed, 25 Feb 2026 16:09:30 +0100 Subject: [PATCH 072/548] project_panel: Add diagnostic count badges (#49802) Discussed in #6668 specifically this comment from @zackangelo: > The biggest thing keeping me from using Zed as a daily driver is error indication in the project panel. When I'm making big project-wide changes I can't clearly see which files have errors (in editors like VSCode the filenames turn red). > VSCode seems to use a letter on the right gutter to indicate git status and a number next to it to indicate diagnostic status. The color indicates either. This PR implements that, I added an opt-in `diagnostic_badges` setting (default is false) that shows error and warning counts as colored labels on the right side of each project panel entry. Counts bubble up to parent directories. When `diagnostic_badges` is enabled, diagnostic severity takes priority over git status for entry text color. Since warnings and git-modified share the same yellow, git status with this option on is readable through the file icon decoration and the absence of a number badge on the right. Example: image image Release Notes: - Added diagnostic count badges to the project panel, displaying error and warning counts next to file names. You can modify this setting using the `diagnostic_badges` option, which is enabled by default. --------- Co-authored-by: Smit Barmase --- assets/settings/default.json | 2 + crates/project_panel/src/project_panel.rs | 119 +++++++++++++++--- .../src/project_panel_settings.rs | 6 +- crates/settings/src/vscode_import.rs | 1 + crates/settings_content/src/workspace.rs | 4 + crates/settings_ui/src/page_data.rs | 24 +++- 6 files changed, 136 insertions(+), 20 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 0a57472a5f21657cab89bd3e6f64e259a4a220e6..9dc077fb29458089e68061d5bd121ed9770108d7 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -799,6 +799,8 @@ // 3. Show files first, then directories: // "files_first" "sort_mode": "directories_first", + // Whether to show error and warning count badges next to file names in the project panel. + "diagnostic_badges": true, // Whether to enable drag-and-drop operations in the project panel. "drag_and_drop": true, // Whether to hide the root entry when only one folder is open in the window; diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 6c0c10c0715a35de25efaa7f6fddbcb5c0257934..e11c04755e59b7d62ea16340d6ed23bdb36daf6d 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -146,6 +146,7 @@ pub struct ProjectPanel { width: Option, pending_serialization: Task>, diagnostics: HashMap<(WorktreeId, Arc), DiagnosticSeverity>, + diagnostic_counts: HashMap<(WorktreeId, Arc), DiagnosticCount>, diagnostic_summary_update: Task<()>, // We keep track of the mouse down state on entries so we don't flash the UI // in case a user clicks to open a file. @@ -232,6 +233,30 @@ enum ClipboardEntry { Cut(BTreeSet), } +#[derive(Debug, Default, PartialEq, Eq, Clone, Copy)] +struct DiagnosticCount { + error_count: usize, + warning_count: usize, +} + +impl DiagnosticCount { + fn capped_error_count(&self) -> String { + Self::capped_count(self.error_count) + } + + fn capped_warning_count(&self) -> String { + Self::capped_count(self.warning_count) + } + + fn capped_count(count: usize) -> String { + if count > 99 { + "99+".to_string() + } else { + count.to_string() + } + } +} + #[derive(Debug, PartialEq, Eq, Clone)] struct EntryDetails { filename: String, @@ -249,6 +274,7 @@ struct EntryDetails { sticky: Option, filename_text_color: Color, diagnostic_severity: Option, + diagnostic_count: Option, git_status: GitSummary, is_private: bool, worktree_id: WorktreeId, @@ -847,6 +873,7 @@ impl ProjectPanel { width: None, pending_serialization: Task::ready(None), diagnostics: Default::default(), + diagnostic_counts: Default::default(), diagnostic_summary_update: Task::ready(()), scroll_handle, mouse_down: false, @@ -1029,6 +1056,26 @@ impl ProjectPanel { }); } self.diagnostics = diagnostics; + + let diagnostic_badges = ProjectPanelSettings::get_global(cx).diagnostic_badges; + self.diagnostic_counts = + if diagnostic_badges && show_diagnostics_setting != ShowDiagnostics::Off { + self.project.read(cx).diagnostic_summaries(false, cx).fold( + HashMap::default(), + |mut counts, (project_path, _, summary)| { + let entry = counts + .entry((project_path.worktree_id, project_path.path)) + .or_default(); + entry.error_count += summary.error_count; + if show_diagnostics_setting == ShowDiagnostics::All { + entry.warning_count += summary.warning_count; + } + counts + }, + ) + } else { + Default::default() + }; } fn update_strongest_diagnostic_severity( @@ -5044,6 +5091,7 @@ impl ProjectPanel { let filename_text_color = details.filename_text_color; let diagnostic_severity = details.diagnostic_severity; + let diagnostic_count = details.diagnostic_count; let item_colors = get_item_color(is_sticky, cx); let canonical_path = details @@ -5482,22 +5530,55 @@ impl ProjectPanel { ProjectPanelEntrySpacing::Standard => ListItemSpacing::ExtraDense, }) .selectable(false) - .when_some(canonical_path, |this, path| { - this.end_slot::( - div() - .id("symlink_icon") - .pr_3() - .tooltip(move |_window, cx| { - Tooltip::with_meta(path.to_string(), None, "Symbolic Link", cx) - }) - .child( - Icon::new(IconName::ArrowUpRight) - .size(IconSize::Indicator) - .color(filename_text_color), - ) - .into_any_element(), - ) - }) + .when( + canonical_path.is_some() || diagnostic_count.is_some(), + |this| { + let symlink_element = canonical_path.map(|path| { + div() + .id("symlink_icon") + .tooltip(move |_window, cx| { + Tooltip::with_meta( + path.to_string(), + None, + "Symbolic Link", + cx, + ) + }) + .child( + Icon::new(IconName::ArrowUpRight) + .size(IconSize::Indicator) + .color(filename_text_color), + ) + }); + this.end_slot::( + h_flex() + .gap_1() + .flex_none() + .pr_3() + .when_some(diagnostic_count, |this, count| { + this.when(count.error_count > 0, |this| { + this.child( + Label::new(count.capped_error_count()) + .size(LabelSize::Small) + .color(Color::Error), + ) + }) + .when( + count.warning_count > 0, + |this| { + this.child( + Label::new(count.capped_warning_count()) + .size(LabelSize::Small) + .color(Color::Warning), + ) + }, + ) + }) + .when_some(symlink_element, |this, el| this.child(el)) + .into_any_element(), + ) + }, + ) .child(if let Some(icon) = &icon { if let Some((_, decoration_color)) = entry_diagnostic_aware_icon_decoration_and_color(diagnostic_severity) @@ -5907,6 +5988,11 @@ impl ProjectPanel { .get(&(worktree_id, entry.path.clone())) .cloned(); + let diagnostic_count = self + .diagnostic_counts + .get(&(worktree_id, entry.path.clone())) + .copied(); + let filename_text_color = entry_git_aware_label_color(git_status, entry.is_ignored, is_marked); @@ -5931,6 +6017,7 @@ impl ProjectPanel { sticky, filename_text_color, diagnostic_severity, + diagnostic_count, git_status, is_private: entry.is_private, worktree_id, diff --git a/crates/project_panel/src/project_panel_settings.rs b/crates/project_panel/src/project_panel_settings.rs index 6b6b7a377276a9fb8b812e495a07a6c4c7aac15e..0d703c55c06dfff2976fe59f6e030ad9eb1d758b 100644 --- a/crates/project_panel/src/project_panel_settings.rs +++ b/crates/project_panel/src/project_panel_settings.rs @@ -35,6 +35,7 @@ pub struct ProjectPanelSettings { pub drag_and_drop: bool, pub auto_open: AutoOpenSettings, pub sort_mode: ProjectPanelSortMode, + pub diagnostic_badges: bool, } #[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] @@ -125,9 +126,8 @@ impl Settings for ProjectPanelSettings { on_drop: auto_open.on_drop.unwrap(), } }, - sort_mode: project_panel - .sort_mode - .unwrap_or(ProjectPanelSortMode::DirectoriesFirst), + sort_mode: project_panel.sort_mode.unwrap(), + diagnostic_badges: project_panel.diagnostic_badges.unwrap(), } } } diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index d0643be3bbee82be02c9c461a5f18ba62893a3cd..8a5a497d265c02787d6944915c0dba56e2381a79 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -801,6 +801,7 @@ impl VsCodeSettings { starts_open: None, sticky_scroll: None, auto_open: None, + diagnostic_badges: None, }; if let (Some(false), Some(false)) = ( diff --git a/crates/settings_content/src/workspace.rs b/crates/settings_content/src/workspace.rs index 3778ccc0373f4b937a08e3a435de40ad6a6d2cff..7262a83b384665b0bcd868bf14dbfaa2928a35c1 100644 --- a/crates/settings_content/src/workspace.rs +++ b/crates/settings_content/src/workspace.rs @@ -739,6 +739,10 @@ pub struct ProjectPanelSettingsContent { /// /// Default: directories_first pub sort_mode: Option, + /// Whether to show error and warning count badges next to file names in the project panel. + /// + /// Default: true + pub diagnostic_badges: Option, } #[derive( diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 738eff917bc57a7a2543f9c31494af02883299d1..40bc8705920e5d30d69a22cf8967a8931181db9b 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -4256,7 +4256,7 @@ fn window_and_layout_page() -> SettingsPage { } fn panels_page() -> SettingsPage { - fn project_panel_section() -> [SettingsPageItem; 21] { + fn project_panel_section() -> [SettingsPageItem; 22] { [ SettingsPageItem::SectionHeader("Project Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -4556,6 +4556,28 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Diagnostic Badges", + description: "Show error and warning count badges next to file names in the project panel.", + field: Box::new(SettingField { + json_path: Some("project_panel.diagnostic_badges"), + pick: |settings_content| { + settings_content + .project_panel + .as_ref()? + .diagnostic_badges + .as_ref() + }, + write: |settings_content, value| { + settings_content + .project_panel + .get_or_insert_default() + .diagnostic_badges = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Sticky Scroll", description: "Whether to stick parent directories at top of the project panel.", From 54ac5323880e35b225540eae6c84f52d9555bb0a Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 25 Feb 2026 16:12:00 +0100 Subject: [PATCH 073/548] agent_servers: Use agent display_name for session titles (#50092) Use a separate `display_name` field (distinct from `server_name`) so that session titles show a human-readable name. For custom agents this resolves to the configured display name; for built-ins it falls back to the server name. Release Notes: - N/A --- crates/agent_servers/src/acp.rs | 19 ++++++++++++++++--- crates/agent_servers/src/claude.rs | 1 + crates/agent_servers/src/codex.rs | 1 + crates/agent_servers/src/custom.rs | 6 ++++++ crates/agent_servers/src/gemini.rs | 1 + 5 files changed, 25 insertions(+), 3 deletions(-) diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index bb7c21083e1ee411d805d34e5676f4300dd5dce9..f64b7c8679484c5edfcedcd82cce3e34f7ae5916 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -36,6 +36,7 @@ pub struct UnsupportedVersion; pub struct AcpConnection { server_name: SharedString, + display_name: SharedString, telemetry_id: SharedString, connection: Rc, sessions: Rc>>, @@ -158,6 +159,7 @@ impl AgentSessionList for AcpSessionList { pub async fn connect( server_name: SharedString, + display_name: SharedString, command: AgentServerCommand, root_dir: &Path, default_mode: Option, @@ -168,6 +170,7 @@ pub async fn connect( ) -> Result> { let conn = AcpConnection::stdio( server_name, + display_name, command.clone(), root_dir, default_mode, @@ -185,6 +188,7 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1 impl AcpConnection { pub async fn stdio( server_name: SharedString, + display_name: SharedString, command: AgentServerCommand, root_dir: &Path, default_mode: Option, @@ -330,6 +334,7 @@ impl AcpConnection { root_dir: root_dir.to_owned(), connection, server_name, + display_name, telemetry_id, sessions, agent_capabilities: response.agent_capabilities, @@ -550,7 +555,7 @@ impl AgentConnection for AcpConnection { let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.server_name.clone(), + self.display_name.clone(), self.clone(), project, action_log, @@ -603,10 +608,14 @@ impl AgentConnection for AcpConnection { let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); + let title = session + .title + .clone() + .unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.server_name.clone(), + title, self.clone(), project, action_log, @@ -676,10 +685,14 @@ impl AgentConnection for AcpConnection { let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); + let title = session + .title + .clone() + .unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.server_name.clone(), + title, self.clone(), project, action_log, diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index f711708383fb6bdce42b08b26f0aa37ce173d9c3..51063bc73fb07b3bbe0dfbd0b8efc36b5efa0534 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -244,6 +244,7 @@ impl AgentServer for ClaudeCode { })?? .await?; let connection = crate::acp::connect( + name.clone(), name, command, root_dir.as_ref(), diff --git a/crates/agent_servers/src/codex.rs b/crates/agent_servers/src/codex.rs index 49deaa6192bb96bd41113f696e53a45a9e31dec0..f04ab868ce04819bcd1a2f495d3151d0305fceb9 100644 --- a/crates/agent_servers/src/codex.rs +++ b/crates/agent_servers/src/codex.rs @@ -248,6 +248,7 @@ impl AgentServer for Codex { .await?; let connection = crate::acp::connect( + name.clone(), name, command, root_dir.as_ref(), diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index dc65a65fc74ce303393b6cca43836e000f1dafa9..65f45d74a499e5ffcd26e7ac7a7d8a52e40aec5b 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -332,6 +332,11 @@ impl AgentServer for CustomAgentServer { cx: &mut App, ) -> Task, Option)>> { let name = self.name(); + let display_name = delegate + .store + .read(cx) + .agent_display_name(&ExternalAgentServerName(name.clone())) + .unwrap_or_else(|| name.clone()); let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); let is_remote = delegate.project.read(cx).is_via_remote_server(); let default_mode = self.default_mode(cx); @@ -399,6 +404,7 @@ impl AgentServer for CustomAgentServer { .await?; let connection = crate::acp::connect( name, + display_name, command, root_dir.as_ref(), default_mode, diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index 1805e64a3a94dddd2b7b3c8762123b98a384ec23..87404ebf5b509e61cf4d71567e6df14cf66e8808 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -87,6 +87,7 @@ impl AgentServer for Gemini { .await?; let connection = crate::acp::connect( + name.clone(), name, command, root_dir.as_ref(), From aa3a12b566da85006ed2cf4c6274b2cdea6877d2 Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Wed, 25 Feb 2026 15:14:20 +0000 Subject: [PATCH 074/548] sidebar: Zoom temporarily hides sidebar (#50088) Using zoom now temporarily hides the sidebar. Unzooming reopens the sidebar Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/workspace/src/multi_workspace.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index 478e33418c68efa1cf5da0868fe16d6dec693447..567c24f5d73887289445fb8367bdd950097ba073 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -623,6 +623,7 @@ impl MultiWorkspace { impl Render for MultiWorkspace { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let multi_workspace_enabled = self.multi_workspace_enabled(cx); + let is_zoomed = self.workspace().read(cx).zoomed_item().is_some(); let sidebar: Option = if multi_workspace_enabled && self.sidebar_open { self.sidebar.as_ref().map(|sidebar_handle| { @@ -676,6 +677,7 @@ impl Render for MultiWorkspace { client_side_decorations( h_flex() .key_context("Workspace") + .relative() .size_full() .on_action( cx.listener(|this: &mut Self, _: &NewWorkspaceInWindow, window, cx| { @@ -722,12 +724,13 @@ impl Render for MultiWorkspace { .flex_1() .size_full() .overflow_hidden() + .when(is_zoomed, |this| this.absolute().inset_0()) .child(self.workspace().clone()), ), window, cx, Tiling { - left: multi_workspace_enabled && self.sidebar_open, + left: multi_workspace_enabled && self.sidebar_open && !is_zoomed, ..Tiling::default() }, ) From ae53f5651e22ee923dd578e5e6e2edcc5374470e Mon Sep 17 00:00:00 2001 From: Peter Tripp Date: Wed, 25 Feb 2026 10:18:42 -0500 Subject: [PATCH 075/548] Redact environment variables from debugger errors (#50008) Closes #50007 - Follow-up to: https://github.com/zed-industries/zed/pull/44783 Release Notes: - Improved redaction of sensitive environment variables from debugger error logs. --- crates/debugger_ui/src/debugger_panel.rs | 6 ++++-- crates/util/src/process.rs | 14 ++++++++++++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/debugger_ui/src/debugger_panel.rs b/crates/debugger_ui/src/debugger_panel.rs index d0b744bfe793ea31367cc48178903190a9becf59..cac96918e32cde4770bedac69fb92a08825e3b25 100644 --- a/crates/debugger_ui/src/debugger_panel.rs +++ b/crates/debugger_ui/src/debugger_panel.rs @@ -35,6 +35,7 @@ use tree_sitter::{Query, StreamingIterator as _}; use ui::{ ContextMenu, Divider, PopoverMenu, PopoverMenuHandle, SplitButton, Tab, Tooltip, prelude::*, }; +use util::redact::redact_command; use util::rel_path::RelPath; use util::{ResultExt, debug_panic, maybe}; use workspace::SplitDirection; @@ -275,12 +276,13 @@ impl DebugPanel { async move |_, cx| { if let Err(error) = task.await { - log::error!("{error:#}"); + let redacted_error = redact_command(&format!("{error:#}")); + log::error!("{redacted_error}"); session .update(cx, |session, cx| { session .console_output(cx) - .unbounded_send(format!("error: {:#}", error)) + .unbounded_send(format!("error: {:#}", redacted_error)) .ok(); session.shutdown(cx) }) diff --git a/crates/util/src/process.rs b/crates/util/src/process.rs index 6c3d4e0c41eaeabf4e0d485e4d70dd340ae7afc9..eaf543dbd817ba9b30e42eb17b7115aec39d44c9 100644 --- a/crates/util/src/process.rs +++ b/crates/util/src/process.rs @@ -36,7 +36,12 @@ impl Child { .stdout(stdout) .stderr(stderr) .spawn() - .with_context(|| format!("failed to spawn command {command:?}"))?; + .with_context(|| { + format!( + "failed to spawn command {}", + crate::redact::redact_command(&format!("{command:?}")) + ) + })?; Ok(Self { process }) } @@ -55,7 +60,12 @@ impl Child { .stdout(stdout) .stderr(stderr) .spawn() - .with_context(|| format!("failed to spawn command {command:?}"))?; + .with_context(|| { + format!( + "failed to spawn command {}", + crate::redact::redact_command(&format!("{command:?}")) + ) + })?; Ok(Self { process }) } From 8b41a64f60f459298b7a5515ff2444b61deb4e86 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 25 Feb 2026 10:27:21 -0500 Subject: [PATCH 076/548] Bump Zed to v0.227 (#50095) Release Notes: - N/A --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dae0fef9c224c0dda72996dc2c58dc75768569fa..a995f9c5c15ee1f5f2bfcdce20c144c2eb6a2fa8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21529,7 +21529,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.226.0" +version = "0.227.0" dependencies = [ "acp_thread", "acp_tools", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 856c9b9dab4884773ec7d53dd210e81bbc4bedbf..c326c1bbf5acac632dd5bc8ea2e40eb7bc1a6703 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.226.0" +version = "0.227.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] From afadd4bca4234d4da8538822064ec7ce8fa9835d Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 25 Feb 2026 16:40:10 +0100 Subject: [PATCH 077/548] agent_server: Remove root_dir from agent server connect APIs (#50093) This isn't necessary and allows us to potentially share processes across threads. Release Notes: - N/A --- crates/agent/src/native_agent_server.rs | 8 +- crates/agent/src/thread_store.rs | 29 ----- crates/agent_servers/src/acp.rs | 15 --- crates/agent_servers/src/agent_servers.rs | 3 +- crates/agent_servers/src/claude.rs | 9 +- crates/agent_servers/src/codex.rs | 10 +- crates/agent_servers/src/custom.rs | 10 +- crates/agent_servers/src/e2e_tests.rs | 5 +- crates/agent_servers/src/gemini.rs | 10 +- crates/agent_ui/src/acp/thread_view.rs | 15 +-- crates/agent_ui/src/mention_set.rs | 2 +- crates/project/src/agent_server_store.rs | 107 +++++------------- .../tests/integration/ext_agent_tests.rs | 4 +- .../integration/extension_agent_tests.rs | 4 +- .../remote_server/src/remote_editing_tests.rs | 4 +- crates/zed/src/visual_test_runner.rs | 1 - 16 files changed, 46 insertions(+), 190 deletions(-) diff --git a/crates/agent/src/native_agent_server.rs b/crates/agent/src/native_agent_server.rs index 4d8bdaf698cb6bc50f6080c9b029954242a56f14..cdd017f76a1840edc2742588131f5ba57d968d85 100644 --- a/crates/agent/src/native_agent_server.rs +++ b/crates/agent/src/native_agent_server.rs @@ -1,4 +1,4 @@ -use std::{any::Any, path::Path, rc::Rc, sync::Arc}; +use std::{any::Any, rc::Rc, sync::Arc}; use agent_client_protocol as acp; use agent_servers::{AgentServer, AgentServerDelegate}; @@ -35,7 +35,6 @@ impl AgentServer for NativeAgentServer { fn connect( &self, - _root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, ) -> Task< @@ -44,10 +43,7 @@ impl AgentServer for NativeAgentServer { Option, )>, > { - log::debug!( - "NativeAgentServer::connect called for path: {:?}", - _root_dir - ); + log::debug!("NativeAgentServer::connect"); let project = delegate.project().clone(); let fs = self.fs.clone(); let thread_store = self.thread_store.clone(); diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 6add31fdb39302d3d02c250829dc14b0c10850af..8dd1ac36e8f6667ec5ecec2286d85ade2b12ee72 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -2,40 +2,11 @@ use crate::{DbThread, DbThreadMetadata, ThreadsDatabase}; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use gpui::{App, Context, Entity, Global, Task, prelude::*}; -use project::Project; -use std::rc::Rc; struct GlobalThreadStore(Entity); impl Global for GlobalThreadStore {} -// TODO: Remove once ACP thread loading is fully handled elsewhere. -pub fn load_agent_thread( - session_id: acp::SessionId, - thread_store: Entity, - project: Entity, - cx: &mut App, -) -> Task>> { - use agent_servers::{AgentServer, AgentServerDelegate}; - - let server = Rc::new(crate::NativeAgentServer::new( - project.read(cx).fs().clone(), - thread_store, - )); - let delegate = AgentServerDelegate::new( - project.read(cx).agent_server_store().clone(), - project.clone(), - None, - None, - ); - let connection = server.connect(None, delegate, cx); - cx.spawn(async move |cx| { - let (agent, _) = connection.await?; - let agent = agent.downcast::().unwrap(); - cx.update(|cx| agent.load_thread(session_id, cx)).await - }) -} - pub struct ThreadStore { threads: Vec, } diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index f64b7c8679484c5edfcedcd82cce3e34f7ae5916..14b7616bd0405ab3eaa3b52e6c4c64f3691dc56a 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -45,7 +45,6 @@ pub struct AcpConnection { default_mode: Option, default_model: Option, default_config_options: HashMap, - root_dir: PathBuf, child: Child, session_list: Option>, _io_task: Task>, @@ -161,22 +160,18 @@ pub async fn connect( server_name: SharedString, display_name: SharedString, command: AgentServerCommand, - root_dir: &Path, default_mode: Option, default_model: Option, default_config_options: HashMap, - is_remote: bool, cx: &mut AsyncApp, ) -> Result> { let conn = AcpConnection::stdio( server_name, display_name, command.clone(), - root_dir, default_mode, default_model, default_config_options, - is_remote, cx, ) .await?; @@ -190,11 +185,9 @@ impl AcpConnection { server_name: SharedString, display_name: SharedString, command: AgentServerCommand, - root_dir: &Path, default_mode: Option, default_model: Option, default_config_options: HashMap, - is_remote: bool, cx: &mut AsyncApp, ) -> Result { let shell = cx.update(|cx| TerminalSettings::get(None, cx).shell.clone()); @@ -202,9 +195,6 @@ impl AcpConnection { let mut child = builder.build_std_command(Some(command.path.display().to_string()), &command.args); child.envs(command.env.iter().flatten()); - if !is_remote { - child.current_dir(root_dir); - } let mut child = Child::spawn(child, Stdio::piped(), Stdio::piped(), Stdio::piped())?; let stdout = child.stdout.take().context("Failed to take stdout")?; @@ -331,7 +321,6 @@ impl AcpConnection { Ok(Self { auth_methods: response.auth_methods, - root_dir: root_dir.to_owned(), connection, server_name, display_name, @@ -352,10 +341,6 @@ impl AcpConnection { pub fn prompt_capabilities(&self) -> &acp::PromptCapabilities { &self.agent_capabilities.prompt_capabilities } - - pub fn root_dir(&self) -> &Path { - &self.root_dir - } } impl Drop for AcpConnection { diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index 6877c93342c22db3426bcf497fd9d45fe15c14ef..15ab23adcfbea5a61838e15a4002db9fc8de06d6 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -22,7 +22,7 @@ use anyhow::Result; use gpui::{App, AppContext, Entity, SharedString, Task}; use project::Project; use settings::SettingsStore; -use std::{any::Any, path::Path, rc::Rc, sync::Arc}; +use std::{any::Any, rc::Rc, sync::Arc}; pub use acp::AcpConnection; @@ -58,7 +58,6 @@ pub trait AgentServer: Send { fn name(&self) -> SharedString; fn connect( &self, - root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, ) -> Task, Option)>>; diff --git a/crates/agent_servers/src/claude.rs b/crates/agent_servers/src/claude.rs index 51063bc73fb07b3bbe0dfbd0b8efc36b5efa0534..6b491d6808c8ebfc36af2495a5581f502ba3c961 100644 --- a/crates/agent_servers/src/claude.rs +++ b/crates/agent_servers/src/claude.rs @@ -2,7 +2,6 @@ use agent_client_protocol as acp; use collections::HashSet; use fs::Fs; use settings::{SettingsStore, update_settings_file}; -use std::path::Path; use std::rc::Rc; use std::sync::Arc; use std::{any::Any, path::PathBuf}; @@ -208,13 +207,10 @@ impl AgentServer for ClaudeCode { fn connect( &self, - root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, ) -> Task, Option)>> { let name = self.name(); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); let store = delegate.store.downgrade(); let extra_env = load_proxy_env(cx); let default_mode = self.default_mode(cx); @@ -229,13 +225,12 @@ impl AgentServer for ClaudeCode { }); cx.spawn(async move |cx| { - let (command, root_dir, login) = store + let (command, login) = store .update(cx, |store, cx| { let agent = store .get_external_agent(&CLAUDE_AGENT_NAME.into()) .context("Claude Agent is not registered")?; anyhow::Ok(agent.get_command( - root_dir.as_deref(), extra_env, delegate.status_tx, delegate.new_version_available, @@ -247,11 +242,9 @@ impl AgentServer for ClaudeCode { name.clone(), name, command, - root_dir.as_ref(), default_mode, default_model, default_config_options, - is_remote, cx, ) .await?; diff --git a/crates/agent_servers/src/codex.rs b/crates/agent_servers/src/codex.rs index f04ab868ce04819bcd1a2f495d3151d0305fceb9..587e207a82bbb0401c7c2bfa4a8199a21afd2da4 100644 --- a/crates/agent_servers/src/codex.rs +++ b/crates/agent_servers/src/codex.rs @@ -1,6 +1,6 @@ +use std::any::Any; use std::rc::Rc; use std::sync::Arc; -use std::{any::Any, path::Path}; use acp_thread::AgentConnection; use agent_client_protocol as acp; @@ -205,13 +205,10 @@ impl AgentServer for Codex { fn connect( &self, - root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, ) -> Task, Option)>> { let name = self.name(); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); let store = delegate.store.downgrade(); let mut extra_env = load_proxy_env(cx); let default_mode = self.default_mode(cx); @@ -232,13 +229,12 @@ impl AgentServer for Codex { } cx.spawn(async move |cx| { - let (command, root_dir, login) = store + let (command, login) = store .update(cx, |store, cx| { let agent = store .get_external_agent(&CODEX_NAME.into()) .context("Codex is not registered")?; anyhow::Ok(agent.get_command( - root_dir.as_deref(), extra_env, delegate.status_tx, delegate.new_version_available, @@ -251,11 +247,9 @@ impl AgentServer for Codex { name.clone(), name, command, - root_dir.as_ref(), default_mode, default_model, default_config_options, - is_remote, cx, ) .await?; diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index 65f45d74a499e5ffcd26e7ac7a7d8a52e40aec5b..0dd4df7e7b33493f9f5efff041d3076a91eb21e6 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -7,7 +7,7 @@ use fs::Fs; use gpui::{App, AppContext as _, SharedString, Task}; use project::agent_server_store::{AllAgentServersSettings, ExternalAgentServerName}; use settings::{SettingsStore, update_settings_file}; -use std::{path::Path, rc::Rc, sync::Arc}; +use std::{rc::Rc, sync::Arc}; use ui::IconName; /// A generic agent server implementation for custom user-defined agents @@ -327,7 +327,6 @@ impl AgentServer for CustomAgentServer { fn connect( &self, - root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, ) -> Task, Option)>> { @@ -337,8 +336,6 @@ impl AgentServer for CustomAgentServer { .read(cx) .agent_display_name(&ExternalAgentServerName(name.clone())) .unwrap_or_else(|| name.clone()); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); let default_mode = self.default_mode(cx); let default_model = self.default_model(cx); let (default_config_options, is_registry_agent) = @@ -386,7 +383,7 @@ impl AgentServer for CustomAgentServer { let store = delegate.store.downgrade(); let extra_env = load_proxy_env(cx); cx.spawn(async move |cx| { - let (command, root_dir, login) = store + let (command, login) = store .update(cx, |store, cx| { let agent = store .get_external_agent(&ExternalAgentServerName(name.clone())) @@ -394,7 +391,6 @@ impl AgentServer for CustomAgentServer { format!("Custom agent server `{}` is not registered", name) })?; anyhow::Ok(agent.get_command( - root_dir.as_deref(), extra_env, delegate.status_tx, delegate.new_version_available, @@ -406,11 +402,9 @@ impl AgentServer for CustomAgentServer { name, display_name, command, - root_dir.as_ref(), default_mode, default_model, default_config_options, - is_remote, cx, ) .await?; diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index 4fe068ee5a9b68ce87bba27fb82db967e7a8aa4a..bc91ff958c93b6f1d43cd5d84323b25638b26d36 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -444,10 +444,7 @@ pub async fn new_test_thread( let store = project.read_with(cx, |project, _| project.agent_server_store().clone()); let delegate = AgentServerDelegate::new(store, project.clone(), None, None); - let (connection, _) = cx - .update(|cx| server.connect(Some(current_dir.as_ref()), delegate, cx)) - .await - .unwrap(); + let (connection, _) = cx.update(|cx| server.connect(delegate, cx)).await.unwrap(); cx.update(|cx| connection.new_session(project.clone(), current_dir.as_ref(), cx)) .await diff --git a/crates/agent_servers/src/gemini.rs b/crates/agent_servers/src/gemini.rs index 87404ebf5b509e61cf4d71567e6df14cf66e8808..6ec57500f8edd24c72c666282b8f360dd5069605 100644 --- a/crates/agent_servers/src/gemini.rs +++ b/crates/agent_servers/src/gemini.rs @@ -1,5 +1,5 @@ +use std::any::Any; use std::rc::Rc; -use std::{any::Any, path::Path}; use crate::{AgentServer, AgentServerDelegate, load_proxy_env}; use acp_thread::AgentConnection; @@ -45,13 +45,10 @@ impl AgentServer for Gemini { fn connect( &self, - root_dir: Option<&Path>, delegate: AgentServerDelegate, cx: &mut App, ) -> Task, Option)>> { let name = self.name(); - let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned()); - let is_remote = delegate.project.read(cx).is_via_remote_server(); let store = delegate.store.downgrade(); let mut extra_env = load_proxy_env(cx); let default_mode = self.default_mode(cx); @@ -71,13 +68,12 @@ impl AgentServer for Gemini { if let Some(api_key) = cx.update(api_key_for_gemini_cli).await.ok() { extra_env.insert("GEMINI_API_KEY".into(), api_key); } - let (command, root_dir, login) = store + let (command, login) = store .update(cx, |store, cx| { let agent = store .get_external_agent(&GEMINI_NAME.into()) .context("Gemini CLI is not registered")?; anyhow::Ok(agent.get_command( - root_dir.as_deref(), extra_env, delegate.status_tx, delegate.new_version_available, @@ -90,11 +86,9 @@ impl AgentServer for Gemini { name.clone(), name, command, - root_dir.as_ref(), default_mode, default_model, default_config_options, - is_remote, cx, ) .await?; diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 6ea3ec6317313822de78b3e08735250308eddbc4..f6f734158fdc5a3a2827d91d53513410d527b24c 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -569,7 +569,6 @@ impl AcpServerView { } }) .collect(); - let root_dir = worktree_roots.first().cloned(); let session_cwd = resume_thread .as_ref() .and_then(|resume| { @@ -584,7 +583,7 @@ impl AcpServerView { }) .map(|path| path.into()) }) - .or_else(|| root_dir.clone()) + .or_else(|| worktree_roots.first().cloned()) .unwrap_or_else(|| paths::home_dir().as_path().into()); let (status_tx, mut status_rx) = watch::channel("Loading…".into()); @@ -596,7 +595,7 @@ impl AcpServerView { Some(new_version_available_tx), ); - let connect_task = agent.connect(root_dir.as_deref(), delegate, cx); + let connect_task = agent.connect(delegate, cx); let load_task = cx.spawn_in(window, async move |this, cx| { let connection = match connect_task.await { Ok((connection, login)) => { @@ -1419,13 +1418,6 @@ impl AcpServerView { }) .unwrap_or_default(); - // Run SpawnInTerminal in the same dir as the ACP server - let cwd = connected - .connection - .clone() - .downcast::() - .map(|acp_conn| acp_conn.root_dir().to_path_buf()); - // Build SpawnInTerminal from _meta let login = task::SpawnInTerminal { id: task::TaskId(format!("external-agent-{}-login", label)), @@ -1434,7 +1426,6 @@ impl AcpServerView { command: Some(command.to_string()), args, command_label: label.to_string(), - cwd, env, use_new_terminal: true, allow_concurrent_runs: true, @@ -3624,7 +3615,6 @@ pub(crate) mod tests { fn connect( &self, - _root_dir: Option<&Path>, _delegate: AgentServerDelegate, _cx: &mut App, ) -> Task, Option)>> { @@ -3649,7 +3639,6 @@ pub(crate) mod tests { fn connect( &self, - _root_dir: Option<&Path>, _delegate: AgentServerDelegate, _cx: &mut App, ) -> Task, Option)>> { diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index b1dab681ade325d0d47fa9f9310cb3e98bf72974..2302417a3c5b9d54bf2070ed40119992ff35f24a 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -547,7 +547,7 @@ impl MentionSet { None, None, ); - let connection = server.connect(None, delegate, cx); + let connection = server.connect(delegate, cx); cx.spawn(async move |_, cx| { let (agent, _) = connection.await?; let agent = agent.downcast::().unwrap(); diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index 958c422dc70bd53e5a66f007f9ac43fd1c61bf27..c0f33d868c82d226fb1071a1edf33f838c1f2670 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -105,12 +105,11 @@ pub enum ExternalAgentSource { pub trait ExternalAgentServer { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>>; + ) -> Task)>>; fn as_any_mut(&mut self) -> &mut dyn Any; } @@ -799,7 +798,7 @@ impl AgentServerStore { envelope: TypedEnvelope, mut cx: AsyncApp, ) -> Result { - let (command, root_dir, login_command) = this + let (command, login_command) = this .update(&mut cx, |this, cx| { let AgentServerStoreState::Local { downstream_client, .. @@ -858,7 +857,6 @@ impl AgentServerStore { }) .unzip(); anyhow::Ok(agent.get_command( - envelope.payload.root_dir.as_deref(), HashMap::default(), status_tx, new_version_available_tx, @@ -873,7 +871,8 @@ impl AgentServerStore { .env .map(|env| env.into_iter().collect()) .unwrap_or_default(), - root_dir: root_dir, + // This is no longer used, but returned for backwards compatibility + root_dir: paths::home_dir().to_string_lossy().to_string(), login: login_command.map(|cmd| cmd.to_proto()), }) } @@ -1254,16 +1253,14 @@ struct RemoteExternalAgentServer { impl ExternalAgentServer for RemoteExternalAgentServer { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let project_id = self.project_id; let name = self.name.to_string(); let upstream_client = self.upstream_client.downgrade(); - let root_dir = root_dir.map(|root_dir| root_dir.to_owned()); self.status_tx = status_tx; self.new_version_available_tx = new_version_available_tx; cx.spawn(async move |cx| { @@ -1274,7 +1271,7 @@ impl ExternalAgentServer for RemoteExternalAgentServer { .request(proto::GetAgentServerCommand { project_id, name, - root_dir: root_dir.clone(), + root_dir: None, }) })? .await?; @@ -1296,7 +1293,6 @@ impl ExternalAgentServer for RemoteExternalAgentServer { args: command.args, env: Some(command.env), }, - root_dir, response.login.map(SpawnInTerminal::from_proto), )) }) @@ -1319,29 +1315,25 @@ struct LocalGemini { impl ExternalAgentServer for LocalGemini { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let fs = self.fs.clone(); let node_runtime = self.node_runtime.clone(); let project_environment = self.project_environment.downgrade(); let custom_command = self.custom_command.clone(); let settings_env = self.settings_env.clone(); let ignore_system_version = self.ignore_system_version; - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); + let home_dir = paths::home_dir(); cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + home_dir.as_path().into(), cx, ) })? @@ -1355,7 +1347,7 @@ impl ExternalAgentServer for LocalGemini { custom_command } else if !ignore_system_version && let Some(bin) = - find_bin_in_path("gemini".into(), root_dir.to_path_buf(), env.clone(), cx).await + find_bin_in_path("gemini".into(), home_dir.to_path_buf(), env.clone(), cx).await { AgentServerCommand { path: bin, @@ -1395,11 +1387,7 @@ impl ExternalAgentServer for LocalGemini { command.env.get_or_insert_default().extend(extra_env); command.args.push("--experimental-acp".into()); - Ok(( - command, - root_dir.to_string_lossy().into_owned(), - Some(login), - )) + Ok((command, Some(login))) }) } @@ -1419,28 +1407,23 @@ struct LocalClaudeCode { impl ExternalAgentServer for LocalClaudeCode { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, status_tx: Option>, new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let fs = self.fs.clone(); let node_runtime = self.node_runtime.clone(); let project_environment = self.project_environment.downgrade(); let custom_command = self.custom_command.clone(); let settings_env = self.settings_env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -1472,11 +1455,7 @@ impl ExternalAgentServer for LocalClaudeCode { }; command.env.get_or_insert_default().extend(extra_env); - Ok(( - command, - root_dir.to_string_lossy().into_owned(), - login_command, - )) + Ok((command, login_command)) }) } @@ -1497,21 +1476,16 @@ struct LocalCodex { impl ExternalAgentServer for LocalCodex { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, mut status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let fs = self.fs.clone(); let project_environment = self.project_environment.downgrade(); let http = self.http_client.clone(); let custom_command = self.custom_command.clone(); let settings_env = self.settings_env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); let no_browser = self.no_browser; cx.spawn(async move |cx| { @@ -1519,7 +1493,7 @@ impl ExternalAgentServer for LocalCodex { .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -1664,7 +1638,7 @@ impl ExternalAgentServer for LocalCodex { }; command.env.get_or_insert_default().extend(extra_env); - Ok((command, root_dir.to_string_lossy().into_owned(), None)) + Ok((command, None)) }) } @@ -1723,12 +1697,11 @@ pub struct LocalExtensionArchiveAgent { impl ExternalAgentServer for LocalExtensionArchiveAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let fs = self.fs.clone(); let http_client = self.http_client.clone(); let node_runtime = self.node_runtime.clone(); @@ -1738,18 +1711,13 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { let targets = self.targets.clone(); let base_env = self.env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - cx.spawn(async move |cx| { // Get project environment let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -1909,7 +1877,7 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { env: Some(env), }; - Ok((command, version_dir.to_string_lossy().into_owned(), None)) + Ok((command, None)) }) } @@ -1931,12 +1899,11 @@ struct LocalRegistryArchiveAgent { impl ExternalAgentServer for LocalRegistryArchiveAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let fs = self.fs.clone(); let http_client = self.http_client.clone(); let node_runtime = self.node_runtime.clone(); @@ -1945,17 +1912,12 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { let targets = self.targets.clone(); let settings_env = self.env.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -2099,7 +2061,7 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { env: Some(env), }; - Ok((command, version_dir.to_string_lossy().into_owned(), None)) + Ok((command, None)) }) } @@ -2120,12 +2082,11 @@ struct LocalRegistryNpxAgent { impl ExternalAgentServer for LocalRegistryNpxAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let node_runtime = self.node_runtime.clone(); let project_environment = self.project_environment.downgrade(); let package = self.package.clone(); @@ -2133,17 +2094,12 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { let distribution_env = self.distribution_env.clone(); let settings_env = self.settings_env.clone(); - let env_root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); - cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - env_root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -2176,7 +2132,7 @@ impl ExternalAgentServer for LocalRegistryNpxAgent { env: Some(env), }; - Ok((command, env_root_dir.to_string_lossy().into_owned(), None)) + Ok((command, None)) }) } @@ -2193,24 +2149,19 @@ struct LocalCustomAgent { impl ExternalAgentServer for LocalCustomAgent { fn get_command( &mut self, - root_dir: Option<&str>, extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { let mut command = self.command.clone(); - let root_dir: Arc = root_dir - .map(|root_dir| Path::new(root_dir)) - .unwrap_or(paths::home_dir()) - .into(); let project_environment = self.project_environment.downgrade(); cx.spawn(async move |cx| { let mut env = project_environment .update(cx, |project_environment, cx| { project_environment.local_directory_environment( &Shell::System, - root_dir.clone(), + paths::home_dir().as_path().into(), cx, ) })? @@ -2219,7 +2170,7 @@ impl ExternalAgentServer for LocalCustomAgent { env.extend(command.env.unwrap_or_default()); env.extend(extra_env); command.env = Some(env); - Ok((command, root_dir.to_string_lossy().into_owned(), None)) + Ok((command, None)) }) } diff --git a/crates/project/tests/integration/ext_agent_tests.rs b/crates/project/tests/integration/ext_agent_tests.rs index 74f762981a4f15f6d3d528e45374f542f30fa5ec..5cb75f54bc366a0661f6fe6c360e2b863974deda 100644 --- a/crates/project/tests/integration/ext_agent_tests.rs +++ b/crates/project/tests/integration/ext_agent_tests.rs @@ -9,19 +9,17 @@ struct NoopExternalAgent; impl ExternalAgentServer for NoopExternalAgent { fn get_command( &mut self, - _root_dir: Option<&str>, _extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, _cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { Task::ready(Ok(( AgentServerCommand { path: PathBuf::from("noop"), args: Vec::new(), env: None, }, - "".to_string(), None, ))) } diff --git a/crates/project/tests/integration/extension_agent_tests.rs b/crates/project/tests/integration/extension_agent_tests.rs index f237b9dc7deaf220fbed8fd3ff6f7c8cec99898d..ca73612d07bb5f9efd7ffeb21e00b9ad35ab0347 100644 --- a/crates/project/tests/integration/extension_agent_tests.rs +++ b/crates/project/tests/integration/extension_agent_tests.rs @@ -25,19 +25,17 @@ struct NoopExternalAgent; impl ExternalAgentServer for NoopExternalAgent { fn get_command( &mut self, - _root_dir: Option<&str>, _extra_env: HashMap, _status_tx: Option>, _new_version_available_tx: Option>>, _cx: &mut AsyncApp, - ) -> Task)>> { + ) -> Task)>> { Task::ready(Ok(( AgentServerCommand { path: PathBuf::from("noop"), args: Vec::new(), env: None, }, - "".to_string(), None, ))) } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index a744f733e72aef7cb7a1f878d14412c8f9b742e3..9d673182bc64e192e6db13a927392d611c53407d 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -2030,14 +2030,13 @@ async fn test_remote_external_agent_server( .collect::>() }); pretty_assertions::assert_eq!(names, ["gemini", "codex", "claude", "foo"]); - let (command, root, login) = project + let (command, login) = project .update(cx, |project, cx| { project.agent_server_store().update(cx, |store, cx| { store .get_external_agent(&"foo".into()) .unwrap() .get_command( - None, HashMap::from_iter([("OTHER_VAR".into(), "other-val".into())]), None, None, @@ -2058,7 +2057,6 @@ async fn test_remote_external_agent_server( ])) } ); - assert_eq!(&PathBuf::from(root), paths::home_dir()); assert!(login.is_none()); } diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 6b1aca5939bf9a7874dd7a590d64a133f96a9dea..09340dcec641ae2a6c1ea871e770886d14276529 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -1945,7 +1945,6 @@ impl AgentServer for StubAgentServer { fn connect( &self, - _root_dir: Option<&Path>, _delegate: AgentServerDelegate, _cx: &mut App, ) -> gpui::Task, Option)>> { From ff83f082434f9ee876de25054f0e3f8467e82db8 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Wed, 25 Feb 2026 16:46:27 +0100 Subject: [PATCH 078/548] python: Fix warning in injections query (#49397) Release Notes: - N/A --- crates/languages/src/python/injections.scm | 50 +++++++++------------- 1 file changed, 21 insertions(+), 29 deletions(-) diff --git a/crates/languages/src/python/injections.scm b/crates/languages/src/python/injections.scm index d8470140e999f3dc649c0a498987cfae7df6bf59..bc47469dc870c4dec13f4c30fafc8a2fb29749fd 100644 --- a/crates/languages/src/python/injections.scm +++ b/crates/languages/src/python/injections.scm @@ -1,34 +1,26 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ; SQL ----------------------------------------------------------------------------- -( +([ + ; function calls + (call [ - ; function calls - (call - [ - (attribute attribute: (identifier) @function_name) - (identifier) @function_name - ] - arguments: (argument_list - (comment) @comment - (string - (string_content) @injection.content - ) - )) - - ; string variables - ((comment) @comment - . - (expression_statement - (assignment - right: (string - (string_content) @injection.content - ) - ) - )) + (attribute + attribute: (identifier)) + (identifier) ] - (#match? @comment "^(#|#\\s+)(?i:sql)\\s*$") - (#set! injection.language "sql") -) + arguments: (argument_list + (comment) @_comment + (string + (string_content) @injection.content))) + ; string variables + ((comment) @_comment + . + (expression_statement + (assignment + right: (string + (string_content) @injection.content)))) +] + (#match? @_comment "^(#|#\\s+)(?i:sql)\\s*$") + (#set! injection.language "sql")) From ea09744584deaa2217c7b4dcef251767e04328fb Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Wed, 25 Feb 2026 10:12:12 -0600 Subject: [PATCH 079/548] zeta2: Try to fix ep disabled in buffer bugs (#50098) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- .../zed/src/zed/edit_prediction_registry.rs | 127 +++++++++++++++++- 1 file changed, 123 insertions(+), 4 deletions(-) diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 326ddef2d4b1e08b656a9381b1a632fbce1bdac3..9381dae22b055b4bd008ee63d0d283581bd513f4 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -60,13 +60,13 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { cx.on_action(clear_edit_prediction_store_edit_history); - let mut provider_config = edit_prediction_provider_config_for_settings(cx); cx.subscribe(&user_store, { let editors = editors.clone(); let client = client.clone(); move |user_store, event, cx| { if let client::user::Event::PrivateUserInfoUpdated = event { + let provider_config = edit_prediction_provider_config_for_settings(cx); assign_edit_prediction_providers( &editors, provider_config, @@ -80,18 +80,39 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { .detach(); cx.observe_global::({ + let editors = editors.clone(); + let client = client.clone(); let user_store = user_store.clone(); + let mut previous_config = edit_prediction_provider_config_for_settings(cx); move |cx| { let new_provider_config = edit_prediction_provider_config_for_settings(cx); - if new_provider_config != provider_config { + if new_provider_config != previous_config { telemetry::event!( "Edit Prediction Provider Changed", - from = provider_config.map(|config| config.name()), + from = previous_config.map(|config| config.name()), to = new_provider_config.map(|config| config.name()) ); - provider_config = new_provider_config; + previous_config = new_provider_config; + assign_edit_prediction_providers( + &editors, + new_provider_config, + &client, + user_store.clone(), + cx, + ); + } + } + }) + .detach(); + + cx.observe_flag::({ + let mut previous_config = edit_prediction_provider_config_for_settings(cx); + move |_is_enabled, cx| { + let new_provider_config = edit_prediction_provider_config_for_settings(cx); + if new_provider_config != previous_config { + previous_config = new_provider_config; assign_edit_prediction_providers( &editors, new_provider_config, @@ -324,3 +345,101 @@ fn assign_edit_prediction_provider( } } } + +#[cfg(test)] +mod tests { + use super::*; + use editor::MultiBuffer; + use gpui::{BorrowAppContext, TestAppContext}; + use settings::{EditPredictionProvider, SettingsStore}; + use workspace::AppState; + + #[gpui::test] + async fn test_subscribe_uses_stale_provider_config_after_settings_change( + cx: &mut TestAppContext, + ) { + let app_state = cx.update(|cx| { + let app_state = AppState::test(cx); + client::init(&app_state.client, cx); + language_model::init(app_state.client.clone(), cx); + editor::init(cx); + app_state + }); + + // Override the default provider to None so the subscribe closure + // captures None at init time. (The test default is Zed/Zeta1, which + // is a no-op on project-less editors and would mask the bug.) + cx.update(|cx| { + cx.update_global::(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.edit_predictions = + Some(settings::EditPredictionSettingsContent { + provider: Some(EditPredictionProvider::None), + ..Default::default() + }); + }); + }); + }); + + cx.update(|cx| { + init(app_state.client.clone(), app_state.user_store.clone(), cx); + }); + + // Create an editor in a window so observe_new registers it. + let editor = cx.add_window(|window, cx| { + let buffer = cx.new(|_cx| MultiBuffer::new(language::Capability::ReadWrite)); + Editor::new(editor::EditorMode::full(), buffer, None, window, cx) + }); + + editor + .update(cx, |editor, _window, _cx| { + assert!( + editor.edit_prediction_provider().is_none(), + "editor should start with no provider when settings = None" + ); + }) + .unwrap(); + + // Change settings to Codestral. The observe_global closure updates its + // own copy of provider_config and assigns Codestral to all editors. + cx.update(|cx| { + cx.update_global::(|store: &mut SettingsStore, cx| { + store.update_user_settings(cx, |settings| { + settings.project.all_languages.edit_predictions = + Some(settings::EditPredictionSettingsContent { + provider: Some(EditPredictionProvider::Codestral), + ..Default::default() + }); + }); + }); + }); + + editor + .update(cx, |editor, _window, _cx| { + assert!( + editor.edit_prediction_provider().is_some(), + "editor should have a provider after changing settings to Codestral" + ); + }) + .unwrap(); + + // Emit PrivateUserInfoUpdated. The subscribe closure should use the + // CURRENT provider config (Codestral), but due to the bug it uses the + // stale init-time value (None) and clears the provider. + cx.update(|cx| { + app_state.user_store.update(cx, |_, cx| { + cx.emit(client::user::Event::PrivateUserInfoUpdated); + }); + }); + cx.run_until_parked(); + + editor + .update(cx, |editor, _window, _cx| { + assert!( + editor.edit_prediction_provider().is_some(), + "BUG: subscribe closure used stale provider_config (None) instead of current (Codestral)" + ); + }) + .unwrap(); + } +} From 533cdb899b90f0d0e7a59059ac7af9c0a70b8d13 Mon Sep 17 00:00:00 2001 From: cardinalpointstudio Date: Wed, 25 Feb 2026 11:18:52 -0500 Subject: [PATCH 080/548] gpui(linux): Fix RefCell borrow panic when callbacks register new callbacks (#49533) ## Summary Fixes RefCell borrow panic on Linux (Wayland and X11) when callbacks try to register new callbacks. **Root cause:** Linux GPUI backends invoked callbacks while still holding a `RefCell` borrow on the `Callbacks` struct. If a callback tried to register a new callback (e.g., `on_window_should_close`), it would panic with "already borrowed: BorrowMutError". **Bug pattern:** ```rust // Callback runs while borrow is held - panics if callback borrows callbacks if let Some(ref mut fun) = self.callbacks.borrow_mut().input { fun(input); } Fix: Apply the take-call-restore pattern (already used in macOS backend): // Take callback out, release borrow, call, restore let callback = self.callbacks.borrow_mut().input.take(); if let Some(mut fun) = callback { let result = fun(input); self.callbacks.borrow_mut().input = Some(fun); } Changes - Wayland (window.rs): Fixed 6 callback invocations - X11 (window.rs): Fixed 4 callback invocations Test plan - cargo check -p gpui compiles successfully - Tested on Linux (Wayland) - no more RefCell panic Release Notes: - Fixed a crash on Linux when window callbacks attempted to register new callbacks Co-authored-by: Claude Opus 4.5 --- crates/agent_ui/src/agent_ui.rs | 1 - crates/gpui_linux/src/linux/wayland/window.rs | 46 +++++++++++-------- crates/gpui_linux/src/linux/x11/window.rs | 31 ++++++++----- 3 files changed, 48 insertions(+), 30 deletions(-) diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 7f05437cbae82ef4ae4953c91d33c0b6c7a296bc..736f69855f0e1cac0c7eb82f6596360e32489939 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -25,7 +25,6 @@ mod ui; use std::rc::Rc; use std::sync::Arc; -// Another comment use agent_settings::{AgentProfileId, AgentSettings}; use assistant_slash_command::SlashCommandRegistry; use client::Client; diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index c1006a816a3844db22ea8932177b0f0b2ff1c99f..4a4c4060bdc31b95bd4b90d930afdc54727a9667 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -640,19 +640,19 @@ impl WaylandWindowStatePtr { match mode { WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ServerSide) => { self.state.borrow_mut().decorations = WindowDecorations::Server; - if let Some(appearance_changed) = - self.callbacks.borrow_mut().appearance_changed.as_mut() - { - appearance_changed(); + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } WEnum::Value(zxdg_toplevel_decoration_v1::Mode::ClientSide) => { self.state.borrow_mut().decorations = WindowDecorations::Client; // Update background to be transparent - if let Some(appearance_changed) = - self.callbacks.borrow_mut().appearance_changed.as_mut() - { - appearance_changed(); + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } WEnum::Value(_) => { @@ -924,8 +924,10 @@ impl WaylandWindowStatePtr { (state.bounds.size, state.scale) }; - if let Some(ref mut fun) = self.callbacks.borrow_mut().resize { + let callback = self.callbacks.borrow_mut().resize.take(); + if let Some(mut fun) = callback { fun(size, scale); + self.callbacks.borrow_mut().resize = Some(fun); } { @@ -971,10 +973,13 @@ impl WaylandWindowStatePtr { if self.is_blocked() { return; } - if let Some(ref mut fun) = self.callbacks.borrow_mut().input - && !fun(input.clone()).propagate - { - return; + let callback = self.callbacks.borrow_mut().input.take(); + if let Some(mut fun) = callback { + let result = fun(input.clone()); + self.callbacks.borrow_mut().input = Some(fun); + if !result.propagate { + return; + } } if let PlatformInput::KeyDown(event) = input && event.keystroke.modifiers.is_subset_of(&Modifiers::shift()) @@ -991,23 +996,28 @@ impl WaylandWindowStatePtr { pub fn set_focused(&self, focus: bool) { self.state.borrow_mut().active = focus; - if let Some(ref mut fun) = self.callbacks.borrow_mut().active_status_change { + let callback = self.callbacks.borrow_mut().active_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().active_status_change = Some(fun); } } pub fn set_hovered(&self, focus: bool) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().hover_status_change { + let callback = self.callbacks.borrow_mut().hover_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().hover_status_change = Some(fun); } } pub fn set_appearance(&mut self, appearance: WindowAppearance) { self.state.borrow_mut().appearance = appearance; - let mut callbacks = self.callbacks.borrow_mut(); - if let Some(ref mut fun) = callbacks.appearance_changed { - (fun)() + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 8060e4c4457c6ef4575d86c4d975e3ead901f693..0ddd6e7adff84908e6a1c06d661347d39bdc5c9e 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -1045,9 +1045,10 @@ impl X11WindowStatePtr { } pub fn refresh(&self, request_frame_options: RequestFrameOptions) { - let mut cb = self.callbacks.borrow_mut(); - if let Some(ref mut fun) = cb.request_frame { + let callback = self.callbacks.borrow_mut().request_frame.take(); + if let Some(mut fun) = callback { fun(request_frame_options); + self.callbacks.borrow_mut().request_frame = Some(fun); } } @@ -1055,10 +1056,13 @@ impl X11WindowStatePtr { if self.is_blocked() { return; } - if let Some(ref mut fun) = self.callbacks.borrow_mut().input - && !fun(input.clone()).propagate - { - return; + let callback = self.callbacks.borrow_mut().input.take(); + if let Some(mut fun) = callback { + let result = fun(input.clone()); + self.callbacks.borrow_mut().input = Some(fun); + if !result.propagate { + return; + } } if let PlatformInput::KeyDown(event) = input { // only allow shift modifier when inserting text @@ -1191,14 +1195,18 @@ impl X11WindowStatePtr { } pub fn set_active(&self, focus: bool) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().active_status_change { + let callback = self.callbacks.borrow_mut().active_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().active_status_change = Some(fun); } } pub fn set_hovered(&self, focus: bool) { - if let Some(ref mut fun) = self.callbacks.borrow_mut().hovered_status_change { + let callback = self.callbacks.borrow_mut().hovered_status_change.take(); + if let Some(mut fun) = callback { fun(focus); + self.callbacks.borrow_mut().hovered_status_change = Some(fun); } } @@ -1209,9 +1217,10 @@ impl X11WindowStatePtr { state.renderer.update_transparency(is_transparent); state.appearance = appearance; drop(state); - let mut callbacks = self.callbacks.borrow_mut(); - if let Some(ref mut fun) = callbacks.appearance_changed { - (fun)() + let callback = self.callbacks.borrow_mut().appearance_changed.take(); + if let Some(mut fun) = callback { + fun(); + self.callbacks.borrow_mut().appearance_changed = Some(fun); } } } From 3714f3173e8e08293a28764cd01e08f21dace747 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Wed, 25 Feb 2026 17:23:31 +0100 Subject: [PATCH 081/548] extension_ci: Fix condition comparison type (#50100) No comment. No string. And this definitely does not spark joy. Release Notes: - N/A --- .github/workflows/extension_bump.yml | 2 +- tooling/xtask/src/tasks/workflows/extension_bump.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index cbd4da3e4066984cbabb1ad603e9d74aa2f29b64..b7bb78363ce4ff97680b2a53967938280c3de902 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -64,7 +64,7 @@ jobs: - check_version_changed if: |- (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && - (inputs.force-bump == 'true' || needs.check_version_changed.outputs.version_changed == 'false') + (inputs.force-bump == true || needs.check_version_changed.outputs.version_changed == 'false') runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token diff --git a/tooling/xtask/src/tasks/workflows/extension_bump.rs b/tooling/xtask/src/tasks/workflows/extension_bump.rs index 88d0cf85300b793aa386d50638fafc7e444e8519..746b842f18dfcc8805be9285facefdfa52085b84 100644 --- a/tooling/xtask/src/tasks/workflows/extension_bump.rs +++ b/tooling/xtask/src/tasks/workflows/extension_bump.rs @@ -191,7 +191,7 @@ fn bump_extension_version( let job = steps::dependant_job(dependencies) .cond(Expression::new(format!( - "{DEFAULT_REPOSITORY_OWNER_GUARD} &&\n({force_bump} == 'true' || {version_changed} == 'false')", + "{DEFAULT_REPOSITORY_OWNER_GUARD} &&\n({force_bump} == true || {version_changed} == 'false')", force_bump = force_bump_output.expr(), version_changed = version_changed_output.expr(), ))) From c235d539dd720a1e224c4e5cbf2e430da2353e38 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 25 Feb 2026 17:46:27 +0100 Subject: [PATCH 082/548] agent: Support streaming tool input (#50099) This PR introduces a `ToolInput` struct which allows tools to receive their inputs incrementally as they stream in. Right now no tool makes use of the streaming APIs, will be used for the streaming edit file tool in #50004 Release Notes: - N/A --- crates/agent/src/tests/mod.rs | 66 ++--- crates/agent/src/tests/test_tools.rs | 89 +++++-- crates/agent/src/thread.rs | 248 +++++++++++++++--- .../src/tools/context_server_registry.rs | 17 +- crates/agent/src/tools/copy_path_tool.rs | 35 ++- .../agent/src/tools/create_directory_tool.rs | 43 +-- crates/agent/src/tools/delete_path_tool.rs | 45 ++-- crates/agent/src/tools/diagnostics_tool.rs | 88 ++++--- crates/agent/src/tools/edit_file_tool.rs | 143 +++++----- crates/agent/src/tools/fetch_tool.rs | 72 ++--- crates/agent/src/tools/find_path_tool.rs | 13 +- crates/agent/src/tools/grep_tool.rs | 114 ++++---- crates/agent/src/tools/list_directory_tool.rs | 185 +++++++++---- crates/agent/src/tools/move_path_tool.rs | 35 ++- crates/agent/src/tools/now_tool.rs | 22 +- crates/agent/src/tools/open_tool.rs | 20 +- crates/agent/src/tools/read_file_tool.rs | 164 +++++++++--- .../src/tools/restore_file_from_disk_tool.rs | 62 +++-- crates/agent/src/tools/save_file_tool.rs | 66 +++-- crates/agent/src/tools/spawn_agent_tool.rs | 119 +++++---- .../src/tools/streaming_edit_file_tool.rs | 129 +++++---- crates/agent/src/tools/terminal_tool.rs | 59 +++-- crates/agent/src/tools/web_search_tool.rs | 75 +++--- .../remote_server/src/remote_editing_tests.rs | 13 +- crates/zed/src/visual_test_runner.rs | 7 +- 25 files changed, 1257 insertions(+), 672 deletions(-) diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 139242fdee9da968986b3fc9537bf9e5292b7dc5..e8c95c630b65870bfc8a78b9e965373a2604879d 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -310,11 +310,11 @@ async fn test_terminal_tool_timeout_kills_handle(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "sleep 1000".to_string(), cd: ".".to_string(), timeout_ms: Some(5), - }, + }), event_stream, cx, ) @@ -377,11 +377,11 @@ async fn test_terminal_tool_without_timeout_does_not_kill_handle(cx: &mut TestAp let _task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "sleep 1000".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -3991,11 +3991,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "rm -rf /".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -4043,11 +4043,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "echo hello".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -4101,11 +4101,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let _task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "sudo rm file".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -4148,11 +4148,11 @@ async fn test_terminal_tool_permission_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::TerminalToolInput { + ToolInput::resolved(crate::TerminalToolInput { command: "echo hello".to_string(), cd: ".".to_string(), timeout_ms: None, - }, + }), event_stream, cx, ) @@ -5309,11 +5309,11 @@ async fn test_edit_file_tool_deny_rule_blocks_edit(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::EditFileToolInput { + ToolInput::resolved(crate::EditFileToolInput { display_description: "Edit sensitive file".to_string(), path: "root/sensitive_config.txt".into(), mode: crate::EditFileMode::Edit, - }, + }), event_stream, cx, ) @@ -5359,9 +5359,9 @@ async fn test_delete_path_tool_deny_rule_blocks_deletion(cx: &mut TestAppContext let task = cx.update(|cx| { tool.run( - crate::DeletePathToolInput { + ToolInput::resolved(crate::DeletePathToolInput { path: "root/important_data.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5411,10 +5411,10 @@ async fn test_move_path_tool_denies_if_destination_denied(cx: &mut TestAppContex let task = cx.update(|cx| { tool.run( - crate::MovePathToolInput { + ToolInput::resolved(crate::MovePathToolInput { source_path: "root/safe.txt".to_string(), destination_path: "root/protected/safe.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5467,10 +5467,10 @@ async fn test_move_path_tool_denies_if_source_denied(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::MovePathToolInput { + ToolInput::resolved(crate::MovePathToolInput { source_path: "root/secret.txt".to_string(), destination_path: "root/public/not_secret.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5525,10 +5525,10 @@ async fn test_copy_path_tool_deny_rule_blocks_copy(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::CopyPathToolInput { + ToolInput::resolved(crate::CopyPathToolInput { source_path: "root/confidential.txt".to_string(), destination_path: "root/dest/copy.txt".to_string(), - }, + }), event_stream, cx, ) @@ -5580,12 +5580,12 @@ async fn test_save_file_tool_denies_if_any_path_denied(cx: &mut TestAppContext) let task = cx.update(|cx| { tool.run( - crate::SaveFileToolInput { + ToolInput::resolved(crate::SaveFileToolInput { paths: vec![ std::path::PathBuf::from("root/normal.txt"), std::path::PathBuf::from("root/readonly/config.txt"), ], - }, + }), event_stream, cx, ) @@ -5632,9 +5632,9 @@ async fn test_save_file_tool_respects_deny_rules(cx: &mut TestAppContext) { let task = cx.update(|cx| { tool.run( - crate::SaveFileToolInput { + ToolInput::resolved(crate::SaveFileToolInput { paths: vec![std::path::PathBuf::from("root/config.secret")], - }, + }), event_stream, cx, ) @@ -5676,7 +5676,7 @@ async fn test_web_search_tool_deny_rule_blocks_search(cx: &mut TestAppContext) { let input: crate::WebSearchToolInput = serde_json::from_value(json!({"query": "internal.company.com secrets"})).unwrap(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let result = task.await; assert!(result.is_err(), "expected search to be blocked"); @@ -5741,11 +5741,11 @@ async fn test_edit_file_tool_allow_rule_skips_confirmation(cx: &mut TestAppConte let _task = cx.update(|cx| { tool.run( - crate::EditFileToolInput { + ToolInput::resolved(crate::EditFileToolInput { display_description: "Edit README".to_string(), path: "root/README.md".into(), mode: crate::EditFileMode::Edit, - }, + }), event_stream, cx, ) @@ -5811,11 +5811,11 @@ async fn test_edit_file_tool_allow_still_prompts_for_local_settings(cx: &mut Tes let (event_stream, mut rx) = crate::ToolCallEventStream::test(); let _task = cx.update(|cx| { tool.run( - crate::EditFileToolInput { + ToolInput::resolved(crate::EditFileToolInput { display_description: "Edit local settings".to_string(), path: "root/.zed/settings.json".into(), mode: crate::EditFileMode::Edit, - }, + }), event_stream, cx, ) @@ -5855,7 +5855,7 @@ async fn test_fetch_tool_deny_rule_blocks_url(cx: &mut TestAppContext) { let input: crate::FetchToolInput = serde_json::from_value(json!({"url": "https://internal.company.com/api"})).unwrap(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let result = task.await; assert!(result.is_err(), "expected fetch to be blocked"); @@ -5893,7 +5893,7 @@ async fn test_fetch_tool_allow_rule_skips_confirmation(cx: &mut TestAppContext) let input: crate::FetchToolInput = serde_json::from_value(json!({"url": "https://docs.rs/some-crate"})).unwrap(); - let _task = cx.update(|cx| tool.run(input, event_stream, cx)); + let _task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); cx.run_until_parked(); diff --git a/crates/agent/src/tests/test_tools.rs b/crates/agent/src/tests/test_tools.rs index 0ed2eef90271538c575cc84b56a28df106e4bd41..e0794ee322cdf2c77c37d1d22f30ec77c5642d24 100644 --- a/crates/agent/src/tests/test_tools.rs +++ b/crates/agent/src/tests/test_tools.rs @@ -3,6 +3,7 @@ use agent_settings::AgentSettings; use gpui::{App, SharedString, Task}; use std::future; use std::sync::atomic::{AtomicBool, Ordering}; +use std::time::Duration; /// A tool that echoes its input #[derive(JsonSchema, Serialize, Deserialize)] @@ -33,11 +34,17 @@ impl AgentTool for EchoTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, - _cx: &mut App, + cx: &mut App, ) -> Task> { - Task::ready(Ok(input.text)) + cx.spawn(async move |_cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + Ok(input.text) + }) } } @@ -74,7 +81,7 @@ impl AgentTool for DelayTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> @@ -83,6 +90,10 @@ impl AgentTool for DelayTool { { let executor = cx.background_executor().clone(); cx.foreground_executor().spawn(async move { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; executor.timer(Duration::from_millis(input.ms)).await; Ok("Ding".to_string()) }) @@ -114,28 +125,38 @@ impl AgentTool for ToolRequiringPermission { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_from_settings(Self::NAME, &[String::new()], settings); - - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(reason)); - } - ToolPermissionDecision::Confirm => { - let context = crate::ToolPermissionContext::new( - "tool_requiring_permission", - vec![String::new()], - ); - Some(event_stream.authorize("Authorize?", context, cx)) - } - }; + cx.spawn(async move |cx| { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let decision = cx.update(|cx| { + decide_permission_from_settings( + Self::NAME, + &[String::new()], + AgentSettings::get_global(cx), + ) + }); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(reason); + } + ToolPermissionDecision::Confirm => Some(cx.update(|cx| { + let context = crate::ToolPermissionContext::new( + "tool_requiring_permission", + vec![String::new()], + ); + event_stream.authorize("Authorize?", context, cx) + })), + }; - cx.foreground_executor().spawn(async move { if let Some(authorize) = authorize { authorize.await.map_err(|e| e.to_string())?; } @@ -169,11 +190,15 @@ impl AgentTool for InfiniteTool { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { cx.foreground_executor().spawn(async move { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; future::pending::<()>().await; unreachable!() }) @@ -221,11 +246,15 @@ impl AgentTool for CancellationAwareTool { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { cx.foreground_executor().spawn(async move { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; // Wait for cancellation - this tool does nothing but wait to be cancelled event_stream.cancelled_by_user().await; self.was_cancelled.store(true, Ordering::SeqCst); @@ -276,10 +305,16 @@ impl AgentTool for WordListTool { fn run( self: Arc, - _input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, - _cx: &mut App, + cx: &mut App, ) -> Task> { - Task::ready(Ok("ok".to_string())) + cx.spawn(async move |_cx| { + let _input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + Ok("ok".to_string()) + }) } } diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 5d4de36cb69335de7a77eb7ad7a15f75b8e2b0b7..f9be3bfbeacfd137b06da7dc99eef7ae34422325 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -45,11 +45,13 @@ use language_model::{ use project::Project; use prompt_store::ProjectContext; use schemars::{JsonSchema, Schema}; +use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use settings::{LanguageModelSelection, Settings, ToolPermissionMode, update_settings_file}; use smol::stream::StreamExt; use std::{ collections::BTreeMap, + marker::PhantomData, ops::RangeInclusive, path::Path, rc::Rc, @@ -1360,7 +1362,6 @@ impl Thread { self.project.clone(), cx.weak_entity(), language_registry, - Templates::new(), )); self.add_tool(FetchTool::new(self.project.read(cx).client().http_client())); self.add_tool(FindPathTool::new(self.project.clone())); @@ -1664,6 +1665,7 @@ impl Thread { event_stream: event_stream.clone(), tools: self.enabled_tools(profile, &model, cx), cancellation_tx, + streaming_tool_inputs: HashMap::default(), _task: cx.spawn(async move |this, cx| { log::debug!("Starting agent turn execution"); @@ -2068,10 +2070,6 @@ impl Thread { self.send_or_update_tool_use(&tool_use, title, kind, event_stream); - if !tool_use.is_input_complete { - return None; - } - let Some(tool) = tool else { let content = format!("No tool named {} exists", tool_use.name); return Some(Task::ready(LanguageModelToolResult { @@ -2083,9 +2081,72 @@ impl Thread { })); }; + if !tool_use.is_input_complete { + if tool.supports_input_streaming() { + let running_turn = self.running_turn.as_mut()?; + if let Some(sender) = running_turn.streaming_tool_inputs.get(&tool_use.id) { + sender.send_partial(tool_use.input); + return None; + } + + let (sender, tool_input) = ToolInputSender::channel(); + sender.send_partial(tool_use.input); + running_turn + .streaming_tool_inputs + .insert(tool_use.id.clone(), sender); + + let tool = tool.clone(); + log::debug!("Running streaming tool {}", tool_use.name); + return Some(self.run_tool( + tool, + tool_input, + tool_use.id, + tool_use.name, + event_stream, + cancellation_rx, + cx, + )); + } else { + return None; + } + } + + if let Some(sender) = self + .running_turn + .as_mut()? + .streaming_tool_inputs + .remove(&tool_use.id) + { + sender.send_final(tool_use.input); + return None; + } + + log::debug!("Running tool {}", tool_use.name); + let tool_input = ToolInput::ready(tool_use.input); + Some(self.run_tool( + tool, + tool_input, + tool_use.id, + tool_use.name, + event_stream, + cancellation_rx, + cx, + )) + } + + fn run_tool( + &self, + tool: Arc, + tool_input: ToolInput, + tool_use_id: LanguageModelToolUseId, + tool_name: Arc, + event_stream: &ThreadEventStream, + cancellation_rx: watch::Receiver, + cx: &mut Context, + ) -> Task { let fs = self.project.read(cx).fs().clone(); let tool_event_stream = ToolCallEventStream::new( - tool_use.id.clone(), + tool_use_id.clone(), event_stream.clone(), Some(fs), cancellation_rx, @@ -2094,9 +2155,8 @@ impl Thread { acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress), ); let supports_images = self.model().is_some_and(|model| model.supports_images()); - let tool_result = tool.run(tool_use.input, tool_event_stream, cx); - log::debug!("Running tool {}", tool_use.name); - Some(cx.foreground_executor().spawn(async move { + let tool_result = tool.run(tool_input, tool_event_stream, cx); + cx.foreground_executor().spawn(async move { let (is_error, output) = match tool_result.await { Ok(mut output) => { if let LanguageModelToolResultContent::Image(_) = &output.llm_output @@ -2114,13 +2174,13 @@ impl Thread { }; LanguageModelToolResult { - tool_use_id: tool_use.id, - tool_name: tool_use.name, + tool_use_id, + tool_name, is_error, content: output.llm_output, output: Some(output.raw_output), } - })) + }) } fn handle_tool_use_json_parse_error_event( @@ -2776,6 +2836,9 @@ struct RunningTurn { /// Sender to signal tool cancellation. When cancel is called, this is /// set to true so all tools can detect user-initiated cancellation. cancellation_tx: watch::Sender, + /// Senders for tools that support input streaming and have already been + /// started but are still receiving input from the LLM. + streaming_tool_inputs: HashMap, } impl RunningTurn { @@ -2795,6 +2858,103 @@ pub struct TitleUpdated; impl EventEmitter for Thread {} +/// A channel-based wrapper that delivers tool input to a running tool. +/// +/// For non-streaming tools, created via `ToolInput::ready()` so `.recv()` resolves immediately. +/// For streaming tools, partial JSON snapshots arrive via `.recv_partial()` as the LLM streams +/// them, followed by the final complete input available through `.recv()`. +pub struct ToolInput { + partial_rx: mpsc::UnboundedReceiver, + final_rx: oneshot::Receiver, + _phantom: PhantomData, +} + +impl ToolInput { + #[cfg(any(test, feature = "test-support"))] + pub fn resolved(input: impl Serialize) -> Self { + let value = serde_json::to_value(input).expect("failed to serialize tool input"); + Self::ready(value) + } + + pub fn ready(value: serde_json::Value) -> Self { + let (partial_tx, partial_rx) = mpsc::unbounded(); + drop(partial_tx); + let (final_tx, final_rx) = oneshot::channel(); + final_tx.send(value).ok(); + Self { + partial_rx, + final_rx, + _phantom: PhantomData, + } + } + + #[cfg(any(test, feature = "test-support"))] + pub fn test() -> (ToolInputSender, Self) { + let (sender, input) = ToolInputSender::channel(); + (sender, input.cast()) + } + + /// Wait for the final deserialized input, ignoring all partial updates. + /// Non-streaming tools can use this to wait until the whole input is available. + pub async fn recv(mut self) -> Result { + // Drain any remaining partials + while self.partial_rx.next().await.is_some() {} + let value = self + .final_rx + .await + .map_err(|_| anyhow!("tool input sender was dropped before sending final input"))?; + serde_json::from_value(value).map_err(Into::into) + } + + /// Returns the next partial JSON snapshot, or `None` when input is complete. + /// Once this returns `None`, call `recv()` to get the final input. + pub async fn recv_partial(&mut self) -> Option { + self.partial_rx.next().await + } + + fn cast(self) -> ToolInput { + ToolInput { + partial_rx: self.partial_rx, + final_rx: self.final_rx, + _phantom: PhantomData, + } + } +} + +pub struct ToolInputSender { + partial_tx: mpsc::UnboundedSender, + final_tx: Option>, +} + +impl ToolInputSender { + pub(crate) fn channel() -> (Self, ToolInput) { + let (partial_tx, partial_rx) = mpsc::unbounded(); + let (final_tx, final_rx) = oneshot::channel(); + let sender = Self { + partial_tx, + final_tx: Some(final_tx), + }; + let input = ToolInput { + partial_rx, + final_rx, + _phantom: PhantomData, + }; + (sender, input) + } + + pub(crate) fn send_partial(&self, value: serde_json::Value) { + self.partial_tx.unbounded_send(value).ok(); + } + + pub(crate) fn send_final(mut self, value: serde_json::Value) { + // Close the partial channel so recv_partial() returns None + self.partial_tx.close_channel(); + if let Some(final_tx) = self.final_tx.take() { + final_tx.send(value).ok(); + } + } +} + pub trait AgentTool where Self: 'static + Sized, @@ -2828,6 +2988,11 @@ where language_model::tool_schema::root_schema_for::(format) } + /// Returns whether the tool supports streaming of tool use parameters. + fn supports_input_streaming() -> bool { + false + } + /// Some tools rely on a provider for the underlying billing or other reasons. /// Allow the tool to check if they are compatible, or should be filtered out. fn supports_provider(_provider: &LanguageModelProviderId) -> bool { @@ -2843,7 +3008,7 @@ where /// still signaling whether the invocation succeeded or failed. fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task>; @@ -2888,13 +3053,16 @@ pub trait AnyAgentTool { fn kind(&self) -> acp::ToolKind; fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString; fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result; + fn supports_input_streaming(&self) -> bool { + false + } fn supports_provider(&self, _provider: &LanguageModelProviderId) -> bool { true } /// See [`AgentTool::run`] for why this returns `Result`. fn run( self: Arc, - input: serde_json::Value, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task>; @@ -2923,6 +3091,10 @@ where T::kind() } + fn supports_input_streaming(&self) -> bool { + T::supports_input_streaming() + } + fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString { let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input); self.0.initial_title(parsed_input, _cx) @@ -2940,35 +3112,31 @@ where fn run( self: Arc, - input: serde_json::Value, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - cx.spawn(async move |cx| { - let input: T::Input = serde_json::from_value(input).map_err(|e| { - AgentToolOutput::from_error(format!("Failed to parse tool input: {e}")) - })?; - let task = cx.update(|cx| self.0.clone().run(input, event_stream, cx)); - match task.await { - Ok(output) => { - let raw_output = serde_json::to_value(&output).map_err(|e| { - AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}")) - })?; - Ok(AgentToolOutput { - llm_output: output.into(), - raw_output, - }) - } - Err(error_output) => { - let raw_output = serde_json::to_value(&error_output).unwrap_or_else(|e| { - log::error!("Failed to serialize tool error output: {e}"); - serde_json::Value::Null - }); - Err(AgentToolOutput { - llm_output: error_output.into(), - raw_output, - }) - } + let tool_input: ToolInput = input.cast(); + let task = self.0.clone().run(tool_input, event_stream, cx); + cx.spawn(async move |_cx| match task.await { + Ok(output) => { + let raw_output = serde_json::to_value(&output).map_err(|e| { + AgentToolOutput::from_error(format!("Failed to serialize tool output: {e}")) + })?; + Ok(AgentToolOutput { + llm_output: output.into(), + raw_output, + }) + } + Err(error_output) => { + let raw_output = serde_json::to_value(&error_output).unwrap_or_else(|e| { + log::error!("Failed to serialize tool error output: {e}"); + serde_json::Value::Null + }); + Err(AgentToolOutput { + llm_output: error_output.into(), + raw_output, + }) } }) } diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 694e28750cd69facc49b7a0bf862203a00043b4c..1c7590d8097a5de50b879d5b253c5dbabd3dcbab 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -1,4 +1,4 @@ -use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream}; +use crate::{AgentToolOutput, AnyAgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol::ToolKind; use anyhow::Result; use collections::{BTreeMap, HashMap}; @@ -329,7 +329,7 @@ impl AnyAgentTool for ContextServerTool { fn run( self: Arc, - input: serde_json::Value, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { @@ -339,14 +339,15 @@ impl AnyAgentTool for ContextServerTool { let tool_name = self.tool.name.clone(); let tool_id = mcp_tool_id(&self.server_id.0, &self.tool.name); let display_name = self.tool.name.clone(); - let authorize = event_stream.authorize_third_party_tool( - self.initial_title(input.clone(), cx), - tool_id, - display_name, - cx, - ); + let initial_title = self.initial_title(serde_json::Value::Null, cx); + let authorize = + event_stream.authorize_third_party_tool(initial_title, tool_id, display_name, cx); cx.spawn(async move |_cx| { + let input = input.recv().await.map_err(|e| { + AgentToolOutput::from_error(format!("Failed to receive tool input: {e}")) + })?; + authorize.await.map_err(|e| AgentToolOutput::from_error(e.to_string()))?; let Some(protocol) = server.client() else { diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs index c82d9e930e1987d389ece84347c1a0f43c601182..7f53a5c36a7979a01de96535f19e421fa3119e16 100644 --- a/crates/agent/src/tools/copy_path_tool.rs +++ b/crates/agent/src/tools/copy_path_tool.rs @@ -2,7 +2,9 @@ use super::tool_permissions::{ SensitiveSettingsKind, authorize_symlink_escapes, canonicalize_worktree_roots, collect_symlink_escapes, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_paths}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_paths, +}; use agent_client_protocol::ToolKind; use agent_settings::AgentSettings; use futures::FutureExt as _; @@ -79,19 +81,24 @@ impl AgentTool for CopyPathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let paths = vec![input.source_path.clone(), input.destination_path.clone()]; - let decision = decide_permission_for_paths(Self::NAME, &paths, settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let paths = vec![input.source_path.clone(), input.destination_path.clone()]; + let decision = cx.update(|cx| { + decide_permission_for_paths(Self::NAME, &paths, &AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -248,7 +255,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -302,7 +309,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; drop(auth); @@ -354,7 +361,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -430,7 +437,9 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let result = cx.update(|cx| tool.run(input, event_stream, cx)).await; + let result = cx + .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)) + .await; assert!(result.is_err(), "Tool should fail when policy denies"); assert!( diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs index 500b5f00289db245898d5918a79dc684a6f0f110..5d8930f3c7400428d55cfe7d14bafc16d94be43a 100644 --- a/crates/agent/src/tools/create_directory_tool.rs +++ b/crates/agent/src/tools/create_directory_tool.rs @@ -13,7 +13,9 @@ use settings::Settings; use std::sync::Arc; use util::markdown::MarkdownInlineCode; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; use std::path::Path; /// Creates a new directory at the specified path within the project. Returns confirmation that the directory was created. @@ -68,21 +70,26 @@ impl AgentTool for CreateDirectoryTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_for_path(Self::NAME, &input.path, settings); + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &input.path, AgentSettings::get_global(cx)) + }); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } - let destination_path: Arc = input.path.as_str().into(); + let destination_path: Arc = input.path.as_str().into(); - let project = self.project.clone(); - cx.spawn(async move |cx| { let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -218,9 +225,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -277,9 +284,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -336,9 +343,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -415,9 +422,9 @@ mod tests { let result = cx .update(|cx| { tool.run( - CreateDirectoryToolInput { + ToolInput::resolved(CreateDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 048f4bd8292077874b49bd74b09cbea38b4fafc5..27ab68db667a4cf3223e6521682814dc1c245bb7 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -2,7 +2,9 @@ use super::tool_permissions::{ SensitiveSettingsKind, authorize_symlink_access, canonicalize_worktree_roots, detect_symlink_escape, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; use action_log::ActionLog; use agent_client_protocol::ToolKind; use agent_settings::AgentSettings; @@ -71,22 +73,27 @@ impl AgentTool for DeletePathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let path = input.path; - - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_for_path(Self::NAME, &path, settings); - - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - let project = self.project.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let path = input.path; + + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path, AgentSettings::get_global(cx)) + }); + + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -278,9 +285,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -345,9 +352,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -405,9 +412,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -488,9 +495,9 @@ mod tests { let result = cx .update(|cx| { tool.run( - DeletePathToolInput { + ToolInput::resolved(DeletePathToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/diagnostics_tool.rs b/crates/agent/src/tools/diagnostics_tool.rs index fea16d531ed5f4212e6b1374aee04cee67b2fc0b..5889f66c2edbe06055678b19474447e0f23e2b0f 100644 --- a/crates/agent/src/tools/diagnostics_tool.rs +++ b/crates/agent/src/tools/diagnostics_tool.rs @@ -1,4 +1,4 @@ -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol as acp; use anyhow::Result; use futures::FutureExt as _; @@ -87,21 +87,27 @@ impl AgentTool for DiagnosticsTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - match input.path { - Some(path) if !path.is_empty() => { - let Some(project_path) = self.project.read(cx).find_project_path(&path, cx) else { - return Task::ready(Err(format!("Could not find path {path} in project"))); - }; - - let open_buffer_task = self - .project - .update(cx, |project, cx| project.open_buffer(project_path, cx)); + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + match input.path { + Some(path) if !path.is_empty() => { + let (_project_path, open_buffer_task) = project.update(cx, |project, cx| { + let Some(project_path) = project.find_project_path(&path, cx) else { + return Err(format!("Could not find path {path} in project")); + }; + let task = project.open_buffer(project_path.clone(), cx); + Ok((project_path, task)) + })?; - cx.spawn(async move |cx| { let buffer = futures::select! { result = open_buffer_task.fuse() => result.map_err(|e| e.to_string())?, _ = event_stream.cancelled_by_user().fuse() => { @@ -135,36 +141,40 @@ impl AgentTool for DiagnosticsTool { } else { Ok(output) } - }) - } - _ => { - let project = self.project.read(cx); - let mut output = String::new(); - let mut has_diagnostics = false; - - for (project_path, _, summary) in project.diagnostic_summaries(true, cx) { - if summary.error_count > 0 || summary.warning_count > 0 { - let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) - else { - continue; - }; - - has_diagnostics = true; - output.push_str(&format!( - "{}: {} error(s), {} warning(s)\n", - worktree.read(cx).absolutize(&project_path.path).display(), - summary.error_count, - summary.warning_count - )); - } } + _ => { + let (output, has_diagnostics) = project.read_with(cx, |project, cx| { + let mut output = String::new(); + let mut has_diagnostics = false; + + for (project_path, _, summary) in project.diagnostic_summaries(true, cx) { + if summary.error_count > 0 || summary.warning_count > 0 { + let Some(worktree) = + project.worktree_for_id(project_path.worktree_id, cx) + else { + continue; + }; + + has_diagnostics = true; + output.push_str(&format!( + "{}: {} error(s), {} warning(s)\n", + worktree.read(cx).absolutize(&project_path.path).display(), + summary.error_count, + summary.warning_count + )); + } + } + + (output, has_diagnostics) + }); - if has_diagnostics { - Task::ready(Ok(output)) - } else { - Task::ready(Ok("No errors or warnings found in the project.".into())) + if has_diagnostics { + Ok(output) + } else { + Ok("No errors or warnings found in the project.".into()) + } } } - } + }) } } diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 788bf06529a6f0b87242379ffcdb83f38e4c7126..3e1e0661f126d464c8d4611e2b3d85a9f668a5ca 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -2,7 +2,7 @@ use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; use super::tool_permissions::authorize_file_edit; use crate::{ - AgentTool, Templates, Thread, ToolCallEventStream, + AgentTool, Templates, Thread, ToolCallEventStream, ToolInput, edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}, }; use acp_thread::Diff; @@ -237,39 +237,44 @@ impl AgentTool for EditFileTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let Ok(project) = self - .thread - .read_with(cx, |thread, _cx| thread.project().clone()) - else { - return Task::ready(Err(EditFileToolOutput::Error { - error: "thread was dropped".to_string(), - })); - }; - let project_path = match resolve_path(&input, project.clone(), cx) { - Ok(path) => path, - Err(err) => { - return Task::ready(Err(EditFileToolOutput::Error { - error: err.to_string(), - })); - } - }; - let abs_path = project.read(cx).absolute_path(&project_path, cx); - if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]), - ); - } - let allow_thinking = self - .thread - .read_with(cx, |thread, _cx| thread.thinking_enabled()) - .unwrap_or(true); - - let authorize = self.authorize(&input, &event_stream, cx); cx.spawn(async move |cx: &mut AsyncApp| { + let input = input.recv().await.map_err(|e| EditFileToolOutput::Error { + error: format!("Failed to receive tool input: {e}"), + })?; + + let project = self + .thread + .read_with(cx, |thread, _cx| thread.project().clone()) + .map_err(|_| EditFileToolOutput::Error { + error: "thread was dropped".to_string(), + })?; + + let (project_path, abs_path, allow_thinking, authorize) = + cx.update(|cx| { + let project_path = resolve_path(&input, project.clone(), cx).map_err(|err| { + EditFileToolOutput::Error { + error: err.to_string(), + } + })?; + let abs_path = project.read(cx).absolute_path(&project_path, cx); + if let Some(abs_path) = abs_path.clone() { + event_stream.update_fields( + ToolCallUpdateFields::new() + .locations(vec![acp::ToolCallLocation::new(abs_path)]), + ); + } + let allow_thinking = self + .thread + .read_with(cx, |thread, _cx| thread.thinking_enabled()) + .unwrap_or(true); + let authorize = self.authorize(&input, &event_stream, cx); + Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, authorize)) + })?; + let result: anyhow::Result = async { authorize.await?; @@ -672,7 +677,11 @@ mod tests { language_registry, Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!( @@ -881,7 +890,11 @@ mod tests { language_registry.clone(), Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the unformatted content @@ -940,7 +953,11 @@ mod tests { language_registry, Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the unformatted content @@ -1027,7 +1044,11 @@ mod tests { language_registry.clone(), Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the content with trailing whitespace @@ -1082,7 +1103,11 @@ mod tests { language_registry, Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }); // Stream the content with trailing whitespace @@ -2081,11 +2106,11 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { tool.run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Edit, - }, + }), stream_tx, cx, ) @@ -2111,11 +2136,11 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { tool.run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Edit, - }, + }), stream_tx, cx, ) @@ -2139,11 +2164,11 @@ mod tests { let (stream_tx, mut stream_rx) = ToolCallEventStream::test(); let edit = cx.update(|cx| { tool.run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), mode: EditFileMode::Edit, - }, + }), stream_tx, cx, ) @@ -2199,11 +2224,11 @@ mod tests { // Read the file to record the read time cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2227,11 +2252,11 @@ mod tests { // Read the file again - should update the entry cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2298,11 +2323,11 @@ mod tests { // Read the file first cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2314,11 +2339,11 @@ mod tests { let edit_result = { let edit_task = cx.update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "First edit".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2343,11 +2368,11 @@ mod tests { let edit_result = { let edit_task = cx.update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Second edit".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2412,11 +2437,11 @@ mod tests { // Read the file first cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2456,11 +2481,11 @@ mod tests { let result = cx .update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit after external change".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2523,11 +2548,11 @@ mod tests { // Read the file first cx.update(|cx| { read_tool.clone().run( - crate::ReadFileToolInput { + ToolInput::resolved(crate::ReadFileToolInput { path: "root/test.txt".to_string(), start_line: None, end_line: None, - }, + }), ToolCallEventStream::test().0, cx, ) @@ -2560,11 +2585,11 @@ mod tests { let result = cx .update(|cx| { edit_tool.clone().run( - EditFileToolInput { + ToolInput::resolved(EditFileToolInput { display_description: "Edit with dirty buffer".into(), path: "root/test.txt".into(), mode: EditFileMode::Edit, - }, + }), ToolCallEventStream::test().0, cx, ) diff --git a/crates/agent/src/tools/fetch_tool.rs b/crates/agent/src/tools/fetch_tool.rs index e573c2202b09d1283d75c3eda20b65be1bcd82a7..75880801595ad0604c9f3a1fac58bd916809a8ba 100644 --- a/crates/agent/src/tools/fetch_tool.rs +++ b/crates/agent/src/tools/fetch_tool.rs @@ -16,7 +16,8 @@ use ui::SharedString; use util::markdown::{MarkdownEscaped, MarkdownInlineCode}; use crate::{ - AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_from_settings, + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, + decide_permission_from_settings, }; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)] @@ -141,41 +142,52 @@ impl AgentTool for FetchTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = - decide_permission_from_settings(Self::NAME, std::slice::from_ref(&input.url), settings); - - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(reason)); - } - ToolPermissionDecision::Confirm => { - let context = - crate::ToolPermissionContext::new(Self::NAME, vec![input.url.clone()]); - Some(event_stream.authorize( - format!("Fetch {}", MarkdownInlineCode(&input.url)), - context, - cx, - )) - } - }; + let http_client = self.http_client.clone(); + cx.spawn(async move |cx| { + let input: FetchToolInput = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let decision = cx.update(|cx| { + decide_permission_from_settings( + Self::NAME, + std::slice::from_ref(&input.url), + AgentSettings::get_global(cx), + ) + }); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(reason); + } + ToolPermissionDecision::Confirm => Some(cx.update(|cx| { + let context = + crate::ToolPermissionContext::new(Self::NAME, vec![input.url.clone()]); + event_stream.authorize( + format!("Fetch {}", MarkdownInlineCode(&input.url)), + context, + cx, + ) + })), + }; - let fetch_task = cx.background_spawn({ - let http_client = self.http_client.clone(); - async move { - if let Some(authorize) = authorize { - authorize.await?; + let fetch_task = cx.background_spawn({ + let http_client = http_client.clone(); + let url = input.url.clone(); + async move { + if let Some(authorize) = authorize { + authorize.await?; + } + Self::build_message(http_client, &url).await } - Self::build_message(http_client, &input.url).await - } - }); + }); - cx.foreground_executor().spawn(async move { let text = futures::select! { result = fetch_task.fuse() => result.map_err(|e| e.to_string())?, _ = event_stream.cancelled_by_user().fuse() => { diff --git a/crates/agent/src/tools/find_path_tool.rs b/crates/agent/src/tools/find_path_tool.rs index 4ba60c61063c08ac002dc7dc16fa11b987cbab74..9c65461503225171bcda482d58871a94743481e3 100644 --- a/crates/agent/src/tools/find_path_tool.rs +++ b/crates/agent/src/tools/find_path_tool.rs @@ -1,4 +1,4 @@ -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use futures::FutureExt as _; @@ -121,13 +121,18 @@ impl AgentTool for FindPathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let search_paths_task = search_paths(&input.glob, self.project.clone(), cx); + let project = self.project.clone(); + cx.spawn(async move |cx| { + let input = input.recv().await.map_err(|e| FindPathToolOutput::Error { + error: format!("Failed to receive tool input: {e}"), + })?; + + let search_paths_task = cx.update(|cx| search_paths(&input.glob, project, cx)); - cx.background_spawn(async move { let matches = futures::select! { result = search_paths_task.fuse() => result.map_err(|e| FindPathToolOutput::Error { error: e.to_string() })?, _ = event_stream.cancelled_by_user().fuse() => { diff --git a/crates/agent/src/tools/grep_tool.rs b/crates/agent/src/tools/grep_tool.rs index 16162107dff84ab40117b7783e04b633d144a214..fbfdc18585b822361effb6fd770e678b3e434a17 100644 --- a/crates/agent/src/tools/grep_tool.rs +++ b/crates/agent/src/tools/grep_tool.rs @@ -1,4 +1,4 @@ -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol as acp; use anyhow::Result; use futures::{FutureExt as _, StreamExt}; @@ -114,66 +114,64 @@ impl AgentTool for GrepTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { const CONTEXT_LINES: u32 = 2; const MAX_ANCESTOR_LINES: u32 = 10; - let path_style = self.project.read(cx).path_style(cx); - - let include_matcher = match PathMatcher::new( - input - .include_pattern - .as_ref() - .into_iter() - .collect::>(), - path_style, - ) { - Ok(matcher) => matcher, - Err(error) => { - return Task::ready(Err(format!("invalid include glob pattern: {error}"))); - } - }; - - // Exclude global file_scan_exclusions and private_files settings - let exclude_matcher = { - let global_settings = WorktreeSettings::get_global(cx); - let exclude_patterns = global_settings - .file_scan_exclusions - .sources() - .chain(global_settings.private_files.sources()); - - match PathMatcher::new(exclude_patterns, path_style) { - Ok(matcher) => matcher, - Err(error) => { - return Task::ready(Err(format!("invalid exclude pattern: {error}"))); - } - } - }; - - let query = match SearchQuery::regex( - &input.regex, - false, - input.case_sensitive, - false, - false, - include_matcher, - exclude_matcher, - true, // Always match file include pattern against *full project paths* that start with a project root. - None, - ) { - Ok(query) => query, - Err(error) => return Task::ready(Err(error.to_string())), - }; - - let results = self - .project - .update(cx, |project, cx| project.search(query, cx)); - - let project = self.project.downgrade(); + let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + let results = cx.update(|cx| { + let path_style = project.read(cx).path_style(cx); + + let include_matcher = PathMatcher::new( + input + .include_pattern + .as_ref() + .into_iter() + .collect::>(), + path_style, + ) + .map_err(|error| format!("invalid include glob pattern: {error}"))?; + + // Exclude global file_scan_exclusions and private_files settings + let exclude_matcher = { + let global_settings = WorktreeSettings::get_global(cx); + let exclude_patterns = global_settings + .file_scan_exclusions + .sources() + .chain(global_settings.private_files.sources()); + + PathMatcher::new(exclude_patterns, path_style) + .map_err(|error| format!("invalid exclude pattern: {error}"))? + }; + + let query = SearchQuery::regex( + &input.regex, + false, + input.case_sensitive, + false, + false, + include_matcher, + exclude_matcher, + true, // Always match file include pattern against *full project paths* that start with a project root. + None, + ) + .map_err(|error| error.to_string())?; + + Ok::<_, String>( + project.update(cx, |project, cx| project.search(query, cx)), + ) + })?; + + let project = project.downgrade(); // Keep the search alive for the duration of result iteration. Dropping this task is the // cancellation mechanism; we intentionally do not detach it. let SearchResults {rx, _task_handle} = results; @@ -787,7 +785,13 @@ mod tests { cx: &mut TestAppContext, ) -> String { let tool = Arc::new(GrepTool { project }); - let task = cx.update(|cx| tool.run(input, ToolCallEventStream::test().0, cx)); + let task = cx.update(|cx| { + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }); match task.await { Ok(result) => { diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs index 5dddee94904283ccb9198ce56aa4005250b5908a..1a674aaa71fef5bf9c11688e82982a5dbcfee331 100644 --- a/crates/agent/src/tools/list_directory_tool.rs +++ b/crates/agent/src/tools/list_directory_tool.rs @@ -2,7 +2,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; use agent_client_protocol::ToolKind; use anyhow::{Context as _, Result, anyhow}; use gpui::{App, Entity, SharedString, Task}; @@ -146,34 +146,39 @@ impl AgentTool for ListDirectoryTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - // Sometimes models will return these even though we tell it to give a path and not a glob. - // When this happens, just list the root worktree directories. - if matches!(input.path.as_str(), "." | "" | "./" | "*") { - let output = self - .project - .read(cx) - .worktrees(cx) - .filter_map(|worktree| { - let worktree = worktree.read(cx); - let root_entry = worktree.root_entry()?; - if root_entry.is_dir() { - Some(root_entry.path.display(worktree.path_style())) - } else { - None - } - }) - .collect::>() - .join("\n"); - - return Task::ready(Ok(output)); - } - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // Sometimes models will return these even though we tell it to give a path and not a glob. + // When this happens, just list the root worktree directories. + if matches!(input.path.as_str(), "." | "" | "./" | "*") { + let output = project.read_with(cx, |project, cx| { + project + .worktrees(cx) + .filter_map(|worktree| { + let worktree = worktree.read(cx); + let root_entry = worktree.root_entry()?; + if root_entry.is_dir() { + Some(root_entry.path.display(worktree.path_style())) + } else { + None + } + }) + .collect::>() + .join("\n") + }); + + return Ok(output); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -323,7 +328,13 @@ mod tests { path: "project".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert_eq!( @@ -344,7 +355,13 @@ mod tests { path: "project/src".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert_eq!( @@ -365,7 +382,13 @@ mod tests { path: "project/tests".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(!output.contains("# Folders:")); @@ -393,7 +416,13 @@ mod tests { path: "project/empty_dir".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert_eq!(output, "project/empty_dir is empty.\n"); @@ -420,7 +449,13 @@ mod tests { path: "project/nonexistent".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!(output.unwrap_err().contains("Path not found")); @@ -429,7 +464,13 @@ mod tests { path: "project/file.txt".into(), }; let output = cx - .update(|cx| tool.run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!(output.unwrap_err().contains("is not a directory")); } @@ -493,7 +534,13 @@ mod tests { path: "project".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); @@ -520,7 +567,13 @@ mod tests { path: "project/.secretdir".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!( output.unwrap_err().contains("file_scan_exclusions"), @@ -532,7 +585,13 @@ mod tests { path: "project/visible_dir".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); @@ -637,7 +696,13 @@ mod tests { path: "worktree1/src".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("main.rs"), "Should list main.rs"); @@ -655,7 +720,13 @@ mod tests { path: "worktree1/tests".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("test.rs"), "Should list test.rs"); @@ -669,7 +740,13 @@ mod tests { path: "worktree2/lib".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("public.js"), "Should list public.js"); @@ -687,7 +764,13 @@ mod tests { path: "worktree2/docs".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await .unwrap(); assert!(output.contains("README.md"), "Should list README.md"); @@ -701,7 +784,13 @@ mod tests { path: "worktree1/src/secret.rs".into(), }; let output = cx - .update(|cx| tool.clone().run(input, ToolCallEventStream::test().0, cx)) + .update(|cx| { + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) + }) .await; assert!(output.unwrap_err().contains("Cannot list directory"),); } @@ -743,9 +832,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -804,9 +893,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -871,9 +960,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_to_external".into(), - }, + }), event_stream, cx, ) @@ -924,9 +1013,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/src".into(), - }, + }), event_stream, cx, ) @@ -981,9 +1070,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - ListDirectoryToolInput { + ToolInput::resolved(ListDirectoryToolInput { path: "project/link_dir".into(), - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs index 4c337d0ec2827ad7c63c87ef206f0e74dc63091f..c246b3c5b0661546f4617bb5521766f9da3839fb 100644 --- a/crates/agent/src/tools/move_path_tool.rs +++ b/crates/agent/src/tools/move_path_tool.rs @@ -2,7 +2,9 @@ use super::tool_permissions::{ SensitiveSettingsKind, authorize_symlink_escapes, canonicalize_worktree_roots, collect_symlink_escapes, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_paths}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_paths, +}; use agent_client_protocol::ToolKind; use agent_settings::AgentSettings; use futures::FutureExt as _; @@ -92,19 +94,24 @@ impl AgentTool for MovePathTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let paths = vec![input.source_path.clone(), input.destination_path.clone()]; - let decision = decide_permission_for_paths(Self::NAME, &paths, settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let paths = vec![input.source_path.clone(), input.destination_path.clone()]; + let decision = cx.update(|cx| { + decide_permission_for_paths(Self::NAME, &paths, AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -255,7 +262,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -309,7 +316,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; drop(auth); @@ -361,7 +368,7 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let task = cx.update(|cx| tool.run(input, event_stream, cx)); + let task = cx.update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)); let auth = event_rx.expect_authorization().await; let title = auth.tool_call.fields.title.as_deref().unwrap_or(""); @@ -437,7 +444,9 @@ mod tests { }; let (event_stream, mut event_rx) = ToolCallEventStream::test(); - let result = cx.update(|cx| tool.run(input, event_stream, cx)).await; + let result = cx + .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)) + .await; assert!(result.is_err(), "Tool should fail when policy denies"); assert!( diff --git a/crates/agent/src/tools/now_tool.rs b/crates/agent/src/tools/now_tool.rs index 689d70ff20d15cbc56fcc0e14a3b46408647f737..fe1cafe5881d14c9700813f742e1f2df0aa1203e 100644 --- a/crates/agent/src/tools/now_tool.rs +++ b/crates/agent/src/tools/now_tool.rs @@ -6,7 +6,7 @@ use gpui::{App, SharedString, Task}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use crate::{AgentTool, ToolCallEventStream}; +use crate::{AgentTool, ToolCallEventStream, ToolInput}; #[derive(Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] @@ -48,14 +48,20 @@ impl AgentTool for NowTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, _event_stream: ToolCallEventStream, - _cx: &mut App, + cx: &mut App, ) -> Task> { - let now = match input.timezone { - Timezone::Utc => Utc::now().to_rfc3339(), - Timezone::Local => Local::now().to_rfc3339(), - }; - Task::ready(Ok(format!("The current datetime is {now}."))) + cx.spawn(async move |_cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + let now = match input.timezone { + Timezone::Utc => Utc::now().to_rfc3339(), + Timezone::Local => Local::now().to_rfc3339(), + }; + Ok(format!("The current datetime is {now}.")) + }) } } diff --git a/crates/agent/src/tools/open_tool.rs b/crates/agent/src/tools/open_tool.rs index c0b24efbec6418c437e9e3d14ffb5d40b45c91b0..344a513d10c2d62e4247dd3e47bcdf428586d6f0 100644 --- a/crates/agent/src/tools/open_tool.rs +++ b/crates/agent/src/tools/open_tool.rs @@ -2,7 +2,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::AgentTool; +use crate::{AgentTool, ToolInput}; use agent_client_protocol::ToolKind; use futures::FutureExt as _; use gpui::{App, AppContext as _, Entity, SharedString, Task}; @@ -61,16 +61,24 @@ impl AgentTool for OpenTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: crate::ToolCallEventStream, cx: &mut App, ) -> Task> { - // If path_or_url turns out to be a path in the project, make it absolute. - let abs_path = to_absolute_path(&input.path_or_url, self.project.clone(), cx); - let initial_title = self.initial_title(Ok(input.clone()), cx); - let project = self.project.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // If path_or_url turns out to be a path in the project, make it absolute. + let (abs_path, initial_title) = cx.update(|cx| { + let abs_path = to_absolute_path(&input.path_or_url, project.clone(), cx); + let initial_title = self.initial_title(Ok(input.clone()), cx); + (abs_path, initial_title) + }); + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index efd33fe5caece4cee4fc02aab8c1a0ebee92f94e..bbc67cf68c7d104772c18ad222478621ce4d7a54 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -21,7 +21,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::{AgentTool, Thread, ToolCallEventStream, outline}; +use crate::{AgentTool, Thread, ToolCallEventStream, ToolInput, outline}; /// Reads the content of the given file in the project. /// @@ -114,7 +114,7 @@ impl AgentTool for ReadFileTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { @@ -122,6 +122,10 @@ impl AgentTool for ReadFileTool { let thread = self.thread.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(tool_content_err)?; let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -398,7 +402,7 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, event_stream, cx) + tool.run(ToolInput::resolved(input), event_stream, cx) }) .await; assert_eq!( @@ -442,7 +446,11 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "This is a small file content".into()); @@ -485,7 +493,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -510,7 +522,11 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -570,7 +586,11 @@ mod test { start_line: Some(2), end_line: Some(4), }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 2\nLine 3\nLine 4\n".into()); @@ -613,7 +633,11 @@ mod test { start_line: Some(0), end_line: Some(2), }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 1\nLine 2\n".into()); @@ -626,7 +650,11 @@ mod test { start_line: Some(1), end_line: Some(0), }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 1\n".into()); @@ -639,7 +667,11 @@ mod test { start_line: Some(3), end_line: Some(2), }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert_eq!(result.unwrap(), "Line 3\n".into()); @@ -744,7 +776,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -760,7 +796,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -776,7 +816,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -791,7 +835,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -807,7 +855,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -822,7 +874,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -837,7 +893,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -853,7 +913,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!(result.is_ok(), "Should be able to read normal files"); @@ -867,7 +931,11 @@ mod test { start_line: None, end_line: None, }; - tool.run(input, ToolCallEventStream::test().0, cx) + tool.run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; assert!( @@ -911,11 +979,11 @@ mod test { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let read_task = cx.update(|cx| { tool.run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "root/secret.png".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) @@ -1039,7 +1107,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -1057,7 +1129,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1075,7 +1151,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1093,7 +1173,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await .unwrap(); @@ -1111,7 +1195,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1129,7 +1217,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1148,7 +1240,11 @@ mod test { start_line: None, end_line: None, }; - tool.clone().run(input, ToolCallEventStream::test().0, cx) + tool.clone().run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1210,11 +1306,11 @@ mod test { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "project/secret_link.txt".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) @@ -1286,11 +1382,11 @@ mod test { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "project/secret_link.txt".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) @@ -1367,11 +1463,11 @@ mod test { let result = cx .update(|cx| { tool.clone().run( - ReadFileToolInput { + ToolInput::resolved(ReadFileToolInput { path: "project/secret_link.txt".to_string(), start_line: None, end_line: None, - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index 304e0d1180fe626482206bfdc2dfa6d53f529816..c1aa8690a840ea6911dcb94c26c8cef3cb5f313d 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -17,7 +17,9 @@ use std::path::{Path, PathBuf}; use std::sync::Arc; use util::markdown::MarkdownInlineCode; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; /// Discards unsaved changes in open buffers by reloading file contents from disk. /// @@ -66,25 +68,31 @@ impl AgentTool for RestoreFileFromDiskTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx).clone(); - - // Check for any immediate deny before spawning async work. - for path in &input.paths { - let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - } - let project = self.project.clone(); - let input_paths = input.paths; cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // Check for any immediate deny before doing async work. + for path in &input.paths { + let path_str = path.to_string_lossy(); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + } + + let input_paths = input.paths; + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -92,7 +100,9 @@ impl AgentTool for RestoreFileFromDiskTool { for path in &input_paths { let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); let symlink_escape = project.read_with(cx, |project, cx| { path_has_symlink_escape(project, path, &canonical_roots, cx) }); @@ -378,12 +388,12 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![ PathBuf::from("root/dirty.txt"), PathBuf::from("root/clean.txt"), ], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -428,7 +438,7 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { paths: vec![] }, + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![] }), ToolCallEventStream::test().0, cx, ) @@ -441,9 +451,9 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("nonexistent/path.txt")], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -495,9 +505,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -564,9 +574,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -623,9 +633,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - RestoreFileFromDiskToolInput { + ToolInput::resolved(RestoreFileFromDiskToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index 20140c77d113d96c741d5afbe672882f708870d6..99e937b9dff2a1b4781dde16bd2bf6d64edd25ad 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -17,7 +17,9 @@ use super::tool_permissions::{ canonicalize_worktree_roots, path_has_symlink_escape, resolve_project_path, sensitive_settings_kind, }; -use crate::{AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_for_path}; +use crate::{ + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_for_path, +}; /// Saves files that have unsaved changes. /// @@ -63,25 +65,31 @@ impl AgentTool for SaveFileTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx).clone(); - - // Check for any immediate deny before spawning async work. - for path in &input.paths { - let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); - if let ToolPermissionDecision::Deny(reason) = decision { - return Task::ready(Err(reason)); - } - } - let project = self.project.clone(); - let input_paths = input.paths; cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + // Check for any immediate deny before doing async work. + for path in &input.paths { + let path_str = path.to_string_lossy(); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); + if let ToolPermissionDecision::Deny(reason) = decision { + return Err(reason); + } + } + + let input_paths = input.paths; + let fs = project.read_with(cx, |project, _cx| project.fs().clone()); let canonical_roots = canonicalize_worktree_roots(&project, &fs, cx).await; @@ -89,7 +97,9 @@ impl AgentTool for SaveFileTool { for path in &input_paths { let path_str = path.to_string_lossy(); - let decision = decide_permission_for_path(Self::NAME, &path_str, &settings); + let decision = cx.update(|cx| { + decide_permission_for_path(Self::NAME, &path_str, AgentSettings::get_global(cx)) + }); let symlink_escape = project.read_with(cx, |project, cx| { path_has_symlink_escape(project, path, &canonical_roots, cx) }); @@ -382,12 +392,12 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![ PathBuf::from("root/dirty.txt"), PathBuf::from("root/clean.txt"), ], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -425,7 +435,7 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { paths: vec![] }, + ToolInput::resolved(SaveFileToolInput { paths: vec![] }), ToolCallEventStream::test().0, cx, ) @@ -438,9 +448,9 @@ mod tests { let output = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("nonexistent/path.txt")], - }, + }), ToolCallEventStream::test().0, cx, ) @@ -490,9 +500,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -559,9 +569,9 @@ mod tests { let result = cx .update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -618,9 +628,9 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![PathBuf::from("project/link.txt")], - }, + }), event_stream, cx, ) @@ -702,12 +712,12 @@ mod tests { let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { tool.clone().run( - SaveFileToolInput { + ToolInput::resolved(SaveFileToolInput { paths: vec![ PathBuf::from("project/dirty.txt"), PathBuf::from("project/link.txt"), ], - }, + }), event_stream, cx, ) diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index e2dd78d4476de48465cb5c48e225e2ae5a0a7767..69529282544cc35a01f792dcb45df6eb8bdf67d5 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; use std::rc::Rc; use std::sync::Arc; -use crate::{AgentTool, Thread, ThreadEnvironment, ToolCallEventStream}; +use crate::{AgentTool, Thread, ThreadEnvironment, ToolCallEventStream, ToolInput}; /// Spawns an agent to perform a delegated task. /// @@ -97,61 +97,78 @@ impl AgentTool for SpawnAgentTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let Some(parent_thread_entity) = self.parent_thread.upgrade() else { - return Task::ready(Err(SpawnAgentToolOutput::Error { - session_id: None, - error: "Parent thread no longer exists".to_string(), - })); - }; - - let subagent = if let Some(session_id) = input.session_id { - self.environment - .resume_subagent(parent_thread_entity, session_id, input.message, cx) - } else { - self.environment - .create_subagent(parent_thread_entity, input.label, input.message, cx) - }; - let subagent = match subagent { - Ok(subagent) => subagent, - Err(err) => { - return Task::ready(Err(SpawnAgentToolOutput::Error { + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| SpawnAgentToolOutput::Error { + session_id: None, + error: format!("Failed to receive tool input: {e}"), + })?; + + let (subagent, subagent_session_id) = cx.update(|cx| { + let Some(parent_thread_entity) = self.parent_thread.upgrade() else { + return Err(SpawnAgentToolOutput::Error { + session_id: None, + error: "Parent thread no longer exists".to_string(), + }); + }; + + let subagent = if let Some(session_id) = input.session_id { + self.environment.resume_subagent( + parent_thread_entity, + session_id, + input.message, + cx, + ) + } else { + self.environment.create_subagent( + parent_thread_entity, + input.label, + input.message, + cx, + ) + }; + let subagent = subagent.map_err(|err| SpawnAgentToolOutput::Error { session_id: None, error: err.to_string(), - })); - } - }; - let subagent_session_id = subagent.id(); - - event_stream.subagent_spawned(subagent_session_id.clone()); - let meta = acp::Meta::from_iter([( - SUBAGENT_SESSION_ID_META_KEY.into(), - subagent_session_id.to_string().into(), - )]); - event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); - - cx.spawn(async move |cx| match subagent.wait_for_output(cx).await { - Ok(output) => { - event_stream.update_fields( - acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]), - ); - Ok(SpawnAgentToolOutput::Success { - session_id: subagent_session_id, - output, - }) - } - Err(e) => { - let error = e.to_string(); - event_stream.update_fields( - acp::ToolCallUpdateFields::new().content(vec![error.clone().into()]), - ); - Err(SpawnAgentToolOutput::Error { - session_id: Some(subagent_session_id), - error, - }) + })?; + let subagent_session_id = subagent.id(); + + event_stream.subagent_spawned(subagent_session_id.clone()); + let meta = acp::Meta::from_iter([( + SUBAGENT_SESSION_ID_META_KEY.into(), + subagent_session_id.to_string().into(), + )]); + event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); + + Ok((subagent, subagent_session_id)) + })?; + + match subagent.wait_for_output(cx).await { + Ok(output) => { + event_stream.update_fields( + acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]), + ); + Ok(SpawnAgentToolOutput::Success { + session_id: subagent_session_id, + output, + }) + } + Err(e) => { + let error = e.to_string(); + event_stream.update_fields( + acp::ToolCallUpdateFields::new().content(vec![error.clone().into()]), + ); + Err(SpawnAgentToolOutput::Error { + session_id: Some(subagent_session_id), + error, + }) + } } }) } diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index dd5445142a001fbd9106af548444165bc8331581..95651b44bac44ad3cc67c25c0ef13fc885342ce3 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -2,7 +2,7 @@ use super::edit_file_tool::EditFileTool; use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; use crate::{ - AgentTool, Templates, Thread, ToolCallEventStream, + AgentTool, Thread, ToolCallEventStream, ToolInput, edit_agent::streaming_fuzzy_matcher::StreamingFuzzyMatcher, }; use acp_thread::Diff; @@ -164,8 +164,6 @@ pub struct StreamingEditFileTool { thread: WeakEntity, language_registry: Arc, project: Entity, - #[allow(dead_code)] - templates: Arc, } impl StreamingEditFileTool { @@ -173,13 +171,11 @@ impl StreamingEditFileTool { project: Entity, thread: WeakEntity, language_registry: Arc, - templates: Arc, ) -> Self { Self { project, thread, language_registry, - templates, } } @@ -188,7 +184,6 @@ impl StreamingEditFileTool { project: self.project.clone(), thread: new_thread, language_registry: self.language_registry.clone(), - templates: self.templates.clone(), } } @@ -268,38 +263,41 @@ impl AgentTool for StreamingEditFileTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let Ok(project) = self - .thread - .read_with(cx, |thread, _cx| thread.project().clone()) - else { - return Task::ready(Err(StreamingEditFileToolOutput::Error { - error: "thread was dropped".to_string(), - })); - }; - - let project_path = match resolve_path(&input, project.clone(), cx) { - Ok(path) => path, - Err(err) => { - return Task::ready(Err(StreamingEditFileToolOutput::Error { - error: err.to_string(), - })); - } - }; - - let abs_path = project.read(cx).absolute_path(&project_path, cx); - if let Some(abs_path) = abs_path.clone() { - event_stream.update_fields( - ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]), - ); - } - - let authorize = self.authorize(&input, &event_stream, cx); - cx.spawn(async move |cx: &mut AsyncApp| { + let input = input.recv().await.map_err(|e| { + StreamingEditFileToolOutput::Error { + error: format!("Failed to receive tool input: {e}"), + } + })?; + + let project = self + .thread + .read_with(cx, |thread, _cx| thread.project().clone()) + .map_err(|_| StreamingEditFileToolOutput::Error { + error: "thread was dropped".to_string(), + })?; + + let (project_path, abs_path, authorize) = cx.update(|cx| { + let project_path = + resolve_path(&input, project.clone(), cx).map_err(|err| { + StreamingEditFileToolOutput::Error { + error: err.to_string(), + } + })?; + let abs_path = project.read(cx).absolute_path(&project_path, cx); + if let Some(abs_path) = abs_path.clone() { + event_stream.update_fields( + ToolCallUpdateFields::new() + .locations(vec![acp::ToolCallLocation::new(abs_path)]), + ); + } + let authorize = self.authorize(&input, &event_stream, cx); + Ok::<_, StreamingEditFileToolOutput>((project_path, abs_path, authorize)) + })?; let result: anyhow::Result = async { authorize.await?; @@ -787,9 +785,12 @@ mod tests { project.clone(), thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -836,9 +837,12 @@ mod tests { project.clone(), thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -896,9 +900,12 @@ mod tests { project.clone(), thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -958,9 +965,12 @@ mod tests { project.clone(), thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1023,9 +1033,12 @@ mod tests { project.clone(), thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1088,9 +1101,12 @@ mod tests { project.clone(), thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1141,9 +1157,12 @@ mod tests { project, thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1192,9 +1211,12 @@ mod tests { project, thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; @@ -1262,9 +1284,12 @@ mod tests { project, thread.downgrade(), language_registry, - Templates::new(), )) - .run(input, ToolCallEventStream::test().0, cx) + .run( + ToolInput::resolved(input), + ToolCallEventStream::test().0, + cx, + ) }) .await; diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 57b3278da256c01408f704a8e2f6f7e075057597..6396bd1b0e63b46a0207dd7df9b9f2fcd00176b7 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -15,7 +15,7 @@ use std::{ }; use crate::{ - AgentTool, ThreadEnvironment, ToolCallEventStream, ToolPermissionDecision, + AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput, ToolPermissionDecision, decide_permission_from_settings, }; @@ -85,34 +85,45 @@ impl AgentTool for TerminalTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let working_dir = match working_dir(&input, &self.project, cx) { - Ok(dir) => dir, - Err(err) => return Task::ready(Err(err.to_string())), - }; + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_from_settings( - Self::NAME, - std::slice::from_ref(&input.command), - settings, - ); + let (working_dir, authorize) = cx.update(|cx| { + let working_dir = + working_dir(&input, &self.project, cx).map_err(|err| err.to_string())?; - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(reason)); - } - ToolPermissionDecision::Confirm => { - let context = - crate::ToolPermissionContext::new(Self::NAME, vec![input.command.clone()]); - Some(event_stream.authorize(self.initial_title(Ok(input.clone()), cx), context, cx)) - } - }; - cx.spawn(async move |cx| { + let decision = decide_permission_from_settings( + Self::NAME, + std::slice::from_ref(&input.command), + AgentSettings::get_global(cx), + ); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(reason); + } + ToolPermissionDecision::Confirm => { + let context = crate::ToolPermissionContext::new( + Self::NAME, + vec![input.command.clone()], + ); + Some(event_stream.authorize( + self.initial_title(Ok(input.clone()), cx), + context, + cx, + )) + } + }; + Ok((working_dir, authorize)) + })?; if let Some(authorize) = authorize { authorize.await.map_err(|e| e.to_string())?; } diff --git a/crates/agent/src/tools/web_search_tool.rs b/crates/agent/src/tools/web_search_tool.rs index c536f45ba65c109d3068b0722db1ffb1cad8b87c..c697a5b78f1fe8c84d6ed58db13f651a493ae8c3 100644 --- a/crates/agent/src/tools/web_search_tool.rs +++ b/crates/agent/src/tools/web_search_tool.rs @@ -1,14 +1,15 @@ use std::sync::Arc; use crate::{ - AgentTool, ToolCallEventStream, ToolPermissionDecision, decide_permission_from_settings, + AgentTool, ToolCallEventStream, ToolInput, ToolPermissionDecision, + decide_permission_from_settings, }; use agent_client_protocol as acp; use agent_settings::AgentSettings; use anyhow::Result; use cloud_llm_client::WebSearchResponse; use futures::FutureExt as _; -use gpui::{App, AppContext, Task}; +use gpui::{App, Task}; use language_model::{ LanguageModelProviderId, LanguageModelToolResultContent, ZED_CLOUD_PROVIDER_ID, }; @@ -73,41 +74,51 @@ impl AgentTool for WebSearchTool { fn run( self: Arc, - input: Self::Input, + input: ToolInput, event_stream: ToolCallEventStream, cx: &mut App, ) -> Task> { - let settings = AgentSettings::get_global(cx); - let decision = decide_permission_from_settings( - Self::NAME, - std::slice::from_ref(&input.query), - settings, - ); - - let authorize = match decision { - ToolPermissionDecision::Allow => None, - ToolPermissionDecision::Deny(reason) => { - return Task::ready(Err(WebSearchToolOutput::Error { error: reason })); - } - ToolPermissionDecision::Confirm => { - let context = - crate::ToolPermissionContext::new(Self::NAME, vec![input.query.clone()]); - Some(event_stream.authorize( - format!("Search the web for {}", MarkdownInlineCode(&input.query)), - context, - cx, - )) - } - }; + cx.spawn(async move |cx| { + let input = input + .recv() + .await + .map_err(|e| WebSearchToolOutput::Error { + error: format!("Failed to receive tool input: {e}"), + })?; + + let (authorize, search_task) = cx.update(|cx| { + let decision = decide_permission_from_settings( + Self::NAME, + std::slice::from_ref(&input.query), + AgentSettings::get_global(cx), + ); + + let authorize = match decision { + ToolPermissionDecision::Allow => None, + ToolPermissionDecision::Deny(reason) => { + return Err(WebSearchToolOutput::Error { error: reason }); + } + ToolPermissionDecision::Confirm => { + let context = + crate::ToolPermissionContext::new(Self::NAME, vec![input.query.clone()]); + Some(event_stream.authorize( + format!("Search the web for {}", MarkdownInlineCode(&input.query)), + context, + cx, + )) + } + }; + + let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else { + return Err(WebSearchToolOutput::Error { + error: "Web search is not available.".to_string(), + }); + }; - let Some(provider) = WebSearchRegistry::read_global(cx).active_provider() else { - return Task::ready(Err(WebSearchToolOutput::Error { - error: "Web search is not available.".to_string(), - })); - }; + let search_task = provider.search(input.query, cx); + Ok((authorize, search_task)) + })?; - let search_task = provider.search(input.query, cx); - cx.background_spawn(async move { if let Some(authorize) = authorize { authorize.await.map_err(|e| WebSearchToolOutput::Error { error: e.to_string() })?; } diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 9d673182bc64e192e6db13a927392d611c53407d..f15382b67557fa9a9b0eda2a9d4438aa33c7cff3 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -2,7 +2,9 @@ /// The tests in this file assume that server_cx is running on Windows too. /// We neead to find a way to test Windows-Non-Windows interactions. use crate::headless_project::HeadlessProject; -use agent::{AgentTool, ReadFileTool, ReadFileToolInput, Templates, Thread, ToolCallEventStream}; +use agent::{ + AgentTool, ReadFileTool, ReadFileToolInput, Templates, Thread, ToolCallEventStream, ToolInput, +}; use client::{Client, UserStore}; use clock::FakeSystemClock; use collections::{HashMap, HashSet}; @@ -1962,7 +1964,11 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu let read_tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); let (event_stream, _) = ToolCallEventStream::test(); - let exists_result = cx.update(|cx| read_tool.clone().run(input, event_stream.clone(), cx)); + let exists_result = cx.update(|cx| { + read_tool + .clone() + .run(ToolInput::resolved(input), event_stream.clone(), cx) + }); let output = exists_result.await.unwrap(); assert_eq!(output, LanguageModelToolResultContent::Text("B".into())); @@ -1971,7 +1977,8 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu start_line: None, end_line: None, }; - let does_not_exist_result = cx.update(|cx| read_tool.run(input, event_stream, cx)); + let does_not_exist_result = + cx.update(|cx| read_tool.run(ToolInput::resolved(input), event_stream, cx)); does_not_exist_result.await.unwrap_err(); } diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 09340dcec641ae2a6c1ea871e770886d14276529..b7471321db203075ac6c71eee0b3ef29c5edaefc 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -1962,7 +1962,7 @@ fn run_agent_thread_view_test( cx: &mut VisualTestAppContext, update_baseline: bool, ) -> Result { - use agent::AgentTool; + use agent::{AgentTool, ToolInput}; use agent_ui::AgentPanel; // Create a temporary directory with the test image @@ -2047,7 +2047,10 @@ fn run_agent_thread_view_test( start_line: None, end_line: None, }; - let run_task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + let run_task = cx.update(|cx| { + tool.clone() + .run(ToolInput::resolved(input), event_stream, cx) + }); cx.background_executor.allow_parking(); let run_result = cx.foreground_executor.block_test(run_task); From f786e045f4a8b7c9a4dce364b61673913cdaeb7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Wed, 25 Feb 2026 17:48:20 +0100 Subject: [PATCH 083/548] Notify after populating MCP server IDs (#50089) --- crates/project/src/context_server_store.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index e4cac4768d48db8aecf0b4499cce070c2c2c914c..2f67077a2c49014c63e7ae4927f5613351b3a4f4 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -855,6 +855,7 @@ impl ContextServerStore { this.update(cx, |this, cx| { this.populate_server_ids(cx); + cx.notify(); this.update_servers_task.take(); if this.needs_server_update { this.available_context_servers_changed(cx); From 21fdf703e8c86d03ad620094c978d74fc75f20ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Wed, 25 Feb 2026 17:48:54 +0100 Subject: [PATCH 084/548] Fix "add custom context server" modal hanging indefinitely (#50085) --- .../configure_context_server_modal.rs | 23 ++++++++--- crates/project/src/context_server_store.rs | 41 +++++++++++++++---- 2 files changed, 50 insertions(+), 14 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 78c032a565522a7eac145add3f65568d559ceb24..38805f2c26693f168c7273afddf5aceea44f83e3 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -877,9 +877,14 @@ fn wait_for_context_server( context_server_id: ContextServerId, cx: &mut App, ) -> Task>> { + use std::time::Duration; + + const WAIT_TIMEOUT: Duration = Duration::from_secs(120); + let (tx, rx) = futures::channel::oneshot::channel(); let tx = Arc::new(Mutex::new(Some(tx))); + let context_server_id_for_timeout = context_server_id.clone(); let subscription = cx.subscribe(context_server_store, move |_, event, _cx| { let project::context_server_store::ServerStatusChangedEvent { server_id, status } = event; @@ -909,12 +914,20 @@ fn wait_for_context_server( } }); - cx.spawn(async move |_cx| { - let result = rx - .await - .map_err(|_| Arc::from("Context server store was dropped"))?; + cx.spawn(async move |cx| { + let timeout = cx.background_executor().timer(WAIT_TIMEOUT); + let result = futures::future::select(rx, timeout).await; drop(subscription); - result + match result { + futures::future::Either::Left((Ok(inner), _)) => inner, + futures::future::Either::Left((Err(_), _)) => { + Err(Arc::from("Context server store was dropped")) + } + futures::future::Either::Right(_) => Err(Arc::from(format!( + "Timed out waiting for context server `{}` to start. Check the Zed log for details.", + context_server_id_for_timeout + ))), + } }) } diff --git a/crates/project/src/context_server_store.rs b/crates/project/src/context_server_store.rs index 2f67077a2c49014c63e7ae4927f5613351b3a4f4..88dc64fcbe8795ae4826dcaa2813744f525b9258 100644 --- a/crates/project/src/context_server_store.rs +++ b/crates/project/src/context_server_store.rs @@ -8,7 +8,7 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use context_server::{ContextServer, ContextServerCommand, ContextServerId}; -use futures::{FutureExt as _, future::join_all}; +use futures::{FutureExt as _, future::Either, future::join_all}; use gpui::{App, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity, actions}; use itertools::Itertools; use registry::ContextServerDescriptorRegistry; @@ -141,6 +141,8 @@ impl ContextServerConfiguration { worktree_store: Entity, cx: &AsyncApp, ) -> Option { + const EXTENSION_COMMAND_TIMEOUT: Duration = Duration::from_secs(30); + match settings { ContextServerSettings::Stdio { enabled: _, @@ -155,18 +157,27 @@ impl ContextServerConfiguration { let descriptor = cx.update(|cx| registry.read(cx).context_server_descriptor(&id.0))?; - match descriptor.command(worktree_store, cx).await { - Ok(command) => Some(ContextServerConfiguration::Extension { + let command_future = descriptor.command(worktree_store, cx); + let timeout_future = cx.background_executor().timer(EXTENSION_COMMAND_TIMEOUT); + + match futures::future::select(command_future, timeout_future).await { + Either::Left((Ok(command), _)) => Some(ContextServerConfiguration::Extension { command, settings, remote, }), - Err(e) => { + Either::Left((Err(e), _)) => { log::error!( "Failed to create context server configuration from settings: {e:#}" ); None } + Either::Right(_) => { + log::error!( + "Timed out resolving command for extension context server {id}" + ); + None + } } } ContextServerSettings::Http { @@ -960,11 +971,23 @@ impl ContextServerStore { })??; for (id, config) in servers_to_start { - let (server, config) = - Self::create_context_server(this.clone(), id, config, cx).await?; - this.update(cx, |this, cx| { - this.run_server(server, config, cx); - })?; + match Self::create_context_server(this.clone(), id.clone(), config, cx).await { + Ok((server, config)) => { + this.update(cx, |this, cx| { + this.run_server(server, config, cx); + })?; + } + Err(err) => { + log::error!("{id} context server failed to create: {err:#}"); + this.update(cx, |_this, cx| { + cx.emit(ServerStatusChangedEvent { + server_id: id, + status: ContextServerStatus::Error(err.to_string().into()), + }); + cx.notify(); + })?; + } + } } Ok(()) From c40cc0cd6ef0ee3a392325081684a7e2776fe8e0 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Wed, 25 Feb 2026 18:09:07 +0100 Subject: [PATCH 085/548] extension_ci: Ensure version bump does not happen too often (#50108) Sigh.... Release Notes: - N/A --- extensions/workflows/shared/bump_version.yml | 2 +- tooling/xtask/src/tasks/workflows/extensions/bump_version.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/extensions/workflows/shared/bump_version.yml b/extensions/workflows/shared/bump_version.yml index bbf7e9b11ca02d15cdee2c300d3a93caffe3f650..dbe92a43a5a3c7900f6d23fffd8ebd3eee9ca95f 100644 --- a/extensions/workflows/shared/bump_version.yml +++ b/extensions/workflows/shared/bump_version.yml @@ -52,7 +52,7 @@ jobs: app-secret: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} with: bump-type: ${{ needs.determine_bump_type.outputs.bump_type }} - force-bump: true + force-bump: ${{ github.event_name != 'push' }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}labels cancel-in-progress: true diff --git a/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs b/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs index d6fa479095b594707675e300ca3cda4514c544bf..2d82f1351f21645a77b1d13e158bd4142dbec069 100644 --- a/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs +++ b/tooling/xtask/src/tasks/workflows/extensions/bump_version.rs @@ -57,7 +57,7 @@ pub(crate) fn call_bump_version( .with( Input::default() .add("bump-type", bump_type.to_string()) - .add("force-bump", true), + .add("force-bump", "${{ github.event_name != 'push' }}"), ) .with_app_secrets(); From f4920f46513825f68e8942b1cb499bfab18dcb9d Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 25 Feb 2026 18:13:49 +0100 Subject: [PATCH 086/548] Fix Zed panicking on invalid ranges in semantic token deltas (#50106) Closes ZED-59J Release Notes: - Fixed Zed panicking on invalid ranges in semantic token deltas --- .../project/src/lsp_store/semantic_tokens.rs | 38 ++++++++++++++++++- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/crates/project/src/lsp_store/semantic_tokens.rs b/crates/project/src/lsp_store/semantic_tokens.rs index 516fb75eaae13752c235d0ad42db460740529c4d..e71b05d47b0cf105429bf50648787fb1db2bad87 100644 --- a/crates/project/src/lsp_store/semantic_tokens.rs +++ b/crates/project/src/lsp_store/semantic_tokens.rs @@ -653,8 +653,8 @@ impl ServerSemanticTokens { pub(crate) fn apply(&mut self, edits: &[SemanticTokensEdit]) { for edit in edits { - let start = edit.start as usize; - let end = start + edit.delete_count as usize; + let start = (edit.start as usize).min(self.data.len()); + let end = (start + edit.delete_count as usize).min(self.data.len()); self.data.splice(start..end, edit.data.iter().copied()); } } @@ -1000,4 +1000,38 @@ mod tests { ] ); } + + #[test] + fn applies_out_of_bounds_delta_edit_without_panic() { + let mut tokens = ServerSemanticTokens::from_full(vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0], None); + + // start beyond data length + tokens.apply(&[SemanticTokensEdit { + start: 100, + delete_count: 5, + data: vec![1, 2, 3, 4, 5], + }]); + assert_eq!( + tokens.data, + vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0, 1, 2, 3, 4, 5] + ); + + // delete_count extends past data length + let mut tokens = ServerSemanticTokens::from_full(vec![2, 5, 3, 0, 3], None); + tokens.apply(&[SemanticTokensEdit { + start: 3, + delete_count: 100, + data: vec![9, 9], + }]); + assert_eq!(tokens.data, vec![2, 5, 3, 9, 9]); + + // empty data + let mut tokens = ServerSemanticTokens::from_full(Vec::new(), None); + tokens.apply(&[SemanticTokensEdit { + start: 0, + delete_count: 5, + data: vec![1, 2, 3, 4, 5], + }]); + assert_eq!(tokens.data, vec![1, 2, 3, 4, 5]); + } } From bbbe7239afa0eecaa3682395ebaac024428df91c Mon Sep 17 00:00:00 2001 From: Bob Mannino Date: Wed, 25 Feb 2026 17:32:22 +0000 Subject: [PATCH 087/548] git: Add diff stats in git_panel (#49519) This PR adds the small UI change of `git diff --numstat` to the git panel so you can see the number of additions/deletions per file. There is an option in the settings UI for this under `git_panel`.`diff_stats`. This option is set to `false` by default. Screenshot 2026-02-18 at 21 25 02 Release Notes: - Added git diff stats to git panel entries --------- Co-authored-by: Danilo Leal Co-authored-by: Anthony Eid --- assets/settings/default.json | 4 + crates/fs/src/fake_git_repo.rs | 130 ++++++++++++++++++ crates/git/src/repository.rs | 56 ++++++++ crates/git/src/status.rs | 127 +++++++++++++++++ crates/git_ui/src/git_panel.rs | 114 ++++++++++++++- crates/git_ui/src/git_panel_settings.rs | 2 + crates/project/src/git_store.rs | 97 +++++++++++++ crates/proto/proto/git.proto | 23 ++++ crates/proto/proto/zed.proto | 4 +- crates/proto/src/proto.rs | 4 + .../remote_server/src/remote_editing_tests.rs | 124 +++++++++++++++++ .../settings_content/src/settings_content.rs | 5 + crates/settings_ui/src/page_data.rs | 20 ++- 13 files changed, 706 insertions(+), 4 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 9dc077fb29458089e68061d5bd121ed9770108d7..f9f4fb417e4b0664170f9f6958966018bb48bc63 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -915,6 +915,10 @@ // Default: inherits editor scrollbar settings // "show": null }, + // Whether to show the addition/deletion change count next to each file in the Git panel. + // + // Default: false + "diff_stats": false, }, "message_editor": { // Whether to automatically replace emoji shortcodes with emoji characters. diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 6513d5a33b6eb96f7a69c5f96530f1d44a71c3ec..12cd67cdae1a250d07468047617c8cc7a52737fa 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -768,6 +768,136 @@ impl GitRepository for FakeGitRepository { unimplemented!() } + fn diff_stat( + &self, + diff_type: git::repository::DiffType, + ) -> BoxFuture<'_, Result>> { + fn count_lines(s: &str) -> u32 { + if s.is_empty() { + 0 + } else { + s.lines().count() as u32 + } + } + + match diff_type { + git::repository::DiffType::HeadToIndex => self + .with_state_async(false, |state| { + let mut result = HashMap::default(); + let all_paths: HashSet<&RepoPath> = state + .head_contents + .keys() + .chain(state.index_contents.keys()) + .collect(); + for path in all_paths { + let head = state.head_contents.get(path); + let index = state.index_contents.get(path); + match (head, index) { + (Some(old), Some(new)) if old != new => { + result.insert( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: count_lines(old), + }, + ); + } + (Some(old), None) => { + result.insert( + path.clone(), + git::status::DiffStat { + added: 0, + deleted: count_lines(old), + }, + ); + } + (None, Some(new)) => { + result.insert( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: 0, + }, + ); + } + _ => {} + } + } + Ok(result) + }) + .boxed(), + git::repository::DiffType::HeadToWorktree => { + let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf(); + let worktree_files: HashMap = self + .fs + .files() + .iter() + .filter_map(|path| { + let repo_path = path.strip_prefix(&workdir_path).ok()?; + if repo_path.starts_with(".git") { + return None; + } + let content = self + .fs + .read_file_sync(path) + .ok() + .and_then(|bytes| String::from_utf8(bytes).ok())?; + let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?; + Some((RepoPath::from_rel_path(&repo_path), content)) + }) + .collect(); + + self.with_state_async(false, move |state| { + let mut result = HashMap::default(); + let all_paths: HashSet<&RepoPath> = state + .head_contents + .keys() + .chain(worktree_files.keys()) + .collect(); + for path in all_paths { + let head = state.head_contents.get(path); + let worktree = worktree_files.get(path); + match (head, worktree) { + (Some(old), Some(new)) if old != new => { + result.insert( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: count_lines(old), + }, + ); + } + (Some(old), None) => { + result.insert( + path.clone(), + git::status::DiffStat { + added: 0, + deleted: count_lines(old), + }, + ); + } + (None, Some(new)) => { + result.insert( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: 0, + }, + ); + } + _ => {} + } + } + Ok(result) + }) + .boxed() + } + git::repository::DiffType::MergeBase { .. } => { + future::ready(Ok(HashMap::default())).boxed() + } + } + } + fn checkpoint(&self) -> BoxFuture<'static, Result> { let executor = self.executor.clone(); let fs = self.fs.clone(); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index ab445a1cd830a726491fab1fc6209686e80960b1..1925e84735a8020c7e1896f3cf2e7ee20ae3f712 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -898,6 +898,11 @@ pub trait GitRepository: Send + Sync { /// Run git diff fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result>; + fn diff_stat( + &self, + diff: DiffType, + ) -> BoxFuture<'_, Result>>; + /// Creates a checkpoint for the repository. fn checkpoint(&self) -> BoxFuture<'static, Result>; @@ -2031,6 +2036,57 @@ impl GitRepository for RealGitRepository { .boxed() } + fn diff_stat( + &self, + diff: DiffType, + ) -> BoxFuture<'_, Result>> { + let working_directory = self.working_directory(); + let git_binary_path = self.any_git_binary_path.clone(); + self.executor + .spawn(async move { + let working_directory = working_directory?; + let output = match diff { + DiffType::HeadToIndex => { + new_command(&git_binary_path) + .current_dir(&working_directory) + .args(["diff", "--numstat", "--staged"]) + .output() + .await? + } + DiffType::HeadToWorktree => { + new_command(&git_binary_path) + .current_dir(&working_directory) + .args(["diff", "--numstat"]) + .output() + .await? + } + DiffType::MergeBase { base_ref } => { + new_command(&git_binary_path) + .current_dir(&working_directory) + .args([ + "diff", + "--numstat", + "--merge-base", + base_ref.as_ref(), + "HEAD", + ]) + .output() + .await? + } + }; + + anyhow::ensure!( + output.status.success(), + "Failed to run git diff --numstat:\n{}", + String::from_utf8_lossy(&output.stderr) + ); + Ok(crate::status::parse_numstat(&String::from_utf8_lossy( + &output.stdout, + ))) + }) + .boxed() + } + fn stage_paths( &self, paths: Vec, diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index be8b0a3a588b40638a895d610cc4b5735d4ae51d..b20919e7ecf4748d0035a003ed5eadebae752dd7 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -580,6 +580,45 @@ impl FromStr for TreeDiff { } } +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +pub struct DiffStat { + pub added: u32, + pub deleted: u32, +} + +/// Parses the output of `git diff --numstat` where output looks like: +/// +/// ```text +/// 24 12 dir/file.txt +/// ``` +pub fn parse_numstat(output: &str) -> HashMap { + let mut stats = HashMap::default(); + for line in output.lines() { + let line = line.trim(); + if line.is_empty() { + continue; + } + let mut parts = line.splitn(3, '\t'); + let (Some(added_str), Some(deleted_str), Some(path_str)) = + (parts.next(), parts.next(), parts.next()) + else { + continue; + }; + let Ok(added) = added_str.parse::() else { + continue; + }; + let Ok(deleted) = deleted_str.parse::() else { + continue; + }; + let Ok(path) = RepoPath::new(path_str) else { + continue; + }; + let stat = DiffStat { added, deleted }; + stats.insert(path, stat); + } + stats +} + #[cfg(test)] mod tests { @@ -588,6 +627,94 @@ mod tests { status::{FileStatus, GitStatus, TreeDiff, TreeDiffStatus}, }; + use super::{DiffStat, parse_numstat}; + + #[test] + fn test_parse_numstat_normal() { + let input = "10\t5\tsrc/main.rs\n3\t1\tREADME.md\n"; + let result = parse_numstat(input); + assert_eq!(result.len(), 2); + assert_eq!( + result.get(&RepoPath::new("src/main.rs").unwrap()), + Some(&DiffStat { + added: 10, + deleted: 5 + }) + ); + assert_eq!( + result.get(&RepoPath::new("README.md").unwrap()), + Some(&DiffStat { + added: 3, + deleted: 1 + }) + ); + } + + #[test] + fn test_parse_numstat_binary_files_skipped() { + // git diff --numstat outputs "-\t-\tpath" for binary files + let input = "-\t-\timage.png\n5\t2\tsrc/lib.rs\n"; + let result = parse_numstat(input); + assert_eq!(result.len(), 1); + assert!(!result.contains_key(&RepoPath::new("image.png").unwrap())); + assert_eq!( + result.get(&RepoPath::new("src/lib.rs").unwrap()), + Some(&DiffStat { + added: 5, + deleted: 2 + }) + ); + } + + #[test] + fn test_parse_numstat_empty_input() { + assert!(parse_numstat("").is_empty()); + assert!(parse_numstat("\n\n").is_empty()); + assert!(parse_numstat(" \n \n").is_empty()); + } + + #[test] + fn test_parse_numstat_malformed_lines_skipped() { + let input = "not_a_number\t5\tfile.rs\n10\t5\tvalid.rs\n"; + let result = parse_numstat(input); + assert_eq!(result.len(), 1); + assert_eq!( + result.get(&RepoPath::new("valid.rs").unwrap()), + Some(&DiffStat { + added: 10, + deleted: 5 + }) + ); + } + + #[test] + fn test_parse_numstat_incomplete_lines_skipped() { + // Lines with fewer than 3 tab-separated fields are skipped + let input = "10\t5\n7\t3\tok.rs\n"; + let result = parse_numstat(input); + assert_eq!(result.len(), 1); + assert_eq!( + result.get(&RepoPath::new("ok.rs").unwrap()), + Some(&DiffStat { + added: 7, + deleted: 3 + }) + ); + } + + #[test] + fn test_parse_numstat_zero_stats() { + let input = "0\t0\tunchanged_but_present.rs\n"; + let result = parse_numstat(input); + assert_eq!( + result.get(&RepoPath::new("unchanged_but_present.rs").unwrap()), + Some(&DiffStat { + added: 0, + deleted: 0 + }) + ); + } + #[test] fn test_duplicate_untracked_entries() { // Regression test for ZED-2XA: git can produce duplicate untracked entries diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index b86fa0196ae786db7a981427628295c4f9d81061..1c8c09d7fdeaa51b8780f29aa13028355864924f 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -28,7 +28,7 @@ use git::repository::{ UpstreamTrackingStatus, get_git_committer, }; use git::stash::GitStash; -use git::status::StageStatus; +use git::status::{DiffStat, StageStatus}; use git::{Amend, Signoff, ToggleStaged, repository::RepoPath, status::FileStatus}; use git::{ ExpandCommitEditor, GitHostingProviderRegistry, RestoreTrackedFiles, StageAll, StashAll, @@ -41,7 +41,7 @@ use gpui::{ WeakEntity, actions, anchored, deferred, point, size, uniform_list, }; use itertools::Itertools; -use language::{Buffer, File}; +use language::{Buffer, BufferEvent, File}; use language_model::{ ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, }; @@ -51,6 +51,7 @@ use notifications::status_toast::{StatusToast, ToastIcon}; use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button}; use project::{ Fs, Project, ProjectPath, + buffer_store::BufferStoreEvent, git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId, pending_op}, project_settings::{GitPathStyle, ProjectSettings}, }; @@ -651,6 +652,8 @@ pub struct GitPanel { local_committer_task: Option>, bulk_staging: Option, stash_entries: GitStash, + diff_stats: HashMap, + diff_stats_task: Task<()>, _settings_subscription: Subscription, } @@ -711,9 +714,11 @@ impl GitPanel { let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let mut was_tree_view = GitPanelSettings::get_global(cx).tree_view; + let mut was_diff_stats = GitPanelSettings::get_global(cx).diff_stats; cx.observe_global_in::(window, move |this, window, cx| { let sort_by_path = GitPanelSettings::get_global(cx).sort_by_path; let tree_view = GitPanelSettings::get_global(cx).tree_view; + let diff_stats = GitPanelSettings::get_global(cx).diff_stats; if tree_view != was_tree_view { this.view_mode = GitPanelViewMode::from_settings(cx); } @@ -721,8 +726,18 @@ impl GitPanel { this.bulk_staging.take(); this.update_visible_entries(window, cx); } + if diff_stats != was_diff_stats { + if diff_stats { + this.fetch_diff_stats(cx); + } else { + this.diff_stats.clear(); + this.diff_stats_task = Task::ready(()); + cx.notify(); + } + } was_sort_by_path = sort_by_path; was_tree_view = tree_view; + was_diff_stats = diff_stats; }) .detach(); @@ -777,6 +792,33 @@ impl GitPanel { ) .detach(); + let buffer_store = project.read(cx).buffer_store().clone(); + + for buffer in project.read(cx).opened_buffers(cx) { + cx.subscribe(&buffer, |this, _buffer, event, cx| { + if matches!(event, BufferEvent::Saved) { + if GitPanelSettings::get_global(cx).diff_stats { + this.fetch_diff_stats(cx); + } + } + }) + .detach(); + } + + cx.subscribe(&buffer_store, |_this, _store, event, cx| { + if let BufferStoreEvent::BufferAdded(buffer) = event { + cx.subscribe(buffer, |this, _buffer, event, cx| { + if matches!(event, BufferEvent::Saved) { + if GitPanelSettings::get_global(cx).diff_stats { + this.fetch_diff_stats(cx); + } + } + }) + .detach(); + } + }) + .detach(); + let mut this = Self { active_repository, commit_editor, @@ -817,6 +859,8 @@ impl GitPanel { entry_count: 0, bulk_staging: None, stash_entries: Default::default(), + diff_stats: HashMap::default(), + diff_stats_task: Task::ready(()), _settings_subscription, }; @@ -3699,9 +3743,60 @@ impl GitPanel { editor.set_placeholder_text(&placeholder_text, window, cx) }); + if GitPanelSettings::get_global(cx).diff_stats { + self.fetch_diff_stats(cx); + } + cx.notify(); } + fn fetch_diff_stats(&mut self, cx: &mut Context) { + let Some(repo) = self.active_repository.clone() else { + self.diff_stats.clear(); + return; + }; + + let unstaged_rx = repo.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToWorktree, cx)); + let staged_rx = repo.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToIndex, cx)); + + self.diff_stats_task = cx.spawn(async move |this, cx| { + let (unstaged_result, staged_result) = + futures::future::join(unstaged_rx, staged_rx).await; + + let mut combined = match unstaged_result { + Ok(Ok(stats)) => stats, + Ok(Err(err)) => { + log::warn!("Failed to fetch unstaged diff stats: {err:?}"); + HashMap::default() + } + Err(_) => HashMap::default(), + }; + + let staged = match staged_result { + Ok(Ok(stats)) => Some(stats), + Ok(Err(err)) => { + log::warn!("Failed to fetch staged diff stats: {err:?}"); + None + } + Err(_) => None, + }; + + if let Some(staged) = staged { + for (path, stat) in staged { + let entry = combined.entry(path).or_default(); + entry.added += stat.added; + entry.deleted += stat.deleted; + } + } + + this.update(cx, |this, cx| { + this.diff_stats = combined; + cx.notify(); + }) + .ok(); + }); + } + fn header_state(&self, header_type: Section) -> ToggleState { let (staged_count, count) = match header_type { Section::New => (self.new_staged_count, self.new_count), @@ -5113,6 +5208,8 @@ impl GitPanel { } }); + let id_for_diff_stat = id.clone(); + h_flex() .id(id) .h(self.list_item_height()) @@ -5129,6 +5226,19 @@ impl GitPanel { .hover(|s| s.bg(hover_bg)) .active(|s| s.bg(active_bg)) .child(name_row) + .when(GitPanelSettings::get_global(cx).diff_stats, |el| { + el.when_some( + self.diff_stats.get(&entry.repo_path).copied(), + move |this, stat| { + let id = format!("diff-stat-{}", id_for_diff_stat); + this.child(ui::DiffStat::new( + id, + stat.added as usize, + stat.deleted as usize, + )) + }, + ) + }) .child( div() .id(checkbox_wrapper_id) diff --git a/crates/git_ui/src/git_panel_settings.rs b/crates/git_ui/src/git_panel_settings.rs index 6b5334e55544b465864fe3afb780c4673bb5961e..2a7480de355a6190494211d823e4aa440d191371 100644 --- a/crates/git_ui/src/git_panel_settings.rs +++ b/crates/git_ui/src/git_panel_settings.rs @@ -25,6 +25,7 @@ pub struct GitPanelSettings { pub sort_by_path: bool, pub collapse_untracked_diff: bool, pub tree_view: bool, + pub diff_stats: bool, } impl ScrollbarVisibility for GitPanelSettings { @@ -58,6 +59,7 @@ impl Settings for GitPanelSettings { sort_by_path: git_panel.sort_by_path.unwrap(), collapse_untracked_diff: git_panel.collapse_untracked_diff.unwrap(), tree_view: git_panel.tree_view.unwrap(), + diff_stats: git_panel.diff_stats.unwrap(), } } } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 1272a689b908413fff5eef71cf5e0e98fd72429b..67bc21c94227e8f53356ef1b7f626ff922326d29 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -529,6 +529,7 @@ impl GitStore { client.add_entity_request_handler(Self::handle_askpass); client.add_entity_request_handler(Self::handle_check_for_pushed_commits); client.add_entity_request_handler(Self::handle_git_diff); + client.add_entity_request_handler(Self::handle_git_diff_stat); client.add_entity_request_handler(Self::handle_tree_diff); client.add_entity_request_handler(Self::handle_get_blob_content); client.add_entity_request_handler(Self::handle_open_unstaged_diff); @@ -2684,6 +2685,45 @@ impl GitStore { Ok(proto::GitDiffResponse { diff }) } + async fn handle_git_diff_stat( + this: Entity, + envelope: TypedEnvelope, + mut cx: AsyncApp, + ) -> Result { + let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); + let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + let diff_type = match envelope.payload.diff_type() { + proto::git_diff_stat::DiffType::HeadToIndex => DiffType::HeadToIndex, + proto::git_diff_stat::DiffType::HeadToWorktree => DiffType::HeadToWorktree, + proto::git_diff_stat::DiffType::MergeBase => { + let base_ref = envelope + .payload + .merge_base_ref + .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?; + DiffType::MergeBase { + base_ref: base_ref.into(), + } + } + }; + + let stats = repository_handle + .update(&mut cx, |repository_handle, cx| { + repository_handle.diff_stat(diff_type, cx) + }) + .await??; + + let entries = stats + .into_iter() + .map(|(path, stat)| proto::GitDiffStatEntry { + path: path.to_proto(), + added: stat.added, + deleted: stat.deleted, + }) + .collect(); + + Ok(proto::GitDiffStatResponse { entries }) + } + async fn handle_tree_diff( this: Entity, request: TypedEnvelope, @@ -5690,6 +5730,63 @@ impl Repository { }) } + /// Fetches per-line diff statistics (additions/deletions) via `git diff --numstat`. + pub fn diff_stat( + &mut self, + diff_type: DiffType, + _cx: &App, + ) -> oneshot::Receiver< + Result>, + > { + let id = self.id; + self.send_job(None, move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.diff_stat(diff_type).await + } + RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { + let (proto_diff_type, merge_base_ref) = match &diff_type { + DiffType::HeadToIndex => { + (proto::git_diff_stat::DiffType::HeadToIndex.into(), None) + } + DiffType::HeadToWorktree => { + (proto::git_diff_stat::DiffType::HeadToWorktree.into(), None) + } + DiffType::MergeBase { base_ref } => ( + proto::git_diff_stat::DiffType::MergeBase.into(), + Some(base_ref.to_string()), + ), + }; + let response = client + .request(proto::GitDiffStat { + project_id: project_id.0, + repository_id: id.to_proto(), + diff_type: proto_diff_type, + merge_base_ref, + }) + .await?; + + let stats = response + .entries + .into_iter() + .filter_map(|entry| { + let path = RepoPath::from_proto(&entry.path).log_err()?; + Some(( + path, + git::status::DiffStat { + added: entry.added, + deleted: entry.deleted, + }, + )) + }) + .collect(); + + Ok(stats) + } + } + }) + } + pub fn create_branch( &mut self, branch_name: String, diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 994d319913c6d84c2e639ccd78bade4547449a7a..facaf43fd5ae3e7ff655f0b4006dc1661d503e10 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -229,6 +229,29 @@ message GitDiffResponse { string diff = 1; } +message GitDiffStat { + uint64 project_id = 1; + uint64 repository_id = 2; + DiffType diff_type = 3; + optional string merge_base_ref = 4; + + enum DiffType { + HEAD_TO_WORKTREE = 0; + HEAD_TO_INDEX = 1; + MERGE_BASE = 2; + } +} + +message GitDiffStatResponse { + repeated GitDiffStatEntry entries = 1; +} + +message GitDiffStatEntry { + string path = 1; + uint32 added = 2; + uint32 deleted = 3; +} + message GitInit { uint64 project_id = 1; string abs_path = 2; diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index 387ed25027230c7e407983ff5c098ae24bbecc9e..fa55e1f27330fb5fee88fb19296f607b1bf9f3a6 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -476,7 +476,9 @@ message Envelope { SpawnKernel spawn_kernel = 426; SpawnKernelResponse spawn_kernel_response = 427; - KillKernel kill_kernel = 428; // current max + KillKernel kill_kernel = 428; + GitDiffStat git_diff_stat = 429; + GitDiffStatResponse git_diff_stat_response = 430; // current max } reserved 87 to 88; diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index dd0a77beb29345021563b21bafd261d02b87e1ab..3d30551557000c305a82b328828b566c9d78f75e 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -322,6 +322,8 @@ messages!( (CheckForPushedCommitsResponse, Background), (GitDiff, Background), (GitDiffResponse, Background), + (GitDiffStat, Background), + (GitDiffStatResponse, Background), (GitInit, Background), (GetDebugAdapterBinary, Background), (DebugAdapterBinary, Background), @@ -539,6 +541,7 @@ request_messages!( (GitRenameBranch, Ack), (CheckForPushedCommits, CheckForPushedCommitsResponse), (GitDiff, GitDiffResponse), + (GitDiffStat, GitDiffStatResponse), (GitInit, Ack), (ToggleBreakpoint, Ack), (GetDebugAdapterBinary, DebugAdapterBinary), @@ -727,6 +730,7 @@ entity_messages!( GitRemoveRemote, CheckForPushedCommits, GitDiff, + GitDiffStat, GitInit, BreakpointsForFile, ToggleBreakpoint, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index f15382b67557fa9a9b0eda2a9d4438aa33c7cff3..b3fe30a472c2d098bc6fb9b2a4e276be8867e94b 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -8,6 +8,7 @@ use agent::{ use client::{Client, UserStore}; use clock::FakeSystemClock; use collections::{HashMap, HashSet}; +use git::repository::DiffType; use language_model::{LanguageModelToolResultContent, fake_provider::FakeLanguageModel}; use prompt_store::ProjectContext; @@ -1919,6 +1920,129 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA assert_eq!(server_branch.name(), "totally-new-branch"); } +#[gpui::test] +async fn test_remote_git_diff_stat(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { + let fs = FakeFs::new(server_cx.executor()); + fs.insert_tree( + path!("/code"), + json!({ + "project1": { + ".git": {}, + "src": { + "lib.rs": "line1\nline2\nline3\n", + "new_file.rs": "added1\nadded2\n", + }, + "README.md": "# project 1", + }, + }), + ) + .await; + + let dot_git = Path::new(path!("/code/project1/.git")); + + // HEAD: lib.rs (2 lines), deleted.rs (1 line) + fs.set_head_for_repo( + dot_git, + &[ + ("src/lib.rs", "line1\nold_line2\n".into()), + ("src/deleted.rs", "was_here\n".into()), + ], + "deadbeef", + ); + // Index: lib.rs modified (4 lines), staged_only.rs new (2 lines) + fs.set_index_for_repo( + dot_git, + &[ + ("src/lib.rs", "line1\nold_line2\nline3\nline4\n".into()), + ("src/staged_only.rs", "x\ny\n".into()), + ], + ); + + let (project, _headless) = init_test(&fs, cx, server_cx).await; + let (_worktree, _) = project + .update(cx, |project, cx| { + project.find_or_create_worktree(path!("/code/project1"), true, cx) + }) + .await + .unwrap(); + cx.run_until_parked(); + + let repo_path = |s: &str| git::repository::RepoPath::new(s).unwrap(); + + let repository = project.update(cx, |project, cx| project.active_repository(cx).unwrap()); + + // --- HeadToWorktree --- + let stats = cx + .update(|cx| repository.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToWorktree, cx))) + .await + .unwrap() + .unwrap(); + + // src/lib.rs: worktree 3 lines vs HEAD 2 lines + let stat = stats.get(&repo_path("src/lib.rs")).expect("src/lib.rs"); + assert_eq!((stat.added, stat.deleted), (3, 2)); + + // src/new_file.rs: only in worktree (2 lines) + let stat = stats + .get(&repo_path("src/new_file.rs")) + .expect("src/new_file.rs"); + assert_eq!((stat.added, stat.deleted), (2, 0)); + + // src/deleted.rs: only in HEAD (1 line) + let stat = stats + .get(&repo_path("src/deleted.rs")) + .expect("src/deleted.rs"); + assert_eq!((stat.added, stat.deleted), (0, 1)); + + // README.md: only in worktree (1 line) + let stat = stats.get(&repo_path("README.md")).expect("README.md"); + assert_eq!((stat.added, stat.deleted), (1, 0)); + + // --- HeadToIndex --- + let stats = cx + .update(|cx| repository.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToIndex, cx))) + .await + .unwrap() + .unwrap(); + + // src/lib.rs: index 4 lines vs HEAD 2 lines + let stat = stats.get(&repo_path("src/lib.rs")).expect("src/lib.rs"); + assert_eq!((stat.added, stat.deleted), (4, 2)); + + // src/staged_only.rs: only in index (2 lines) + let stat = stats + .get(&repo_path("src/staged_only.rs")) + .expect("src/staged_only.rs"); + assert_eq!((stat.added, stat.deleted), (2, 0)); + + // src/deleted.rs: in HEAD but not in index + let stat = stats + .get(&repo_path("src/deleted.rs")) + .expect("src/deleted.rs"); + assert_eq!((stat.added, stat.deleted), (0, 1)); + + // --- MergeBase (not implemented in FakeGitRepository) --- + let stats = cx + .update(|cx| { + repository.update(cx, |repo, cx| { + repo.diff_stat( + DiffType::MergeBase { + base_ref: "main".into(), + }, + cx, + ) + }) + }) + .await + .unwrap() + .unwrap(); + + assert!( + stats.is_empty(), + "MergeBase diff_stat should return empty from FakeGitRepository" + ); +} + #[gpui::test] async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index 788917b5ebb0fc0f4ba29e29fc95b0da148c6f0f..8c4845e05cbf16d0aacb089a5d16dcdb0ff6d7c7 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -619,6 +619,11 @@ pub struct GitPanelSettingsContent { /// /// Default: false pub tree_view: Option, + + /// Whether to show the addition/deletion change count next to each file in the Git panel. + /// + /// Default: false + pub diff_stats: Option, } #[derive( diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 40bc8705920e5d30d69a22cf8967a8931181db9b..5b3f5480148c30ef89bcae29b23986eac29808d9 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -5039,7 +5039,7 @@ fn panels_page() -> SettingsPage { ] } - fn git_panel_section() -> [SettingsPageItem; 10] { + fn git_panel_section() -> [SettingsPageItem; 11] { [ SettingsPageItem::SectionHeader("Git Panel"), SettingsPageItem::SettingItem(SettingItem { @@ -5181,6 +5181,24 @@ fn panels_page() -> SettingsPage { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Diff Stats", + description: "Whether to show the addition/deletion change count next to each file in the Git panel.", + field: Box::new(SettingField { + json_path: Some("git_panel.diff_stats"), + pick: |settings_content| { + settings_content.git_panel.as_ref()?.diff_stats.as_ref() + }, + write: |settings_content, value| { + settings_content + .git_panel + .get_or_insert_default() + .diff_stats = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Scroll Bar", description: "How and when the scrollbar should be displayed.", From 1b2c1b48649aa60341cb174b60311058ae712c5e Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Wed, 25 Feb 2026 09:33:05 -0800 Subject: [PATCH 088/548] Fix a bug where closing the workspace could skip the dirty check for other workspaces (#50105) Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- .../tests/integration/following_tests.rs | 10 +- crates/workspace/src/multi_workspace.rs | 42 +++++++- crates/workspace/src/workspace.rs | 98 ++++++++++++++++--- crates/zed/src/zed.rs | 24 ++--- 4 files changed, 143 insertions(+), 31 deletions(-) diff --git a/crates/collab/tests/integration/following_tests.rs b/crates/collab/tests/integration/following_tests.rs index b761bef9ec3be679d55d1c82e3cb5cce0ac7f14e..c4031788c87f747c3125f4dbc509d68ea3720b43 100644 --- a/crates/collab/tests/integration/following_tests.rs +++ b/crates/collab/tests/integration/following_tests.rs @@ -8,8 +8,8 @@ use collab_ui::{ }; use editor::{Editor, MultiBuffer, MultiBufferOffset, PathKey, SelectionEffects}; use gpui::{ - AppContext as _, BackgroundExecutor, BorrowAppContext, Entity, SharedString, TestAppContext, - VisualContext, VisualTestContext, point, + Action, AppContext as _, BackgroundExecutor, BorrowAppContext, Entity, SharedString, + TestAppContext, VisualContext, VisualTestContext, point, }; use language::Capability; use rpc::proto::PeerId; @@ -18,7 +18,7 @@ use settings::SettingsStore; use text::{Point, ToPoint}; use util::{path, rel_path::rel_path, test::sample_text}; use workspace::{ - CollaboratorId, MultiWorkspace, ParticipantLocation, SplitDirection, Workspace, + CloseWindow, CollaboratorId, MultiWorkspace, ParticipantLocation, SplitDirection, Workspace, item::ItemHandle as _, }; @@ -259,8 +259,8 @@ async fn test_basic_following( // Client C closes the project. let weak_workspace_c = workspace_c.downgrade(); - workspace_c.update_in(cx_c, |workspace, window, cx| { - workspace.close_window(&Default::default(), window, cx); + workspace_c.update_in(cx_c, |_, window, cx| { + window.dispatch_action(Box::new(CloseWindow) as Box, cx); }); executor.run_until_parked(); // are you sure you want to leave the call? diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index 567c24f5d73887289445fb8367bdd950097ba073..e5d529556be690298b57fbb864a7010729e8c170 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -14,8 +14,8 @@ use util::ResultExt; const SIDEBAR_RESIZE_HANDLE_SIZE: Pixels = px(6.0); use crate::{ - DockPosition, Item, ModalView, Panel, Toast, Workspace, WorkspaceId, client_side_decorations, - notifications::NotificationId, + CloseIntent, CloseWindow, DockPosition, Event as WorkspaceEvent, Item, ModalView, Panel, Toast, + Workspace, WorkspaceId, client_side_decorations, notifications::NotificationId, }; actions!( @@ -122,6 +122,7 @@ impl MultiWorkspace { } }); let quit_subscription = cx.on_app_quit(Self::app_will_quit); + Self::subscribe_to_workspace(&workspace, cx); Self { window_id: window.window_handle().window_id(), workspaces: vec![workspace], @@ -237,6 +238,41 @@ impl MultiWorkspace { cx.notify(); } + pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context) { + cx.spawn_in(window, async move |this, cx| { + let workspaces = this.update(cx, |multi_workspace, _cx| { + multi_workspace.workspaces().to_vec() + })?; + + for workspace in workspaces { + let should_continue = workspace + .update_in(cx, |workspace, window, cx| { + workspace.prepare_to_close(CloseIntent::CloseWindow, window, cx) + })? + .await?; + if !should_continue { + return anyhow::Ok(()); + } + } + + cx.update(|window, _cx| { + window.remove_window(); + })?; + + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + + fn subscribe_to_workspace(workspace: &Entity, cx: &mut Context) { + cx.subscribe(workspace, |this, workspace, event, cx| { + if let WorkspaceEvent::Activate = event { + this.activate(workspace, cx); + } + }) + .detach(); + } + pub fn is_sidebar_open(&self) -> bool { self.sidebar_open } @@ -290,6 +326,7 @@ impl MultiWorkspace { workspace.set_workspace_sidebar_open(true, cx); }); } + Self::subscribe_to_workspace(&workspace, cx); self.workspaces.push(workspace); cx.notify(); self.workspaces.len() - 1 @@ -679,6 +716,7 @@ impl Render for MultiWorkspace { .key_context("Workspace") .relative() .size_full() + .on_action(cx.listener(Self::close_window)) .on_action( cx.listener(|this: &mut Self, _: &NewWorkspaceInWindow, window, cx| { this.create_workspace(window, cx); diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index cab4b4974f15f4b68da7c4a5abd0fca34a4af00c..f680007924f5061756a864f8c4330345a69403f4 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -1182,6 +1182,7 @@ pub enum Event { }, ZoomChanged, ModalOpened, + Activate, } #[derive(Debug, Clone)] @@ -2629,17 +2630,6 @@ impl Workspace { }); } - pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context) { - let prepare = self.prepare_to_close(CloseIntent::CloseWindow, window, cx); - cx.spawn_in(window, async move |_, cx| { - if prepare.await? { - cx.update(|window, _cx| window.remove_window())?; - } - anyhow::Ok(()) - }) - .detach_and_log_err(cx) - } - pub fn move_focused_panel_to_next_position( &mut self, _: &MoveFocusedPanelToNextPosition, @@ -2717,6 +2707,7 @@ impl Workspace { .unwrap_or(false) { if close_intent == CloseIntent::CloseWindow { + this.update(cx, |_, cx| cx.emit(Event::Activate))?; let answer = cx.update(|window, cx| { window.prompt( PromptLevel::Warning, @@ -2905,6 +2896,10 @@ impl Workspace { futures::future::try_join_all(serialize_tasks).await?; + if !remaining_dirty_items.is_empty() { + workspace.update(cx, |_, cx| cx.emit(Event::Activate))?; + } + if remaining_dirty_items.len() > 1 { let answer = workspace.update_in(cx, |_, window, cx| { let detail = Pane::file_names_for_prompt( @@ -6354,7 +6349,6 @@ impl Workspace { .on_action(cx.listener(Self::send_keystrokes)) .on_action(cx.listener(Self::add_folder_to_project)) .on_action(cx.listener(Self::follow_next_collaborator)) - .on_action(cx.listener(Self::close_window)) .on_action(cx.listener(Self::activate_pane_at_index)) .on_action(cx.listener(Self::move_item_to_pane_at_index)) .on_action(cx.listener(Self::move_focused_panel_to_next_position)) @@ -10052,6 +10046,86 @@ mod tests { assert!(!task.await.unwrap()); } + #[gpui::test] + async fn test_multi_workspace_close_window_multiple_workspaces_cancel(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/root", json!({ "one": "" })).await; + + let project_a = Project::test(fs.clone(), ["root".as_ref()], cx).await; + let project_b = Project::test(fs, ["root".as_ref()], cx).await; + let multi_workspace_handle = + cx.add_window(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + + let workspace_a = multi_workspace_handle + .read_with(cx, |mw, _| mw.workspace().clone()) + .unwrap(); + + let workspace_b = multi_workspace_handle + .update(cx, |mw, window, cx| { + mw.test_add_workspace(project_b, window, cx) + }) + .unwrap(); + + // Activate workspace A + multi_workspace_handle + .update(cx, |mw, window, cx| { + mw.activate_index(0, window, cx); + }) + .unwrap(); + + let cx = &mut VisualTestContext::from_window(multi_workspace_handle.into(), cx); + + // Workspace A has a clean item + let item_a = cx.new(TestItem::new); + workspace_a.update_in(cx, |w, window, cx| { + w.add_item_to_active_pane(Box::new(item_a.clone()), None, true, window, cx) + }); + + // Workspace B has a dirty item + let item_b = cx.new(|cx| TestItem::new(cx).with_dirty(true)); + workspace_b.update_in(cx, |w, window, cx| { + w.add_item_to_active_pane(Box::new(item_b.clone()), None, true, window, cx) + }); + + // Verify workspace A is active + multi_workspace_handle + .read_with(cx, |mw, _| { + assert_eq!(mw.active_workspace_index(), 0); + }) + .unwrap(); + + // Dispatch CloseWindow — workspace A will pass, workspace B will prompt + multi_workspace_handle + .update(cx, |mw, window, cx| { + mw.close_window(&CloseWindow, window, cx); + }) + .unwrap(); + cx.run_until_parked(); + + // Workspace B should now be active since it has dirty items that need attention + multi_workspace_handle + .read_with(cx, |mw, _| { + assert_eq!( + mw.active_workspace_index(), + 1, + "workspace B should be activated when it prompts" + ); + }) + .unwrap(); + + // User cancels the save prompt from workspace B + cx.simulate_prompt_answer("Cancel"); + cx.run_until_parked(); + + // Window should still exist because workspace B's close was cancelled + assert!( + multi_workspace_handle.update(cx, |_, _, _| ()).is_ok(), + "window should still exist after cancelling one workspace's close" + ); + } + #[gpui::test] async fn test_close_window_with_serializable_items(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 83d504ea8f1cfbb13b5f0ea97cea6508a04126aa..bbbce4986607aa5b64453e8bceb61375a49a7122 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -376,8 +376,19 @@ pub fn initialize_workspace( return; }; let multi_workspace_handle = cx.entity(); - let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle, window, cx)); + let sidebar = cx.new(|cx| Sidebar::new(multi_workspace_handle.clone(), window, cx)); multi_workspace.register_sidebar(sidebar, window, cx); + + let multi_workspace_handle = multi_workspace_handle.downgrade(); + window.on_window_should_close(cx, move |window, cx| { + multi_workspace_handle + .update(cx, |multi_workspace, cx| { + // We'll handle closing asynchronously + multi_workspace.close_window(&CloseWindow, window, cx); + false + }) + .unwrap_or(true) + }); }) .detach(); @@ -485,17 +496,6 @@ pub fn initialize_workspace( status_bar.add_right_item(image_info, window, cx); }); - let handle = cx.entity().downgrade(); - window.on_window_should_close(cx, move |window, cx| { - handle - .update(cx, |workspace, cx| { - // We'll handle closing asynchronously - workspace.close_window(&CloseWindow, window, cx); - false - }) - .unwrap_or(true) - }); - initialize_panels(prompt_builder.clone(), window, cx); register_actions(app_state.clone(), workspace, window, cx); From 6acc1a33441e7c89b46d112480a41d3616538b65 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Wed, 25 Feb 2026 12:36:40 -0500 Subject: [PATCH 089/548] Remove dead AgentGitWorktreeInfo code (#50101) This code was part of a series of stacked diff PRs that became obsolete because we changed the UI design, so none of this code is necessary anymore. Release Notes: - N/A --- crates/agent/src/db.rs | 142 ++----------------------------- crates/agent/src/thread.rs | 13 +-- crates/agent/src/thread_store.rs | 1 - 3 files changed, 10 insertions(+), 146 deletions(-) diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 14ec9bb9af92c2f9720af5714c7344b986f5f7b5..fa4b37dba3e789b499bfe5db4f0b76ccf12e5a09 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -23,17 +23,6 @@ pub type DbMessage = crate::Message; pub type DbSummary = crate::legacy_thread::DetailedSummaryState; pub type DbLanguageModel = crate::legacy_thread::SerializedLanguageModel; -/// Metadata about the git worktree associated with an agent thread. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AgentGitWorktreeInfo { - /// The branch name in the git worktree. - pub branch: String, - /// Absolute path to the git worktree on disk. - pub worktree_path: std::path::PathBuf, - /// The base branch/commit the worktree was created from. - pub base_ref: String, -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DbThreadMetadata { pub id: acp::SessionId, @@ -41,10 +30,6 @@ pub struct DbThreadMetadata { #[serde(alias = "summary")] pub title: SharedString, pub updated_at: DateTime, - /// Denormalized from `DbThread::git_worktree_info.branch` for efficient - /// listing without decompressing thread data. The blob is the source of - /// truth; this column is populated on save for query convenience. - pub worktree_branch: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -68,8 +53,6 @@ pub struct DbThread { pub imported: bool, #[serde(default)] pub subagent_context: Option, - #[serde(default)] - pub git_worktree_info: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -108,7 +91,6 @@ impl SharedThread { profile: None, imported: true, subagent_context: None, - git_worktree_info: None, } } @@ -283,7 +265,6 @@ impl DbThread { profile: thread.profile, imported: false, subagent_context: None, - git_worktree_info: None, }) } } @@ -388,13 +369,6 @@ impl ThreadsDatabase { s().ok(); } - if let Ok(mut s) = connection.exec(indoc! {" - ALTER TABLE threads ADD COLUMN worktree_branch TEXT - "}) - { - s().ok(); - } - let db = Self { executor, connection: Arc::new(Mutex::new(connection)), @@ -423,10 +397,6 @@ impl ThreadsDatabase { .subagent_context .as_ref() .map(|ctx| ctx.parent_thread_id.0.clone()); - let worktree_branch = thread - .git_worktree_info - .as_ref() - .map(|info| info.branch.clone()); let json_data = serde_json::to_string(&SerializedThread { thread, version: DbThread::VERSION, @@ -438,19 +408,11 @@ impl ThreadsDatabase { let data_type = DataType::Zstd; let data = compressed; - let mut insert = connection.exec_bound::<(Arc, Option>, Option, String, String, DataType, Vec)>(indoc! {" - INSERT OR REPLACE INTO threads (id, parent_id, worktree_branch, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?, ?) + let mut insert = connection.exec_bound::<(Arc, Option>, String, String, DataType, Vec)>(indoc! {" + INSERT OR REPLACE INTO threads (id, parent_id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?) "})?; - insert(( - id.0, - parent_id, - worktree_branch, - title, - updated_at, - data_type, - data, - ))?; + insert((id.0, parent_id, title, updated_at, data_type, data))?; Ok(()) } @@ -462,20 +424,19 @@ impl ThreadsDatabase { let connection = connection.lock(); let mut select = connection - .select_bound::<(), (Arc, Option>, Option, String, String)>(indoc! {" - SELECT id, parent_id, worktree_branch, summary, updated_at FROM threads ORDER BY updated_at DESC + .select_bound::<(), (Arc, Option>, String, String)>(indoc! {" + SELECT id, parent_id, summary, updated_at FROM threads ORDER BY updated_at DESC "})?; let rows = select(())?; let mut threads = Vec::new(); - for (id, parent_id, worktree_branch, summary, updated_at) in rows { + for (id, parent_id, summary, updated_at) in rows { threads.push(DbThreadMetadata { id: acp::SessionId::new(id), parent_session_id: parent_id.map(acp::SessionId::new), title: summary.into(), updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), - worktree_branch, }); } @@ -609,7 +570,6 @@ mod tests { profile: None, imported: false, subagent_context: None, - git_worktree_info: None, } } @@ -753,94 +713,4 @@ mod tests { "Regular threads should have no subagent_context" ); } - - #[gpui::test] - async fn test_git_worktree_info_roundtrip(cx: &mut TestAppContext) { - let database = ThreadsDatabase::new(cx.executor()).unwrap(); - - let thread_id = session_id("worktree-thread"); - let mut thread = make_thread( - "Worktree Thread", - Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(), - ); - thread.git_worktree_info = Some(AgentGitWorktreeInfo { - branch: "zed/agent/a4Xiu".to_string(), - worktree_path: std::path::PathBuf::from("/repo/worktrees/zed/agent/a4Xiu"), - base_ref: "main".to_string(), - }); - - database - .save_thread(thread_id.clone(), thread) - .await - .unwrap(); - - let loaded = database - .load_thread(thread_id) - .await - .unwrap() - .expect("thread should exist"); - - let info = loaded - .git_worktree_info - .expect("git_worktree_info should be restored"); - assert_eq!(info.branch, "zed/agent/a4Xiu"); - assert_eq!( - info.worktree_path, - std::path::PathBuf::from("/repo/worktrees/zed/agent/a4Xiu") - ); - assert_eq!(info.base_ref, "main"); - } - - #[gpui::test] - async fn test_session_list_includes_worktree_meta(cx: &mut TestAppContext) { - let database = ThreadsDatabase::new(cx.executor()).unwrap(); - - // Save a thread with worktree info - let worktree_id = session_id("wt-thread"); - let mut worktree_thread = make_thread( - "With Worktree", - Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(), - ); - worktree_thread.git_worktree_info = Some(AgentGitWorktreeInfo { - branch: "zed/agent/bR9kz".to_string(), - worktree_path: std::path::PathBuf::from("/repo/worktrees/zed/agent/bR9kz"), - base_ref: "develop".to_string(), - }); - - database - .save_thread(worktree_id.clone(), worktree_thread) - .await - .unwrap(); - - // Save a thread without worktree info - let plain_id = session_id("plain-thread"); - let plain_thread = make_thread( - "Without Worktree", - Utc.with_ymd_and_hms(2024, 6, 15, 11, 0, 0).unwrap(), - ); - - database - .save_thread(plain_id.clone(), plain_thread) - .await - .unwrap(); - - // List threads and verify worktree_branch is populated correctly - let threads = database.list_threads().await.unwrap(); - assert_eq!(threads.len(), 2); - - let wt_entry = threads - .iter() - .find(|t| t.id == worktree_id) - .expect("should find worktree thread"); - assert_eq!(wt_entry.worktree_branch.as_deref(), Some("zed/agent/bR9kz")); - - let plain_entry = threads - .iter() - .find(|t| t.id == plain_id) - .expect("should find plain thread"); - assert!( - plain_entry.worktree_branch.is_none(), - "plain thread should have no worktree_branch" - ); - } } diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index f9be3bfbeacfd137b06da7dc99eef7ae34422325..39fef567916eb7d4a7bf04db3a0455bead6eee2f 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1,8 +1,8 @@ use crate::{ - AgentGitWorktreeInfo, ContextServerRegistry, CopyPathTool, CreateDirectoryTool, - DbLanguageModel, DbThread, DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, - FindPathTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, - ReadFileTool, RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool, + ContextServerRegistry, CopyPathTool, CreateDirectoryTool, DbLanguageModel, DbThread, + DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, + ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, + RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool, SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, WebSearchTool, decide_permission_from_settings, }; @@ -904,8 +904,6 @@ pub struct Thread { subagent_context: Option, /// Weak references to running subagent threads for cancellation propagation running_subagents: Vec>, - /// Git worktree info if this thread is running in an agent worktree. - git_worktree_info: Option, } impl Thread { @@ -996,7 +994,6 @@ impl Thread { imported: false, subagent_context: None, running_subagents: Vec::new(), - git_worktree_info: None, } } @@ -1221,7 +1218,6 @@ impl Thread { imported: db_thread.imported, subagent_context: db_thread.subagent_context, running_subagents: Vec::new(), - git_worktree_info: db_thread.git_worktree_info, } } @@ -1242,7 +1238,6 @@ impl Thread { profile: Some(self.profile_id.clone()), imported: self.imported, subagent_context: self.subagent_context.clone(), - git_worktree_info: self.git_worktree_info.clone(), }; cx.background_spawn(async move { diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 8dd1ac36e8f6667ec5ecec2286d85ade2b12ee72..d5526b0953cb4342fcbf3b13a883385dfcf609ea 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -133,7 +133,6 @@ mod tests { profile: None, imported: false, subagent_context: None, - git_worktree_info: None, } } From 706faa973e71ecfeb82d5dcef844d6dc66a3d038 Mon Sep 17 00:00:00 2001 From: Efe <67526259+skyline69@users.noreply.github.com> Date: Wed, 25 Feb 2026 19:05:59 +0100 Subject: [PATCH 090/548] project_panel: Fix selection not updating for already-visible gitignored files (#49521) ## Summary - Keep auto-reveal behavior for ignored files unchanged (no implicit reveal). - When an ignored file is already visible in the project panel, mark it as selected on `ActiveEntryChanged`. - Add regression coverage for switching back to a visible gitignored file. ## Testing - `project_panel_tests::test_autoreveal_and_gitignored_files` - `project_panel_tests::test_gitignored_and_always_included` - `project_panel_tests::test_explicit_reveal` Closes #49515 Release Notes: - Fixed project panel not updating selection when switching to a gitignored file that was already visible. --- crates/project_panel/src/project_panel.rs | 28 ++++++--- .../project_panel/src/project_panel_tests.rs | 58 +++++++++++++++++++ 2 files changed, 79 insertions(+), 7 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index e11c04755e59b7d62ea16340d6ed23bdb36daf6d..7f746a6ccd7efec2b73354992c593433b0b6f281 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -6053,12 +6053,27 @@ impl ProjectPanel { .worktree_for_entry(entry_id, cx) .context("can't reveal a non-existent entry in the project panel")?; let worktree = worktree.read(cx); - if skip_ignored - && worktree - .entry_for_id(entry_id) - .is_none_or(|entry| entry.is_ignored && !entry.is_always_included) - { - anyhow::bail!("can't reveal an ignored entry in the project panel"); + let worktree_id = worktree.id(); + let is_ignored = worktree + .entry_for_id(entry_id) + .is_none_or(|entry| entry.is_ignored && !entry.is_always_included); + if skip_ignored && is_ignored { + if self.index_for_entry(entry_id, worktree_id).is_none() { + anyhow::bail!("can't reveal an ignored entry in the project panel"); + } + + self.selection = Some(SelectedEntry { + worktree_id, + entry_id, + }); + self.marked_entries.clear(); + self.marked_entries.push(SelectedEntry { + worktree_id, + entry_id, + }); + self.autoscroll(cx); + cx.notify(); + return Ok(()); } let is_active_item_file_diff_view = self .workspace @@ -6070,7 +6085,6 @@ impl ProjectPanel { return Ok(()); } - let worktree_id = worktree.id(); self.expand_entry(worktree_id, entry_id, cx); self.update_visible_entries(Some((worktree_id, entry_id)), false, true, window, cx); self.marked_entries.clear(); diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs index 01d165174784f4ab5360b99e16a514a4b8f669b4..af84a7f522a60abf2608bf1f3435b367d24f6bdc 100644 --- a/crates/project_panel/src/project_panel_tests.rs +++ b/crates/project_panel/src/project_panel_tests.rs @@ -4843,6 +4843,64 @@ async fn test_autoreveal_and_gitignored_files(cx: &mut gpui::TestAppContext) { ], "When a gitignored entry is explicitly revealed, it should be shown in the project tree" ); + + panel.update(cx, |panel, cx| { + panel.project.update(cx, |_, cx| { + cx.emit(project::Event::ActiveEntryChanged(Some(dir_2_file))) + }) + }); + cx.run_until_parked(); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v project_root", + " > .git", + " v dir_1", + " v gitignored_dir", + " file_a.py", + " file_b.py", + " file_c.py", + " file_1.py", + " file_2.py", + " file_3.py", + " v dir_2", + " file_1.py <== selected <== marked", + " file_2.py", + " file_3.py", + " .gitignore", + ], + "After switching to dir_2_file, it should be selected and marked" + ); + + panel.update(cx, |panel, cx| { + panel.project.update(cx, |_, cx| { + cx.emit(project::Event::ActiveEntryChanged(Some( + gitignored_dir_file, + ))) + }) + }); + cx.run_until_parked(); + assert_eq!( + visible_entries_as_strings(&panel, 0..20, cx), + &[ + "v project_root", + " > .git", + " v dir_1", + " v gitignored_dir", + " file_a.py <== selected <== marked", + " file_b.py", + " file_c.py", + " file_1.py", + " file_2.py", + " file_3.py", + " v dir_2", + " file_1.py", + " file_2.py", + " file_3.py", + " .gitignore", + ], + "When a gitignored entry is already visible, auto reveal should mark it as selected" + ); } #[gpui::test] From c9aea6f294bdb55d75e2136d2a6cfce5c786cda9 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Wed, 25 Feb 2026 20:15:05 +0200 Subject: [PATCH 091/548] ep: Stratify by cursor_path by default (#50111) Also, `ep split train=100` now means 100 lines, not 100 groups (repos or cursor_paths). Release Notes: - N/A --- crates/edit_prediction_cli/src/main.rs | 5 +- crates/edit_prediction_cli/src/score.rs | 42 ++- .../edit_prediction_cli/src/split_dataset.rs | 318 +++++++++++------- 3 files changed, 227 insertions(+), 138 deletions(-) diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index 03f94a4dc47388c9a56169f2be0280af33dc6f1d..a6a0b2e3145cefbe7dd84a88733fe5d865b6364b 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -294,6 +294,9 @@ struct EvalArgs { /// Path to write summary scores as JSON #[clap(long)] summary_json: Option, + /// Print all individual example lines (default: up to 20) + #[clap(long)] + verbose: bool, } #[derive(Clone, Copy, Default, Debug, PartialEq, Eq, Hash)] @@ -1238,7 +1241,7 @@ fn main() { match &command { Command::Eval(args) => { let examples = finished_examples.lock().unwrap(); - score::print_report(&examples); + score::print_report(&examples, args.verbose); if let Some(summary_path) = &args.summary_json { score::write_summary_json(&examples, summary_path)?; } diff --git a/crates/edit_prediction_cli/src/score.rs b/crates/edit_prediction_cli/src/score.rs index 8436dc4a4b26206eb41bafd5b9de8645cb0abb5e..b6f745114f6dd2a091b95b724ee53869a04a8c4e 100644 --- a/crates/edit_prediction_cli/src/score.rs +++ b/crates/edit_prediction_cli/src/score.rs @@ -217,7 +217,8 @@ fn compute_cursor_metrics( } } -pub fn print_report(examples: &[Example]) { +pub fn print_report(examples: &[Example], verbose: bool) { + const MAX_EXAMPLES_DEFAULT: usize = 20; use crate::metrics::ClassificationMetrics; const LINE_WIDTH: usize = 101; @@ -250,6 +251,9 @@ pub fn print_report(examples: &[Example]) { let mut patch_deleted_tokens: Vec = Vec::new(); let mut predictions_with_patch: usize = 0; + let mut printed_lines: usize = 0; + let mut skipped_lines: usize = 0; + for example in examples { for (score_idx, score) in example.score.iter().enumerate() { let exact_lines = ClassificationMetrics { @@ -284,18 +288,23 @@ pub fn print_report(examples: &[Example]) { (None, _) => "-".to_string(), }; - println!( - "{:<40} {:>8.2} {:>5} {:>6.1}% {:>6.1}% {:>7} {:>7} {:>6} {:>5}", - truncate_name(&example.spec.name, 40), - score.delta_chr_f, - score.braces_disbalance, - exact_lines.f1() * 100.0, - score.reversal_ratio * 100.0, - qa_reverts_str, - qa_conf_str, - cursor_str, - wrong_er_str - ); + if verbose || printed_lines < MAX_EXAMPLES_DEFAULT { + println!( + "{:<40} {:>8.2} {:>5} {:>6.1}% {:>6.1}% {:>7} {:>7} {:>6} {:>5}", + truncate_name(&example.spec.name, 40), + score.delta_chr_f, + score.braces_disbalance, + exact_lines.f1() * 100.0, + score.reversal_ratio * 100.0, + qa_reverts_str, + qa_conf_str, + cursor_str, + wrong_er_str + ); + printed_lines += 1; + } else { + skipped_lines += 1; + } all_delta_chr_f_scores.push(score.delta_chr_f); all_reversal_ratios.push(score.reversal_ratio); @@ -358,6 +367,13 @@ pub fn print_report(examples: &[Example]) { } } + if skipped_lines > 0 { + println!( + "{:<40} (use --verbose to see all {} examples)", + format!("... and {} more", skipped_lines), + printed_lines + skipped_lines + ); + } println!("{}", separator); if !all_delta_chr_f_scores.is_empty() { diff --git a/crates/edit_prediction_cli/src/split_dataset.rs b/crates/edit_prediction_cli/src/split_dataset.rs index b34d7c14c6646442359459ef8d4450dae0b9c40e..f1e0a672695cb940f3c368f71fec3b16a64524a1 100644 --- a/crates/edit_prediction_cli/src/split_dataset.rs +++ b/crates/edit_prediction_cli/src/split_dataset.rs @@ -1,29 +1,34 @@ //! `ep split` implementation. //! //! This command splits a JSONL dataset into multiple files based on size specifications, -//! with stratification by repository URL (if the field is present). +//! with optional stratification by a JSON field. //! //! # Usage //! //! ```text -//! ep split [input.jsonl] = = ... +//! ep split [--stratify=] [input.jsonl] = = ... //! ``` //! //! If `input.jsonl` is not provided or is `-`, reads from stdin. //! //! # Size specifications //! -//! - `80%` - percentage of total (repositories if stratified, examples otherwise) -//! - `100` - absolute count of repositories (if stratified) or examples +//! - `80%` - percentage of total examples (lines) +//! - `100` - approximate absolute count of examples (lines) //! - `rest` - all remaining items (only one split can use this) //! //! # Stratification //! -//! When examples have a `repository_url` field, the split is stratified by repository. -//! This ensures each output file contains examples from non-overlapping repositories. -//! Size specifications apply to the number of repositories, not individual examples. +//! The `--stratify` flag controls how examples are grouped before splitting: //! -//! Examples without `repository_url` are distributed proportionally across all outputs. +//! - `cursor-path` (default): group by the `cursor_path` JSON field +//! - `repo`: group by the `repository_url` JSON field +//! - `none`: no grouping, split individual examples +//! +//! When stratifying, the split ensures each output file contains examples from +//! non-overlapping groups. Size specifications always apply to the number of +//! examples (lines), with whole groups assigned greedily to meet the target. +//! Examples missing the stratification field are treated as individual groups. use anyhow::{Context as _, Result, bail}; use clap::Args; @@ -38,23 +43,27 @@ use std::path::{Path, PathBuf}; /// `ep split` CLI args. #[derive(Debug, Args, Clone)] #[command( - about = "Split a JSONL dataset into multiple files (stratified by repository_url if present)", + about = "Split a JSONL dataset into multiple files with optional stratification", after_help = r#"SIZE SPECIFICATIONS: % Percentage of total (e.g., 80%) Absolute number (e.g., 100) rest All remaining items (only one output can use this) - When stratifying by repository_url, sizes apply to repositories, not examples. + Sizes always apply to examples (lines). When stratifying, whole groups + are assigned greedily to approximate the target count. EXAMPLES: - # Split 80% train, 20% validation + # Split 80% train, 20% validation (default: stratify by cursor_path) ep split input.jsonl train.jsonl=80% valid.jsonl=rest # Split into train/valid/test ep split input.jsonl train.jsonl=80% valid.jsonl=10% test.jsonl=rest - # Use absolute counts (100 repos to train, rest to valid) - ep split input.jsonl train.jsonl=100 valid.jsonl=rest + # Stratify by repository_url instead of cursor_path + ep split --stratify=repo input.jsonl train.jsonl=80% valid.jsonl=rest + + # No stratification (split by individual examples) + ep split --stratify=none input.jsonl train.jsonl=80% valid.jsonl=rest # Read from stdin cat input.jsonl | ep split train.jsonl=80% valid.jsonl=rest @@ -62,14 +71,15 @@ EXAMPLES: # Reproducible split with seed ep split --seed 42 input.jsonl train.jsonl=80% valid.jsonl=rest - # Disable stratification (split by examples, not repositories) - ep split --no-stratify input.jsonl train.jsonl=80% valid.jsonl=rest - STRATIFICATION: - When examples have a "repository_url" field, the split ensures each output - file contains examples from non-overlapping repositories. This prevents - data leakage between train/test splits. Use --no-stratify to disable this - behavior and split by individual examples instead. + Controls how examples are grouped before splitting: + cursor-path Group by "cursor_path" field (default) + repo Group by "repository_url" field + none No grouping, split individual examples + + When stratifying, the split ensures each output file contains examples + from non-overlapping groups. This prevents data leakage between + train/test splits. "# )] pub struct SplitArgs { @@ -77,9 +87,19 @@ pub struct SplitArgs { #[arg(long)] pub seed: Option, - /// Disable stratification by repository_url (split by examples instead) - #[arg(long)] - pub no_stratify: bool, + /// Stratification field for splitting the dataset + #[arg(long, default_value = "cursor-path")] + pub stratify: Stratify, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, clap::ValueEnum, strum::Display)] +pub enum Stratify { + #[strum(serialize = "cursor_path")] + CursorPath, + #[strum(serialize = "repo")] + Repo, + #[strum(serialize = "none")] + None, } #[derive(Debug, Clone)] @@ -142,29 +162,6 @@ fn read_lines_from_input(input: Option<&Path>) -> Result> { Ok(lines) } -fn get_repository_url(line: &str) -> Option { - let value: Value = serde_json::from_str(line).ok()?; - value - .get("repository_url") - .and_then(|v| v.as_str()) - .map(|s| s.to_string()) -} - -fn group_lines_by_repo(lines: Vec) -> (HashMap>, Vec) { - let mut by_repo: HashMap> = HashMap::new(); - let mut without_repo: Vec = Vec::new(); - - for line in lines { - if let Some(repo_url) = get_repository_url(&line) { - by_repo.entry(repo_url).or_default().push(line); - } else { - without_repo.push(line); - } - } - - (by_repo, without_repo) -} - fn compute_split_counts(specs: &[SplitSpec], total: usize) -> Result> { let mut counts = vec![0usize; specs.len()]; let mut remaining = total; @@ -261,26 +258,20 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> { return Ok(()); } - let (by_repo, without_repo) = group_lines_by_repo(lines); - let has_repos = !by_repo.is_empty() && !args.no_stratify; + let mut grouped_lines = group_lines(&lines, args.stratify); - if args.no_stratify && !by_repo.is_empty() { + if args.stratify != Stratify::None { eprintln!( - "Stratification disabled (--no-stratify), splitting {} examples by line", + "Stratifying by {} ({} unique groups, {} examples)", + args.stratify, + grouped_lines.len(), total_lines ); - } else if has_repos { + } else { eprintln!( - "Stratifying by repository_url ({} unique repositories, {} examples)", - by_repo.len(), - total_lines - without_repo.len() + "No stratification, splitting {} examples by line", + total_lines ); - if !without_repo.is_empty() { - eprintln!( - " + {} examples without repository_url (distributed proportionally)", - without_repo.len() - ); - } } let mut rng = match args.seed { @@ -288,53 +279,31 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> { None => rand::rngs::StdRng::from_os_rng(), }; - let mut split_outputs: Vec> = vec![Vec::new(); specs.len()]; - - if has_repos { - let mut repos: Vec = by_repo.keys().cloned().collect(); - repos.shuffle(&mut rng); + grouped_lines.shuffle(&mut rng); - let repo_counts = compute_split_counts(&specs, repos.len())?; + let line_targets = compute_split_counts(&specs, total_lines)?; + let rest_index = specs.iter().position(|s| matches!(s.size, SplitSize::Rest)); + let mut split_outputs: Vec> = vec![Vec::new(); specs.len()]; + let mut group_iter = grouped_lines.into_iter(); - let mut repo_iter = repos.into_iter(); - for (split_idx, &count) in repo_counts.iter().enumerate() { - for _ in 0..count { - if let Some(repo) = repo_iter.next() { - if let Some(repo_lines) = by_repo.get(&repo) { - split_outputs[split_idx].extend(repo_lines.iter().cloned()); - } - } - } + for (split_idx, &target) in line_targets.iter().enumerate() { + if Some(split_idx) == rest_index { + continue; } - - if !without_repo.is_empty() { - let no_repo_counts = compute_split_counts(&specs, without_repo.len())?; - let mut no_repo_shuffled = without_repo; - no_repo_shuffled.shuffle(&mut rng); - - let mut line_iter = no_repo_shuffled.into_iter(); - for (split_idx, &count) in no_repo_counts.iter().enumerate() { - for _ in 0..count { - if let Some(line) = line_iter.next() { - split_outputs[split_idx].push(line); - } - } + let mut accumulated = 0; + while accumulated < target { + if let Some(group) = group_iter.next() { + accumulated += group.len(); + split_outputs[split_idx].extend(group); + } else { + break; } } - } else { - let line_counts = compute_split_counts(&specs, total_lines)?; - let mut all_lines: Vec = by_repo.into_values().flatten().collect(); - all_lines.extend(without_repo); - all_lines.shuffle(&mut rng); - - let mut line_iter = all_lines.into_iter(); + } - for (split_idx, &count) in line_counts.iter().enumerate() { - for _ in 0..count { - if let Some(line) = line_iter.next() { - split_outputs[split_idx].push(line); - } - } + if let Some(idx) = rest_index { + for group in group_iter { + split_outputs[idx].extend(group); } } @@ -346,6 +315,39 @@ pub fn run_split(args: &SplitArgs, inputs: &[PathBuf]) -> Result<()> { Ok(()) } +/// Groups lines by the specified stratification field. +/// +/// When `stratify` is `None`, each line becomes its own group. +/// When a line is missing the stratification field, it is also placed in its own group. +fn group_lines(lines: &[String], stratify: Stratify) -> Vec> { + if stratify == Stratify::None { + return lines.iter().map(|line| vec![line.clone()]).collect(); + } + + let field = match stratify { + Stratify::Repo => "repository_url", + Stratify::CursorPath => "cursor_path", + Stratify::None => unreachable!(), + }; + + let mut groups: HashMap> = HashMap::new(); + let mut ungrouped: Vec> = Vec::new(); + + for line in lines { + let key = serde_json::from_str::(line) + .ok() + .and_then(|v| v.get(field)?.as_str().map(|s| s.to_string())); + match key { + Some(key) => groups.entry(key).or_default().push(line.clone()), + None => ungrouped.push(vec![line.clone()]), + } + } + + let mut result: Vec> = groups.into_values().collect(); + result.extend(ungrouped); + result +} + #[cfg(test)] mod tests { use super::*; @@ -389,15 +391,11 @@ mod tests { } #[test] - fn test_get_repository_url() { - let line = r#"{"repository_url": "https://github.com/example/repo", "data": 123}"#; - assert_eq!( - get_repository_url(line), - Some("https://github.com/example/repo".to_string()) - ); - - let line_no_repo = r#"{"data": 123}"#; - assert_eq!(get_repository_url(line_no_repo), None); + fn test_group_lines_none() { + let lines = vec!["a".to_string(), "b".to_string(), "c".to_string()]; + let groups = group_lines(&lines, Stratify::None); + assert_eq!(groups.len(), 3); + assert!(groups.iter().all(|g| g.len() == 1)); } #[test] @@ -457,12 +455,30 @@ mod tests { r#"{"id": 4}"#.to_string(), ]; - let (by_repo, without_repo) = group_lines_by_repo(lines); + let groups = group_lines(&lines, Stratify::Repo); + + let grouped_count: usize = groups.iter().filter(|g| g.len() > 1).count(); + let ungrouped_count: usize = groups.iter().filter(|g| g.len() == 1).count(); + let total_lines: usize = groups.iter().map(|g| g.len()).sum(); - assert_eq!(by_repo.len(), 2); - assert_eq!(by_repo.get("repo1").unwrap().len(), 2); - assert_eq!(by_repo.get("repo2").unwrap().len(), 1); - assert_eq!(without_repo.len(), 1); + assert_eq!(grouped_count, 1); // repo1 has 2 lines + assert_eq!(ungrouped_count, 2); // repo2 (1 line) + line without repo + assert_eq!(total_lines, 4); + } + + #[test] + fn test_group_lines_by_cursor_path() { + let lines = vec![ + r#"{"cursor_path": "src/main.rs", "id": 1}"#.to_string(), + r#"{"cursor_path": "src/main.rs", "id": 2}"#.to_string(), + r#"{"cursor_path": "src/lib.rs", "id": 3}"#.to_string(), + ]; + + let groups = group_lines(&lines, Stratify::CursorPath); + + let total_lines: usize = groups.iter().map(|g| g.len()).sum(); + assert_eq!(groups.len(), 2); + assert_eq!(total_lines, 3); } #[test] @@ -484,7 +500,7 @@ mod tests { let args = SplitArgs { seed: Some(42), - no_stratify: false, + stratify: Stratify::Repo, }; let inputs = vec![ input.path().to_path_buf(), @@ -502,14 +518,18 @@ mod tests { assert_eq!(train_lines.len() + valid_lines.len(), 8); - let train_repos: std::collections::HashSet<_> = train_lines - .iter() - .filter_map(|l| get_repository_url(l)) - .collect(); - let valid_repos: std::collections::HashSet<_> = valid_lines - .iter() - .filter_map(|l| get_repository_url(l)) - .collect(); + let get_repo = |line: &str| -> Option { + let value: Value = serde_json::from_str(line).ok()?; + value + .get("repository_url") + .and_then(|v| v.as_str()) + .map(|s| s.to_string()) + }; + + let train_repos: std::collections::HashSet<_> = + train_lines.iter().filter_map(|l| get_repo(l)).collect(); + let valid_repos: std::collections::HashSet<_> = + valid_lines.iter().filter_map(|l| get_repo(l)).collect(); assert!( train_repos.is_disjoint(&valid_repos), @@ -531,4 +551,54 @@ mod tests { ]; assert!(compute_split_counts(&specs, 100).is_err()); } + + #[test] + fn test_absolute_targets_lines_not_groups() { + // 5 repos × 3 lines each = 15 total lines. + // `train=6` should target ~6 lines (2 groups), NOT 6 groups (all 15 lines). + let input = create_temp_jsonl(&[ + r#"{"repository_url": "r1", "id": 1}"#, + r#"{"repository_url": "r1", "id": 2}"#, + r#"{"repository_url": "r1", "id": 3}"#, + r#"{"repository_url": "r2", "id": 4}"#, + r#"{"repository_url": "r2", "id": 5}"#, + r#"{"repository_url": "r2", "id": 6}"#, + r#"{"repository_url": "r3", "id": 7}"#, + r#"{"repository_url": "r3", "id": 8}"#, + r#"{"repository_url": "r3", "id": 9}"#, + r#"{"repository_url": "r4", "id": 10}"#, + r#"{"repository_url": "r4", "id": 11}"#, + r#"{"repository_url": "r4", "id": 12}"#, + r#"{"repository_url": "r5", "id": 13}"#, + r#"{"repository_url": "r5", "id": 14}"#, + r#"{"repository_url": "r5", "id": 15}"#, + ]); + + let temp_dir = tempfile::tempdir().unwrap(); + let train_path = temp_dir.path().join("train.jsonl"); + let valid_path = temp_dir.path().join("valid.jsonl"); + + let args = SplitArgs { + seed: Some(42), + stratify: Stratify::Repo, + }; + let inputs = vec![ + input.path().to_path_buf(), + PathBuf::from(format!("{}=6", train_path.display())), + PathBuf::from(format!("{}=rest", valid_path.display())), + ]; + + run_split(&args, &inputs).unwrap(); + + let train_content = std::fs::read_to_string(&train_path).unwrap(); + let valid_content = std::fs::read_to_string(&valid_path).unwrap(); + + let train_lines: Vec<&str> = train_content.lines().collect(); + let valid_lines: Vec<&str> = valid_content.lines().collect(); + + // With 3-line groups, train should get 2 groups (6 lines) to meet the + // target of 6, NOT 6 groups (which don't even exist). Valid gets the rest. + assert_eq!(train_lines.len(), 6); + assert_eq!(valid_lines.len(), 9); + } } From 046b173b8720dd335ab8ee47350c0f3cde087f4a Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 25 Feb 2026 19:20:22 +0100 Subject: [PATCH 092/548] agent: Improve SpawnAgentTool instructions for clarity (#50114) Trying to keep it from reiterating instructions Release Notes: - N/A --- crates/agent/src/tools/spawn_agent_tool.rs | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index 69529282544cc35a01f792dcb45df6eb8bdf67d5..e454377ce1a56134ca0677b37c469ff322a6ed90 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -12,27 +12,24 @@ use crate::{AgentTool, Thread, ThreadEnvironment, ToolCallEventStream, ToolInput /// Spawns an agent to perform a delegated task. /// -/// Use this tool when you want to do any of the following: -/// - Run multiple tasks in parallel that would take significantly longer to run sequentially. -/// - Complete a self-contained task where you need to know if it succeeded or failed (and how), but none of its intermediate output. -/// - Perform an investigation where all you need to know is the outcome, not the research that led to that outcome. -/// -/// You control what the agent does by providing a prompt describing what the agent should do. The agent has access to the same tools you do, but does NOT see your conversation history or any context the user attached. You must include all relevant context (file paths, requirements, constraints) in the prompt. +/// Use this tool when you want to: +/// - Run multiple tasks in parallel. +/// - Delegate a self-contained task where you only need the final outcome. /// /// You will receive only the agent's final message as output. /// -/// If a response (success or error) includes a session_id, you can send a follow-up message to that session by passing the session_id back. This is useful for multi-turn conversations with an agent, asking clarifying questions about its output, or retrying after timeouts or transient failures. +/// **New session** (no session_id): Creates a new agent that does NOT see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message. +/// +/// **Follow-up** (with session_id): Sends a follow-up to an existing agent session. The agent already has full context, so send only a short, direct message — do NOT repeat the original task or context. Examples: "Also update the tests", "Fix the compile error in foo.rs", "Retry". /// -/// Note: -/// - Agents cannot use tools you don't have access to. /// - If spawning multiple agents that might write to the filesystem, provide guidance on how to avoid conflicts (e.g. assign each to different directories). #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] pub struct SpawnAgentToolInput { /// Short label displayed in the UI while the agent runs (e.g., "Researching alternatives") pub label: String, - /// Describe the task for the agent to perform. Be specific about what you want accomplished. Include all necessary context (file paths, requirements, constraints) since the agent cannot see your conversation. + /// The prompt for the agent. For new sessions, include full context needed for the task. For follow-ups (with session_id), you can rely on the agent already having the previous message. pub message: String, - /// Optional session ID of an existing agent session to continue a conversation with. When provided, the message is sent as a follow-up to that session instead of creating a new one. Use this to ask clarifying questions, request changes based on previous output, or retry after errors. + /// Session ID of an existing agent session to continue instead of creating a new one. #[serde(default)] pub session_id: Option, } From b4fe63b097be0f410e358cd0664ce1fc7b4e7290 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Wed, 25 Feb 2026 20:45:01 +0100 Subject: [PATCH 093/548] git_graph: Polish UX (#50123) This is a follow-up on #50027 I address my comments by adding a hash map look-up to find the selected pending commit. I also removed the limitation where we would only retry finding the pending commit 5 times. The pending selection is removed when the graph is fully loaded and doesn't contain the pending commit. This PR also cleans up some internal code structure and starts work to enable search and propagating git log error messages to the UI. UI wise I made the git graph item show the repository name instead of "Git Graph" in Zed. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --------- Co-authored-by: Remco Smits --- crates/git_graph/src/git_graph.rs | 155 ++++++++++++++++++----------- crates/project/src/git_store.rs | 157 ++++++++++++++++++++---------- 2 files changed, 206 insertions(+), 106 deletions(-) diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 3bdb2b0d717ca4cae181fee9dd690755e29075d0..0052d58f5985a29f11043f0bd97edb76bb8d2124 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -18,7 +18,10 @@ use language::line_diff; use menu::{Cancel, SelectNext, SelectPrevious}; use project::{ Project, - git_store::{CommitDataState, GitStoreEvent, Repository, RepositoryEvent, RepositoryId}, + git_store::{ + CommitDataState, GitGraphEvent, GitStoreEvent, GraphDataResponse, Repository, + RepositoryEvent, RepositoryId, + }, }; use settings::Settings; use smallvec::{SmallVec, smallvec}; @@ -48,7 +51,6 @@ const LANE_WIDTH: Pixels = px(16.0); const LEFT_PADDING: Pixels = px(12.0); const LINE_WIDTH: Pixels = px(1.5); const RESIZE_HANDLE_WIDTH: f32 = 8.0; -const PENDING_SELECT_MAX_RETRIES: usize = 5; const COPIED_STATE_DURATION: Duration = Duration::from_secs(2); struct CopiedState { @@ -853,7 +855,7 @@ pub struct GitGraph { commit_details_split_state: Entity, selected_repo_id: Option, changed_files_scroll_handle: UniformListScrollHandle, - pending_select_sha: Option<(String, usize)>, + pending_select_sha: Option, } impl GitGraph { @@ -965,20 +967,62 @@ impl GitGraph { cx: &mut Context, ) { match event { - RepositoryEvent::GitGraphCountUpdated((order, source), commit_count) => { - if order != &self.log_order || source != &self.log_source { - return; - } + RepositoryEvent::GraphEvent((source, order), event) + if source == &self.log_source && order == &self.log_order => + { + match event { + GitGraphEvent::FullyLoaded => { + if let Some(pending_sha_index) = + self.pending_select_sha.take().and_then(|oid| { + repository + .read(cx) + .get_graph_data(source.clone(), *order) + .and_then(|data| data.commit_oid_to_index.get(&oid).copied()) + }) + { + self.select_entry(pending_sha_index, cx); + } + } + GitGraphEvent::LoadingError => { + // todo(git_graph): Wire this up with the UI + } + GitGraphEvent::CountUpdated(commit_count) => { + let old_count = self.graph_data.commits.len(); + + if let Some(pending_selection_index) = + repository.update(cx, |repository, cx| { + let GraphDataResponse { + commits, + is_loading, + error: _, + } = repository.graph_data( + source.clone(), + *order, + old_count..*commit_count, + cx, + ); + self.graph_data.add_commits(commits); - let old_count = self.graph_data.commits.len(); + let pending_sha_index = self.pending_select_sha.and_then(|oid| { + repository.get_graph_data(source.clone(), *order).and_then( + |data| data.commit_oid_to_index.get(&oid).copied(), + ) + }); - repository.update(cx, |repository, cx| { - let (commits, _) = - repository.graph_data(source.clone(), *order, old_count..*commit_count, cx); - self.graph_data.add_commits(commits); - }); - cx.notify(); - self.retry_pending_select(cx); + if !is_loading && pending_sha_index.is_none() { + self.pending_select_sha.take(); + } + + pending_sha_index + }) + { + self.select_entry(pending_selection_index, cx); + self.pending_select_sha.take(); + } + + cx.notify(); + } + } } RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { self.pending_select_sha = None; @@ -990,6 +1034,7 @@ impl GitGraph { cx.notify(); } } + RepositoryEvent::GraphEvent(_, _) => {} _ => {} } } @@ -997,12 +1042,9 @@ impl GitGraph { fn fetch_initial_graph_data(&mut self, cx: &mut App) { if let Some(repository) = self.get_selected_repository(cx) { repository.update(cx, |repository, cx| { - let (commits, _) = repository.graph_data( - self.log_source.clone(), - self.log_order, - 0..usize::MAX, - cx, - ); + let commits = repository + .graph_data(self.log_source.clone(), self.log_order, 0..usize::MAX, cx) + .commits; self.graph_data.add_commits(commits); }); } @@ -1145,6 +1187,10 @@ impl GitGraph { } } + fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + self.open_selected_commit_view(window, cx); + } + fn select_entry(&mut self, idx: usize, cx: &mut Context) { if self.selected_entry_idx == Some(idx) { return; @@ -1193,31 +1239,21 @@ impl GitGraph { let Ok(oid) = sha.parse::() else { return; }; - for (idx, commit) in self.graph_data.commits.iter().enumerate() { - if commit.data.sha == oid { - self.pending_select_sha = None; - self.select_entry(idx, cx); - return; - } - } - self.pending_select_sha = Some((sha.to_string(), PENDING_SELECT_MAX_RETRIES)); - } - fn retry_pending_select(&mut self, cx: &mut Context) { - let Some((sha, retries_remaining)) = self.pending_select_sha.take() else { + let Some(selected_repository) = self.get_selected_repository(cx) else { return; }; - if let Ok(oid) = sha.parse::() { - for (idx, commit) in self.graph_data.commits.iter().enumerate() { - if commit.data.sha == oid { - self.select_entry(idx, cx); - return; - } - } - } - if retries_remaining > 0 { - self.pending_select_sha = Some((sha, retries_remaining - 1)); - } + + let Some(index) = selected_repository + .read(cx) + .get_graph_data(self.log_source.clone(), self.log_order) + .and_then(|data| data.commit_oid_to_index.get(&oid)) + .copied() + else { + return; + }; + + self.select_entry(index, cx); } fn open_selected_commit_view(&mut self, window: &mut Window, cx: &mut Context) { @@ -2033,7 +2069,11 @@ impl Render for GitGraph { if let Some(repository) = self.get_selected_repository(cx) { repository.update(cx, |repository, cx| { // Start loading the graph data if we haven't started already - let (commits, is_loading) = repository.graph_data( + let GraphDataResponse { + commits, + is_loading, + error: _, + } = repository.graph_data( self.log_source.clone(), self.log_order, 0..usize::MAX, @@ -2212,16 +2252,17 @@ impl Render for GitGraph { }; div() - .size_full() - .bg(cx.theme().colors().editor_background) .key_context("GitGraph") .track_focus(&self.focus_handle) + .size_full() + .bg(cx.theme().colors().editor_background) .on_action(cx.listener(|this, _: &OpenCommitView, window, cx| { this.open_selected_commit_view(window, cx); })) .on_action(cx.listener(Self::cancel)) .on_action(cx.listener(Self::select_prev)) .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::confirm)) .child(content) .children(self.context_menu.as_ref().map(|(menu, position, _)| { deferred( @@ -2270,8 +2311,15 @@ impl Item for GitGraph { })))) } - fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString { - "Git Graph".into() + fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { + self.get_selected_repository(cx) + .and_then(|repo| { + repo.read(cx) + .work_directory_abs_path + .file_name() + .map(|name| name.to_string_lossy().to_string()) + }) + .map_or_else(|| "Git Graph".into(), |name| SharedString::from(name)) } fn show_toolbar(&self) -> bool { @@ -3049,7 +3097,7 @@ mod tests { 0..usize::MAX, cx, ) - .0 + .commits .to_vec() }); @@ -3132,13 +3180,10 @@ mod tests { .any(|event| matches!(event, RepositoryEvent::MergeHeadsChanged)), "initial repository scan should emit MergeHeadsChanged" ); - - let graph_data_key = (crate::LogOrder::default(), crate::LogSource::default()); let commit_count_after = repository.read_with(cx, |repo, _| { - repo.initial_graph_data - .get(&graph_data_key) - .map(|(_, data)| data.len()) - .unwrap_or(0) + repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default()) + .map(|data| data.commit_data.len()) + .unwrap() }); assert_eq!( commits.len(), diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 67bc21c94227e8f53356ef1b7f626ff922326d29..3113163cbaec65d7b439e0cbf46603d60ac3fae0 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -60,7 +60,7 @@ use settings::WorktreeId; use smol::future::yield_now; use std::{ cmp::Ordering, - collections::{BTreeSet, HashSet, VecDeque}, + collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry}, future::Future, mem, ops::Range, @@ -296,6 +296,19 @@ enum GraphCommitHandlerState { Closed, } +pub struct InitialGitGraphData { + fetch_task: Task<()>, + pub error: Option, + pub commit_data: Vec>, + pub commit_oid_to_index: HashMap, +} + +pub struct GraphDataResponse<'a> { + pub commits: &'a [Arc], + pub is_loading: bool, + pub error: Option, +} + pub struct Repository { this: WeakEntity, snapshot: RepositorySnapshot, @@ -311,13 +324,7 @@ pub struct Repository { askpass_delegates: Arc>>, latest_askpass_id: u64, repository_state: Shared>>, - pub initial_graph_data: HashMap< - (LogOrder, LogSource), - ( - Task>, - Vec>, - ), - >, + initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>, graph_commit_data_handler: GraphCommitHandlerState, commit_data: HashMap, } @@ -390,6 +397,13 @@ pub enum RepositoryState { Remote(RemoteRepositoryState), } +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum GitGraphEvent { + CountUpdated(usize), + FullyLoaded, + LoadingError, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub enum RepositoryEvent { StatusesChanged, @@ -397,7 +411,7 @@ pub enum RepositoryEvent { BranchChanged, StashEntriesChanged, PendingOpsChanged { pending_ops: SumTree }, - GitGraphCountUpdated((LogOrder, LogSource), usize), + GraphEvent((LogSource, LogOrder), GitGraphEvent), } #[derive(Clone, Debug)] @@ -4404,47 +4418,82 @@ impl Repository { }) } + pub fn get_graph_data( + &self, + log_source: LogSource, + log_order: LogOrder, + ) -> Option<&InitialGitGraphData> { + self.initial_graph_data.get(&(log_source, log_order)) + } + pub fn graph_data( &mut self, log_source: LogSource, log_order: LogOrder, range: Range, cx: &mut Context, - ) -> (&[Arc], bool) { - let (loading_task, initial_commit_data) = self + ) -> GraphDataResponse<'_> { + let initial_commit_data = self .initial_graph_data - .entry((log_order, log_source.clone())) + .entry((log_source.clone(), log_order)) .or_insert_with(|| { let state = self.repository_state.clone(); let log_source = log_source.clone(); - ( - cx.spawn(async move |repository, cx| { - let state = state.await; - match state { - Ok(RepositoryState::Local(LocalRepositoryState { - backend, .. - })) => { - Self::local_git_graph_data( - repository, backend, log_source, log_order, cx, - ) - .await - } - Ok(RepositoryState::Remote(_)) => { - Err("Git graph is not supported for collab yet".into()) - } - Err(e) => Err(SharedString::from(e)), + + let fetch_task = cx.spawn(async move |repository, cx| { + let state = state.await; + let result = match state { + Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => { + Self::local_git_graph_data( + repository.clone(), + backend, + log_source.clone(), + log_order, + cx, + ) + .await } - }), - vec![], - ) + Ok(RepositoryState::Remote(_)) => { + Err("Git graph is not supported for collab yet".into()) + } + Err(e) => Err(SharedString::from(e)), + }; + + if let Err(fetch_task_error) = result { + repository + .update(cx, |repository, _| { + if let Some(data) = repository + .initial_graph_data + .get_mut(&(log_source, log_order)) + { + data.error = Some(fetch_task_error); + } else { + debug_panic!( + "This task would be dropped if this entry doesn't exist" + ); + } + }) + .ok(); + } + }); + + InitialGitGraphData { + fetch_task, + error: None, + commit_data: Vec::new(), + commit_oid_to_index: HashMap::default(), + } }); - let max_start = initial_commit_data.len().saturating_sub(1); - let max_end = initial_commit_data.len(); - ( - &initial_commit_data[range.start.min(max_start)..range.end.min(max_end)], - !loading_task.is_ready(), - ) + let max_start = initial_commit_data.commit_data.len().saturating_sub(1); + let max_end = initial_commit_data.commit_data.len(); + + GraphDataResponse { + commits: &initial_commit_data.commit_data + [range.start.min(max_start)..range.end.min(max_end)], + is_loading: !initial_commit_data.fetch_task.is_ready(), + error: initial_commit_data.error.clone(), + } } async fn local_git_graph_data( @@ -4467,32 +4516,38 @@ impl Repository { } }); - let graph_data_key = (log_order, log_source.clone()); + let graph_data_key = (log_source, log_order); while let Ok(initial_graph_commit_data) = request_rx.recv().await { this.update(cx, |repository, cx| { let graph_data = repository .initial_graph_data - .get_mut(&graph_data_key) - .map(|(_, graph_data)| graph_data); - debug_assert!( - graph_data.is_some(), - "This task should be dropped if data doesn't exist" - ); + .entry(graph_data_key.clone()) + .and_modify(|graph_data| { + for commit_data in initial_graph_commit_data { + graph_data + .commit_oid_to_index + .insert(commit_data.sha, graph_data.commit_data.len()); + graph_data.commit_data.push(commit_data); + + cx.emit(RepositoryEvent::GraphEvent( + graph_data_key.clone(), + GitGraphEvent::CountUpdated(graph_data.commit_data.len()), + )); + } + }); - if let Some(graph_data) = graph_data { - graph_data.extend(initial_graph_commit_data); - cx.emit(RepositoryEvent::GitGraphCountUpdated( - graph_data_key.clone(), - graph_data.len(), - )); + match &graph_data { + Entry::Occupied(_) => {} + Entry::Vacant(_) => { + debug_panic!("This task should be dropped if data doesn't exist"); + } } }) .ok(); } task.await?; - Ok(()) } From 646ec5e866f68419614d6d91876b98b3e5e12b9f Mon Sep 17 00:00:00 2001 From: morgankrey Date: Wed, 25 Feb 2026 14:16:54 -0600 Subject: [PATCH 094/548] docs: Remove Preview callouts for stable release (#50119) This PR removes Preview callouts from documentation for features that are now in Stable. Features documented with Preview callouts are now included in the stable release. Generated by script/docs-strip-preview-callouts Release Notes: - N/A --- docs/src/ai/edit-prediction.md | 2 -- docs/src/finding-navigating.md | 2 -- docs/src/git.md | 2 -- docs/src/globs.md | 2 -- docs/src/languages/json.md | 2 -- docs/src/languages/yaml.md | 2 -- docs/src/reference/all-settings.md | 6 ------ docs/src/reference/cli.md | 2 -- docs/src/repl.md | 4 ---- docs/src/tasks.md | 4 ---- docs/src/vim.md | 4 ---- script/docs-suggest-publish | 18 +++++++++++------- 12 files changed, 11 insertions(+), 39 deletions(-) diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 56b1c1260ec98bc82de337ac492b1b4aa40723d8..3d0f8c5141a40daa66fc3689deb2d0363acf273a 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -301,8 +301,6 @@ Edit Prediction also works with other providers. ### GitHub Copilot {#github-copilot} -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - To use GitHub Copilot as your provider, set this in your settings file ([how to edit](../configuring-zed.md#settings-files)): ```json [settings] diff --git a/docs/src/finding-navigating.md b/docs/src/finding-navigating.md index b5f6e3fff774281d699276449c11602df543a021..f1d3536f8c909f18240f83eac6f4309159b764e1 100644 --- a/docs/src/finding-navigating.md +++ b/docs/src/finding-navigating.md @@ -19,8 +19,6 @@ Open any file in your project with {#kb file_finder::Toggle}. Type part of the f ## Project Search -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - Search across all files with {#kb pane::DeploySearch}. Start typing in the search field to begin searching—results appear as you type. Results appear in a [multibuffer](./multibuffers.md), letting you edit matches in place. diff --git a/docs/src/git.md b/docs/src/git.md index 5e46dfc322a21dca186dd08389fbf6f72a777288..b33aa0690cbad99f792729dd780ab03716d0dc4c 100644 --- a/docs/src/git.md +++ b/docs/src/git.md @@ -72,8 +72,6 @@ To disable word diff for specific languages only, add this to your settings.json ### Diff View Styles -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - Zed displays diffs in two modes: **split** (side-by-side comparison) or **unified** (inline changes). Split view is the default. #### Changing the diff view diff --git a/docs/src/globs.md b/docs/src/globs.md index 26ecf51da8c0420fb65428eb296887b603a99eb5..f1fb584ee568d2e7393539ec3d74b5020c483aaf 100644 --- a/docs/src/globs.md +++ b/docs/src/globs.md @@ -24,8 +24,6 @@ A glob "pattern" is used to match a file name or complete file path. For example ### Multiple Patterns -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - You can specify multiple glob patterns in Project Search filters by separating them with commas. When using comma-separated patterns, Zed correctly handles braces within individual patterns: - `*.ts, *.tsx` — Match TypeScript and TSX files diff --git a/docs/src/languages/json.md b/docs/src/languages/json.md index 253669b2fca3a5007e8ea748a8602d85575b24ce..41644a8b0556c3a21f1c680a2fccb8c901a580cc 100644 --- a/docs/src/languages/json.md +++ b/docs/src/languages/json.md @@ -54,8 +54,6 @@ For example to for a `.luarc.json` for use with [lua-language-server](https://gi ### Schema Specification via Settings -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - You can associate JSON Schemas with file paths using relative paths in your language server settings. Zed resolves paths relative to your project root: ```json [settings] diff --git a/docs/src/languages/yaml.md b/docs/src/languages/yaml.md index 907a07cc4366a29e7481aa5d927de8887ec84e96..b82e14d64bd20f861d505b71f88b73fc4dfdf56f 100644 --- a/docs/src/languages/yaml.md +++ b/docs/src/languages/yaml.md @@ -12,8 +12,6 @@ YAML support is available natively in Zed. ## Configuration -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - You can configure various [yaml-language-server settings](https://github.com/redhat-developer/yaml-language-server?tab=readme-ov-file#language-server-settings) by adding them to your Zed settings.json in a `yaml-language-server` block under the `lsp` key. You can configure custom YAML schemas using relative paths. Zed resolves paths relative to your project root: diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index 0211114aee3ce95d63621a2702677290bd2c575b..23b59f0b91002c0a920df0df8d61088652281735 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -519,8 +519,6 @@ Note: Dirty files (files with unsaved changes) will not be automatically closed - `"unified"`: Show changes inline with added and deleted lines stacked vertically - `"split"`: Display old and new versions side by side in separate panes (default) -> **Changed in Preview (v0.225).** Values renamed from `"stacked"`/`"side_by_side"` to `"unified"`/`"split"`. - See [Git documentation](../git.md#diff-view-styles) for more details. ## Disable AI @@ -2774,8 +2772,6 @@ These values take in the same options as the root-level settings with the same n ### Document Symbols -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - - Description: Controls the source of document symbols used for outlines and breadcrumbs. - Setting: `document_symbols` - Default: `off` @@ -5140,8 +5136,6 @@ See the [debugger page](../debugger.md) for more information about debugging sup ## Git Worktree Directory -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - - Description: Directory where git worktrees are created, relative to the repository working directory. - Setting: `git.worktree_directory` - Default: `"../worktrees"` diff --git a/docs/src/reference/cli.md b/docs/src/reference/cli.md index b7c4fafd0b8b30fc64a59a1871a7698b0572fca3..788e287c3abe1f1fe752e00b938de477bcf9d78b 100644 --- a/docs/src/reference/cli.md +++ b/docs/src/reference/cli.md @@ -71,8 +71,6 @@ zed -n ~/projects/myproject ### `-a`, `--add` -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - Add paths to the currently focused workspace instead of opening a new window. When multiple workspace windows are open, files open in the focused window: ```sh diff --git a/docs/src/repl.md b/docs/src/repl.md index f1cc0ef08ae384c280a3eaaf3d2de0bcfd5c7395..2e782cb0c14e17cd0ce35dec264d4173a46d404f 100644 --- a/docs/src/repl.md +++ b/docs/src/repl.md @@ -151,8 +151,6 @@ TBD: Improve Julia REPL instructions ## Changing which kernel is used per language {#changing-kernels} -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - Zed automatically detects available kernels and organizes them in the kernel picker: - **Recommended**: The Python environment matching your active toolchain (if detected) @@ -193,8 +191,6 @@ To configure a different default kernel for a language, you can assign a kernel ## Interactive Input -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - When code execution requires user input (such as Python's `input()` function), the REPL displays an input prompt below the cell output. Type your response in the text field and press `Enter` to submit. The kernel receives your input and continues execution. diff --git a/docs/src/tasks.md b/docs/src/tasks.md index 9e0e38690096d674fc84d968c90091761ae43082..2d8afdeefa19485433374b50cfba7f9fa3475f58 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -249,8 +249,6 @@ When you have a task definition that is bound to the runnable, you can quickly r ## Running Bash Scripts -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - You can run bash scripts directly from Zed. When you open a `.sh` or `.bash` file, Zed automatically detects the script as runnable and makes it available in the task picker. To run a bash script: @@ -276,8 +274,6 @@ If you need to pass arguments or customize the execution environment, add a task ## Shell Initialization -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When Zed runs a task, it launches the command in a login shell. This ensures your shell's initialization files (`.bash_profile`, `.zshrc`, etc.) are sourced before the task executes. This behavior gives tasks access to the same environment variables, aliases, and PATH modifications you've configured in your shell profile. If a task fails to find a command that works in your terminal, verify your shell configuration files are properly set up. diff --git a/docs/src/vim.md b/docs/src/vim.md index ce6ed57b5c61f7d39ab786ec8fd91574cd5cf6f6..1798f16a93244f2694b30ffa70119da1e4498fdc 100644 --- a/docs/src/vim.md +++ b/docs/src/vim.md @@ -248,10 +248,6 @@ Below, you'll find tables listing the commands you can use in the command palett ### File and window management -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). -> -> The `:bd[elete]` command now closes the active file across all panes. Previously, it only closed the file in the active pane. - This table shows commands for managing windows, tabs, and panes. As commands don't support arguments currently, you cannot specify a filename when saving or creating a new file. | Command | Description | diff --git a/script/docs-suggest-publish b/script/docs-suggest-publish index 24ea1c31addf59ad4abc8ef10e4649fdead4e7bb..4f3e3ac4d552960bb362170081e81283bb76619c 100755 --- a/script/docs-suggest-publish +++ b/script/docs-suggest-publish @@ -132,8 +132,8 @@ if [[ "$DRY_RUN" == "true" ]]; then exit 0 fi -# Ensure clean working state -if [[ -n "$(git status --porcelain)" ]]; then +# Ensure clean working state (ignore untracked files with grep -v '??') +if [[ -n "$(git status --porcelain | grep -v '^??' || true)" ]]; then error "Working directory has uncommitted changes. Please commit or stash first." fi @@ -221,9 +221,14 @@ EOF cat "$SUGGESTIONS_FILE" >> "$APPLY_PROMPT_FILE" log "Running Droid auto-apply with model: $MODEL" -droid exec -m "$MODEL" -f "$APPLY_PROMPT_FILE" > "$APPLY_SUMMARY_FILE" +if ! droid exec -m "$MODEL" -f "$APPLY_PROMPT_FILE" --auto high > "$APPLY_SUMMARY_FILE" 2>&1; then + echo "Droid exec output:" + cat "$APPLY_SUMMARY_FILE" + error "Droid exec failed. See output above." +fi +log "Droid completed, checking results..." -if [[ -n "$(git status --porcelain | grep -vE '^.. docs/' || true)" ]]; then +if [[ -n "$(git status --porcelain | grep -v '^??' | grep -vE '^.. docs/' || true)" ]]; then error "Auto-apply modified non-doc files. Revert and re-run." fi @@ -232,7 +237,7 @@ if [[ -z "$(git status --porcelain docs/ | grep '^.. docs/src/' || true)" ]]; th fi log "Running docs formatter" -./script/prettier +./script/prettier --write if [[ -z "$(git status --porcelain docs/ | grep '^.. docs/src/' || true)" ]]; then error "No docs/src changes remain after formatting; aborting PR creation." @@ -324,8 +329,7 @@ log "Creating PR..." PR_URL=$(gh pr create \ --draft \ --title "docs: auto-apply preview release suggestions" \ - --body-file "$PR_BODY_FILE" \ - --label "documentation") + --body-file "$PR_BODY_FILE") echo "" echo -e "${GREEN}PR created:${NC} $PR_URL" From 876086e8419e79691a6266869492709fae362e1f Mon Sep 17 00:00:00 2001 From: morgankrey Date: Wed, 25 Feb 2026 15:15:38 -0600 Subject: [PATCH 095/548] docs: Apply preview release suggestions (#50118) Documentation updates for Preview release - generated by docs-suggest-publish Release Notes: - N/A --- docs/src/ai/agent-panel.md | 12 ++++++-- docs/src/ai/agent-settings.md | 11 +++---- docs/src/ai/edit-prediction.md | 46 ++++++++++++++++++++++++++---- docs/src/ai/llm-providers.md | 40 ++++++++++++++++++++++++-- docs/src/ai/mcp.md | 12 ++++++++ docs/src/ai/models.md | 4 ++- docs/src/collaboration/overview.md | 29 +++++++++++++++++++ docs/src/configuring-languages.md | 4 +++ docs/src/debugger.md | 10 +++++++ docs/src/development.md | 30 +++++++++++++++++++ docs/src/getting-started.md | 8 ++++++ docs/src/outline-panel.md | 4 ++- docs/src/tasks.md | 31 ++++++++++++++++++++ docs/src/troubleshooting.md | 21 ++++++++++++++ 14 files changed, 245 insertions(+), 17 deletions(-) diff --git a/docs/src/ai/agent-panel.md b/docs/src/ai/agent-panel.md index b62512d469bee4c1e776f29fddc35bcc4d979467..6204cf386cf2afd42a27fe89f75ee0a771b0ba13 100644 --- a/docs/src/ai/agent-panel.md +++ b/docs/src/ai/agent-panel.md @@ -114,7 +114,13 @@ The agent can search your codebase to find relevant context, but providing it ex Add context by typing `@` in the message editor. You can mention files, directories, symbols, previous threads, rules files, and diagnostics. -Copying images and pasting them in the panel's message editor is also supported. +> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). + +### Images + +You can add images to agent messages on providers that support vision models. OpenAI GPT-4o and later, Anthropic Claude 3 and later, Google Gemini 1.5 and 2.0, and Bedrock vision models (Claude 3+, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision, Mistral Pixtral) all support image inputs. + +To add an image, use the `/file` slash command and select an image file, or drag an image from your file system directly into the agent panel message editor. You can also copy an image and paste it into the message editor. When you paste multi-line code selections copied from a buffer, Zed automatically formats them as @-mentions with the file context. To paste content without this automatic formatting, use {#kb agent::PasteRaw} to paste raw text directly. @@ -168,7 +174,9 @@ You can explore the exact tools enabled in each profile by clicking on the profi Alternatively, you can also use either the command palette, by running {#action agent::ManageProfiles}, or the keybinding directly, {#kb agent::ManageProfiles}, to have access to the profile management modal. -Use {#kb agent::CycleModeSelector} to switch between profiles without opening the modal. +> **Preview:** This keybinding is available in Zed Preview. It will be included in the next Stable release. + +Use {#kb agent::CycleModeSelector} to cycle through available profiles without opening the modal. #### Custom Profiles {#custom-profiles} diff --git a/docs/src/ai/agent-settings.md b/docs/src/ai/agent-settings.md index af02bd5f2072ee8e709c65d6237168c6d2159e70..b351fdfd3b3b15817f05cdf1d1a204d7a91c6eac 100644 --- a/docs/src/ai/agent-settings.md +++ b/docs/src/ai/agent-settings.md @@ -290,20 +290,21 @@ See the [Tool Permissions](./tool-permissions.md) documentation for more example > **Note:** Before Zed v0.224.0, tool approval was controlled by the `agent.always_allow_tool_actions` boolean (default `false`). Set it to `true` to auto-approve tool actions, or leave it `false` to require confirmation for edits and tool calls. -### Single-file Review +### Edit Display Mode -Control whether to display review actions (accept & reject) in single buffers after the agent is done performing edits. -The default value is `true`. +> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). + +By default, agent edits open in multi-file review mode. To display agent edits in single-file editors instead, enable `single_file_review`: ```json [settings] { "agent": { - "single_file_review": false + "single_file_review": true } } ``` -When set to `false`, these controls are only available in the multibuffer review tab. +When enabled, each file modified by an agent opens in its own editor tab for review. When disabled (default), all changes appear in a unified review interface. ### Sound Notification diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 3d0f8c5141a40daa66fc3689deb2d0363acf273a..973dc9546a8b81ad58fc996102ff25aed2d241a9 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -406,13 +406,49 @@ After adding your API key, Codestral will appear in the provider dropdown in the ### Self-Hosted OpenAI-compatible servers -To configure Zed to use an arbitrary server for edit predictions: +> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. -1. Open the Settings Editor (`Cmd+,` on macOS, `Ctrl+,` on Linux/Windows) -2. Search for "Edit Predictions" and click **Configure Providers** -3. Find the "OpenAI-compatible API" section and enter the URL and model name. You can also select a prompt format that Zed should use. Zed currently supports several FIM prompt formats, as well as Zed's own Zeta prompt format. If you do not select a prompt format, Zed will attempt to infer it from the model name. +You can use any self-hosted server that implements the OpenAI completion API format. This works with vLLM, llama.cpp server, LocalAI, and other compatible servers. + +#### Configuration + +Set `open_ai_compatible_api` as your provider and configure the API endpoint: + +```json [settings] +{ + "edit_predictions": { + "provider": "open_ai_compatible_api", + "open_ai_compatible_api": { + "api_url": "http://localhost:8080/v1/completions", + "model": "deepseek-coder-6.7b-base", + "prompt_format": "deepseek_coder", + "max_output_tokens": 64 + } + } +} +``` + +The `prompt_format` setting controls how code context is formatted for the model. Use `"infer"` to detect the format from the model name, or specify one explicitly: + +- `code_llama` - CodeLlama format: `
 prefix  suffix `
+- `star_coder` - StarCoder format: `prefixsuffix`
+- `deepseek_coder` - DeepSeek format with special unicode markers
+- `qwen` - Qwen/CodeGemma format: `<|fim_prefix|>prefix<|fim_suffix|>suffix<|fim_middle|>`
+- `codestral` - Codestral format: `[SUFFIX]suffix[PREFIX]prefix`
+- `glm` - GLM-4 format with code markers
+- `infer` - Auto-detect from model name (default)
 
-The URL must accept requests according to OpenAI's [Completions API](https://developers.openai.com/api/reference/resources/completions/methods/create)
+Your server must implement the OpenAI `/v1/completions` endpoint. Edit predictions will send POST requests with this format:
+
+```json
+{
+  "model": "your-model-name",
+  "prompt": "formatted-code-context",
+  "max_tokens": 256,
+  "temperature": 0.2,
+  "stop": ["<|endoftext|>", ...]
+}
+```
 
 ## See also
 
diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md
index 696427ae6981df5ec75907f7ce2957c07d76eb1f..a8aa7a832736f6042e2c5b6d504b77a7e10f5614 100644
--- a/docs/src/ai/llm-providers.md
+++ b/docs/src/ai/llm-providers.md
@@ -151,7 +151,9 @@ For the most up-to-date supported regions and models, refer to the [Supported Mo
 
 #### Extended Context Window {#bedrock-extended-context}
 
-Anthropic models on Bedrock support a [1M token extended context window](https://docs.anthropic.com/en/docs/build-with-claude/extended-context) beta. To enable this feature, add `"allow_extended_context": true` to your Bedrock configuration:
+> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
+
+Anthropic models on Bedrock support a 1M token extended context window through the `anthropic_beta` API parameter. To enable this feature, set `"allow_extended_context": true` in your Bedrock configuration:
 
 ```json [settings]
 {
@@ -166,9 +168,13 @@ Anthropic models on Bedrock support a [1M token extended context window](https:/
 }
 ```
 
-When enabled, Zed will include the `anthropic_beta` field in requests to Bedrock, enabling the 1M token context window for supported Anthropic models such as Claude Sonnet 4.5 and Claude Opus 4.6.
+Zed enables extended context for supported models (Claude Sonnet 4.5 and Claude Opus 4.6). Extended context usage may increase API costs—refer to AWS Bedrock pricing for details.
+
+#### Image Support {#bedrock-image-support}
+
+> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
 
-> **Note**: Extended context usage may incur additional API costs. Refer to your AWS Bedrock pricing for details.
+Bedrock models that support vision (Claude 3 and later, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision models, Mistral Pixtral) can receive images in conversations and tool results. To send an image, use the slash command `/file` followed by an image path, or drag an image directly into the agent panel.
 
 ### Anthropic {#anthropic}
 
@@ -303,6 +309,15 @@ Here is an example of a custom Google AI model you could add to your Zed setting
   "language_models": {
     "google": {
       "available_models": [
+        {
+          "name": "gemini-3.1-pro-preview",
+          "display_name": "Gemini 3.1 Pro",
+          "max_tokens": 1000000,
+          "mode": {
+            "type": "thinking",
+            "budget_tokens": 24000
+          }
+        },
         {
           "name": "gemini-3-flash-preview",
           "display_name": "Gemini 3 Flash (Thinking)",
@@ -614,6 +629,25 @@ The OpenRouter API key will be saved in your keychain.
 
 Zed will also use the `OPENROUTER_API_KEY` environment variable if it's defined.
 
+> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
+
+When using OpenRouter as your assistant provider, you must explicitly select a model in your settings. OpenRouter no longer provides a default model selection.
+
+Configure your preferred OpenRouter model in `settings.json`:
+
+```json [settings]
+{
+  "agent": {
+    "default_model": {
+      "provider": "openrouter",
+      "model": "openrouter/auto"
+    }
+  }
+}
+```
+
+The `openrouter/auto` model automatically routes your requests to the most appropriate available model. You can also specify any model available through OpenRouter's API.
+
 #### Custom Models {#openrouter-custom-models}
 
 You can add custom models to the OpenRouter provider by adding the following to your Zed settings file ([how to edit](../configuring-zed.md#settings-files)):
diff --git a/docs/src/ai/mcp.md b/docs/src/ai/mcp.md
index 1a3ee231f204eb3ed0f216fe2bc71590e74c3afc..482b2e371de71a6c8cab41149ea1759779d5ca25 100644
--- a/docs/src/ai/mcp.md
+++ b/docs/src/ai/mcp.md
@@ -86,6 +86,18 @@ Once installation is complete, you can return to the Agent Panel and start promp
 How reliably MCP tools get called can vary from model to model.
 Mentioning the MCP server by name can help the model pick tools from that server.
 
+#### Error Handling
+
+> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
+
+When a context server encounters an error while processing a tool call, the agent receives the error message directly and the operation fails. Common error scenarios include:
+
+- Invalid parameters passed to the tool
+- Server-side failures (database connection issues, rate limits)
+- Unsupported operations or missing resources
+
+The error message from the context server will be shown in the agent's response, allowing you to diagnose and correct the issue. Check the context server's logs or documentation for details about specific error codes.
+
 If you want to _ensure_ a given MCP server will be used, you can create [a custom profile](./agent-panel.md#custom-profiles) where all built-in tools (or the ones that could cause conflicts with the server's tools) are turned off and only the tools coming from the MCP server are turned on.
 
 As an example, [the Dagger team suggests](https://container-use.com/agent-integrations#zed) doing that with their [Container Use MCP server](https://zed.dev/extensions/mcp-server-container-use):
diff --git a/docs/src/ai/models.md b/docs/src/ai/models.md
index 09c26305d277d9afc592bac9b554f123957db03b..a86b873ef8aff112ceddbe7da000e4350023ec42 100644
--- a/docs/src/ai/models.md
+++ b/docs/src/ai/models.md
@@ -43,6 +43,8 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir
 |                        | OpenAI    | Cached Input        | $0.005                       | $0.0055                 |
 | Gemini 3.1 Pro         | Google    | Input               | $2.00                        | $2.20                   |
 |                        | Google    | Output              | $12.00                       | $13.20                  |
+| Gemini 3.1 Pro         | Google    | Input               | $2.00                        | $2.20                   |
+|                        | Google    | Output              | $12.00                       | $13.20                  |
 | Gemini 3 Pro           | Google    | Input               | $2.00                        | $2.20                   |
 |                        | Google    | Output              | $12.00                       | $13.20                  |
 | Gemini 3 Flash         | Google    | Input               | $0.30                        | $0.33                   |
@@ -68,7 +70,7 @@ As of February 19, 2026, Zed Pro serves newer model versions in place of the ret
 - Claude Sonnet 4 → Claude Sonnet 4.5 or Claude Sonnet 4.6
 - Claude Sonnet 3.7 (retired Feb 19) → Claude Sonnet 4.5 or Claude Sonnet 4.6
 - GPT-5.1 and GPT-5 → GPT-5.2 or GPT-5.2 Codex
-- Gemini 2.5 Pro → Gemini 3 Pro
+- Gemini 2.5 Pro → Gemini 3 Pro or Gemini 3.1 Pro
 - Gemini 2.5 Flash → Gemini 3 Flash
 
 ## Usage {#usage}
diff --git a/docs/src/collaboration/overview.md b/docs/src/collaboration/overview.md
index ce74a74ddc6c831433ce64dd67701fd221cf1eb3..97efdae088d1692ad5840e23c13bc50d4ecb75c7 100644
--- a/docs/src/collaboration/overview.md
+++ b/docs/src/collaboration/overview.md
@@ -19,3 +19,32 @@ The Collaboration Panel has two sections:
 > **Warning:** Sharing a project gives collaborators access to your local file system within that project. Only collaborate with people you trust.
 
 See the [Data and Privacy FAQs](https://zed.dev/faq#data-and-privacy) for more details.
+
+## Audio Settings {#audio-settings}
+
+### Selecting Audio Devices
+
+> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
+
+You can select specific input and output audio devices instead of using system defaults. To configure audio devices:
+
+1. Open {#kb zed::OpenSettings}
+2. Navigate to **Collaboration** > **Experimental**
+3. Use the **Output Audio Device** and **Input Audio Device** dropdowns to select your preferred devices
+
+Changes take effect immediately. If you select a device that becomes unavailable, Zed falls back to system defaults.
+
+To test your audio configuration, click **Test Audio** in the same section. This opens a window where you can verify your microphone and speaker work correctly with the selected devices.
+
+**JSON configuration:**
+
+```json [settings]
+{
+  "audio": {
+    "experimental.output_audio_device": "Device Name (device-id)",
+    "experimental.input_audio_device": "Device Name (device-id)"
+  }
+}
+```
+
+Set either value to `null` to use system defaults.
diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md
index 90fec03c0b96a05d9ab193da240d045314404204..4e9bbce822f2f0d87ac2a8c9617698acd5983243 100644
--- a/docs/src/configuring-languages.md
+++ b/docs/src/configuring-languages.md
@@ -136,6 +136,10 @@ Not all languages in Zed support toolchain discovery and selection, but for thos
 
 ### Configuring Language Servers
 
+> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
+
+When configuring language servers in your `settings.json`, autocomplete suggestions include all available LSP adapters recognized by Zed, not only those currently active for loaded languages. This helps you discover and configure language servers before opening files that use them.
+
 Many language servers accept custom configuration options. You can set these in the `lsp` section of your `settings.json`:
 
 ```json [settings]
diff --git a/docs/src/debugger.md b/docs/src/debugger.md
index 2a84821cac88097e61e744f41d74abefd21d3b8b..c659c1410b38166cf11da0af728e18f8c9282054 100644
--- a/docs/src/debugger.md
+++ b/docs/src/debugger.md
@@ -163,6 +163,16 @@ Some debug adapters (e.g. CodeLLDB and JavaScript) will also _verify_ whether yo
 All breakpoints enabled for a given project are also listed in "Breakpoints" item in your debugging session UI. From "Breakpoints" item in your UI you can also manage exception breakpoints.
 The debug adapter will then stop whenever an exception of a given kind occurs. Which exception types are supported depends on the debug adapter.
 
+## Working with Split Panes
+
+> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
+
+When debugging with multiple split panes open, Zed shows the active debug line in one pane and preserves your layout in others. If you have the same file open in multiple panes, the debugger picks a pane where the file is already the active tab—it won't switch tabs in panes where the file is inactive.
+
+Once the debugger picks a pane, it continues using that pane for subsequent breakpoints during the session. If you drag the tab with the active debug line to a different split, the debugger tracks the move and uses the new pane.
+
+This ensures the debugger doesn't disrupt your workflow when stepping through code across different files.
+
 ## Settings
 
 The settings for the debugger are grouped under the `debugger` key in `settings.json`:
diff --git a/docs/src/development.md b/docs/src/development.md
index 529ce2a69c08f2d3ebb77a5747762de642a1f841..d8ea0b3f980317ff1f38a8325534e57f321bd8de 100644
--- a/docs/src/development.md
+++ b/docs/src/development.md
@@ -86,6 +86,36 @@ For benchmarking unit tests, annotate them with the `#[perf]` attribute from the
 perf-test -p $CRATE` to benchmark them. See the rustdoc documentation on `crates/util_macros` and `tooling/perf` for
 in-depth examples and explanations.
 
+## ETW Profiling on Windows
+
+> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
+
+Zed supports Event Tracing for Windows (ETW) to capture detailed performance data. You can record CPU, GPU, disk I/O, and file I/O activity, with optional heap allocation tracking.
+
+### Recording a trace
+
+Open the command palette and run:
+
+- **`etw_tracing: Record Etw Trace`** — Records CPU, GPU, and I/O activity
+- **`etw_tracing: Record Etw Trace With Heap Tracing`** — Includes heap allocation data for the Zed process
+
+Zed prompts you to choose a save location for the `.etl` trace file.
+
+### Saving or canceling
+
+While recording:
+
+- **`etw_tracing: Save Etw Trace`** — Stops recording and saves the trace to disk
+- **`etw_tracing: Cancel Etw Trace`** — Stops recording without saving
+
+Zed buffers trace data in memory. Recordings automatically save after 60 seconds if you don't manually stop them.
+
+### Analyzing traces
+
+Open `.etl` files with [Windows Performance Analyzer](https://learn.microsoft.com/en-us/windows-hardware/test/wpt/windows-performance-analyzer) to inspect CPU stacks, GPU usage, disk I/O patterns, and heap allocations.
+
+**Note for existing keybindings**: The `etw_tracing::StopEtwTrace` action was renamed to `etw_tracing::SaveEtwTrace`. Update any custom keybindings.
+
 ## Contributor links
 
 - [CONTRIBUTING.md](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md)
diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md
index be5281b56091740d274b70b1ee11c348418cc9da..af6a41c26a6f70f073b2d7e45267871962bb1697 100644
--- a/docs/src/getting-started.md
+++ b/docs/src/getting-started.md
@@ -11,6 +11,14 @@ This guide covers the essential commands, environment setup, and navigation basi
 
 ## Quick Start
 
+### Welcome Page
+
+> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
+
+When you open Zed without a folder, you see the welcome page in the main editor area. The welcome page offers quick actions to open a folder, clone a repository, or view documentation. Once you open a folder or file, the welcome page disappears. If you split the editor into multiple panes, the welcome page appears only in the center pane when empty—other panes show a standard empty state.
+
+To reopen the welcome page, close all items in the center pane or use the command palette to search for "Welcome".
+
 ### 1. Open a Project
 
 Open a folder from the command line:
diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md
index e5f1f911a4e0257427a86b30c835abd2dfa7fd0f..1bacc3cacf4f556c9c3a06e59d6f3fac9b8c74b0 100644
--- a/docs/src/outline-panel.md
+++ b/docs/src/outline-panel.md
@@ -7,7 +7,9 @@ description: Navigate code structure with Zed's outline panel. View symbols, jum
 
 In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar.
 
-When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols, as reported by tree-sitter. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file.
+> **Changed in Preview (v0.225).** See [release notes](/releases#0.225).
+
+When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols. Each symbol entry shows its type prefix (such as "struct", "fn", "mod", "impl") along with the symbol name, helping you quickly identify what kind of symbol you're looking at. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file.
 
 ![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png)
 
diff --git a/docs/src/tasks.md b/docs/src/tasks.md
index 2d8afdeefa19485433374b50cfba7f9fa3475f58..0fa659eb2cc58fe63536e721475b0093e0650618 100644
--- a/docs/src/tasks.md
+++ b/docs/src/tasks.md
@@ -223,6 +223,37 @@ This could be useful for launching a terminal application that you want to use i
 }
 ```
 
+## VS Code Task Format
+
+> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release.
+
+When importing VS Code tasks from `.vscode/tasks.json`, you can omit the `label` field. Zed automatically generates labels based on the task type:
+
+- **npm tasks**: `npm: 
+
+
+"#
+    )
+}
+
+fn make_gallery_html(examples: &[&str]) -> String {
+    let mut buttons = String::new();
+    for name in examples {
+        buttons.push_str(&format!(
+            "                \n"
+        ));
+    }
+
+    let first = examples.first().copied().unwrap_or("hello_web");
+
+    format!(
+        r##"
+
+
+    
+    
+    GPUI Web Examples
+    
+
+
+    
+ +
+
+ {first} + Open in new tab ↗ +
+ +
+
+ + + +"##, + count = examples.len(), + ) +} diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index c234f46f3dd2edc4bd861d7df46f966a1e623708..8b633edab6d81ad71c31e25c5171af076402fa9d 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -50,6 +50,7 @@ pub(crate) fn run_tests() -> Workflow { should_run_tests.guard(run_platform_tests(Platform::Mac)), should_run_tests.guard(doctests()), should_run_tests.guard(check_workspace_binaries()), + should_run_tests.guard(check_wasm()), should_run_tests.guard(check_dependencies()), // could be more specific here? should_check_docs.guard(check_docs()), should_check_licences.guard(check_licenses()), @@ -335,6 +336,38 @@ fn check_dependencies() -> NamedJob { ) } +fn check_wasm() -> NamedJob { + fn install_nightly_wasm_toolchain() -> Step { + named::bash( + "rustup toolchain install nightly --component rust-src --target wasm32-unknown-unknown", + ) + } + + fn cargo_check_wasm() -> Step { + named::bash(concat!( + "cargo +nightly -Zbuild-std=std,panic_abort ", + "check --target wasm32-unknown-unknown -p gpui_platform", + )) + .add_env(( + "CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS", + "-C target-feature=+atomics,+bulk-memory,+mutable-globals", + )) + } + + named::job( + release_job(&[]) + .runs_on(runners::LINUX_LARGE) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(Platform::Linux)) + .add_step(steps::cache_rust_dependencies_namespace()) + .add_step(install_nightly_wasm_toolchain()) + .add_step(steps::setup_sccache(Platform::Linux)) + .add_step(cargo_check_wasm()) + .add_step(steps::show_sccache_stats(Platform::Linux)) + .add_step(steps::cleanup_cargo_config(Platform::Linux)), + ) +} + fn check_workspace_binaries() -> NamedJob { named::job( release_job(&[]) From d858f5236f72aec6369dd3b0eec15acab987dc67 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Thu, 26 Feb 2026 13:53:17 -0500 Subject: [PATCH 133/548] settings: Remove unused `file_finder.git_status` setting (#49889) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit From the looks of it, this setting was never used by the File Finder. It also doesn’t make much sense to show git info in the File Finder. The Project Panel already exposes this information to the user. Closes #49709. | Before | After | |--------|--------| | image | image | Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Removed unused `file_finder.git_status` setting --- assets/settings/default.json | 2 -- .../settings_content/src/settings_content.rs | 4 ---- crates/settings_ui/src/page_data.rs | 20 +------------------ docs/src/visual-customization.md | 1 - 4 files changed, 1 insertion(+), 26 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index f9f4fb417e4b0664170f9f6958966018bb48bc63..cb1e30bbee45ba06de8866fe3b8b6f0ab38b61da 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1271,8 +1271,6 @@ // // Default: true "skip_focus_for_active_in_search": true, - // Whether to show the git status in the file finder. - "git_status": true, // Whether to use gitignored files when searching. // Only the file Zed had indexed will be used, not necessary all the gitignored files. // diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index 8c4845e05cbf16d0aacb089a5d16dcdb0ff6d7c7..f94c6a0b98d7fa23686dc1c89012e3b1fe476c70 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -716,10 +716,6 @@ pub struct FileFinderSettingsContent { /// /// Default: true pub skip_focus_for_active_in_search: Option, - /// Determines whether to show the git status in the file finder - /// - /// Default: true - pub git_status: Option, /// Whether to use gitignored files when searching. /// Only the file Zed had indexed will be used, not necessary all the gitignored files. /// diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 5b3f5480148c30ef89bcae29b23986eac29808d9..afc84a9f9b91e32f3a110e19dc78db5634369458 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -3154,7 +3154,7 @@ fn search_and_files_page() -> SettingsPage { ] } - fn file_finder_section() -> [SettingsPageItem; 6] { + fn file_finder_section() -> [SettingsPageItem; 5] { [ SettingsPageItem::SectionHeader("File Finder"), // todo: null by default @@ -3242,24 +3242,6 @@ fn search_and_files_page() -> SettingsPage { metadata: None, files: USER, }), - SettingsPageItem::SettingItem(SettingItem { - title: "Git Status", - description: "Show the Git status in the file finder.", - field: Box::new(SettingField { - json_path: Some("file_finder.git_status"), - pick: |settings_content| { - settings_content.file_finder.as_ref()?.git_status.as_ref() - }, - write: |settings_content, value| { - settings_content - .file_finder - .get_or_insert_default() - .git_status = value; - }, - }), - metadata: None, - files: USER, - }), ] } diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 7f84fe8b289295c2562b5eba21ec512948da3552..0e18c59fda21014a80ea8f362486711e204016e0 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -436,7 +436,6 @@ TBD: Centered layout related settings "file_finder": { "file_icons": true, // Show/hide file icons "modal_max_width": "small", // Horizontal size: small, medium, large, xlarge, full - "git_status": true, // Show the git status for each entry "include_ignored": null // gitignored files in results: true, false, null }, ``` From 6a749380aa45d2c2e6eb3307e67ab6739cc0a1ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Thu, 26 Feb 2026 21:19:41 +0100 Subject: [PATCH 134/548] Add fast mode toggle in agent panel (#49714) This is a staff only toggle for now, since the consequences of activating it are not obvious and quite dire (tokens costs 6 times more). Also, persist thinking, thinking effort and fast mode in DbThread so the thinking mode toggle and thinking effort are persisted. Release Notes: - Agent: The thinking mode toggle and thinking effort are now persisted when selecting a thread from history. --- assets/icons/fast_forward.svg | 4 ++ assets/icons/fast_forward_off.svg | 5 ++ assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + assets/keymaps/default-windows.json | 1 + crates/agent/src/db.rs | 16 +++++ crates/agent/src/edit_agent.rs | 1 + crates/agent/src/thread.rs | 33 +++++---- crates/agent/src/thread_store.rs | 3 + crates/agent_ui/src/agent_ui.rs | 2 + crates/agent_ui/src/buffer_codegen.rs | 2 + crates/agent_ui/src/connection_view.rs | 4 +- .../src/connection_view/thread_view.rs | 67 ++++++++++++++++++- .../agent_ui/src/terminal_inline_assistant.rs | 1 + crates/anthropic/src/anthropic.rs | 10 +++ .../assistant_text_thread/src/text_thread.rs | 1 + .../cloud_llm_client/src/cloud_llm_client.rs | 2 + .../src/anthropic_client.rs | 3 + crates/eval/src/instance.rs | 1 + crates/git_ui/src/git_panel.rs | 1 + crates/icons/src/icons.rs | 2 + crates/language_model/src/language_model.rs | 4 ++ crates/language_model/src/request.rs | 27 ++++++++ .../language_models/src/provider/anthropic.rs | 3 + crates/language_models/src/provider/cloud.rs | 4 ++ .../src/provider/copilot_chat.rs | 1 + .../language_models/src/provider/mistral.rs | 2 + .../language_models/src/provider/open_ai.rs | 3 + crates/rules_library/src/rules_library.rs | 1 + 29 files changed, 190 insertions(+), 16 deletions(-) create mode 100644 assets/icons/fast_forward.svg create mode 100644 assets/icons/fast_forward_off.svg diff --git a/assets/icons/fast_forward.svg b/assets/icons/fast_forward.svg new file mode 100644 index 0000000000000000000000000000000000000000..240bc65aca3558561bb52f2f8c5e860d38596223 --- /dev/null +++ b/assets/icons/fast_forward.svg @@ -0,0 +1,4 @@ + + + + diff --git a/assets/icons/fast_forward_off.svg b/assets/icons/fast_forward_off.svg new file mode 100644 index 0000000000000000000000000000000000000000..8ea7c41c6582b031f066f590dd425641945aadc9 --- /dev/null +++ b/assets/icons/fast_forward_off.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index f3247e936f2b6d2d5ee5275304ea445729046afa..9b8f2d337b1f1073bca818cf0b9c66773a3ce4e9 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -333,6 +333,7 @@ "ctrl-alt-k": "agent::ToggleThinkingMode", "ctrl-alt-'": "agent::ToggleThinkingEffortMenu", "ctrl-'": "agent::CycleThinkingEffort", + "ctrl-alt-.": "agent::ToggleFastMode", }, }, { diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 77e01368462cdfcce24cf1cba39d6a2a11cdcce0..5f210cb4da35f9909767035c941289ee24a2ee3f 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -377,6 +377,7 @@ "cmd-alt-k": "agent::ToggleThinkingMode", "cmd-alt-'": "agent::ToggleThinkingEffortMenu", "ctrl-'": "agent::CycleThinkingEffort", + "cmd-alt-.": "agent::ToggleFastMode", }, }, { diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 51b221c8389d1588d80a8186ddceb68e8cb025c7..19f75f858cd45192c4cf30dd6bd0799046c26268 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -335,6 +335,7 @@ "ctrl-alt-k": "agent::ToggleThinkingMode", "ctrl-alt-'": "agent::ToggleThinkingEffortMenu", "ctrl-'": "agent::CycleThinkingEffort", + "ctrl-alt-.": "agent::ToggleFastMode", }, }, { diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index fa4b37dba3e789b499bfe5db4f0b76ccf12e5a09..7dba2f078adac47b951dcec9dd30883fdea618ad 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -8,6 +8,7 @@ use collections::{HashMap, IndexMap}; use futures::{FutureExt, future::Shared}; use gpui::{BackgroundExecutor, Global, Task}; use indoc::indoc; +use language_model::Speed; use parking_lot::Mutex; use serde::{Deserialize, Serialize}; use sqlez::{ @@ -53,6 +54,12 @@ pub struct DbThread { pub imported: bool, #[serde(default)] pub subagent_context: Option, + #[serde(default)] + pub speed: Option, + #[serde(default)] + pub thinking_enabled: bool, + #[serde(default)] + pub thinking_effort: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -91,6 +98,9 @@ impl SharedThread { profile: None, imported: true, subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, } } @@ -265,6 +275,9 @@ impl DbThread { profile: thread.profile, imported: false, subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, }) } } @@ -570,6 +583,9 @@ mod tests { profile: None, imported: false, subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, } } diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 9f2f2f1877e20620373b1a7aacbf2f7b3a407bfd..288a3178f3c4501ae9de65d19624b66cbda2548d 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -749,6 +749,7 @@ impl EditAgent { temperature: None, thinking_allowed: self.thinking_allowed, thinking_effort: None, + speed: None, }; Ok(self.model.stream_completion_text(request, cx).await?.stream) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index cfac50aba7daa9bf799b561bb06f14309bcf53dd..9fccf441a54bb1c3705ad1db6b6cc1ac079e38ff 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -40,7 +40,8 @@ use language_model::{ LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, - LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID, + LanguageModelToolUseId, Role, SelectedModel, Speed, StopReason, TokenUsage, + ZED_CLOUD_PROVIDER_ID, }; use project::Project; use prompt_store::ProjectContext; @@ -884,6 +885,7 @@ pub struct Thread { summarization_model: Option>, thinking_enabled: bool, thinking_effort: Option, + speed: Option, prompt_capabilities_tx: watch::Sender, pub(crate) prompt_capabilities_rx: watch::Receiver, pub(crate) project: Entity, @@ -977,6 +979,7 @@ impl Thread { model, summarization_model: None, thinking_enabled: enable_thinking, + speed: None, thinking_effort, prompt_capabilities_tx, prompt_capabilities_rx, @@ -1134,10 +1137,6 @@ impl Thread { let profile_id = db_thread .profile .unwrap_or_else(|| settings.default_profile.clone()); - let thinking_effort = settings - .default_model - .as_ref() - .and_then(|model| model.effort.clone()); let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { db_thread @@ -1166,12 +1165,6 @@ impl Thread { watch::channel(Self::prompt_capabilities(model.as_deref())); let action_log = cx.new(|_| ActionLog::new(project.clone())); - // TODO: We should serialize the user's configured thinking parameter on `DbThread` - // rather than deriving it from the model's capability. A user may have explicitly - // toggled thinking off for a model that supports it, and we'd lose that preference here. - let enable_thinking = model - .as_deref() - .is_some_and(|model| model.supports_thinking()); Self { id, @@ -1199,8 +1192,9 @@ impl Thread { templates, model, summarization_model: None, - thinking_enabled: enable_thinking, - thinking_effort, + thinking_enabled: db_thread.thinking_enabled, + thinking_effort: db_thread.thinking_effort, + speed: db_thread.speed, project, action_log, updated_at: db_thread.updated_at, @@ -1230,6 +1224,9 @@ impl Thread { profile: Some(self.profile_id.clone()), imported: self.imported, subagent_context: self.subagent_context.clone(), + speed: self.speed, + thinking_enabled: self.thinking_enabled, + thinking_effort: self.thinking_effort.clone(), }; cx.background_spawn(async move { @@ -1318,6 +1315,15 @@ impl Thread { cx.notify(); } + pub fn speed(&self) -> Option { + self.speed + } + + pub fn set_speed(&mut self, speed: Speed, cx: &mut Context) { + self.speed = Some(speed); + cx.notify(); + } + pub fn last_message(&self) -> Option { if let Some(message) = self.pending_message.clone() { Some(Message::Agent(message)) @@ -2485,6 +2491,7 @@ impl Thread { temperature: AgentSettings::temperature_for_model(model, cx), thinking_allowed: self.thinking_enabled, thinking_effort: self.thinking_effort.clone(), + speed: self.speed(), }; log::debug!("Completion request built successfully"); diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index d5526b0953cb4342fcbf3b13a883385dfcf609ea..3769355bc8d3495f614ccd6787bb3a33d58e8f2f 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -133,6 +133,9 @@ mod tests { profile: None, imported: false, subagent_context: None, + speed: None, + thinking_enabled: false, + thinking_effort: None, } } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 1eca5a12c12cfc5e96faa83239735a1a1c9522cd..967b53bd200e6dc8e863a86602b2ac5f590406e2 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -160,6 +160,8 @@ actions!( CycleThinkingEffort, /// Toggles the thinking effort selector menu open or closed. ToggleThinkingEffortMenu, + /// Toggles fast mode for models that support it. + ToggleFastMode, ] ); diff --git a/crates/agent_ui/src/buffer_codegen.rs b/crates/agent_ui/src/buffer_codegen.rs index 0376fda47e0b20820e19cf9cc2b09493b06898b8..4f7bf084b7e96a14e6ecaafb04adfdbb6712e574 100644 --- a/crates/agent_ui/src/buffer_codegen.rs +++ b/crates/agent_ui/src/buffer_codegen.rs @@ -547,6 +547,7 @@ impl CodegenAlternative { messages, thinking_allowed: false, thinking_effort: None, + speed: None, } })) } @@ -626,6 +627,7 @@ impl CodegenAlternative { messages: vec![request_message], thinking_allowed: false, thinking_effort: None, + speed: None, } })) } diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 8d38a15544f193e6b8a7aa458a24720d19163cd5..df06ed2bae7f77cfb366f3499097ab8c43bdf78c 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -75,8 +75,8 @@ use crate::{ ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu, OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage, - SelectPermissionGranularity, SendImmediately, SendNextQueuedMessage, ToggleProfileSelector, - ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject, + SelectPermissionGranularity, SendImmediately, SendNextQueuedMessage, ToggleFastMode, + ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject, }; const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30); diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index ac605c7990359c90d172d083388be39476cd9656..499b11e5c08bd9b2c811e4cf5119bf7f71663c4b 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -1,6 +1,6 @@ use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody}; use gpui::{Corner, List}; -use language_model::LanguageModelEffortLevel; +use language_model::{LanguageModelEffortLevel, Speed}; use settings::update_settings_file; use ui::{ButtonLike, SplitButton, SplitButtonStyle, Tab}; @@ -2526,6 +2526,7 @@ impl ThreadView { .gap_0p5() .child(self.render_add_context_button(cx)) .child(self.render_follow_toggle(cx)) + .children(self.render_fast_mode_control(cx)) .children(self.render_thinking_control(cx)), ) .child( @@ -2950,6 +2951,49 @@ impl ThreadView { } } + fn fast_mode_available(&self, cx: &Context) -> bool { + if !cx.is_staff() { + return false; + } + self.as_native_thread(cx) + .and_then(|thread| thread.read(cx).model()) + .map(|model| model.supports_fast_mode()) + .unwrap_or(false) + } + + fn render_fast_mode_control(&self, cx: &mut Context) -> Option { + if !self.fast_mode_available(cx) { + return None; + } + + let thread = self.as_native_thread(cx)?.read(cx); + + let (tooltip_label, color, icon) = if matches!(thread.speed(), Some(Speed::Fast)) { + ("Disable Fast Mode", Color::Muted, IconName::FastForward) + } else { + ( + "Enable Fast Mode", + Color::Custom(cx.theme().colors().icon_disabled.opacity(0.8)), + IconName::FastForwardOff, + ) + }; + + let focus_handle = self.message_editor.focus_handle(cx); + + Some( + IconButton::new("fast-mode", icon) + .icon_size(IconSize::Small) + .icon_color(color) + .tooltip(move |_, cx| { + Tooltip::for_action_in(tooltip_label, &ToggleFastMode, &focus_handle, cx) + }) + .on_click(cx.listener(move |this, _, _window, cx| { + this.toggle_fast_mode(cx); + })) + .into_any_element(), + ) + } + fn render_thinking_control(&self, cx: &mut Context) -> Option { let thread = self.as_native_thread(cx)?.read(cx); let model = thread.model()?; @@ -7089,6 +7133,24 @@ impl ThreadView { }); } + fn toggle_fast_mode(&mut self, cx: &mut Context) { + if !self.fast_mode_available(cx) { + return; + } + let Some(thread) = self.as_native_thread(cx) else { + return; + }; + thread.update(cx, |thread, cx| { + thread.set_speed( + thread + .speed() + .map(|speed| speed.toggle()) + .unwrap_or(Speed::Fast), + cx, + ); + }); + } + fn cycle_thinking_effort(&mut self, cx: &mut Context) { let Some(thread) = self.as_native_thread(cx) else { return; @@ -7193,6 +7255,9 @@ impl Render for ThreadView { .on_action(cx.listener(Self::handle_select_permission_granularity)) .on_action(cx.listener(Self::open_permission_dropdown)) .on_action(cx.listener(Self::open_add_context_menu)) + .on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| { + this.toggle_fast_mode(cx); + })) .on_action(cx.listener(|this, _: &ToggleThinkingMode, _window, cx| { if let Some(thread) = this.as_native_thread(cx) { thread.update(cx, |thread, cx| { diff --git a/crates/agent_ui/src/terminal_inline_assistant.rs b/crates/agent_ui/src/terminal_inline_assistant.rs index 2d424c3e1a8ffd33c6933bd50991596bb07a44b2..3df3c1faaed9e02b659bc75b09257e81e96ebc03 100644 --- a/crates/agent_ui/src/terminal_inline_assistant.rs +++ b/crates/agent_ui/src/terminal_inline_assistant.rs @@ -276,6 +276,7 @@ impl TerminalInlineAssistant { temperature, thinking_allowed: false, thinking_effort: None, + speed: None, } })) } diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs index 56baf4b58fe9ac568ea22012234510ff617fab25..6bff2be4c15841de597309b626e768bbf79e880a 100644 --- a/crates/anthropic/src/anthropic.rs +++ b/crates/anthropic/src/anthropic.rs @@ -977,6 +977,8 @@ pub struct Request { #[serde(default, skip_serializing_if = "Vec::is_empty")] pub stop_sequences: Vec, #[serde(default, skip_serializing_if = "Option::is_none")] + pub speed: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] pub temperature: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub top_k: Option, @@ -984,6 +986,14 @@ pub struct Request { pub top_p: Option, } +#[derive(Debug, Default, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum Speed { + #[default] + Standard, + Fast, +} + #[derive(Debug, Serialize, Deserialize)] struct StreamingRequest { #[serde(flatten)] diff --git a/crates/assistant_text_thread/src/text_thread.rs b/crates/assistant_text_thread/src/text_thread.rs index 18b37808b936e354614f6681bbcb263b184f832c..34007868f9f128fa80f09f884ccbaf57ffd103c1 100644 --- a/crates/assistant_text_thread/src/text_thread.rs +++ b/crates/assistant_text_thread/src/text_thread.rs @@ -2275,6 +2275,7 @@ impl TextThread { temperature: model.and_then(|model| AgentSettings::temperature_for_model(model, cx)), thinking_allowed: true, thinking_effort: None, + speed: None, }; for message in self.messages(cx) { if message.status != MessageStatus::Done { diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs index f69c279b24718b76fe2ae10f066d57324fe03461..62986b311e07c8b4439246e36c44318d23012e0b 100644 --- a/crates/cloud_llm_client/src/cloud_llm_client.rs +++ b/crates/cloud_llm_client/src/cloud_llm_client.rs @@ -302,6 +302,8 @@ pub struct LanguageModel { pub supports_tools: bool, pub supports_images: bool, pub supports_thinking: bool, + #[serde(default)] + pub supports_fast_mode: bool, pub supported_effort_levels: Vec, #[serde(default)] pub supports_streaming_tools: bool, diff --git a/crates/edit_prediction_cli/src/anthropic_client.rs b/crates/edit_prediction_cli/src/anthropic_client.rs index 784fa711b0058e3d2884460f6ca6f5300fc44a9a..869635c53a15e5c3f6cdaca7632a3e99f0b0bec1 100644 --- a/crates/edit_prediction_cli/src/anthropic_client.rs +++ b/crates/edit_prediction_cli/src/anthropic_client.rs @@ -50,6 +50,7 @@ impl PlainLlmClient { metadata: None, output_config: None, stop_sequences: Vec::new(), + speed: None, temperature: None, top_k: None, top_p: None, @@ -89,6 +90,7 @@ impl PlainLlmClient { metadata: None, output_config: None, stop_sequences: Vec::new(), + speed: None, temperature: None, top_k: None, top_p: None, @@ -578,6 +580,7 @@ impl BatchingLlmClient { temperature: None, top_k: None, top_p: None, + speed: None, }; let custom_id = format!("req_hash_{}", hash); diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index 59593578f1ffc512447f08fd728c6619943d6b6e..54e6ab0b925191c16885b8b8ed89369039c467f6 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -564,6 +564,7 @@ impl ExampleInstance { stop: Vec::new(), thinking_allowed: true, thinking_effort: None, + speed: None, }; let model = model.clone(); diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 1c8c09d7fdeaa51b8780f29aa13028355864924f..b042d66ce9ac5c45af2e5701da2d83db3c3ab907 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -2786,6 +2786,7 @@ impl GitPanel { temperature, thinking_allowed: false, thinking_effort: None, + speed: None, }; let stream = model.stream_completion_text(request, cx); diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index d6356f831ea9bbbaec5313da1a5b56f101471411..5738d763fcf7ff50b67f5a77acb918250a537124 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -112,6 +112,8 @@ pub enum IconName { ExpandUp, ExpandVertical, Eye, + FastForward, + FastForwardOff, File, FileCode, FileDiff, diff --git a/crates/language_model/src/language_model.rs b/crates/language_model/src/language_model.rs index 313a7a3b4d94726a2e6619eddd0fd14e5e4c30e4..c403774499c9dcb384e93cf19367dc28e336aa60 100644 --- a/crates/language_model/src/language_model.rs +++ b/crates/language_model/src/language_model.rs @@ -613,6 +613,10 @@ pub trait LanguageModel: Send + Sync { false } + fn supports_fast_mode(&self) -> bool { + false + } + /// Returns the list of supported effort levels that can be used when thinking. fn supported_effort_levels(&self) -> Vec { Vec::new() diff --git a/crates/language_model/src/request.rs b/crates/language_model/src/request.rs index cb2f6a27de65739bb684626ce5bd985a187bf28f..9be3002deae758ee99432842a31e3b90754ada0f 100644 --- a/crates/language_model/src/request.rs +++ b/crates/language_model/src/request.rs @@ -453,6 +453,33 @@ pub struct LanguageModelRequest { pub temperature: Option, pub thinking_allowed: bool, pub thinking_effort: Option, + pub speed: Option, +} + +#[derive(Clone, Copy, Default, Debug, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +pub enum Speed { + #[default] + Standard, + Fast, +} + +impl Speed { + pub fn toggle(self) -> Self { + match self { + Speed::Standard => Speed::Fast, + Speed::Fast => Speed::Standard, + } + } +} + +impl From for anthropic::Speed { + fn from(speed: Speed) -> Self { + match speed { + Speed::Standard => anthropic::Speed::Standard, + Speed::Fast => anthropic::Speed::Fast, + } + } } #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] diff --git a/crates/language_models/src/provider/anthropic.rs b/crates/language_models/src/provider/anthropic.rs index 5b7ad62e0e66977465502d61f3db3707274a9718..d3bd129248406211e43e69fc5880310a9dedbc97 100644 --- a/crates/language_models/src/provider/anthropic.rs +++ b/crates/language_models/src/provider/anthropic.rs @@ -725,6 +725,7 @@ pub fn into_anthropic( metadata: None, output_config: None, stop_sequences: Vec::new(), + speed: request.speed.map(From::from), temperature: request.temperature.or(Some(default_temperature)), top_k: None, top_p: None, @@ -1105,6 +1106,7 @@ mod tests { tool_choice: None, thinking_allowed: true, thinking_effort: None, + speed: None, }; let anthropic_request = into_anthropic( @@ -1167,6 +1169,7 @@ mod tests { tools: vec![], tool_choice: None, thinking_allowed: true, + speed: None, }; request.messages.push(LanguageModelRequestMessage { role: Role::Assistant, diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index f822b89916a60c32b5f076580f960d47c6a1463c..19009013bf84ad9751e9ed0de2d3338b279a258e 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -571,6 +571,10 @@ impl LanguageModel for CloudLanguageModel { self.model.supports_thinking } + fn supports_fast_mode(&self) -> bool { + self.model.supports_fast_mode + } + fn supported_effort_levels(&self) -> Vec { self.model .supported_effort_levels diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 55ca0e526243dbbcb9504ea3948b192d79a02da1..4363430f865de63ed5fec0d6b40b085d9413fc2a 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -930,6 +930,7 @@ fn into_copilot_responses( temperature, thinking_allowed: _, thinking_effort: _, + speed: _, } = request; let mut input_items: Vec = Vec::new(); diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 3cc583ddde1cb03a4fd312b36f4358c0fbf3b4c1..02d46dcaa7ce7acc76d85c93cad610a7d2489bf0 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -883,6 +883,7 @@ mod tests { stop: vec![], thinking_allowed: true, thinking_effort: None, + speed: Default::default(), }; let (mistral_request, affinity) = @@ -919,6 +920,7 @@ mod tests { stop: vec![], thinking_allowed: true, thinking_effort: None, + speed: None, }; let (mistral_request, _) = into_mistral(request, mistral::Model::Pixtral12BLatest, None); diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 40cc67098a76d0430f597feb8f1045859863486a..7fb65df0a534c7600f7315fd85d7adda0d66314a 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -555,6 +555,7 @@ pub fn into_open_ai_response( temperature, thinking_allowed: _, thinking_effort: _, + speed: _, } = request; let mut input_items = Vec::new(); @@ -1435,6 +1436,7 @@ mod tests { temperature: None, thinking_allowed: true, thinking_effort: None, + speed: None, }; // Validate that all models are supported by tiktoken-rs @@ -1573,6 +1575,7 @@ mod tests { temperature: None, thinking_allowed: false, thinking_effort: None, + speed: None, }; let response = into_open_ai_response( diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index b3aa0301f204e97e6b1acda2a5cff4479b51c590..a89657e29680ccfd759fe63efcc837d883ef7590 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -1106,6 +1106,7 @@ impl RulesLibrary { temperature: None, thinking_allowed: true, thinking_effort: None, + speed: None, }, cx, ) From da2bed1930d1e0b3bfaa7b27a96170543f9629c4 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Thu, 26 Feb 2026 21:43:34 +0100 Subject: [PATCH 135/548] agent: Rename run_turn to send in subagent handle (#50242) Align better with naming in thread. I grabbed the wrong function name :D Release Notes: - N/A --- crates/agent/src/agent.rs | 2 +- crates/agent/src/tests/mod.rs | 2 +- crates/agent/src/thread.rs | 2 +- crates/agent/src/tools/spawn_agent_tool.rs | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 7bf0468d3a65a619a70efd1e7e67f301402ad20c..e110f9c0514e2a030b632872d1df4e3a66973c97 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1733,7 +1733,7 @@ impl SubagentHandle for NativeSubagentHandle { self.session_id.clone() } - fn run_turn(&self, message: String, cx: &AsyncApp) -> Task> { + fn send(&self, message: String, cx: &AsyncApp) -> Task> { let thread = self.subagent_thread.clone(); let acp_thread = self.acp_thread.clone(); let subagent_session_id = self.session_id.clone(); diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 3643704802d673a5b18075c7edbc684b68578219..5262414631c7f1f329a7de941424e0a0dfa8b1b9 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -167,7 +167,7 @@ impl SubagentHandle for FakeSubagentHandle { self.session_id.clone() } - fn run_turn(&self, _message: String, cx: &AsyncApp) -> Task> { + fn send(&self, _message: String, cx: &AsyncApp) -> Task> { let task = self.wait_for_summary_task.clone(); cx.background_spawn(async move { Ok(task.await) }) } diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 9fccf441a54bb1c3705ad1db6b6cc1ac079e38ff..63e180e7a9686991ba67e813c51b65bcc5a8bedf 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -606,7 +606,7 @@ pub trait TerminalHandle { pub trait SubagentHandle { fn id(&self) -> acp::SessionId; - fn run_turn(&self, message: String, cx: &AsyncApp) -> Task>; + fn send(&self, message: String, cx: &AsyncApp) -> Task>; } pub trait ThreadEnvironment { diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index 8c97b222a901744d77429cba15d03686e31fbde2..f46e85ce26d9194047ef62223393db0ac30f0f4b 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -125,7 +125,7 @@ impl AgentTool for SpawnAgentTool { Ok((subagent, subagent_session_id)) })?; - match subagent.run_turn(input.message, cx).await { + match subagent.send(input.message, cx).await { Ok(output) => { event_stream.update_fields( acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]), From cbbcb1e10165e11df3e23d0defb1d58ec954b277 Mon Sep 17 00:00:00 2001 From: Nikhil Pandey Date: Thu, 26 Feb 2026 17:03:46 -0500 Subject: [PATCH 136/548] acp: Fix stale ACP reasoning-effort options when model switch keeps same config IDs (#50246) ## Problem Zed's ACP config-options UI was only rebuilding selectors when the set of config option IDs changed. For many model switches, the IDs stay the same (`mode`, `model`, `reasoning_effort`) while the valid values for `reasoning_effort` change by model. As a result, the picker could show stale values (for example, missing `xhigh` on `gpt-5.3-codex`, or showing extra `xhigh` after switching away). This is especially problematic for providers like Copilot that expose multiple agents/models with different reasoning-level capabilities. ## Fix Rebuild ACP config-option selectors on every `config_option_update`, not only when config IDs change. This refreshes cached picker entries whenever model-specific option values change, even if option IDs are unchanged. ## User Impact - Reasoning-effort picker now reflects the selected model immediately on Claude <-> GPT transitions. - Prevents stale or invalid effort choices in ACP sessions. ## Validation - Manual validation: switch across models with different reasoning-effort sets and confirm picker updates immediately. - Local `cargo check -p agent_ui` remains blocked by unrelated pre-existing `livekit-protocol` compile errors in this checkout. ## Files Changed - `crates/agent_ui/src/acp/config_options.rs` Release Notes: - acp: Fix for config selectors not always being refreshed --------- Co-authored-by: Ben Brandt --- crates/agent_ui/src/config_options.rs | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/crates/agent_ui/src/config_options.rs b/crates/agent_ui/src/config_options.rs index 387069cd1671fa811ad3933d943f5d691d848b37..458411d4d3af3f1c85dc57a1e940515e8aabb23a 100644 --- a/crates/agent_ui/src/config_options.rs +++ b/crates/agent_ui/src/config_options.rs @@ -49,7 +49,7 @@ impl ConfigOptionsView { if let Some(mut rx) = rx { while let Ok(()) = rx.recv().await { this.update_in(cx, |this, window, cx| { - this.refresh_selectors_if_needed(window, cx); + this.rebuild_selectors(window, cx); cx.notify(); }) .log_err(); @@ -184,15 +184,10 @@ impl ConfigOptionsView { .collect() } - fn refresh_selectors_if_needed(&mut self, window: &mut Window, cx: &mut Context) { - let current_ids = Self::config_option_ids(&self.config_options); - if current_ids != self.config_option_ids { - self.config_option_ids = current_ids; - self.rebuild_selectors(window, cx); - } - } - fn rebuild_selectors(&mut self, window: &mut Window, cx: &mut Context) { + // Config option updates can mutate option values for existing IDs (for example, + // reasoning levels after a model switch). Rebuild to refresh cached picker entries. + self.config_option_ids = Self::config_option_ids(&self.config_options); self.selectors = Self::build_selectors( &self.config_options, &self.agent_server, From f1c5ed324b95aafa3a44d3e8b06f93f27c7d6f61 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 26 Feb 2026 14:32:48 -0800 Subject: [PATCH 137/548] Add folder_paths for project grouping (#50249) Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/agent/src/agent.rs | 16 ++- crates/agent/src/db.rs | 119 +++++++++++++++--- crates/agent/src/thread_store.rs | 76 +++++++++-- crates/agent_ui/src/agent_panel.rs | 2 +- .../src/connection_view/thread_view.rs | 2 +- crates/{workspace => util}/src/path_list.rs | 16 ++- crates/util/src/util.rs | 1 + crates/workspace/src/workspace.rs | 6 +- crates/zed/src/main.rs | 7 +- 9 files changed, 214 insertions(+), 31 deletions(-) rename crates/{workspace => util}/src/path_list.rs (92%) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index e110f9c0514e2a030b632872d1df4e3a66973c97..85b943da4bb65b038100b2b842d81bc34662325d 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -51,6 +51,7 @@ use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::Arc; use util::ResultExt; +use util::path_list::PathList; use util::rel_path::RelPath; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -848,13 +849,26 @@ impl NativeAgent { let Some(session) = self.sessions.get_mut(&id) else { return; }; + + let folder_paths = PathList::new( + &self + .project + .read(cx) + .visible_worktrees(cx) + .map(|worktree| worktree.read(cx).abs_path().to_path_buf()) + .collect::>(), + ); + let thread_store = self.thread_store.clone(); session.pending_save = cx.spawn(async move |_, cx| { let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { return; }; let db_thread = db_thread.await; - database.save_thread(id, db_thread).await.log_err(); + database + .save_thread(id, db_thread, folder_paths) + .await + .log_err(); thread_store.update(cx, |store, cx| store.reload(cx)); }); } diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 7dba2f078adac47b951dcec9dd30883fdea618ad..5a14e920e52c18fb6341e09fa9f747b3c5019f1d 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -18,6 +18,7 @@ use sqlez::{ }; use std::sync::Arc; use ui::{App, SharedString}; +use util::path_list::PathList; use zed_env_vars::ZED_STATELESS; pub type DbMessage = crate::Message; @@ -31,6 +32,9 @@ pub struct DbThreadMetadata { #[serde(alias = "summary")] pub title: SharedString, pub updated_at: DateTime, + /// The workspace folder paths this thread was created against, sorted + /// lexicographically. Used for grouping threads by project in the sidebar. + pub folder_paths: PathList, } #[derive(Debug, Serialize, Deserialize)] @@ -382,6 +386,14 @@ impl ThreadsDatabase { s().ok(); } + if let Ok(mut s) = connection.exec(indoc! {" + ALTER TABLE threads ADD COLUMN folder_paths TEXT; + ALTER TABLE threads ADD COLUMN folder_paths_order TEXT; + "}) + { + s().ok(); + } + let db = Self { executor, connection: Arc::new(Mutex::new(connection)), @@ -394,6 +406,7 @@ impl ThreadsDatabase { connection: &Arc>, id: acp::SessionId, thread: DbThread, + folder_paths: &PathList, ) -> Result<()> { const COMPRESSION_LEVEL: i32 = 3; @@ -410,6 +423,16 @@ impl ThreadsDatabase { .subagent_context .as_ref() .map(|ctx| ctx.parent_thread_id.0.clone()); + let serialized_folder_paths = folder_paths.serialize(); + let (folder_paths_str, folder_paths_order_str): (Option, Option) = + if folder_paths.is_empty() { + (None, None) + } else { + ( + Some(serialized_folder_paths.paths), + Some(serialized_folder_paths.order), + ) + }; let json_data = serde_json::to_string(&SerializedThread { thread, version: DbThread::VERSION, @@ -421,11 +444,20 @@ impl ThreadsDatabase { let data_type = DataType::Zstd; let data = compressed; - let mut insert = connection.exec_bound::<(Arc, Option>, String, String, DataType, Vec)>(indoc! {" - INSERT OR REPLACE INTO threads (id, parent_id, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?) + let mut insert = connection.exec_bound::<(Arc, Option>, Option, Option, String, String, DataType, Vec)>(indoc! {" + INSERT OR REPLACE INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data) VALUES (?, ?, ?, ?, ?, ?, ?, ?) "})?; - insert((id.0, parent_id, title, updated_at, data_type, data))?; + insert(( + id.0, + parent_id, + folder_paths_str, + folder_paths_order_str, + title, + updated_at, + data_type, + data, + ))?; Ok(()) } @@ -437,19 +469,28 @@ impl ThreadsDatabase { let connection = connection.lock(); let mut select = connection - .select_bound::<(), (Arc, Option>, String, String)>(indoc! {" - SELECT id, parent_id, summary, updated_at FROM threads ORDER BY updated_at DESC + .select_bound::<(), (Arc, Option>, Option, Option, String, String)>(indoc! {" + SELECT id, parent_id, folder_paths, folder_paths_order, summary, updated_at FROM threads ORDER BY updated_at DESC "})?; let rows = select(())?; let mut threads = Vec::new(); - for (id, parent_id, summary, updated_at) in rows { + for (id, parent_id, folder_paths, folder_paths_order, summary, updated_at) in rows { + let folder_paths = folder_paths + .map(|paths| { + PathList::deserialize(&util::path_list::SerializedPathList { + paths, + order: folder_paths_order.unwrap_or_default(), + }) + }) + .unwrap_or_default(); threads.push(DbThreadMetadata { id: acp::SessionId::new(id), parent_session_id: parent_id.map(acp::SessionId::new), title: summary.into(), updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc), + folder_paths, }); } @@ -483,11 +524,16 @@ impl ThreadsDatabase { }) } - pub fn save_thread(&self, id: acp::SessionId, thread: DbThread) -> Task> { + pub fn save_thread( + &self, + id: acp::SessionId, + thread: DbThread, + folder_paths: PathList, + ) -> Task> { let connection = self.connection.clone(); self.executor - .spawn(async move { Self::save_thread_sync(&connection, id, thread) }) + .spawn(async move { Self::save_thread_sync(&connection, id, thread, &folder_paths) }) } pub fn delete_thread(&self, id: acp::SessionId) -> Task> { @@ -606,11 +652,11 @@ mod tests { ); database - .save_thread(older_id.clone(), older_thread) + .save_thread(older_id.clone(), older_thread, PathList::default()) .await .unwrap(); database - .save_thread(newer_id.clone(), newer_thread) + .save_thread(newer_id.clone(), newer_thread, PathList::default()) .await .unwrap(); @@ -635,11 +681,11 @@ mod tests { ); database - .save_thread(thread_id.clone(), original_thread) + .save_thread(thread_id.clone(), original_thread, PathList::default()) .await .unwrap(); database - .save_thread(thread_id.clone(), updated_thread) + .save_thread(thread_id.clone(), updated_thread, PathList::default()) .await .unwrap(); @@ -686,7 +732,7 @@ mod tests { }); database - .save_thread(child_id.clone(), child_thread) + .save_thread(child_id.clone(), child_thread, PathList::default()) .await .unwrap(); @@ -714,7 +760,7 @@ mod tests { ); database - .save_thread(thread_id.clone(), thread) + .save_thread(thread_id.clone(), thread, PathList::default()) .await .unwrap(); @@ -729,4 +775,49 @@ mod tests { "Regular threads should have no subagent_context" ); } + + #[gpui::test] + async fn test_folder_paths_roundtrip(cx: &mut TestAppContext) { + let database = ThreadsDatabase::new(cx.executor()).unwrap(); + + let thread_id = session_id("folder-thread"); + let thread = make_thread( + "Folder Thread", + Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(), + ); + + let folder_paths = PathList::new(&[ + std::path::PathBuf::from("/home/user/project-a"), + std::path::PathBuf::from("/home/user/project-b"), + ]); + + database + .save_thread(thread_id.clone(), thread, folder_paths.clone()) + .await + .unwrap(); + + let threads = database.list_threads().await.unwrap(); + assert_eq!(threads.len(), 1); + assert_eq!(threads[0].folder_paths, folder_paths); + } + + #[gpui::test] + async fn test_folder_paths_empty_when_not_set(cx: &mut TestAppContext) { + let database = ThreadsDatabase::new(cx.executor()).unwrap(); + + let thread_id = session_id("no-folder-thread"); + let thread = make_thread( + "No Folder Thread", + Utc.with_ymd_and_hms(2024, 6, 15, 12, 0, 0).unwrap(), + ); + + database + .save_thread(thread_id.clone(), thread, PathList::default()) + .await + .unwrap(); + + let threads = database.list_threads().await.unwrap(); + assert_eq!(threads.len(), 1); + assert!(threads[0].folder_paths.is_empty()); + } } diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 3769355bc8d3495f614ccd6787bb3a33d58e8f2f..5cdce12125da8f7d26677388169e899f94b7e7f1 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -2,6 +2,7 @@ use crate::{DbThread, DbThreadMetadata, ThreadsDatabase}; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use gpui::{App, Context, Entity, Global, Task, prelude::*}; +use util::path_list::PathList; struct GlobalThreadStore(Entity); @@ -49,12 +50,13 @@ impl ThreadStore { &mut self, id: acp::SessionId, thread: crate::DbThread, + folder_paths: PathList, cx: &mut Context, ) -> Task> { let database_future = ThreadsDatabase::connect(cx); cx.spawn(async move |this, cx| { let database = database_future.await.map_err(|err| anyhow!(err))?; - database.save_thread(id, thread).await?; + database.save_thread(id, thread, folder_paths).await?; this.update(cx, |this, cx| this.reload(cx)) }) } @@ -106,6 +108,13 @@ impl ThreadStore { pub fn entries(&self) -> impl Iterator + '_ { self.threads.iter().cloned() } + + /// Returns threads whose folder_paths match the given paths exactly. + pub fn threads_for_paths(&self, paths: &PathList) -> impl Iterator { + self.threads + .iter() + .filter(move |thread| &thread.folder_paths == paths) + } } #[cfg(test)] @@ -157,12 +166,12 @@ mod tests { ); let save_older = thread_store.update(cx, |store, cx| { - store.save_thread(older_id.clone(), older_thread, cx) + store.save_thread(older_id.clone(), older_thread, PathList::default(), cx) }); save_older.await.unwrap(); let save_newer = thread_store.update(cx, |store, cx| { - store.save_thread(newer_id.clone(), newer_thread, cx) + store.save_thread(newer_id.clone(), newer_thread, PathList::default(), cx) }); save_newer.await.unwrap(); @@ -185,8 +194,9 @@ mod tests { Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), ); - let save_task = - thread_store.update(cx, |store, cx| store.save_thread(thread_id, thread, cx)); + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread(thread_id, thread, PathList::default(), cx) + }); save_task.await.unwrap(); cx.run_until_parked(); @@ -217,11 +227,11 @@ mod tests { ); let save_first = thread_store.update(cx, |store, cx| { - store.save_thread(first_id.clone(), first_thread, cx) + store.save_thread(first_id.clone(), first_thread, PathList::default(), cx) }); save_first.await.unwrap(); let save_second = thread_store.update(cx, |store, cx| { - store.save_thread(second_id.clone(), second_thread, cx) + store.save_thread(second_id.clone(), second_thread, PathList::default(), cx) }); save_second.await.unwrap(); cx.run_until_parked(); @@ -254,11 +264,11 @@ mod tests { ); let save_first = thread_store.update(cx, |store, cx| { - store.save_thread(first_id.clone(), first_thread, cx) + store.save_thread(first_id.clone(), first_thread, PathList::default(), cx) }); save_first.await.unwrap(); let save_second = thread_store.update(cx, |store, cx| { - store.save_thread(second_id.clone(), second_thread, cx) + store.save_thread(second_id.clone(), second_thread, PathList::default(), cx) }); save_second.await.unwrap(); cx.run_until_parked(); @@ -268,7 +278,7 @@ mod tests { Utc.with_ymd_and_hms(2024, 1, 3, 0, 0, 0).unwrap(), ); let update_task = thread_store.update(cx, |store, cx| { - store.save_thread(first_id.clone(), updated_first, cx) + store.save_thread(first_id.clone(), updated_first, PathList::default(), cx) }); update_task.await.unwrap(); cx.run_until_parked(); @@ -278,4 +288,50 @@ mod tests { assert_eq!(entries[0].id, first_id); assert_eq!(entries[1].id, second_id); } + + #[gpui::test] + async fn test_threads_for_paths_filters_correctly(cx: &mut TestAppContext) { + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + cx.run_until_parked(); + + let project_a_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-a")]); + let project_b_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-b")]); + + let thread_a = make_thread( + "Thread in A", + Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), + ); + let thread_b = make_thread( + "Thread in B", + Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap(), + ); + let thread_a_id = session_id("thread-a"); + let thread_b_id = session_id("thread-b"); + + let save_a = thread_store.update(cx, |store, cx| { + store.save_thread(thread_a_id.clone(), thread_a, project_a_paths.clone(), cx) + }); + save_a.await.unwrap(); + + let save_b = thread_store.update(cx, |store, cx| { + store.save_thread(thread_b_id.clone(), thread_b, project_b_paths.clone(), cx) + }); + save_b.await.unwrap(); + + cx.run_until_parked(); + + thread_store.read_with(cx, |store, _cx| { + let a_threads: Vec<_> = store.threads_for_paths(&project_a_paths).collect(); + assert_eq!(a_threads.len(), 1); + assert_eq!(a_threads[0].id, thread_a_id); + + let b_threads: Vec<_> = store.threads_for_paths(&project_b_paths).collect(); + assert_eq!(b_threads.len(), 1); + assert_eq!(b_threads[0].id, thread_b_id); + + let nonexistent = PathList::new(&[std::path::PathBuf::from("/nonexistent")]); + let no_threads: Vec<_> = store.threads_for_paths(&nonexistent).collect(); + assert!(no_threads.is_empty()); + }); + } } diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 5269e3f1b8d03d16577e4aaeea0c258140853cb5..7097e5be156eb33382a1a0f47c1b4256c84ce9b1 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -1461,7 +1461,7 @@ impl AgentPanel { cx.spawn_in(window, async move |this, cx| { thread_store .update(&mut cx.clone(), |store, cx| { - store.save_thread(session_id.clone(), db_thread, cx) + store.save_thread(session_id.clone(), db_thread, Default::default(), cx) }) .await?; diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 499b11e5c08bd9b2c811e4cf5119bf7f71663c4b..9578a0752b45ea48477f4fab7935f670f84c25d5 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -1536,7 +1536,7 @@ impl ThreadView { thread_store .update(&mut cx.clone(), |store, cx| { - store.save_thread(session_id.clone(), db_thread, cx) + store.save_thread(session_id.clone(), db_thread, Default::default(), cx) }) .await?; diff --git a/crates/workspace/src/path_list.rs b/crates/util/src/path_list.rs similarity index 92% rename from crates/workspace/src/path_list.rs rename to crates/util/src/path_list.rs index 035f9e44fcce46527faa0c1053b7a6bb09aae0c8..1f923769780de2ae7f1dc18d3334020960ff3bb6 100644 --- a/crates/workspace/src/path_list.rs +++ b/crates/util/src/path_list.rs @@ -3,8 +3,9 @@ use std::{ sync::Arc, }; +use crate::paths::SanitizedPath; use itertools::Itertools; -use util::paths::SanitizedPath; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; /// A list of absolute paths, in a specific order. /// @@ -118,6 +119,19 @@ impl PathList { } } +impl Serialize for PathList { + fn serialize(&self, serializer: S) -> Result { + self.paths.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for PathList { + fn deserialize>(deserializer: D) -> Result { + let paths: Vec = Vec::deserialize(deserializer)?; + Ok(PathList::new(&paths)) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/util/src/util.rs b/crates/util/src/util.rs index 86d26aee884da5f708fec14b5a3c09dccfa7f5f3..4f129ef6d529aff0991b86882e5e60b6ad837d5c 100644 --- a/crates/util/src/util.rs +++ b/crates/util/src/util.rs @@ -2,6 +2,7 @@ pub mod archive; pub mod command; pub mod fs; pub mod markdown; +pub mod path_list; pub mod paths; pub mod process; pub mod redact; diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index bcc6f2ccc26c967537e5c9069ae3c8da7e0a1402..cde04d987a015982006d283c17ee82ed9b7a7cb2 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -7,7 +7,9 @@ mod multi_workspace; pub mod notifications; pub mod pane; pub mod pane_group; -mod path_list; +pub mod path_list { + pub use util::path_list::{PathList, SerializedPathList}; +} mod persistence; pub mod searchable; mod security_modal; @@ -28,7 +30,7 @@ pub use multi_workspace::{ NextWorkspaceInWindow, PreviousWorkspaceInWindow, Sidebar, SidebarEvent, SidebarHandle, ToggleWorkspaceSidebar, }; -pub use path_list::PathList; +pub use path_list::{PathList, SerializedPathList}; pub use toast_layer::{ToastAction, ToastLayer, ToastView}; use anyhow::{Context as _, Result, anyhow}; diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 95ff6f03b1b7902e254c5e405c5d8b50e1f48773..f429c32df79b6a1a62a82832e69d412800544e8a 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -951,7 +951,12 @@ fn handle_open_request(request: OpenRequest, app_state: Arc, cx: &mut thread_store .update(&mut cx.clone(), |store, cx| { - store.save_thread(save_session_id.clone(), db_thread, cx) + store.save_thread( + save_session_id.clone(), + db_thread, + Default::default(), + cx, + ) }) .await?; From 0859cce2e5686e1ae28b7b37507ed1a30eee9085 Mon Sep 17 00:00:00 2001 From: dancer <144584931+dancer@users.noreply.github.com> Date: Thu, 26 Feb 2026 14:54:44 -0800 Subject: [PATCH 138/548] Add Vercel AI gateway provider integration (#50207) ## Summary - add a new `ai_gateway` language model provider in zed using the openai-compatible chat completions path - register ai gateway in provider wiring, settings schema, and default settings - add vercel ai gateway icon and provider list entry - parse ai gateway model capabilities from tags (`tool-use`, `vision`) and supported parameters - clean up ai gateway oidc auth failures into a concise actionable error message Release Notes: - Added Vercel AI Gateway as a new LLM provider in Zed. --- assets/icons/ai_vercel.svg | 3 + assets/settings/default.json | 3 + crates/icons/src/icons.rs | 1 + crates/language_models/src/language_models.rs | 8 + crates/language_models/src/provider.rs | 1 + .../src/provider/vercel_ai_gateway.rs | 706 ++++++++++++++++++ crates/language_models/src/settings.rs | 8 +- crates/settings_content/src/agent.rs | 1 + crates/settings_content/src/language_model.rs | 20 + docs/src/ai/llm-providers.md | 27 +- 10 files changed, 776 insertions(+), 2 deletions(-) create mode 100644 assets/icons/ai_vercel.svg create mode 100644 crates/language_models/src/provider/vercel_ai_gateway.rs diff --git a/assets/icons/ai_vercel.svg b/assets/icons/ai_vercel.svg new file mode 100644 index 0000000000000000000000000000000000000000..c6cc5796f724e713437c4866053380cf2e14d511 --- /dev/null +++ b/assets/icons/ai_vercel.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/settings/default.json b/assets/settings/default.json index cb1e30bbee45ba06de8866fe3b8b6f0ab38b61da..8f724f59b66486b6477fc19155d78c7dd89d33c8 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -2226,6 +2226,9 @@ "vercel": { "api_url": "https://api.v0.dev/v1", }, + "vercel_ai_gateway": { + "api_url": "https://ai-gateway.vercel.sh/v1", + }, "x_ai": { "api_url": "https://api.x.ai/v1", }, diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 5738d763fcf7ff50b67f5a77acb918250a537124..a8a4e47cd0046fa995b10bb5e91b8884d70cdd6d 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -23,6 +23,7 @@ pub enum IconName { AiOpenAi, AiOpenAiCompat, AiOpenRouter, + AiVercel, AiVZero, AiXAi, AiZed, diff --git a/crates/language_models/src/language_models.rs b/crates/language_models/src/language_models.rs index 37d4ca5ddd4e5c1e7a0202c88c012d18b018cd4f..f22ea00c9e801e120bf057a06683487bc4deb22a 100644 --- a/crates/language_models/src/language_models.rs +++ b/crates/language_models/src/language_models.rs @@ -25,6 +25,7 @@ use crate::provider::open_ai::OpenAiLanguageModelProvider; use crate::provider::open_ai_compatible::OpenAiCompatibleLanguageModelProvider; use crate::provider::open_router::OpenRouterLanguageModelProvider; use crate::provider::vercel::VercelLanguageModelProvider; +use crate::provider::vercel_ai_gateway::VercelAiGatewayLanguageModelProvider; use crate::provider::x_ai::XAiLanguageModelProvider; pub use crate::settings::*; @@ -208,6 +209,13 @@ fn register_language_model_providers( Arc::new(VercelLanguageModelProvider::new(client.http_client(), cx)), cx, ); + registry.register_provider( + Arc::new(VercelAiGatewayLanguageModelProvider::new( + client.http_client(), + cx, + )), + cx, + ); registry.register_provider( Arc::new(XAiLanguageModelProvider::new(client.http_client(), cx)), cx, diff --git a/crates/language_models/src/provider.rs b/crates/language_models/src/provider.rs index 6e63a5f5745afce2a21f19002706c628360d7792..27f43e37f5be343c3f80201c013e96d858bb00de 100644 --- a/crates/language_models/src/provider.rs +++ b/crates/language_models/src/provider.rs @@ -12,4 +12,5 @@ pub mod open_ai_compatible; pub mod open_router; mod util; pub mod vercel; +pub mod vercel_ai_gateway; pub mod x_ai; diff --git a/crates/language_models/src/provider/vercel_ai_gateway.rs b/crates/language_models/src/provider/vercel_ai_gateway.rs new file mode 100644 index 0000000000000000000000000000000000000000..69c54e624b9e7289abaefbe7ab654d73df385b62 --- /dev/null +++ b/crates/language_models/src/provider/vercel_ai_gateway.rs @@ -0,0 +1,706 @@ +use anyhow::Result; +use collections::BTreeMap; +use futures::{AsyncReadExt, FutureExt, StreamExt, future::BoxFuture}; +use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window}; +use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest, http}; +use language_model::{ + ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError, + LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider, + LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState, + LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolSchemaFormat, RateLimiter, + env_var, +}; +use open_ai::ResponseStreamEvent; +use serde::Deserialize; +pub use settings::OpenAiCompatibleModelCapabilities as ModelCapabilities; +pub use settings::VercelAiGatewayAvailableModel as AvailableModel; +use settings::{Settings, SettingsStore}; +use std::sync::{Arc, LazyLock}; +use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*}; +use ui_input::InputField; +use util::ResultExt; + +const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("vercel_ai_gateway"); +const PROVIDER_NAME: LanguageModelProviderName = + LanguageModelProviderName::new("Vercel AI Gateway"); + +const API_URL: &str = "https://ai-gateway.vercel.sh/v1"; +const API_KEY_ENV_VAR_NAME: &str = "VERCEL_AI_GATEWAY_API_KEY"; +static API_KEY_ENV_VAR: LazyLock = env_var!(API_KEY_ENV_VAR_NAME); + +#[derive(Default, Clone, Debug, PartialEq)] +pub struct VercelAiGatewaySettings { + pub api_url: String, + pub available_models: Vec, +} + +pub struct VercelAiGatewayLanguageModelProvider { + http_client: Arc, + state: Entity, +} + +pub struct State { + api_key_state: ApiKeyState, + http_client: Arc, + available_models: Vec, + fetch_models_task: Option>>, +} + +impl State { + fn is_authenticated(&self) -> bool { + self.api_key_state.has_key() + } + + fn set_api_key(&mut self, api_key: Option, cx: &mut Context) -> Task> { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + self.api_key_state + .store(api_url, api_key, |this| &mut this.api_key_state, cx) + } + + fn authenticate(&mut self, cx: &mut Context) -> Task> { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + let task = self + .api_key_state + .load_if_needed(api_url, |this| &mut this.api_key_state, cx); + + cx.spawn(async move |this, cx| { + let result = task.await; + this.update(cx, |this, cx| this.restart_fetch_models_task(cx)) + .ok(); + result + }) + } + + fn fetch_models( + &mut self, + cx: &mut Context, + ) -> Task> { + let http_client = self.http_client.clone(); + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + let api_key = self.api_key_state.key(&api_url); + cx.spawn(async move |this, cx| { + let models = list_models(http_client.as_ref(), &api_url, api_key.as_deref()).await?; + this.update(cx, |this, cx| { + this.available_models = models; + cx.notify(); + }) + .map_err(|e| LanguageModelCompletionError::Other(e))?; + Ok(()) + }) + } + + fn restart_fetch_models_task(&mut self, cx: &mut Context) { + if self.is_authenticated() { + let task = self.fetch_models(cx); + self.fetch_models_task.replace(task); + } else { + self.available_models = Vec::new(); + } + } +} + +impl VercelAiGatewayLanguageModelProvider { + pub fn new(http_client: Arc, cx: &mut App) -> Self { + let state = cx.new(|cx| { + cx.observe_global::({ + let mut last_settings = VercelAiGatewayLanguageModelProvider::settings(cx).clone(); + move |this: &mut State, cx| { + let current_settings = VercelAiGatewayLanguageModelProvider::settings(cx); + if current_settings != &last_settings { + last_settings = current_settings.clone(); + this.authenticate(cx).detach(); + cx.notify(); + } + } + }) + .detach(); + State { + api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()), + http_client: http_client.clone(), + available_models: Vec::new(), + fetch_models_task: None, + } + }); + + Self { http_client, state } + } + + fn settings(cx: &App) -> &VercelAiGatewaySettings { + &crate::AllLanguageModelSettings::get_global(cx).vercel_ai_gateway + } + + fn api_url(cx: &App) -> SharedString { + let api_url = &Self::settings(cx).api_url; + if api_url.is_empty() { + API_URL.into() + } else { + SharedString::new(api_url.as_str()) + } + } + + fn default_available_model() -> AvailableModel { + AvailableModel { + name: "openai/gpt-5.3-codex".to_string(), + display_name: Some("GPT 5.3 Codex".to_string()), + max_tokens: 400_000, + max_output_tokens: Some(128_000), + max_completion_tokens: None, + capabilities: ModelCapabilities::default(), + } + } + + fn create_language_model(&self, model: AvailableModel) -> Arc { + Arc::new(VercelAiGatewayLanguageModel { + id: LanguageModelId::from(model.name.clone()), + model, + state: self.state.clone(), + http_client: self.http_client.clone(), + request_limiter: RateLimiter::new(4), + }) + } +} + +impl LanguageModelProviderState for VercelAiGatewayLanguageModelProvider { + type ObservableEntity = State; + + fn observable_entity(&self) -> Option> { + Some(self.state.clone()) + } +} + +impl LanguageModelProvider for VercelAiGatewayLanguageModelProvider { + fn id(&self) -> LanguageModelProviderId { + PROVIDER_ID + } + + fn name(&self) -> LanguageModelProviderName { + PROVIDER_NAME + } + + fn icon(&self) -> IconOrSvg { + IconOrSvg::Icon(IconName::AiVercel) + } + + fn default_model(&self, _cx: &App) -> Option> { + Some(self.create_language_model(Self::default_available_model())) + } + + fn default_fast_model(&self, _cx: &App) -> Option> { + None + } + + fn provided_models(&self, cx: &App) -> Vec> { + let mut models = BTreeMap::default(); + + let default_model = Self::default_available_model(); + models.insert(default_model.name.clone(), default_model); + + for model in self.state.read(cx).available_models.clone() { + models.insert(model.name.clone(), model); + } + + for model in &Self::settings(cx).available_models { + models.insert(model.name.clone(), model.clone()); + } + + models + .into_values() + .map(|model| self.create_language_model(model)) + .collect() + } + + fn is_authenticated(&self, cx: &App) -> bool { + self.state.read(cx).is_authenticated() + } + + fn authenticate(&self, cx: &mut App) -> Task> { + self.state.update(cx, |state, cx| state.authenticate(cx)) + } + + fn configuration_view( + &self, + _target_agent: language_model::ConfigurationViewTargetAgent, + window: &mut Window, + cx: &mut App, + ) -> AnyView { + cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx)) + .into() + } + + fn reset_credentials(&self, cx: &mut App) -> Task> { + self.state + .update(cx, |state, cx| state.set_api_key(None, cx)) + } +} + +pub struct VercelAiGatewayLanguageModel { + id: LanguageModelId, + model: AvailableModel, + state: Entity, + http_client: Arc, + request_limiter: RateLimiter, +} + +impl VercelAiGatewayLanguageModel { + fn stream_open_ai( + &self, + request: open_ai::Request, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream<'static, Result>, + LanguageModelCompletionError, + >, + > { + let http_client = self.http_client.clone(); + let (api_key, api_url) = self.state.read_with(cx, |state, cx| { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + (state.api_key_state.key(&api_url), api_url) + }); + + let future = self.request_limiter.stream(async move { + let provider = PROVIDER_NAME; + let Some(api_key) = api_key else { + return Err(LanguageModelCompletionError::NoApiKey { provider }); + }; + let request = open_ai::stream_completion( + http_client.as_ref(), + provider.0.as_str(), + &api_url, + &api_key, + request, + ); + let response = request.await.map_err(map_open_ai_error)?; + Ok(response) + }); + + async move { Ok(future.await?.boxed()) }.boxed() + } +} + +fn map_open_ai_error(error: open_ai::RequestError) -> LanguageModelCompletionError { + match error { + open_ai::RequestError::HttpResponseError { + status_code, + body, + headers, + .. + } => { + let retry_after = headers + .get(http::header::RETRY_AFTER) + .and_then(|value| value.to_str().ok()?.parse::().ok()) + .map(std::time::Duration::from_secs); + + LanguageModelCompletionError::from_http_status( + PROVIDER_NAME, + status_code, + extract_error_message(&body), + retry_after, + ) + } + open_ai::RequestError::Other(error) => LanguageModelCompletionError::Other(error), + } +} + +fn extract_error_message(body: &str) -> String { + let json = match serde_json::from_str::(body) { + Ok(json) => json, + Err(_) => return body.to_string(), + }; + + let message = json + .get("error") + .and_then(|value| { + value + .get("message") + .and_then(serde_json::Value::as_str) + .or_else(|| value.as_str()) + }) + .or_else(|| json.get("message").and_then(serde_json::Value::as_str)) + .map(ToString::to_string) + .unwrap_or_else(|| body.to_string()); + + clean_error_message(&message) +} + +fn clean_error_message(message: &str) -> String { + let lower = message.to_lowercase(); + + if lower.contains("vercel_oidc_token") && lower.contains("oidc token") { + return "Authentication failed for Vercel AI Gateway. Use a Vercel AI Gateway key (vck_...).\nCreate or manage keys in Vercel AI Gateway console.\nIf this persists, regenerate the key and update it in Vercel AI Gateway provider settings in Zed.".to_string(); + } + + if lower.contains("invalid api key") || lower.contains("invalid_api_key") { + return "Authentication failed for Vercel AI Gateway. Check that your Vercel AI Gateway key starts with vck_ and is active.".to_string(); + } + + message.to_string() +} + +fn has_tag(tags: &[String], expected: &str) -> bool { + tags.iter() + .any(|tag| tag.trim().eq_ignore_ascii_case(expected)) +} + +impl LanguageModel for VercelAiGatewayLanguageModel { + fn id(&self) -> LanguageModelId { + self.id.clone() + } + + fn name(&self) -> LanguageModelName { + LanguageModelName::from( + self.model + .display_name + .clone() + .unwrap_or_else(|| self.model.name.clone()), + ) + } + + fn provider_id(&self) -> LanguageModelProviderId { + PROVIDER_ID + } + + fn provider_name(&self) -> LanguageModelProviderName { + PROVIDER_NAME + } + + fn supports_tools(&self) -> bool { + self.model.capabilities.tools + } + + fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { + LanguageModelToolSchemaFormat::JsonSchemaSubset + } + + fn supports_images(&self) -> bool { + self.model.capabilities.images + } + + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { + match choice { + LanguageModelToolChoice::Auto => self.model.capabilities.tools, + LanguageModelToolChoice::Any => self.model.capabilities.tools, + LanguageModelToolChoice::None => true, + } + } + + fn supports_split_token_display(&self) -> bool { + true + } + + fn telemetry_id(&self) -> String { + format!("vercel_ai_gateway/{}", self.model.name) + } + + fn max_token_count(&self) -> u64 { + self.model.max_tokens + } + + fn max_output_tokens(&self) -> Option { + self.model.max_output_tokens + } + + fn count_tokens( + &self, + request: LanguageModelRequest, + cx: &App, + ) -> BoxFuture<'static, Result> { + let max_token_count = self.max_token_count(); + cx.background_spawn(async move { + let messages = crate::provider::open_ai::collect_tiktoken_messages(request); + let model = if max_token_count >= 100_000 { + "gpt-4o" + } else { + "gpt-4" + }; + tiktoken_rs::num_tokens_from_messages(model, &messages).map(|tokens| tokens as u64) + }) + .boxed() + } + + fn stream_completion( + &self, + request: LanguageModelRequest, + cx: &AsyncApp, + ) -> BoxFuture< + 'static, + Result< + futures::stream::BoxStream< + 'static, + Result, + >, + LanguageModelCompletionError, + >, + > { + let request = crate::provider::open_ai::into_open_ai( + request, + &self.model.name, + self.model.capabilities.parallel_tool_calls, + self.model.capabilities.prompt_cache_key, + self.max_output_tokens(), + None, + ); + let completions = self.stream_open_ai(request, cx); + async move { + let mapper = crate::provider::open_ai::OpenAiEventMapper::new(); + Ok(mapper.map_stream(completions.await?).boxed()) + } + .boxed() + } +} + +#[derive(Deserialize)] +struct ModelsResponse { + data: Vec, +} + +#[derive(Deserialize)] +struct ApiModel { + id: String, + name: Option, + context_window: Option, + max_tokens: Option, + #[serde(default)] + r#type: Option, + #[serde(default)] + supported_parameters: Vec, + #[serde(default)] + tags: Vec, + architecture: Option, +} + +#[derive(Deserialize)] +struct ApiModelArchitecture { + #[serde(default)] + input_modalities: Vec, +} + +async fn list_models( + client: &dyn HttpClient, + api_url: &str, + api_key: Option<&str>, +) -> Result, LanguageModelCompletionError> { + let uri = format!("{api_url}/models?include_mappings=true"); + let mut request_builder = HttpRequest::builder() + .method(Method::GET) + .uri(uri) + .header("Accept", "application/json"); + if let Some(api_key) = api_key { + request_builder = request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + let request = request_builder + .body(AsyncBody::default()) + .map_err(|error| LanguageModelCompletionError::BuildRequestBody { + provider: PROVIDER_NAME, + error, + })?; + let mut response = + client + .send(request) + .await + .map_err(|error| LanguageModelCompletionError::HttpSend { + provider: PROVIDER_NAME, + error, + })?; + + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .map_err(|error| LanguageModelCompletionError::ApiReadResponseError { + provider: PROVIDER_NAME, + error, + })?; + + if !response.status().is_success() { + return Err(LanguageModelCompletionError::from_http_status( + PROVIDER_NAME, + response.status(), + extract_error_message(&body), + None, + )); + } + + let response: ModelsResponse = serde_json::from_str(&body).map_err(|error| { + LanguageModelCompletionError::DeserializeResponse { + provider: PROVIDER_NAME, + error, + } + })?; + + let mut models = Vec::new(); + for model in response.data { + if let Some(model_type) = model.r#type.as_deref() + && model_type != "language" + { + continue; + } + let supports_tools = model + .supported_parameters + .iter() + .any(|parameter| parameter == "tools") + || has_tag(&model.tags, "tool-use") + || has_tag(&model.tags, "tools"); + let supports_images = model.architecture.is_some_and(|architecture| { + architecture + .input_modalities + .iter() + .any(|modality| modality == "image") + }) || has_tag(&model.tags, "vision") + || has_tag(&model.tags, "image-input"); + let parallel_tool_calls = model + .supported_parameters + .iter() + .any(|parameter| parameter == "parallel_tool_calls"); + let prompt_cache_key = model + .supported_parameters + .iter() + .any(|parameter| parameter == "prompt_cache_key" || parameter == "cache_control"); + models.push(AvailableModel { + name: model.id.clone(), + display_name: model.name.or(Some(model.id)), + max_tokens: model.context_window.or(model.max_tokens).unwrap_or(128_000), + max_output_tokens: model.max_tokens, + max_completion_tokens: None, + capabilities: ModelCapabilities { + tools: supports_tools, + images: supports_images, + parallel_tool_calls, + prompt_cache_key, + chat_completions: true, + }, + }); + } + + Ok(models) +} + +struct ConfigurationView { + api_key_editor: Entity, + state: Entity, + load_credentials_task: Option>, +} + +impl ConfigurationView { + fn new(state: Entity, window: &mut Window, cx: &mut Context) -> Self { + let api_key_editor = + cx.new(|cx| InputField::new(window, cx, "vck_000000000000000000000000000")); + + cx.observe(&state, |_, _, cx| cx.notify()).detach(); + + let load_credentials_task = Some(cx.spawn_in(window, { + let state = state.clone(); + async move |this, cx| { + if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) { + let _ = task.await; + } + this.update(cx, |this, cx| { + this.load_credentials_task = None; + cx.notify(); + }) + .log_err(); + } + })); + + Self { + api_key_editor, + state, + load_credentials_task, + } + } + + fn save_api_key(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context) { + let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string(); + if api_key.is_empty() { + return; + } + + self.api_key_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(Some(api_key), cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context) { + self.api_key_editor + .update(cx, |editor, cx| editor.set_text("", window, cx)); + + let state = self.state.clone(); + cx.spawn_in(window, async move |_, cx| { + state + .update(cx, |state, cx| state.set_api_key(None, cx)) + .await + }) + .detach_and_log_err(cx); + } + + fn should_render_editor(&self, cx: &Context) -> bool { + !self.state.read(cx).is_authenticated() + } +} + +impl Render for ConfigurationView { + fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { + let env_var_set = self.state.read(cx).api_key_state.is_from_env_var(); + let configured_card_label = if env_var_set { + format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable") + } else { + let api_url = VercelAiGatewayLanguageModelProvider::api_url(cx); + if api_url == API_URL { + "API key configured".to_string() + } else { + format!("API key configured for {}", api_url) + } + }; + + if self.load_credentials_task.is_some() { + div().child(Label::new("Loading credentials...")).into_any() + } else if self.should_render_editor(cx) { + v_flex() + .size_full() + .on_action(cx.listener(Self::save_api_key)) + .child(Label::new( + "To use Zed's agent with Vercel AI Gateway, you need to add an API key. Follow these steps:", + )) + .child( + List::new() + .child( + ListBulletItem::new("") + .child(Label::new("Create an API key in")) + .child(ButtonLink::new( + "Vercel AI Gateway's console", + "https://vercel.com/d?to=%2F%5Bteam%5D%2F%7E%2Fai%2Fapi-keys&title=Go+to+AI+Gateway", + )), + ) + .child(ListBulletItem::new( + "Paste your API key below and hit enter to start using the assistant", + )), + ) + .child(self.api_key_editor.clone()) + .child( + Label::new(format!( + "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed.", + )) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + } else { + ConfiguredApiCard::new(configured_card_label) + .disabled(env_var_set) + .when(env_var_set, |this| { + this.tooltip_label(format!("To reset your API key, unset the {API_KEY_ENV_VAR_NAME} environment variable.")) + }) + .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx))) + .into_any_element() + } + } +} diff --git a/crates/language_models/src/settings.rs b/crates/language_models/src/settings.rs index 512ea05b0c6cfb7d91b39beb8aafb0de7916a78e..7466a337f636abcd8ad70343dfd64a825a7fb6a7 100644 --- a/crates/language_models/src/settings.rs +++ b/crates/language_models/src/settings.rs @@ -8,7 +8,7 @@ use crate::provider::{ deepseek::DeepSeekSettings, google::GoogleSettings, lmstudio::LmStudioSettings, mistral::MistralSettings, ollama::OllamaSettings, open_ai::OpenAiSettings, open_ai_compatible::OpenAiCompatibleSettings, open_router::OpenRouterSettings, - vercel::VercelSettings, x_ai::XAiSettings, + vercel::VercelSettings, vercel_ai_gateway::VercelAiGatewaySettings, x_ai::XAiSettings, }; #[derive(Debug, RegisterSetting)] @@ -24,6 +24,7 @@ pub struct AllLanguageModelSettings { pub openai: OpenAiSettings, pub openai_compatible: HashMap, OpenAiCompatibleSettings>, pub vercel: VercelSettings, + pub vercel_ai_gateway: VercelAiGatewaySettings, pub x_ai: XAiSettings, pub zed_dot_dev: ZedDotDevSettings, } @@ -44,6 +45,7 @@ impl settings::Settings for AllLanguageModelSettings { let openai = language_models.openai.unwrap(); let openai_compatible = language_models.openai_compatible.unwrap(); let vercel = language_models.vercel.unwrap(); + let vercel_ai_gateway = language_models.vercel_ai_gateway.unwrap(); let x_ai = language_models.x_ai.unwrap(); let zed_dot_dev = language_models.zed_dot_dev.unwrap(); Self { @@ -107,6 +109,10 @@ impl settings::Settings for AllLanguageModelSettings { api_url: vercel.api_url.unwrap(), available_models: vercel.available_models.unwrap_or_default(), }, + vercel_ai_gateway: VercelAiGatewaySettings { + api_url: vercel_ai_gateway.api_url.unwrap(), + available_models: vercel_ai_gateway.available_models.unwrap_or_default(), + }, x_ai: XAiSettings { api_url: x_ai.api_url.unwrap(), available_models: x_ai.available_models.unwrap_or_default(), diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 02aa26156de3a66a160e2f0da25b3e57088aa8a3..87e117b8b0bbdd9a789bae18c3f9dce98a6f1bc0 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -290,6 +290,7 @@ impl JsonSchema for LanguageModelProviderSetting { "openai", "openrouter", "vercel", + "vercel_ai_gateway", "x_ai", "zed.dev" ] diff --git a/crates/settings_content/src/language_model.rs b/crates/settings_content/src/language_model.rs index 4d5e89f9ab7d1e647e82d22767ec2a9b91b80d6d..6af419119d819931f3ad826ff416f1b47c89824f 100644 --- a/crates/settings_content/src/language_model.rs +++ b/crates/settings_content/src/language_model.rs @@ -20,6 +20,7 @@ pub struct AllLanguageModelSettingsContent { pub openai: Option, pub openai_compatible: Option, OpenAiCompatibleSettingsContent>>, pub vercel: Option, + pub vercel_ai_gateway: Option, pub x_ai: Option, #[serde(rename = "zed.dev")] pub zed_dot_dev: Option, @@ -301,6 +302,25 @@ pub struct VercelAvailableModel { pub max_completion_tokens: Option, } +#[with_fallible_options] +#[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] +pub struct VercelAiGatewaySettingsContent { + pub api_url: Option, + pub available_models: Option>, +} + +#[with_fallible_options] +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)] +pub struct VercelAiGatewayAvailableModel { + pub name: String, + pub display_name: Option, + pub max_tokens: u64, + pub max_output_tokens: Option, + pub max_completion_tokens: Option, + #[serde(default)] + pub capabilities: OpenAiCompatibleModelCapabilities, +} + #[with_fallible_options] #[derive(Default, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] pub struct GoogleSettingsContent { diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index 8c2dc976fcccfefcf75b4d0dcb71bad353af147f..3a32bd96e73d9df427897798681f203c4ceb2273 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -1,6 +1,6 @@ --- title: LLM Providers - Use Your Own API Keys in Zed -description: Bring your own API keys to Zed. Set up Anthropic, OpenAI, Google AI, Ollama, DeepSeek, Mistral, OpenRouter, and more. +description: Bring your own API keys to Zed. Set up Anthropic, OpenAI, Google AI, Ollama, DeepSeek, Mistral, OpenRouter, Vercel AI Gateway, and more. --- # LLM Providers @@ -32,6 +32,7 @@ Zed supports these providers with your own API keys: - [OpenAI](#openai) - [OpenAI API Compatible](#openai-api-compatible) - [OpenRouter](#openrouter) +- [Vercel AI Gateway](#vercel-ai-gateway) - [Vercel](#vercel-v0) - [xAI](#xai) @@ -738,6 +739,30 @@ Example adding routing preferences to a model: These routing controls let you fine‑tune cost, capability, and reliability trade‑offs without changing the model name you select in the UI. +### Vercel AI Gateway {#vercel-ai-gateway} + +[Vercel AI Gateway](https://vercel.com/ai-gateway) provides access to many models through a single OpenAI-compatible endpoint. + +1. Create an API key from your [Vercel AI Gateway keys page](https://vercel.com/d?to=%2F%5Bteam%5D%2F%7E%2Fai%2Fapi-keys&title=Go+to+AI+Gateway) +2. Open the settings view (`agent: open settings`) and go to the **Vercel AI Gateway** section +3. Enter your Vercel AI Gateway API key + +The Vercel AI Gateway API key will be saved in your keychain. + +Zed will also use the `VERCEL_AI_GATEWAY_API_KEY` environment variable if it's defined. + +You can also set a custom endpoint for Vercel AI Gateway in your settings file: + +```json [settings] +{ + "language_models": { + "vercel_ai_gateway": { + "api_url": "https://ai-gateway.vercel.sh/v1" + } + } +} +``` + ### Vercel v0 {#vercel-v0} [Vercel v0](https://v0.app/docs/api/model) is a model for generating full-stack apps, with framework-aware completions for stacks like Next.js and Vercel. From 703bc3698840a67a3984e4a80eefdaac6ce29dc1 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Fri, 27 Feb 2026 00:27:24 +0100 Subject: [PATCH 139/548] Revert "outline: Refactor outline render_item to reuse existing TextStyle (#49166)" (#50258) This reverts commit 69e5ff7c76faa888ac71ff1d83cd335fb183b065. Release Notes: - N/A --- crates/outline/src/outline.rs | 30 ++++++++++++++------- crates/outline_panel/src/outline_panel.rs | 33 ++++++++++++----------- 2 files changed, 38 insertions(+), 25 deletions(-) diff --git a/crates/outline/src/outline.rs b/crates/outline/src/outline.rs index d66036a31d8a931a652a8ec0ca9019e5cdcaa7b9..454f6f0b578ce25785f0a356251c8af64776772f 100644 --- a/crates/outline/src/outline.rs +++ b/crates/outline/src/outline.rs @@ -10,7 +10,8 @@ use editor::{MultiBufferOffset, RowHighlightOptions, SelectionEffects}; use fuzzy::StringMatch; use gpui::{ App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, HighlightStyle, - ParentElement, Point, Render, Styled, StyledText, Task, WeakEntity, Window, div, rems, + ParentElement, Point, Render, Styled, StyledText, Task, TextStyle, WeakEntity, Window, div, + rems, }; use language::{Outline, OutlineItem}; use ordered_float::OrderedFloat; @@ -406,7 +407,7 @@ pub fn render_item( outline_item: &OutlineItem, match_ranges: impl IntoIterator>, cx: &App, -) -> impl IntoElement { +) -> StyledText { let highlight_style = HighlightStyle { background_color: Some(cx.theme().colors().text_accent.alpha(0.3)), ..Default::default() @@ -414,19 +415,28 @@ pub fn render_item( let custom_highlights = match_ranges .into_iter() .map(|range| (range, highlight_style)); + + let settings = ThemeSettings::get_global(cx); + + // TODO: We probably shouldn't need to build a whole new text style here + // but I'm not sure how to get the current one and modify it. + // Before this change TextStyle::default() was used here, which was giving us the wrong font and text color. + let text_style = TextStyle { + color: cx.theme().colors().text, + font_family: settings.buffer_font.family.clone(), + font_features: settings.buffer_font.features.clone(), + font_fallbacks: settings.buffer_font.fallbacks.clone(), + font_size: settings.buffer_font_size(cx).into(), + font_weight: settings.buffer_font.weight, + line_height: relative(1.), + ..Default::default() + }; let highlights = gpui::combine_highlights( custom_highlights, outline_item.highlight_ranges.iter().cloned(), ); - let settings = ThemeSettings::get_global(cx); - - div() - .text_color(cx.theme().colors().text) - .font(settings.buffer_font.clone()) - .text_size(settings.buffer_font_size(cx)) - .line_height(relative(1.)) - .child(StyledText::new(outline_item.text.clone()).with_highlights(highlights)) + StyledText::new(outline_item.text.clone()).with_default_highlights(&text_style, highlights) } #[cfg(test)] diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index 91b0f2e6d45d87170c5bb8ecda47df3e1a64626e..445f63fa1cdc38cb358cf033cc49f404aa6e6d94 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -2618,21 +2618,24 @@ impl OutlinePanel { } else { &search_matches }; - let outline_item = OutlineItem { - depth, - annotation_range: None, - range: search_data.context_range.clone(), - text: search_data.context_text.clone(), - source_range_for_text: search_data.context_range.clone(), - highlight_ranges: search_data - .highlights_data - .get() - .cloned() - .unwrap_or_default(), - name_ranges: search_data.search_match_indices.clone(), - body_range: Some(search_data.context_range.clone()), - }; - let label_element = outline::render_item(&outline_item, match_ranges.iter().cloned(), cx); + let label_element = outline::render_item( + &OutlineItem { + depth, + annotation_range: None, + range: search_data.context_range.clone(), + text: search_data.context_text.clone(), + source_range_for_text: search_data.context_range.clone(), + highlight_ranges: search_data + .highlights_data + .get() + .cloned() + .unwrap_or_default(), + name_ranges: search_data.search_match_indices.clone(), + body_range: Some(search_data.context_range.clone()), + }, + match_ranges.iter().cloned(), + cx, + ); let truncated_contents_label = || Label::new(TRUNCATED_CONTEXT_MARK); let entire_label = h_flex() .justify_center() From d4e89f9587f66ca277fc662dbeb45324d91952bb Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 26 Feb 2026 15:36:29 -0800 Subject: [PATCH 140/548] Add edit prediction evals that test related excerpt usage (#50256) I've also fixed a race condition with the programmatic context retrieval in the CLI, which was causing no excerpts to be fetched for the Rust examples. Release Notes: - N/A --- .../evals/vscode--log-object-property.md | 56 +++++++ .../evals/zed--add-eprintln.md | 54 ++++--- .../evals/zed--change-match-arm.md | 68 +++++++++ .../src/retrieve_context.rs | 143 ++++++++---------- 4 files changed, 213 insertions(+), 108 deletions(-) create mode 100644 crates/edit_prediction_cli/evals/vscode--log-object-property.md create mode 100644 crates/edit_prediction_cli/evals/zed--change-match-arm.md diff --git a/crates/edit_prediction_cli/evals/vscode--log-object-property.md b/crates/edit_prediction_cli/evals/vscode--log-object-property.md new file mode 100644 index 0000000000000000000000000000000000000000..1c60b84f0107c54ea8bd89084dccbfdf785fb932 --- /dev/null +++ b/crates/edit_prediction_cli/evals/vscode--log-object-property.md @@ -0,0 +1,56 @@ ++++ +repository_url = "https://github.com/microsoft/vscode" +revision = "e28a92fc1fbe9de11eca2f8ad19899334bff8525" ++++ + +This prediction requires the model to see the `IDiffComputationResult` type definition. + +## Edit History + +```diff +--- a/src/vs/editor/browser/widget/diffEditorWidget.ts ++++ b/src/vs/editor/browser/widget/diffEditorWidget.ts +@@ -1117,6 +1117,7 @@ + && currentModifiedModel === this._modifiedEditor.getModel() + ) { + this._setState(editorBrowser.DiffEditorState.DiffComputed); ++ console.log("did quit:") + this._diffComputationResult = result; + this._updateDecorationsRunner.schedule(); + this._onDidUpdateDiff.fire(); +``` + +## Cursor Position + +```src/vs/editor/browser/widget/diffEditorWidget.ts + if (currentToken === this._diffComputationToken + && currentOriginalModel === this._originalEditor.getModel() + && currentModifiedModel === this._modifiedEditor.getModel() + ) { + this._setState(editorBrowser.DiffEditorState.DiffComputed); + console.log("did quit:") + // ^[CURSOR_POSITION] + this._diffComputationResult = result; + this._updateDecorationsRunner.schedule(); + this._onDidUpdateDiff.fire(); + } +``` + +## Expected Patch + +```diff +--- a/src/vs/editor/browser/widget/diffEditorWidget.ts ++++ b/src/vs/editor/browser/widget/diffEditorWidget.ts +@@ -1115,10 +1115,10 @@ + if (currentToken === this._diffComputationToken + && currentOriginalModel === this._originalEditor.getModel() + && currentModifiedModel === this._modifiedEditor.getModel() + ) { + this._setState(editorBrowser.DiffEditorState.DiffComputed); +- console.log("did quit:") ++ console.log("did quit:", result.quitEarly) + this._diffComputationResult = result; + this._updateDecorationsRunner.schedule(); + this._onDidUpdateDiff.fire(); + } +``` diff --git a/crates/edit_prediction_cli/evals/zed--add-eprintln.md b/crates/edit_prediction_cli/evals/zed--add-eprintln.md index d4252810b5f97df0991de3015c19e12138e8a27b..467bfd5151996bc98d00145bfebef62f89c5e37e 100644 --- a/crates/edit_prediction_cli/evals/zed--add-eprintln.md +++ b/crates/edit_prediction_cli/evals/zed--add-eprintln.md @@ -1,43 +1,37 @@ +++ repository_url = "git@github.com:zed-industries/zed" -revision = "780a87dd98f26816876d12e2728933b17faca78d" +revision = "b7090c9fae7390a82021b994994c0f587744d96c" +++ +This example shows the model's preference for making conservative predictions, and ability to place +the cursor within the predicted output. + ## Edit History ```diff --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs -@@ -206,6 +206,7 @@ - self.select_next_edit(&Default::default(), window, cx); - self.confirm(&Default::default(), window, cx); - +@@ -144,7 +144,7 @@ + fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { + epr - cx.notify(); - } - + let next_index = self + .ep_store + .read(cx) ``` ## Cursor Position ```crates/edit_prediction_ui/src/rate_prediction_modal.rs - let current_completion = self - .active_prediction - .as_ref() - .map(|completion| completion.prediction.clone()); - self.select_completion(current_completion, false, window, cx); - self.select_next_edit(&Default::default(), window, cx); - self.confirm(&Default::default(), window, cx); - + fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { epr // ^[CURSOR_POSITION] - cx.notify(); - } - - pub fn thumbs_down_active( - &mut self, - _: &ThumbsDownActivePrediction, - window: &mut Window, + let next_index = self + .ep_store + .read(cx) + .shown_predictions() + .skip(self.selected_index) + .enumerate() + .skip(1) // Skip straight to the next item ``` ## Expected Patch @@ -45,12 +39,16 @@ revision = "780a87dd98f26816876d12e2728933b17faca78d" ```diff --- a/crates/edit_prediction_ui/src/rate_prediction_modal.rs +++ b/crates/edit_prediction_ui/src/rate_prediction_modal.rs -@@ -201,16 +201,16 @@ - self.confirm(&Default::default(), window, cx); - +@@ -144,14 +144,14 @@ + fn select_next_edit(&mut self, _: &NextEdit, _: &mut Window, cx: &mut Context) { - epr + eprintln!(""); # ^[CURSOR_POSITION] - cx.notify(); - } + let next_index = self + .ep_store + .read(cx) + .shown_predictions() + .skip(self.selected_index) + .enumerate() + .skip(1) // Skip straight to the next item ``` diff --git a/crates/edit_prediction_cli/evals/zed--change-match-arm.md b/crates/edit_prediction_cli/evals/zed--change-match-arm.md new file mode 100644 index 0000000000000000000000000000000000000000..042e2730cc352d9c90739a3fe3ea20438755896b --- /dev/null +++ b/crates/edit_prediction_cli/evals/zed--change-match-arm.md @@ -0,0 +1,68 @@ ++++ +repository_url = "git@github.com:zed-industries/zed" +revision = "be5763632dccb33470ca233c36ccd9e5e790e3b2" ++++ + +This prediction requires the model to see the `project::Event` enum. + +## Edit History + +```diff +--- a/crates/edit_prediction/src/edit_prediction.rs ++++ b/crates/edit_prediction/src/edit_prediction.rs +@@ -1035,7 +1035,7 @@ + project_state.recent_paths.push_front(path); + } + } +- project::Event::DiagnosticsUpdated { .. } => { ++ project::Event::Disk { .. } => { + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` + +## Cursor Position + +```crates/edit_prediction/src/edit_prediction.rs + { + project_state.recent_paths.remove(ix); + } + project_state.recent_paths.push_front(path); + } + } + project::Event::Disk { .. } => { + // ^[CURSOR_POSITION] + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` + +## Expected Patch + +```diff +--- a/crates/edit_prediction/src/edit_prediction.rs ++++ b/crates/edit_prediction/src/edit_prediction.rs +@@ -1032,10 +1032,10 @@ + project_state.recent_paths.push_front(path); + } + } +- project::Event::Disk { .. } => { ++ project::Event::DiskBasedDiagnosticsFinished { .. } => { + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` + +```diff +--- a/crates/edit_prediction/src/edit_prediction.rs ++++ b/crates/edit_prediction/src/edit_prediction.rs +@@ -1032,10 +1032,10 @@ + project_state.recent_paths.push_front(path); + } + } +- project::Event::Disk { .. } => { ++ project::Event::DiskBasedDiagnosticsStarted { .. } => { + if cx.has_flag::() { + self.refresh_prediction_from_diagnostics( + project, +``` diff --git a/crates/edit_prediction_cli/src/retrieve_context.rs b/crates/edit_prediction_cli/src/retrieve_context.rs index 18ee3c1b0ec1456b02bb145c98e669b777048385..a5fb00b39a67a15a7afcced897b4d109f1f3406f 100644 --- a/crates/edit_prediction_cli/src/retrieve_context.rs +++ b/crates/edit_prediction_cli/src/retrieve_context.rs @@ -85,46 +85,79 @@ async fn wait_for_language_servers_to_start( ) -> anyhow::Result<()> { let lsp_store = project.read_with(cx, |project, _| project.lsp_store()); - let (language_server_ids, mut starting_language_server_ids) = - buffer.update(cx, |buffer, cx| { - lsp_store.update(cx, |lsp_store, cx| { - let ids = lsp_store.language_servers_for_local_buffer(buffer, cx); - let starting_ids = ids - .iter() - .copied() - .filter(|id| !lsp_store.language_server_statuses.contains_key(&id)) - .collect::>(); - (ids, starting_ids) - }) + // Determine which servers exist for this buffer, and which are still starting. + let mut servers_pending_start = HashSet::default(); + let mut servers_pending_diagnostics = HashSet::default(); + buffer.update(cx, |buffer, cx| { + lsp_store.update(cx, |lsp_store, cx| { + let ids = lsp_store.language_servers_for_local_buffer(buffer, cx); + for &id in &ids { + match lsp_store.language_server_statuses.get(&id) { + None => { + servers_pending_start.insert(id); + servers_pending_diagnostics.insert(id); + } + Some(status) if status.has_pending_diagnostic_updates => { + servers_pending_diagnostics.insert(id); + } + Some(_) => {} + } + } }); + }); - step_progress.set_substatus(format!("waiting for {} LSPs", language_server_ids.len())); + step_progress.set_substatus(format!( + "waiting for {} LSPs", + servers_pending_diagnostics.len() + )); - let timeout_duration = if starting_language_server_ids.is_empty() { + let timeout_duration = if servers_pending_start.is_empty() { Duration::from_secs(30) } else { Duration::from_secs(60 * 5) }; - let timeout = cx.background_executor().timer(timeout_duration).shared(); - let (mut tx, mut rx) = mpsc::channel(language_server_ids.len()); - let added_subscription = cx.subscribe(project, { + let (mut started_tx, mut started_rx) = mpsc::channel(servers_pending_start.len().max(1)); + let (mut diag_tx, mut diag_rx) = mpsc::channel(servers_pending_diagnostics.len().max(1)); + let subscriptions = [cx.subscribe(&lsp_store, { let step_progress = step_progress.clone(); - move |_, event, _| match event { - project::Event::LanguageServerAdded(language_server_id, name, _) => { + move |lsp_store, event, cx| match event { + project::LspStoreEvent::LanguageServerAdded(id, name, _) => { step_progress.set_substatus(format!("LSP started: {}", name)); - tx.try_send(*language_server_id).ok(); + started_tx.try_send(*id).ok(); + } + project::LspStoreEvent::DiskBasedDiagnosticsFinished { language_server_id } => { + let name = lsp_store + .read(cx) + .language_server_adapter_for_id(*language_server_id) + .unwrap() + .name(); + step_progress.set_substatus(format!("LSP idle: {}", name)); + diag_tx.try_send(*language_server_id).ok(); + } + project::LspStoreEvent::LanguageServerUpdate { + message: + client::proto::update_language_server::Variant::WorkProgress( + client::proto::LspWorkProgress { + message: Some(message), + .. + }, + ), + .. + } => { + step_progress.set_substatus(message.clone()); } _ => {} } - }); + })]; - while !starting_language_server_ids.is_empty() { + // Phase 1: wait for all servers to start. + while !servers_pending_start.is_empty() { futures::select! { - language_server_id = rx.next() => { - if let Some(id) = language_server_id { - starting_language_server_ids.remove(&id); + id = started_rx.next() => { + if let Some(id) = id { + servers_pending_start.remove(&id); } }, _ = timeout.clone().fuse() => { @@ -133,67 +166,17 @@ async fn wait_for_language_servers_to_start( } } - drop(added_subscription); - - let (mut tx, mut rx) = mpsc::channel(language_server_ids.len()); - let subscriptions = [ - cx.subscribe(&lsp_store, { - let step_progress = step_progress.clone(); - move |_, event, _| { - if let project::LspStoreEvent::LanguageServerUpdate { - message: - client::proto::update_language_server::Variant::WorkProgress( - client::proto::LspWorkProgress { - message: Some(message), - .. - }, - ), - .. - } = event - { - step_progress.set_substatus(message.clone()); - } - } - }), - cx.subscribe(project, { - let step_progress = step_progress.clone(); - let lsp_store = lsp_store.clone(); - move |_, event, cx| match event { - project::Event::DiskBasedDiagnosticsFinished { language_server_id } => { - let lsp_store = lsp_store.read(cx); - let name = lsp_store - .language_server_adapter_for_id(*language_server_id) - .unwrap() - .name(); - step_progress.set_substatus(format!("LSP idle: {}", name)); - tx.try_send(*language_server_id).ok(); - } - _ => {} - } - }), - ]; - + // Save the buffer so the server sees the current content and kicks off diagnostics. project .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) .await?; - let mut pending_language_server_ids = lsp_store.read_with(cx, |lsp_store, _| { - language_server_ids - .iter() - .copied() - .filter(|id| { - lsp_store - .language_server_statuses - .get(id) - .is_some_and(|status| status.has_pending_diagnostic_updates) - }) - .collect::>() - }); - while !pending_language_server_ids.is_empty() { + // Phase 2: wait for all servers to finish their diagnostic pass. + while !servers_pending_diagnostics.is_empty() { futures::select! { - language_server_id = rx.next() => { - if let Some(id) = language_server_id { - pending_language_server_ids.remove(&id); + id = diag_rx.next() => { + if let Some(id) = id { + servers_pending_diagnostics.remove(&id); } }, _ = timeout.clone().fuse() => { From 9ff0b0206f27d5d523bdfb536163b8929ac4aae7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Thu, 26 Feb 2026 17:07:37 -0800 Subject: [PATCH 141/548] Include optional model version with EP acceptance and rejection messages (#50262) Release Notes: - N/A --- .../cloud_llm_client/src/cloud_llm_client.rs | 4 ++ .../cloud_llm_client/src/predict_edits_v3.rs | 2 + crates/edit_prediction/src/edit_prediction.rs | 21 ++++++++++- .../src/edit_prediction_tests.rs | 37 ++++++++++++++----- crates/edit_prediction/src/fim.rs | 1 + crates/edit_prediction/src/mercury.rs | 1 + crates/edit_prediction/src/prediction.rs | 4 ++ crates/edit_prediction/src/sweep_ai.rs | 1 + crates/edit_prediction/src/zeta.rs | 19 ++++++---- 9 files changed, 72 insertions(+), 18 deletions(-) diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs index 62986b311e07c8b4439246e36c44318d23012e0b..9ed82365ea910dd910226f70e242d68388b41796 100644 --- a/crates/cloud_llm_client/src/cloud_llm_client.rs +++ b/crates/cloud_llm_client/src/cloud_llm_client.rs @@ -142,6 +142,8 @@ pub struct PredictEditsResponse { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AcceptEditPredictionBody { pub request_id: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub model_version: Option, } #[derive(Debug, Clone, Deserialize)] @@ -160,6 +162,8 @@ pub struct EditPredictionRejection { #[serde(default)] pub reason: EditPredictionRejectReason, pub was_shown: bool, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub model_version: Option, } #[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq)] diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index d0b53ca18e8c74ec2588bff14c5130e3381f9444..5002c1a770ec1955d2a96c97098867f20f9bd05d 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -33,6 +33,8 @@ pub struct PredictEditsV3Response { /// this range to extract the old text from its local excerpt for /// diffing, rather than relying on its own format-derived range. pub editable_range: Range, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub model_version: Option, } #[derive(Debug, Deserialize, Serialize)] diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 78f42db2120b45f04dbf83c5e706a42163ee8067..836b4a477f62e2da6674568d0a7a1ccfc2b603cf 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -361,6 +361,7 @@ impl ProjectState { prediction_id, EditPredictionRejectReason::Canceled, false, + None, cx, ); }) @@ -1394,7 +1395,14 @@ impl EditPredictionStore { if let Some(project_state) = self.projects.get_mut(&project.entity_id()) { project_state.pending_predictions.clear(); if let Some(prediction) = project_state.current_prediction.take() { - self.reject_prediction(prediction.prediction.id, reason, prediction.was_shown, cx); + let model_version = prediction.prediction.model_version.clone(); + self.reject_prediction( + prediction.prediction.id, + reason, + prediction.was_shown, + model_version, + cx, + ); } }; } @@ -1453,6 +1461,7 @@ impl EditPredictionStore { prediction_id: EditPredictionId, reason: EditPredictionRejectReason, was_shown: bool, + model_version: Option, cx: &App, ) { match self.edit_prediction_model { @@ -1467,6 +1476,7 @@ impl EditPredictionStore { request_id: prediction_id.to_string(), reason, was_shown, + model_version, }) .log_err(); } @@ -1812,6 +1822,7 @@ impl EditPredictionStore { new_prediction.prediction.id, EditPredictionRejectReason::CurrentPreferred, false, + new_prediction.prediction.model_version, cx, ); None @@ -1821,7 +1832,13 @@ impl EditPredictionStore { } } Err(reject_reason) => { - this.reject_prediction(prediction_result.id, reject_reason, false, cx); + this.reject_prediction( + prediction_result.id, + reject_reason, + false, + None, + cx, + ); None } } diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index b0468e3c5610b8f618631be6707c74c4eaa451e5..abe522494fc8962a995313ffb1a57b8672c22ca4 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -897,7 +897,8 @@ async fn test_empty_prediction(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: id, reason: EditPredictionRejectReason::Empty, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -957,7 +958,8 @@ async fn test_interpolated_empty(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: id, reason: EditPredictionRejectReason::InterpolatedEmpty, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1049,7 +1051,8 @@ async fn test_replace_current(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: first_id, reason: EditPredictionRejectReason::Replaced, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1143,7 +1146,8 @@ async fn test_current_preferred(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: second_id, reason: EditPredictionRejectReason::CurrentPreferred, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1234,7 +1238,8 @@ async fn test_cancel_earlier_pending_requests(cx: &mut TestAppContext) { &[EditPredictionRejection { request_id: first_id, reason: EditPredictionRejectReason::Canceled, - was_shown: false + was_shown: false, + model_version: None, }] ); } @@ -1364,12 +1369,14 @@ async fn test_cancel_second_on_third_request(cx: &mut TestAppContext) { EditPredictionRejection { request_id: cancelled_id, reason: EditPredictionRejectReason::Canceled, - was_shown: false + was_shown: false, + model_version: None, }, EditPredictionRejection { request_id: first_id, reason: EditPredictionRejectReason::Replaced, - was_shown: false + was_shown: false, + model_version: None, } ] ); @@ -1485,12 +1492,14 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId("test-1".into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); ep_store.reject_prediction( EditPredictionId("test-2".into()), EditPredictionRejectReason::Canceled, true, + None, cx, ); }); @@ -1508,7 +1517,8 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionRejection { request_id: "test-1".to_string(), reason: EditPredictionRejectReason::Discarded, - was_shown: false + was_shown: false, + model_version: None, } ); assert_eq!( @@ -1516,7 +1526,8 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionRejection { request_id: "test-2".to_string(), reason: EditPredictionRejectReason::Canceled, - was_shown: true + was_shown: true, + model_version: None, } ); @@ -1527,6 +1538,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId(format!("batch-{}", i).into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); } @@ -1558,6 +1570,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId("retry-1".into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); }); @@ -1577,6 +1590,7 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { EditPredictionId("retry-2".into()), EditPredictionRejectReason::Discarded, false, + None, cx, ); }); @@ -1700,6 +1714,7 @@ fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> Predi request_id: Uuid::new_v4().to_string(), editable_range, output: new_excerpt, + model_version: None, } } @@ -1708,6 +1723,7 @@ fn empty_response() -> PredictEditsV3Response { request_id: Uuid::new_v4().to_string(), editable_range: 0..0, output: String::new(), + model_version: None, } } @@ -1837,6 +1853,7 @@ async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { }, buffer_snapshotted_at: Instant::now(), response_received_at: Instant::now(), + model_version: None, }; cx.update(|cx| { @@ -2034,6 +2051,7 @@ async fn test_edit_prediction_no_spurious_trailing_newline(cx: &mut TestAppConte request_id: Uuid::new_v4().to_string(), output: "hello world\n".to_string(), editable_range: 0..excerpt_length, + model_version: None, }; respond_tx.send(response).unwrap(); @@ -2138,6 +2156,7 @@ async fn make_test_ep_store( request_id: format!("request-{next_request_id}"), editable_range: 0..req.input.cursor_excerpt.len(), output: completion_response.lock().clone(), + model_version: None, }) .unwrap() .into(), diff --git a/crates/edit_prediction/src/fim.rs b/crates/edit_prediction/src/fim.rs index 7ba6c6bef77c5b2229d1b3a4072e8070e5c4a6f1..dda008133d3726f5e7ba32ec05c770878d16585f 100644 --- a/crates/edit_prediction/src/fim.rs +++ b/crates/edit_prediction/src/fim.rs @@ -141,6 +141,7 @@ pub fn request_prediction( output.buffer_snapshotted_at, output.response_received_at, output.inputs, + None, cx, ) .await, diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs index 4187881639d8c363582f7a2c7603f2bb51e09fa7..f3adba55e620e77ffd7bb12b0e950fd4d3f011fc 100644 --- a/crates/edit_prediction/src/mercury.rs +++ b/crates/edit_prediction/src/mercury.rs @@ -218,6 +218,7 @@ impl Mercury { buffer_snapshotted_at, response_received_at, inputs, + None, cx, ) .await, diff --git a/crates/edit_prediction/src/prediction.rs b/crates/edit_prediction/src/prediction.rs index 750b1a435ae4a7a281ef41973e1f6d0d2158445e..9c17f29fe29bc711f6750cf6fe24586067bfc619 100644 --- a/crates/edit_prediction/src/prediction.rs +++ b/crates/edit_prediction/src/prediction.rs @@ -41,6 +41,7 @@ impl EditPredictionResult { buffer_snapshotted_at: Instant, response_received_at: Instant, inputs: ZetaPromptInput, + model_version: Option, cx: &mut AsyncApp, ) -> Self { if edits.is_empty() { @@ -79,6 +80,7 @@ impl EditPredictionResult { buffer: edited_buffer.clone(), buffer_snapshotted_at, response_received_at, + model_version, }), } } @@ -95,6 +97,7 @@ pub struct EditPrediction { pub buffer_snapshotted_at: Instant, pub response_received_at: Instant, pub inputs: zeta_prompt::ZetaPromptInput, + pub model_version: Option, } impl EditPrediction { @@ -150,6 +153,7 @@ mod tests { snapshot: cx.read(|cx| buffer.read(cx).snapshot()), buffer: buffer.clone(), edit_preview, + model_version: None, inputs: ZetaPromptInput { events: vec![], related_files: vec![], diff --git a/crates/edit_prediction/src/sweep_ai.rs b/crates/edit_prediction/src/sweep_ai.rs index 1253916487894d757c74293c21f4ace1c681cd11..5a9fcf0e6ce7bfa5476d6c48245068994178f7bc 100644 --- a/crates/edit_prediction/src/sweep_ai.rs +++ b/crates/edit_prediction/src/sweep_ai.rs @@ -303,6 +303,7 @@ impl SweepAi { buffer_snapshotted_at, response_received_at, inputs, + None, cx, ) .await, diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 41877d10d6e3ede2ad6055e7580400075533a265..25f9900dcba4a8f29f7e1268560bcbb40ded9778 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -125,7 +125,7 @@ pub fn request_prediction_with_zeta( log::trace!("Sending edit prediction request"); - let (request_id, output_text, usage) = if let Some(custom_settings) = + let (request_id, output_text, model_version, usage) = if let Some(custom_settings) = &custom_server_settings { let max_tokens = custom_settings.max_output_tokens * 4; @@ -158,7 +158,7 @@ pub fn request_prediction_with_zeta( let request_id = EditPredictionId(request_id.into()); let output_text = zeta1::clean_zeta1_model_output(&response_text); - (request_id, output_text, None) + (request_id, output_text, None, None) } else { let prompt = format_zeta_prompt(&prompt_input, zeta_version); let prefill = get_prefill(&prompt_input, zeta_version); @@ -188,7 +188,7 @@ pub fn request_prediction_with_zeta( Some(clean_zeta2_model_output(&output, zeta_version).to_string()) }; - (request_id, output_text, None) + (request_id, output_text, None, None) } } else if let Some(config) = &raw_config { let prompt = format_zeta_prompt(&prompt_input, config.format); @@ -225,7 +225,7 @@ pub fn request_prediction_with_zeta( clean_zeta2_model_output(&output, config.format).to_string() }); - (request_id, output_text, usage) + (request_id, output_text, None, usage) } else { // Use V3 endpoint - server handles model/version selection and suffix stripping let (response, usage) = EditPredictionStore::send_v3_request( @@ -244,8 +244,9 @@ pub fn request_prediction_with_zeta( Some(response.output) }; editable_range_in_excerpt = response.editable_range; + let model_version = response.model_version; - (request_id, output_text, usage) + (request_id, output_text, model_version, usage) }; let received_response_at = Instant::now(); @@ -253,7 +254,7 @@ pub fn request_prediction_with_zeta( log::trace!("Got edit prediction response"); let Some(mut output_text) = output_text else { - return Ok((Some((request_id, None)), usage)); + return Ok((Some((request_id, None, model_version)), usage)); }; // Client-side cursor marker processing (applies to both raw and v3 responses) @@ -311,6 +312,7 @@ pub fn request_prediction_with_zeta( full_context_offset_range, editable_range_in_buffer, )), + model_version, )), usage, )) @@ -318,7 +320,7 @@ pub fn request_prediction_with_zeta( }); cx.spawn(async move |this, cx| { - let Some((id, prediction)) = + let Some((id, prediction, model_version)) = EditPredictionStore::handle_api_response(&this, request_task.await, cx)? else { return Ok(None); @@ -392,6 +394,7 @@ pub fn request_prediction_with_zeta( buffer_snapshotted_at, received_response_at, inputs, + model_version, cx, ) .await, @@ -521,6 +524,7 @@ pub(crate) fn edit_prediction_accepted( } let request_id = current_prediction.prediction.id.to_string(); + let model_version = current_prediction.prediction.model_version; let require_auth = custom_accept_url.is_none(); let client = store.client.clone(); let llm_token = store.llm_token.clone(); @@ -540,6 +544,7 @@ pub(crate) fn edit_prediction_accepted( let req = builder.uri(url.as_ref()).body( serde_json::to_string(&AcceptEditPredictionBody { request_id: request_id.clone(), + model_version: model_version.clone(), })? .into(), ); From 1c39e192f1fa83a6d131d4f43d13ade53e8a424d Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Thu, 26 Feb 2026 22:14:40 -0500 Subject: [PATCH 142/548] languages: Reorder type identifier highlights in JavaScript (#49325) Move general type identifier rules before class-specific ones to ensure proper precedence in the syntax highlighting query. Closes #49226. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed an issue where class names were not highlighted correctly in JavaScript files --- crates/languages/src/javascript/highlights.scm | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/languages/src/javascript/highlights.scm b/crates/languages/src/javascript/highlights.scm index 5561dc31d56d52e6b4d6f71c07137537953410f6..5fb31ce100b5884d99d3e941ce6fb67b69ff2cfd 100644 --- a/crates/languages/src/javascript/highlights.scm +++ b/crates/languages/src/javascript/highlights.scm @@ -120,15 +120,15 @@ ; Special identifiers ; +(type_identifier) @type +(predefined_type) @type.builtin + (class_declaration (type_identifier) @type.class) (extends_clause value: (identifier) @type.class) -(type_identifier) @type -(predefined_type) @type.builtin - ([ (identifier) (shorthand_property_identifier) From a217578649b5b35a3cddedd2dd3971e5635faf20 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Fri, 27 Feb 2026 01:24:34 -0500 Subject: [PATCH 143/548] docs: Point to the right Bash extension repository (#50271) Closes #50261. Before you mark this PR as ready for review, make sure that you have: - ~[ ] Added a solid test coverage and/or screenshots from doing manual testing~ N/A - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- docs/src/languages/bash.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/src/languages/bash.md b/docs/src/languages/bash.md index ce398a7237dd3131ab0b5e0bac10ded14d62f218..c801b55054c9939f5e124aca76dc5e6b80f008d4 100644 --- a/docs/src/languages/bash.md +++ b/docs/src/languages/bash.md @@ -5,8 +5,7 @@ description: "Configure Bash language support in Zed, including language servers # Bash -Bash language support in Zed is provided by the community-maintained [Basher extension](https://github.com/d1y/bash.zed). -Report issues to: [https://github.com/d1y/bash.zed/issues](https://github.com/d1y/bash.zed/issues) +Bash support is available through the [Bash extension](https://github.com/zed-extensions/bash). - Tree-sitter: [tree-sitter/tree-sitter-bash](https://github.com/tree-sitter/tree-sitter-bash) - Language Server: [bash-lsp/bash-language-server](https://github.com/bash-lsp/bash-language-server) From f0535ddc3ce1027368ee469e1c0b03bb90ec60ad Mon Sep 17 00:00:00 2001 From: John Tur Date: Fri, 27 Feb 2026 01:31:20 -0500 Subject: [PATCH 144/548] Relax device limits for WGPU (#50270) Plus some minor cleanup. Release Notes: - Fixed GPU acceleration not working on certain Linux devices. --- crates/gpui_wgpu/src/wgpu_context.rs | 69 ++++++++------------------- crates/gpui_wgpu/src/wgpu_renderer.rs | 7 ++- crates/zlog/src/filter.rs | 2 - 3 files changed, 26 insertions(+), 52 deletions(-) diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index bcf0b93454ea64c45d1f453c1107a23e6a9cc962..3425aa905afdb30d8dde4d2ecf50161a50f779a0 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -52,7 +52,8 @@ impl WgpuContext { adapter.get_info().backend ); - let (device, queue, dual_source_blending) = Self::create_device(&adapter)?; + let (device, queue, dual_source_blending) = + pollster::block_on(Self::create_device(&adapter))?; Ok(Self { instance, @@ -80,29 +81,36 @@ impl WgpuContext { }) .await .map_err(|e| anyhow::anyhow!("Failed to request GPU adapter: {e}"))?; - Self::create_context(instance, adapter).await - } - #[cfg(target_family = "wasm")] - async fn create_context( - instance: wgpu::Instance, - adapter: wgpu::Adapter, - ) -> anyhow::Result { log::info!( "Selected GPU adapter: {:?} ({:?})", adapter.get_info().name, adapter.get_info().backend ); - let dual_source_blending_available = adapter + let (device, queue, dual_source_blending) = Self::create_device(&adapter).await?; + + Ok(Self { + instance, + adapter, + device: Arc::new(device), + queue: Arc::new(queue), + dual_source_blending, + }) + } + + async fn create_device( + adapter: &wgpu::Adapter, + ) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { + let dual_source_blending = adapter .features() .contains(wgpu::Features::DUAL_SOURCE_BLENDING); let mut required_features = wgpu::Features::empty(); - if dual_source_blending_available { + if dual_source_blending { required_features |= wgpu::Features::DUAL_SOURCE_BLENDING; } else { - log::info!( + log::warn!( "Dual-source blending not available on this GPU. \ Subpixel text antialiasing will be disabled." ); @@ -112,7 +120,7 @@ impl WgpuContext { .request_device(&wgpu::DeviceDescriptor { label: Some("gpui_device"), required_features, - required_limits: wgpu::Limits::default(), + required_limits: wgpu::Limits::downlevel_defaults(), memory_hints: wgpu::MemoryHints::MemoryUsage, trace: wgpu::Trace::Off, experimental_features: wgpu::ExperimentalFeatures::disabled(), @@ -120,13 +128,7 @@ impl WgpuContext { .await .map_err(|e| anyhow::anyhow!("Failed to create wgpu device: {e}"))?; - Ok(Self { - instance, - adapter, - device: Arc::new(device), - queue: Arc::new(queue), - dual_source_blending: dual_source_blending_available, - }) + Ok((device, queue, dual_source_blending)) } #[cfg(not(target_family = "wasm"))] @@ -154,35 +156,6 @@ impl WgpuContext { Ok(()) } - #[cfg(not(target_family = "wasm"))] - fn create_device(adapter: &wgpu::Adapter) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { - let dual_source_blending_available = adapter - .features() - .contains(wgpu::Features::DUAL_SOURCE_BLENDING); - - let mut required_features = wgpu::Features::empty(); - if dual_source_blending_available { - required_features |= wgpu::Features::DUAL_SOURCE_BLENDING; - } else { - log::warn!( - "Dual-source blending not available on this GPU. \ - Subpixel text antialiasing will be disabled." - ); - } - - let (device, queue) = pollster::block_on(adapter.request_device(&wgpu::DeviceDescriptor { - label: Some("gpui_device"), - required_features, - required_limits: wgpu::Limits::default(), - memory_hints: wgpu::MemoryHints::MemoryUsage, - trace: wgpu::Trace::Off, - experimental_features: wgpu::ExperimentalFeatures::disabled(), - })) - .map_err(|e| anyhow::anyhow!("Failed to create wgpu device: {e}"))?; - - Ok((device, queue, dual_source_blending_available)) - } - #[cfg(not(target_family = "wasm"))] async fn select_adapter( instance: &wgpu::Instance, diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 0f0a6c4544b8b46c82d35b5f8804accc3a943c53..489f354c691c280a5331e5a7765c9d626064eb9c 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -106,6 +106,7 @@ pub struct WgpuRenderer { path_globals_bind_group: wgpu::BindGroup, instance_buffer: wgpu::Buffer, instance_buffer_capacity: u64, + max_buffer_size: u64, storage_buffer_alignment: u64, path_intermediate_texture: wgpu::Texture, path_intermediate_view: wgpu::TextureView, @@ -285,6 +286,7 @@ impl WgpuRenderer { mapped_at_creation: false, }); + let max_buffer_size = device.limits().max_buffer_size; let storage_buffer_alignment = device.limits().min_storage_buffer_offset_alignment as u64; let initial_instance_buffer_capacity = 2 * 1024 * 1024; let instance_buffer = device.create_buffer(&wgpu::BufferDescriptor { @@ -375,6 +377,7 @@ impl WgpuRenderer { path_globals_bind_group, instance_buffer, instance_buffer_capacity: initial_instance_buffer_capacity, + max_buffer_size, storage_buffer_alignment, path_intermediate_texture, path_intermediate_view, @@ -1070,7 +1073,7 @@ impl WgpuRenderer { if overflow { drop(encoder); - if self.instance_buffer_capacity >= 256 * 1024 * 1024 { + if self.instance_buffer_capacity >= self.max_buffer_size { log::error!( "instance buffer size grew too large: {}", self.instance_buffer_capacity @@ -1379,7 +1382,7 @@ impl WgpuRenderer { } fn grow_instance_buffer(&mut self) { - let new_capacity = self.instance_buffer_capacity * 2; + let new_capacity = (self.instance_buffer_capacity * 2).min(self.max_buffer_size); log::info!("increased instance buffer size to {}", new_capacity); self.instance_buffer = self.device.create_buffer(&wgpu::BufferDescriptor { label: Some("instance_buffer"), diff --git a/crates/zlog/src/filter.rs b/crates/zlog/src/filter.rs index c6e51fa40340b4aad7efb017c961ce8891ab776e..710ddf761eb6eb1d0c164522903a9525d12de2a4 100644 --- a/crates/zlog/src/filter.rs +++ b/crates/zlog/src/filter.rs @@ -38,8 +38,6 @@ const DEFAULT_FILTERS: &[(&str, log::LevelFilter)] = &[ #[cfg(any(target_os = "linux", target_os = "freebsd"))] ("zbus", log::LevelFilter::Warn), #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))] - ("wgpu", log::LevelFilter::Warn), - #[cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))] ("naga::back::spv::writer", log::LevelFilter::Warn), // usvg prints a lot of warnings on rendering an SVG with partial errors, which // can happen a lot with the SVG preview From 7e582468db720adda54b46e11164edbe69b87142 Mon Sep 17 00:00:00 2001 From: John Tur Date: Fri, 27 Feb 2026 02:59:06 -0500 Subject: [PATCH 145/548] Improve GPU selection on Linux (#50274) Follow-up to https://github.com/zed-industries/zed/pull/50270 Release Notes: - N/A --- crates/gpui_wgpu/src/wgpu_context.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index 3425aa905afdb30d8dde4d2ecf50161a50f779a0..38a27d0623c821144a2b0ba4ed5cadaaceb03812 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -75,7 +75,7 @@ impl WgpuContext { let adapter = instance .request_adapter(&wgpu::RequestAdapterOptions { - power_preference: wgpu::PowerPreference::None, + power_preference: wgpu::PowerPreference::HighPerformance, compatible_surface: None, force_fallback_adapter: false, }) @@ -214,7 +214,7 @@ impl WgpuContext { instance .request_adapter(&wgpu::RequestAdapterOptions { - power_preference: wgpu::PowerPreference::None, + power_preference: wgpu::PowerPreference::HighPerformance, compatible_surface, force_fallback_adapter: false, }) From 7f09bff76a51a337efe1bcac7a68ed517734f5f4 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Feb 2026 10:18:03 +0100 Subject: [PATCH 146/548] gpui: Reduce amount of wasm cfgs in gpui (#50278) Release Notes: - N/A *or* Added/Fixed/Improved ... --- Cargo.lock | 2 +- crates/gpui/Cargo.toml | 3 +-- crates/gpui/src/app.rs | 11 +---------- crates/gpui/src/app/test_context.rs | 8 +------- crates/gpui/src/elements/img.rs | 9 --------- crates/gpui/src/executor.rs | 8 +------- crates/gpui/src/gpui.rs | 1 - crates/gpui/src/platform.rs | 2 +- crates/http_client/Cargo.toml | 10 ++++++---- crates/http_client/src/http_client.rs | 2 ++ crates/reqwest_client/Cargo.toml | 6 ++++-- crates/reqwest_client/src/reqwest_client.rs | 2 +- 12 files changed, 19 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 85cf10d661d68a535cce85904d4ae9c3aedb651f..2d8b9858deb088f280e348f7f170fe720fd480b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14242,13 +14242,13 @@ dependencies = [ "bytes 1.11.1", "futures 0.3.31", "gpui", + "gpui_util", "http_client", "http_client_tls", "log", "regex", "serde", "tokio", - "util", "zed-reqwest", ] diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index fbc8a10571d73ff34f8e37f4591b43c0fdaaab1f..4bd9510eac1710554f8eec52f22609db31c531ad 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -55,6 +55,7 @@ etagere = "0.2" futures.workspace = true futures-concurrency.workspace = true gpui_macros.workspace = true +http_client.workspace = true image.workspace = true inventory.workspace = true itertools.workspace = true @@ -103,8 +104,6 @@ web-time.workspace = true getrandom = { version = "0.3.4", features = ["wasm_js"] } uuid = { workspace = true, features = ["js"] } -[target.'cfg(not(target_family = "wasm"))'.dependencies] -http_client.workspace = true [target.'cfg(target_os = "macos")'.dependencies] block = "0.1" diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index dd81d9166d471632b62725f6ad1ce4faeca18c59..1bd5cd6b3c6a74ee840ac93b08554a82b1f050fa 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -27,7 +27,6 @@ use collections::{FxHashMap, FxHashSet, HashMap, VecDeque}; pub use context::*; pub use entity_map::*; use gpui_util::{ResultExt, debug_panic}; -#[cfg(not(target_family = "wasm"))] use http_client::{HttpClient, Url}; use smallvec::SmallVec; #[cfg(any(test, feature = "test-support"))] @@ -139,7 +138,6 @@ impl Application { Self(App::new_app( platform, Arc::new(()), - #[cfg(not(target_family = "wasm"))] Arc::new(NullHttpClient), )) } @@ -155,7 +153,6 @@ impl Application { } /// Sets the HTTP client for the application. - #[cfg(not(target_family = "wasm"))] pub fn with_http_client(self, http_client: Arc) -> Self { let mut context_lock = self.0.borrow_mut(); context_lock.http_client = http_client; @@ -585,7 +582,6 @@ pub struct App { pub(crate) loading_assets: FxHashMap<(TypeId, u64), Box>, asset_source: Arc, pub(crate) svg_renderer: SvgRenderer, - #[cfg(not(target_family = "wasm"))] http_client: Arc, pub(crate) globals_by_type: FxHashMap>, pub(crate) entities: EntityMap, @@ -642,7 +638,7 @@ impl App { pub(crate) fn new_app( platform: Rc, asset_source: Arc, - #[cfg(not(target_family = "wasm"))] http_client: Arc, + http_client: Arc, ) -> Rc { let background_executor = platform.background_executor(); let foreground_executor = platform.foreground_executor(); @@ -672,7 +668,6 @@ impl App { svg_renderer: SvgRenderer::new(asset_source.clone()), loading_assets: Default::default(), asset_source, - #[cfg(not(target_family = "wasm"))] http_client, globals_by_type: FxHashMap::default(), entities, @@ -1281,13 +1276,11 @@ impl App { } /// Returns the HTTP client for the application. - #[cfg(not(target_family = "wasm"))] pub fn http_client(&self) -> Arc { self.http_client.clone() } /// Sets the HTTP client for the application. - #[cfg(not(target_family = "wasm"))] pub fn set_http_client(&mut self, new_client: Arc) { self.http_client = new_client; } @@ -2512,10 +2505,8 @@ pub struct KeystrokeEvent { pub context_stack: Vec, } -#[cfg(not(target_family = "wasm"))] struct NullHttpClient; -#[cfg(not(target_family = "wasm"))] impl HttpClient for NullHttpClient { fn send( &self, diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 7ce7f22e3c3cfef7beb531ce9443a172397e2e0f..dd4f37ed2a561f4259b41241c7cf4c83790a2b2f 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -120,16 +120,10 @@ impl TestAppContext { let foreground_executor = ForegroundExecutor::new(arc_dispatcher); let platform = TestPlatform::new(background_executor.clone(), foreground_executor.clone()); let asset_source = Arc::new(()); - #[cfg(not(target_family = "wasm"))] let http_client = http_client::FakeHttpClient::with_404_response(); let text_system = Arc::new(TextSystem::new(platform.text_system())); - let app = App::new_app( - platform.clone(), - asset_source, - #[cfg(not(target_family = "wasm"))] - http_client, - ); + let app = App::new_app(platform.clone(), asset_source, http_client); app.borrow_mut().mode = GpuiMode::test(); Self { diff --git a/crates/gpui/src/elements/img.rs b/crates/gpui/src/elements/img.rs index 59dd9de5fdfadf66fba622da6921b468726f439c..875f9e6dc1cc7d248f9e70488e52480dcca53fa3 100644 --- a/crates/gpui/src/elements/img.rs +++ b/crates/gpui/src/elements/img.rs @@ -594,7 +594,6 @@ impl Asset for ImageAssetLoader { source: Self::Source, cx: &mut App, ) -> impl Future + Send + 'static { - #[cfg(not(target_family = "wasm"))] let client = cx.http_client(); // TODO: Can we make SVGs always rescale? // let scale_factor = cx.scale_factor(); @@ -603,7 +602,6 @@ impl Asset for ImageAssetLoader { async move { let bytes = match source.clone() { Resource::Path(uri) => fs::read(uri.as_ref())?, - #[cfg(not(target_family = "wasm"))] Resource::Uri(uri) => { use anyhow::Context as _; use futures::AsyncReadExt as _; @@ -626,12 +624,6 @@ impl Asset for ImageAssetLoader { } body } - #[cfg(target_family = "wasm")] - Resource::Uri(_) => { - return Err(ImageCacheError::Other(Arc::new(anyhow::anyhow!( - "Uri resources are not supported on wasm" - )))); - } Resource::Embedded(path) => { let data = asset_source.load(&path).ok().flatten(); if let Some(data) = data { @@ -722,7 +714,6 @@ pub enum ImageCacheError { #[error("IO error: {0}")] Io(Arc), /// An error that occurred while processing an image. - #[cfg(not(target_family = "wasm"))] #[error("unexpected http status for {uri}: {status}, body: {body}")] BadStatus { /// The URI of the image. diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index d48be9dc30811cd5728fc07081c1d11d3405ec95..31c1ed80b92efb5dfa9ead6dcaf9050fe68ea399 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -564,15 +564,9 @@ mod test { let platform = TestPlatform::new(background_executor.clone(), foreground_executor); let asset_source = Arc::new(()); - #[cfg(not(target_family = "wasm"))] let http_client = http_client::FakeHttpClient::with_404_response(); - let app = App::new_app( - platform, - asset_source, - #[cfg(not(target_family = "wasm"))] - http_client, - ); + let app = App::new_app(platform, asset_source, http_client); (dispatcher, background_executor, app) } diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index 6e592655162471e5501030152a11bf67f3744578..af3fb51ce51f7df570a8e28faad23018ed7dc778 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -88,7 +88,6 @@ pub use geometry::*; pub use global::*; pub use gpui_macros::{AppContext, IntoElement, Render, VisualContext, register_action, test}; pub use gpui_util::arc_cow::ArcCow; -#[cfg(not(target_family = "wasm"))] pub use http_client; pub use input::*; pub use inspector::*; diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 885e031cba3020b16fc6391a52bbcf49e9022707..a6714ff250f2f854c51d30bfea5e2e5911ce60ee 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -229,7 +229,7 @@ pub trait Platform: 'static { } /// A handle to a platform's display, e.g. a monitor or laptop screen. -pub trait PlatformDisplay: Send + Sync + Debug { +pub trait PlatformDisplay: Debug { /// Get the ID for this display fn id(&self) -> DisplayId; diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index 177f8639ca1a5d75bd0130979f4d550e3622a1b4..6273d773d8c4651fd292555e18d2a2462e6358df 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -19,8 +19,6 @@ doctest = true [dependencies] anyhow.workspace = true async-compression.workspace = true -async-fs.workspace = true -async-tar.workspace = true bytes.workspace = true derive_more.workspace = true futures.workspace = true @@ -31,7 +29,11 @@ parking_lot.workspace = true serde.workspace = true serde_json.workspace = true serde_urlencoded.workspace = true -sha2.workspace = true -tempfile.workspace = true url.workspace = true + +[target.'cfg(not(target_family = "wasm"))'.dependencies] util.workspace = true +async-fs.workspace = true +async-tar.workspace = true +sha2.workspace = true +tempfile.workspace = true diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 1182ef74ca3d59a2d59419e185ff5bd673c5d505..5cf25a8277872ba3c6d502565e8057623b267d42 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -1,5 +1,7 @@ mod async_body; +#[cfg(not(target_family = "wasm"))] pub mod github; +#[cfg(not(target_family = "wasm"))] pub mod github_download; pub use anyhow::{Result, anyhow}; diff --git a/crates/reqwest_client/Cargo.toml b/crates/reqwest_client/Cargo.toml index 2f23ed3072f4d21d1ff053cb829931ae407f6d5b..41fcd1f5d2f8ca1c78b0a2261a7c48566999e0de 100644 --- a/crates/reqwest_client/Cargo.toml +++ b/crates/reqwest_client/Cargo.toml @@ -20,13 +20,15 @@ anyhow.workspace = true bytes.workspace = true futures.workspace = true http_client.workspace = true -http_client_tls.workspace = true serde.workspace = true log.workspace = true tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } regex.workspace = true reqwest.workspace = true -util.workspace = true +gpui_util.workspace = true + +[target.'cfg(not(target_family = "wasm"))'.dependencies] +http_client_tls.workspace = true [dev-dependencies] gpui.workspace = true diff --git a/crates/reqwest_client/src/reqwest_client.rs b/crates/reqwest_client/src/reqwest_client.rs index 7c8ab84bd40fa76075a8cd377e942a5c73094b22..3239a48904a6c4e71c5b94a7de1b78426da7c51d 100644 --- a/crates/reqwest_client/src/reqwest_client.rs +++ b/crates/reqwest_client/src/reqwest_client.rs @@ -2,7 +2,7 @@ use std::error::Error; use std::sync::{LazyLock, OnceLock}; use std::{borrow::Cow, mem, pin::Pin, task::Poll, time::Duration}; -use util::defer; +use gpui_util::defer; use anyhow::anyhow; use bytes::{BufMut, Bytes, BytesMut}; From 365e4157b71e1faaf63f6a4d87db1116ccd30f2d Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 27 Feb 2026 07:04:39 -0300 Subject: [PATCH 147/548] agent_ui: Add more improvements to the subagent UI (#50260) This PR includes the following adjustments: - Add divider in the full screen view to expose what part of a subagent's output is actually sent to the parent model - Auto-expand the subagent card if there's only one running on a turn - Adjust errors display within the subagent card Release Notes: - N/A --- crates/acp_thread/src/acp_thread.rs | 17 +- crates/agent_ui/src/agent_diff.rs | 2 +- crates/agent_ui/src/connection_view.rs | 9 +- .../src/connection_view/thread_view.rs | 235 +++++++++++------- 4 files changed, 174 insertions(+), 89 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index fea3236e1697e3af189da2e6a0f14d70a6f1c6f6..be681a846f7963950370095f50095160649d1fcd 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -102,6 +102,7 @@ impl UserMessage { pub struct AssistantMessage { pub chunks: Vec, pub indented: bool, + pub is_subagent_output: bool, } impl AssistantMessage { @@ -983,7 +984,7 @@ pub enum AcpThreadEvent { ToolAuthorizationReceived(acp::ToolCallId), Retry(RetryStatus), SubagentSpawned(acp::SessionId), - Stopped, + Stopped(acp::StopReason), Error, LoadError(LoadError), PromptCapabilitiesUpdated, @@ -1425,6 +1426,7 @@ impl AcpThread { && let AgentThreadEntry::AssistantMessage(AssistantMessage { chunks, indented: existing_indented, + is_subagent_output: _, }) = last_entry && *existing_indented == indented { @@ -1456,6 +1458,7 @@ impl AcpThread { AgentThreadEntry::AssistantMessage(AssistantMessage { chunks: vec![chunk], indented, + is_subagent_output: false, }), cx, ); @@ -2033,7 +2036,7 @@ impl AcpThread { } } - cx.emit(AcpThreadEvent::Stopped); + cx.emit(AcpThreadEvent::Stopped(r.stop_reason)); Ok(Some(r)) } Err(e) => { @@ -2549,6 +2552,16 @@ impl AcpThread { self.terminals.insert(terminal_id.clone(), entity.clone()); entity } + + pub fn mark_as_subagent_output(&mut self, cx: &mut Context) { + for entry in self.entries.iter_mut().rev() { + if let AgentThreadEntry::AssistantMessage(assistant_message) = entry { + assistant_message.is_subagent_output = true; + cx.notify(); + return; + } + } + } } fn markdown_for_raw_output( diff --git a/crates/agent_ui/src/agent_diff.rs b/crates/agent_ui/src/agent_diff.rs index b02af97881cff92714641b7f4e3fd10601e0685f..8fa68b0c510c086d7c6e224b24675e6f19344b82 100644 --- a/crates/agent_ui/src/agent_diff.rs +++ b/crates/agent_ui/src/agent_diff.rs @@ -1403,7 +1403,7 @@ impl AgentDiff { self.update_reviewing_editors(workspace, window, cx); } } - AcpThreadEvent::Stopped => { + AcpThreadEvent::Stopped(_) => { self.update_reviewing_editors(workspace, window, cx); } AcpThreadEvent::Error | AcpThreadEvent::LoadError(_) | AcpThreadEvent::Refusal => { diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index df06ed2bae7f77cfb366f3499097ab8c43bdf78c..f5efa8aa2834829630bd60dd3ef012a92a33cb17 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -182,7 +182,7 @@ impl Conversation { | AcpThreadEvent::EntriesRemoved(_) | AcpThreadEvent::Retry(_) | AcpThreadEvent::SubagentSpawned(_) - | AcpThreadEvent::Stopped + | AcpThreadEvent::Stopped(_) | AcpThreadEvent::Error | AcpThreadEvent::LoadError(_) | AcpThreadEvent::PromptCapabilitiesUpdated @@ -1190,13 +1190,18 @@ impl ConnectionView { }); } } - AcpThreadEvent::Stopped => { + AcpThreadEvent::Stopped(stop_reason) => { if let Some(active) = self.thread_view(&thread_id) { active.update(cx, |active, _cx| { active.thread_retry_status.take(); }); } if is_subagent { + if *stop_reason == acp::StopReason::EndTurn { + thread.update(cx, |thread, cx| { + thread.mark_as_subagent_output(cx); + }); + } return; } diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 9578a0752b45ea48477f4fab7935f670f84c25d5..777a54312e8d4c35a100c6c1f7e5ac446613c4b9 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -3711,6 +3711,7 @@ impl ThreadView { AgentThreadEntry::AssistantMessage(AssistantMessage { chunks, indented: _, + is_subagent_output: _, }) => { let mut is_blank = true; let is_last = entry_ix + 1 == total_entries; @@ -3783,6 +3784,42 @@ impl ThreadView { .into_any(), }; + let is_subagent_output = self.is_subagent() + && matches!(entry, AgentThreadEntry::AssistantMessage(msg) if msg.is_subagent_output); + + let primary = if is_subagent_output { + v_flex() + .w_full() + .child( + h_flex() + .id("subagent_output") + .px_5() + .py_1() + .gap_2() + .child(Divider::horizontal()) + .child( + h_flex() + .gap_1() + .child( + Icon::new(IconName::ForwardArrowUp) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child( + Label::new("Subagent Output") + .size(LabelSize::Custom(self.tool_name_font_size())) + .color(Color::Muted), + ), + ) + .child(Divider::horizontal()) + .tooltip(Tooltip::text("Everything below this line was sent as output from this subagent to the main agent.")), + ) + .child(primary) + .into_any_element() + } else { + primary + }; + let primary = if is_indented { let line_top = if is_first_indented { rems_from_px(-12.0) @@ -6397,12 +6434,9 @@ impl ThreadView { let session_id = thread.read(cx).session_id().clone(); this.when(is_expanded, |this| { this.child(self.render_subagent_expanded_content( - active_session_id, - entry_ix, thread_view, is_running, tool_call, - focus_handle, window, cx, )) @@ -6442,12 +6476,9 @@ impl ThreadView { fn render_subagent_expanded_content( &self, - active_session_id: &acp::SessionId, - entry_ix: usize, thread_view: &Entity, is_running: bool, tool_call: &ToolCall, - focus_handle: &FocusHandle, window: &Window, cx: &Context, ) -> impl IntoElement { @@ -6456,103 +6487,139 @@ impl ThreadView { let subagent_view = thread_view.read(cx); let session_id = subagent_view.thread.read(cx).session_id().clone(); - let base_container = || { - div() - .id(format!("subagent-content-{}", session_id)) - .relative() - .w_full() - .h_56() - .border_t_1() - .border_color(self.tool_card_border_color(cx)) - .overflow_hidden() - }; + let is_canceled_or_failed = matches!( + tool_call.status, + ToolCallStatus::Canceled | ToolCallStatus::Failed | ToolCallStatus::Rejected + ); let editor_bg = cx.theme().colors().editor_background; - let overlay = || { + let overlay = { div() .absolute() .inset_0() .size_full() .bg(linear_gradient( 180., - linear_color_stop(editor_bg, 0.), + linear_color_stop(editor_bg.opacity(0.5), 0.), linear_color_stop(editor_bg.opacity(0.), 0.1), )) .block_mouse_except_scroll() }; - let show_thread_entries = is_running || tool_call.content.is_empty(); - - if show_thread_entries { - let scroll_handle = self - .subagent_scroll_handles - .borrow_mut() - .entry(session_id.clone()) - .or_default() - .clone(); - if is_running { - scroll_handle.scroll_to_bottom(); - } + let entries = subagent_view.thread.read(cx).entries(); + let total_entries = entries.len(); + let start_ix = total_entries.saturating_sub(MAX_PREVIEW_ENTRIES); - let entries = subagent_view.thread.read(cx).entries(); - let total_entries = entries.len(); - let start_ix = total_entries.saturating_sub(MAX_PREVIEW_ENTRIES); + let scroll_handle = self + .subagent_scroll_handles + .borrow_mut() + .entry(session_id.clone()) + .or_default() + .clone(); + if is_running { + scroll_handle.scroll_to_bottom(); + } - let rendered_entries: Vec = entries[start_ix..] - .iter() - .enumerate() - .map(|(i, entry)| { - let actual_ix = start_ix + i; - subagent_view.render_entry(actual_ix, total_entries + 1, entry, window, cx) - }) - .collect(); + let rendered_entries: Vec = entries[start_ix..] + .iter() + .enumerate() + .map(|(i, entry)| { + let actual_ix = start_ix + i; + subagent_view.render_entry(actual_ix, total_entries + 1, entry, window, cx) + }) + .collect(); - base_container() - .child( - div() - .id(format!("subagent-entries-{}", session_id)) - .size_full() - .track_scroll(&scroll_handle) - .pb_1() - .children(rendered_entries), - ) - .child(overlay()) - .into_any_element() - } else { - base_container() - .child( - v_flex() - .id(format!("subagent-done-content-{}", session_id)) - .size_full() - .justify_end() - .children(tool_call.content.iter().enumerate().map( - |(content_ix, content)| { - div().p_2().child(self.render_tool_call_content( - active_session_id, - entry_ix, - content, - content_ix, - tool_call, - true, - false, - matches!( - tool_call.status, - ToolCallStatus::Failed - | ToolCallStatus::Rejected - | ToolCallStatus::Canceled - ), - focus_handle, - window, - cx, - )) - }, - )), - ) - .child(overlay()) - .into_any_element() + let error_message = + self.subagent_error_message(subagent_view, &tool_call.status, tool_call, cx); + + let parent_thread = self.thread.read(cx); + let mut started_subagent_count = 0usize; + let mut turn_has_our_call = false; + for entry in parent_thread.entries().iter() { + match entry { + AgentThreadEntry::UserMessage(_) => { + if turn_has_our_call { + break; + } + started_subagent_count = 0; + turn_has_our_call = false; + } + AgentThreadEntry::ToolCall(tc) + if tc.is_subagent() && !matches!(tc.status, ToolCallStatus::Pending) => + { + started_subagent_count += 1; + if tc.id == tool_call.id { + turn_has_our_call = true; + } + } + _ => {} + } } + + v_flex() + .relative() + .w_full() + .border_t_1() + .when(is_canceled_or_failed, |this| this.border_dashed()) + .border_color(self.tool_card_border_color(cx)) + .overflow_hidden() + .child( + div() + .id(format!("subagent-entries-{}", session_id)) + .flex_1() + .min_h_0() + .pb_1() + .overflow_hidden() + .track_scroll(&scroll_handle) + .children(rendered_entries), + ) + .when_some(error_message, |this, message| { + this.child( + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircle) + .title(message), + ) + }) + .when(started_subagent_count > 1, |this| { + this.h_56().child(overlay) + }) + .into_any_element() } + fn subagent_error_message( + &self, + subagent_view: &ThreadView, + status: &ToolCallStatus, + tool_call: &ToolCall, + cx: &App, + ) -> Option { + if matches!(status, ToolCallStatus::Canceled | ToolCallStatus::Rejected) { + return None; + } + + subagent_view + .thread_error + .as_ref() + .and_then(|e| match e { + ThreadError::Refusal => Some("The agent refused to respond to this prompt.".into()), + ThreadError::Other { message, .. } => Some(message.clone()), + ThreadError::PaymentRequired | ThreadError::AuthenticationRequired(_) => None, + }) + .or_else(|| { + tool_call.content.iter().find_map(|content| { + if let ToolCallContent::ContentBlock(block) = content { + if let acp_thread::ContentBlock::Markdown { markdown } = block { + let source = markdown.read(cx).source().to_string(); + if !source.is_empty() { + return Some(SharedString::from(source)); + } + } + } + None + }) + }) + } fn render_rules_item(&self, cx: &Context) -> Option { let project_context = self .as_native_thread(cx)? From 0fad478607e397c1bb4c9f38806b4cf97275e742 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Fri, 27 Feb 2026 16:20:27 +0530 Subject: [PATCH 148/548] markdown_preview: Fix mermaid diagrams failing to render with empty subgraphs (#50280) Upgrade mermaid-rs-renderer to 9d8360d9cea10dc4bc86d7b8012cc6e9656e6cf2 Release Notes: - N/A --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2d8b9858deb088f280e348f7f170fe720fd480b7..06def4875c2517965381840faabaab45126fdea2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10296,7 +10296,7 @@ dependencies = [ [[package]] name = "mermaid-rs-renderer" version = "0.2.0" -source = "git+https://github.com/zed-industries/mermaid-rs-renderer?branch=fix-font-family-xml-escaping#d91961aa90bc7b0c09c87a13c91d48e2f05c468d" +source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=9d8360d9cea10dc4bc86d7b8012cc6e9656e6cf2#9d8360d9cea10dc4bc86d7b8012cc6e9656e6cf2" dependencies = [ "anyhow", "fontdb 0.16.2", diff --git a/Cargo.toml b/Cargo.toml index ac80f187e6ffc16a95753e83ae7a333c6bc9ffdb..cb388e08a5b0c403a15dea8ebb8d0905cbcea316 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -370,7 +370,7 @@ markdown_preview = { path = "crates/markdown_preview" } svg_preview = { path = "crates/svg_preview" } media = { path = "crates/media" } menu = { path = "crates/menu" } -mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", branch = "fix-font-family-xml-escaping", default-features = false } +mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "9d8360d9cea10dc4bc86d7b8012cc6e9656e6cf2", default-features = false } migrator = { path = "crates/migrator" } mistral = { path = "crates/mistral" } multi_buffer = { path = "crates/multi_buffer" } From 96abd034a91deab95b68883d09c2ff564edce823 Mon Sep 17 00:00:00 2001 From: Wuji Chen Date: Fri, 27 Feb 2026 19:28:34 +0800 Subject: [PATCH 149/548] Fix Cmd+click navigating to file instead of definition (#49012) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary - Tighten `link_pattern_file_candidates` regex from `\(([^)]*)\)` to `]\(([^)]*)\)` so only Markdown link syntax `[title](path)` triggers path extraction from parentheses - Prevents function call arguments like `do_work(file2)` from being incorrectly resolved as file paths, which preempted LSP go-to-definition Closes #48938 ## Test plan - [x] `cargo test -p editor hover_links` — all 12 tests pass - [x] New unit tests verify: function calls don't extract arguments as file candidates; Markdown links still extract correctly 🤖 Generated with [Claude Code](https://claude.com/claude-code) Release Notes: - Fixed Cmd+click navigating to file instead of definition in certain cases Co-authored-by: Claude Opus 4.6 --- crates/editor/src/hover_links.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index a2f56f625d9553e81c9de4abbe21451982cfd17e..d4877a5f1986685bea37f243edf4ac8bbdfdf9f5 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -673,7 +673,7 @@ pub(crate) async fn find_file( // (literally, [LinkTitle](link_file.txt)) as a candidate. fn link_pattern_file_candidates(candidate: &str) -> Vec<(String, Range)> { static MD_LINK_REGEX: LazyLock = - LazyLock::new(|| Regex::new(r"\(([^)]*)\)").expect("Failed to create REGEX")); + LazyLock::new(|| Regex::new(r"]\(([^)]*)\)").expect("Failed to create REGEX")); let candidate_len = candidate.len(); @@ -1444,14 +1444,26 @@ mod tests { candidates, vec!["LinkTitle](link\\ _file.txt)", "link\\ _file.txt",] ); - // - // Square brackets not strictly necessary + // Parentheses without preceding `]` should not extract inner content, + // to avoid matching function calls like `do_work(file2)` as file paths. let candidates: Vec = link_pattern_file_candidates("(link_file.txt)") .into_iter() .map(|(c, _)| c) .collect(); + assert_eq!(candidates, vec!["(link_file.txt)"]); - assert_eq!(candidates, vec!["(link_file.txt)", "link_file.txt",]); + let candidates: Vec = link_pattern_file_candidates("do_work(file2);") + .into_iter() + .map(|(c, _)| c) + .collect(); + assert_eq!(candidates, vec!["do_work(file2);"]); + + // Markdown links should still extract the path + let candidates: Vec = link_pattern_file_candidates("](readme.md)") + .into_iter() + .map(|(c, _)| c) + .collect(); + assert_eq!(candidates, vec!["](readme.md)", "readme.md"]); // No nesting let candidates: Vec = From 511be9a3ffa032da6bab82ddfdd2e492c68298e3 Mon Sep 17 00:00:00 2001 From: Lena <241371603+zelenenka@users.noreply.github.com> Date: Fri, 27 Feb 2026 12:56:39 +0100 Subject: [PATCH 150/548] Fix version reporting for duplicates bot (#50286) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Obviously™, we add the bot-commented issues onto the tracking board at the time of the issues getting closed, which is very much not the same time as when the bot commented. We could very well be adding the issue with a v1 bot comment to the board when v2 is already deployed. This commit stops messing up those stats. Release Notes: - N/A --- ...ithub-track-duplicate-bot-effectiveness.py | 53 +++++++++++++------ 1 file changed, 36 insertions(+), 17 deletions(-) diff --git a/script/github-track-duplicate-bot-effectiveness.py b/script/github-track-duplicate-bot-effectiveness.py index ca1ec5a9165bb9264dac1ad3fba7345a12d90f55..1ae62f36463d6059bed628b81c04aedcec792eac 100644 --- a/script/github-track-duplicate-bot-effectiveness.py +++ b/script/github-track-duplicate-bot-effectiveness.py @@ -24,6 +24,7 @@ import functools import os import re import sys +from datetime import datetime, timezone import requests @@ -39,10 +40,22 @@ BOT_START_DATE = "2026-02-18" NEEDS_TRIAGE_LABEL = "state:needs triage" DEFAULT_PROJECT_NUMBER = 76 VALID_CLOSED_AS_VALUES = {"duplicate", "not_planned", "completed"} -# Bump this when the duplicate-detection bot's behavior changes in a way that -# could affect outcome rates (e.g. prompt rewrites, model swaps, candidate -# filtering changes). Don't bump for unrelated changes like comment formatting. -BOT_VERSION = "v2" +# Add a new tuple when you deploy a new version of the bot that you want to +# keep track of (e.g. the prompt gets a rewrite or the model gets swapped). +# Newest first, please. The datetime is for the deployment time (merge to maain). +BOT_VERSION_TIMELINE = [ + ("v2", datetime(2026, 2, 26, 14, 9, tzinfo=timezone.utc)), + ("v1", datetime(2026, 2, 18, tzinfo=timezone.utc)), +] + + +def bot_version_for_time(date_string): + """Return the bot version that was active at the given ISO 8601 timestamp.""" + timestamp = datetime.fromisoformat(date_string.replace("Z", "+00:00")) + for version, deployed in BOT_VERSION_TIMELINE: + if timestamp >= deployed: + return version + return BOT_VERSION_TIMELINE[-1][0] def github_api_get(path, params=None): @@ -82,10 +95,10 @@ def fetch_issue(issue_number): } -def get_bot_duplicate_comment(issue_number): - """Get the bot's duplicate-detection comment body from an issue. +def get_bot_comment_with_time(issue_number): + """Get the bot's duplicate-detection comment and its timestamp from an issue. - Returns the comment body if found, else None. + Returns {"body": str, "created_at": str} if found, else None. """ comments_path = f"/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}/comments" page = 1 @@ -94,7 +107,7 @@ def get_bot_duplicate_comment(issue_number): author = (comment.get("user") or {}).get("login", "") body = comment.get("body", "") if author == BOT_LOGIN and body.startswith(BOT_COMMENT_PREFIX): - return body + return {"body": body, "created_at": comment.get("created_at", "")} page += 1 return None @@ -265,7 +278,7 @@ def set_field_value(item_id, field_name, value): ) -def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None): +def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="Auto-classified", notes=None, bot_comment_time=None): """Add an issue to the project board (or update it if already there), setting field values.""" item_id = find_project_item(issue_node_id) if item_id: @@ -283,7 +296,8 @@ def add_or_update_project_item(issue_node_id, outcome, closed_as=None, status="A if notes: set_field_value(item_id, "Notes", notes) - set_field_value(item_id, "Bot version", BOT_VERSION) + if bot_comment_time: + set_field_value(item_id, "Bot version", bot_version_for_time(bot_comment_time)) return item_id @@ -302,14 +316,14 @@ def classify_closed(issue_number, closer_login, state_reason): print(f" Skipping: author '{author}' is a staff member") return - bot_comment = get_bot_duplicate_comment(issue_number) + bot_comment = get_bot_comment_with_time(issue_number) bot_commented = bot_comment is not None print(f" Bot commented: {bot_commented}") closer_is_author = closer_login == author if bot_commented and closer_is_author: - classify_as_success(issue, state_reason) + classify_as_success(issue, bot_comment, state_reason) elif bot_commented and not closer_is_author: # Only authors, staff, and triagers can close issues, so # a non-author closer is always someone with elevated permissions. @@ -320,7 +334,7 @@ def classify_closed(issue_number, closer_login, state_reason): print(" Skipping: no bot comment and not closed as duplicate") -def classify_as_success(issue, state_reason): +def classify_as_success(issue, bot_comment, state_reason): """Author closed their own issue after the bot commented.""" if state_reason == "duplicate": status = "Auto-classified" @@ -340,6 +354,7 @@ def classify_as_success(issue, state_reason): closed_as=state_reason, status=status, notes=notes, + bot_comment_time=bot_comment["created_at"], ) @@ -356,12 +371,13 @@ def classify_non_author_closed(issue, bot_comment, state_reason): closed_as=state_reason, status="Needs review", notes=notes, + bot_comment_time=bot_comment["created_at"], ) def classify_as_assist(issue, bot_comment): """Staff member closed as duplicate after the bot commented. Check if the dup matches.""" - suggested = parse_suggested_issues(bot_comment) + suggested = parse_suggested_issues(bot_comment["body"]) original = None try: original = get_closed_as_duplicate_of(issue["number"]) @@ -388,7 +404,8 @@ def classify_as_assist(issue, bot_comment): print(f" -> Possible Assist, needs review ({notes})") add_or_update_project_item( - issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes) + issue["node_id"], outcome="Assist", closed_as="duplicate", status=status, notes=notes, + bot_comment_time=bot_comment["created_at"]) def classify_as_missed_opportunity(issue): @@ -425,16 +442,18 @@ def classify_open(): f"type is {type_name}" if type_name not in ("Bug", "Crash") else f"author {author} is staff" if is_staff_member(author) else "already on the board" if find_project_item(node_id) - else "no bot duplicate comment found" if not get_bot_duplicate_comment(number) + else "no bot duplicate comment found" if not (bot_comment := get_bot_comment_with_time(number)) else None ) + if skip_reason: print(f" #{number}: skipping, {skip_reason}") skipped += 1 continue print(f" #{number}: adding as Noise") - add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified") + add_or_update_project_item(node_id, outcome="Noise", status="Auto-classified", + bot_comment_time=bot_comment["created_at"]) added += 1 except Exception as error: # broad catch: one issue failing shouldn't stop the sweep print(f" #{number}: error processing issue, skipping: {error}") From 78878e514e72c80477d2ac6bda73f1e568f35407 Mon Sep 17 00:00:00 2001 From: Lena <241371603+zelenenka@users.noreply.github.com> Date: Fri, 27 Feb 2026 13:16:58 +0100 Subject: [PATCH 151/548] Decrease review needs of the duplicate bot (#50289) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Turns out you can't query the duplicate issue for the id of the original/canonical issue, even though GitHub UI displays “Closed as duplicate of ”. Oh well. For the bot performance tracking all that matters is whether any of its suggestions were the actual originals, so we're working around the API limitation by querying the suggested issues instead. Release Notes: - N/A --- ...ithub-track-duplicate-bot-effectiveness.py | 97 ++++++++++--------- 1 file changed, 52 insertions(+), 45 deletions(-) diff --git a/script/github-track-duplicate-bot-effectiveness.py b/script/github-track-duplicate-bot-effectiveness.py index 1ae62f36463d6059bed628b81c04aedcec792eac..18bad6bbdabc6d6f6dc91c42ddf56e1115dc55c5 100644 --- a/script/github-track-duplicate-bot-effectiveness.py +++ b/script/github-track-duplicate-bot-effectiveness.py @@ -117,8 +117,8 @@ def parse_suggested_issues(comment_body): return [int(match) for match in re.findall(r"^- #(\d+)", comment_body, re.MULTILINE)] -def github_api_graphql(query, variables=None): - """Execute a GitHub GraphQL query. Raises on errors.""" +def github_api_graphql(query, variables=None, partial_errors_ok=False): + """Execute a GitHub GraphQL query. Raises on errors unless partial_errors_ok is set.""" response = requests.post( GRAPHQL_URL, headers=GITHUB_HEADERS, @@ -127,43 +127,51 @@ def github_api_graphql(query, variables=None): response.raise_for_status() data = response.json() if "errors" in data: - raise RuntimeError(f"GraphQL errors: {data['errors']}") + if not partial_errors_ok or "data" not in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + print(f" GraphQL partial errors (ignored): {data['errors']}") return data["data"] -def get_closed_as_duplicate_of(issue_number): - """Get the issue number this issue was closed as a duplicate of. +def find_canonical_among(duplicate_number, candidates): + """Check if any candidate issue has duplicate_number marked as a duplicate. - Uses the timeline to find the most recent MarkedAsDuplicateEvent. - Returns the original issue number, or None. + The MarkedAsDuplicateEvent lives on the canonical issue's timeline, not the + duplicate's. So to find which canonical issue our duplicate was closed against, + we check each candidate's timeline for a MarkedAsDuplicateEvent whose + `duplicate` field matches our issue. - Note: not all "closed as duplicate" issues have a MarkedAsDuplicateEvent. - If the closer used the "Close as duplicate" button without separately - marking the duplicate relationship, no event is created and this returns - None. The caller handles this by flagging the item for manual review. + Returns the matching canonical issue number, or None. """ + if not candidates: + return None + data = github_api_graphql( """ - query($owner: String!, $repo: String!, $number: Int!) { + query($owner: String!, $repo: String!, $numbers: [Int!]!) { repository(owner: $owner, name: $repo) { - issue(number: $number) { - timelineItems(last: 10, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) { - nodes { - ... on MarkedAsDuplicateEvent { - canonical { ... on Issue { number } } - } - } - } - } + PLACEHOLDER } } - """, - {"owner": REPO_OWNER, "repo": REPO_NAME, "number": issue_number}, + """.replace("PLACEHOLDER", "\n ".join( + f'issue_{number}: issue(number: {number}) {{' + f' timelineItems(last: 50, itemTypes: [MARKED_AS_DUPLICATE_EVENT]) {{' + f' nodes {{ ... on MarkedAsDuplicateEvent {{ duplicate {{ ... on Issue {{ number }} }} }} }} }} }}' + for number in candidates + )), + {"owner": REPO_OWNER, "repo": REPO_NAME, "numbers": list(candidates)}, + partial_errors_ok=True, ) - nodes = data["repository"]["issue"]["timelineItems"]["nodes"] - for node in reversed(nodes): - if original := (node.get("canonical") or {}).get("number"): - return original + + repo = data["repository"] + for candidate in candidates: + issue_data = repo.get(f"issue_{candidate}") + if not issue_data: + continue + for node in issue_data["timelineItems"]["nodes"]: + dup_number = (node.get("duplicate") or {}).get("number") + if dup_number == duplicate_number: + return candidate return None @@ -378,29 +386,28 @@ def classify_non_author_closed(issue, bot_comment, state_reason): def classify_as_assist(issue, bot_comment): """Staff member closed as duplicate after the bot commented. Check if the dup matches.""" suggested = parse_suggested_issues(bot_comment["body"]) + if not suggested: + print(" -> Assist, needs review (could not parse bot suggestions)") + add_or_update_project_item( + issue["node_id"], outcome="Assist", closed_as="duplicate", + status="Needs review", notes="Could not parse bot suggestions", + bot_comment_time=bot_comment["created_at"]) + return + original = None try: - original = get_closed_as_duplicate_of(issue["number"]) + original = find_canonical_among(issue["number"], suggested) except (requests.RequestException, RuntimeError) as error: - print(f" Warning: failed to get the original-for the duplicate issue: {error}") - - if original and suggested: - if original in suggested: - status = "Auto-classified" - notes = None - print(f" -> Assist (original #{original} matches bot suggestion)") - else: - status = "Needs review" - suggested_str = ", ".join(f"#{number}" for number in suggested) - notes = f"Bot suggested {suggested_str}; closed as dup of #{original}" - print(f" -> Possible Assist, needs review ({notes})") + print(f" Warning: failed to query candidate timelines: {error}") + + if original: + status = "Auto-classified" + notes = None + print(f" -> Assist (original #{original} matches bot suggestion)") else: - # couldn't determine original or no suggestions parsed status = "Needs review" - if not original: - notes = "Could not determine original issue from timeline" - else: - notes = f"Closed as dup of #{original}; could not parse bot suggestions" + suggested_str = ", ".join(f"#{number}" for number in suggested) + notes = f"Bot suggested {suggested_str}; none matched as canonical" print(f" -> Possible Assist, needs review ({notes})") add_or_update_project_item( From c9425f2a904d9bc5855e53fac8dd66dff7cdffda Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Feb 2026 13:24:57 +0100 Subject: [PATCH 152/548] agent: Stream `new_text` in `StreamingEditFileTool` (#50240) We now stream the new text into the buffer as soon as we receive partial chunks of `new_text`. This is pretty much a full re-write of the way streaming worked, which is now much closer to how the edit agent works: - `ToolEditParser` buffers chunks as they stream in, and emits relevant events (`OldTextChunk`,`NewTextChunk`, ...) that we use to power the `EditSession` pipeline. - `EditSession::process_events` takes care of consuming these events and applying the edits incrementally as chunks stream in. `EditPipeline` maintains the underlying state machine for each edit. - We handle whitespace mismatches similar to the edit agent, the code is shared by moving that logic to `reindent.rs` Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/agent/src/edit_agent.rs | 89 +- crates/agent/src/edit_agent/reindent.rs | 214 ++ crates/agent/src/tools.rs | 1 + .../src/tools/streaming_edit_file_tool.rs | 1955 ++++++++--------- crates/agent/src/tools/tool_edit_parser.rs | 941 ++++++++ 5 files changed, 2098 insertions(+), 1102 deletions(-) create mode 100644 crates/agent/src/edit_agent/reindent.rs create mode 100644 crates/agent/src/tools/tool_edit_parser.rs diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index 288a3178f3c4501ae9de65d19624b66cbda2548d..ef95eee07378438686aff688fdaf2d7fa98e036b 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -2,6 +2,7 @@ mod create_file_parser; mod edit_parser; #[cfg(test)] mod evals; +pub mod reindent; pub mod streaming_fuzzy_matcher; use crate::{Template, Templates}; @@ -24,9 +25,10 @@ use language_model::{ LanguageModelToolChoice, MessageContent, Role, }; use project::{AgentLocation, Project}; +use reindent::{IndentDelta, Reindenter}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::{cmp, iter, mem, ops::Range, pin::Pin, sync::Arc, task::Poll}; +use std::{mem, ops::Range, pin::Pin, sync::Arc, task::Poll}; use streaming_diff::{CharOperation, StreamingDiff}; use streaming_fuzzy_matcher::StreamingFuzzyMatcher; @@ -553,15 +555,8 @@ impl EditAgent { let compute_edits = cx.background_spawn(async move { let buffer_start_indent = snapshot .line_indent_for_row(snapshot.offset_to_point(resolved_old_text.range.start).row); - let indent_delta = if buffer_start_indent.tabs > 0 { - IndentDelta::Tabs( - buffer_start_indent.tabs as isize - resolved_old_text.indent.tabs as isize, - ) - } else { - IndentDelta::Spaces( - buffer_start_indent.spaces as isize - resolved_old_text.indent.spaces as isize, - ) - }; + let indent_delta = + reindent::compute_indent_delta(buffer_start_indent, resolved_old_text.indent); let old_text = snapshot .text_for_range(resolved_old_text.range.clone()) @@ -608,8 +603,7 @@ impl EditAgent { delta: IndentDelta, mut stream: impl Unpin + Stream>, ) -> impl Stream> { - let mut buffer = String::new(); - let mut in_leading_whitespace = true; + let mut reindenter = Reindenter::new(delta); let mut done = false; futures::stream::poll_fn(move |cx| { while !done { @@ -622,55 +616,10 @@ impl EditAgent { _ => return Poll::Ready(None), }; - buffer.push_str(&chunk); - - let mut indented_new_text = String::new(); - let mut start_ix = 0; - let mut newlines = buffer.match_indices('\n').peekable(); - loop { - let (line_end, is_pending_line) = match newlines.next() { - Some((ix, _)) => (ix, false), - None => (buffer.len(), true), - }; - let line = &buffer[start_ix..line_end]; - - if in_leading_whitespace { - if let Some(non_whitespace_ix) = line.find(|c| delta.character() != c) { - // We found a non-whitespace character, adjust - // indentation based on the delta. - let new_indent_len = - cmp::max(0, non_whitespace_ix as isize + delta.len()) as usize; - indented_new_text - .extend(iter::repeat(delta.character()).take(new_indent_len)); - indented_new_text.push_str(&line[non_whitespace_ix..]); - in_leading_whitespace = false; - } else if is_pending_line { - // We're still in leading whitespace and this line is incomplete. - // Stop processing until we receive more input. - break; - } else { - // This line is entirely whitespace. Push it without indentation. - indented_new_text.push_str(line); - } - } else { - indented_new_text.push_str(line); - } - - if is_pending_line { - start_ix = line_end; - break; - } else { - in_leading_whitespace = true; - indented_new_text.push('\n'); - start_ix = line_end + 1; - } - } - buffer.replace_range(..start_ix, ""); - + let mut indented_new_text = reindenter.push(&chunk); // This was the last chunk, push all the buffered content as-is. if is_last_chunk { - indented_new_text.push_str(&buffer); - buffer.clear(); + indented_new_text.push_str(&reindenter.finish()); done = true; } @@ -761,28 +710,6 @@ struct ResolvedOldText { indent: LineIndent, } -#[derive(Copy, Clone, Debug)] -enum IndentDelta { - Spaces(isize), - Tabs(isize), -} - -impl IndentDelta { - fn character(&self) -> char { - match self { - IndentDelta::Spaces(_) => ' ', - IndentDelta::Tabs(_) => '\t', - } - } - - fn len(&self) -> isize { - match self { - IndentDelta::Spaces(n) => *n, - IndentDelta::Tabs(n) => *n, - } - } -} - #[cfg(test)] mod tests { use super::*; diff --git a/crates/agent/src/edit_agent/reindent.rs b/crates/agent/src/edit_agent/reindent.rs new file mode 100644 index 0000000000000000000000000000000000000000..7f08749e475f6acfcf63013abd9139574112e4b5 --- /dev/null +++ b/crates/agent/src/edit_agent/reindent.rs @@ -0,0 +1,214 @@ +use language::LineIndent; +use std::{cmp, iter}; + +#[derive(Copy, Clone, Debug)] +pub enum IndentDelta { + Spaces(isize), + Tabs(isize), +} + +impl IndentDelta { + pub fn character(&self) -> char { + match self { + IndentDelta::Spaces(_) => ' ', + IndentDelta::Tabs(_) => '\t', + } + } + + pub fn len(&self) -> isize { + match self { + IndentDelta::Spaces(n) => *n, + IndentDelta::Tabs(n) => *n, + } + } +} + +pub fn compute_indent_delta(buffer_indent: LineIndent, query_indent: LineIndent) -> IndentDelta { + if buffer_indent.tabs > 0 { + IndentDelta::Tabs(buffer_indent.tabs as isize - query_indent.tabs as isize) + } else { + IndentDelta::Spaces(buffer_indent.spaces as isize - query_indent.spaces as isize) + } +} + +/// Synchronous re-indentation adapter. Buffers incomplete lines and applies +/// an `IndentDelta` to each line's leading whitespace before emitting it. +pub struct Reindenter { + delta: IndentDelta, + buffer: String, + in_leading_whitespace: bool, +} + +impl Reindenter { + pub fn new(delta: IndentDelta) -> Self { + Self { + delta, + buffer: String::new(), + in_leading_whitespace: true, + } + } + + /// Feed a chunk of text and return the re-indented portion that is + /// ready to emit. Incomplete trailing lines are buffered internally. + pub fn push(&mut self, chunk: &str) -> String { + self.buffer.push_str(chunk); + self.drain(false) + } + + /// Flush any remaining buffered content (call when the stream is done). + pub fn finish(&mut self) -> String { + self.drain(true) + } + + fn drain(&mut self, is_final: bool) -> String { + let mut indented = String::new(); + let mut start_ix = 0; + let mut newlines = self.buffer.match_indices('\n'); + loop { + let (line_end, is_pending_line) = match newlines.next() { + Some((ix, _)) => (ix, false), + None => (self.buffer.len(), true), + }; + let line = &self.buffer[start_ix..line_end]; + + if self.in_leading_whitespace { + if let Some(non_whitespace_ix) = line.find(|c| self.delta.character() != c) { + // We found a non-whitespace character, adjust indentation + // based on the delta. + let new_indent_len = + cmp::max(0, non_whitespace_ix as isize + self.delta.len()) as usize; + indented.extend(iter::repeat(self.delta.character()).take(new_indent_len)); + indented.push_str(&line[non_whitespace_ix..]); + self.in_leading_whitespace = false; + } else if is_pending_line && !is_final { + // We're still in leading whitespace and this line is incomplete. + // Stop processing until we receive more input. + break; + } else { + // This line is entirely whitespace. Push it without indentation. + indented.push_str(line); + } + } else { + indented.push_str(line); + } + + if is_pending_line { + start_ix = line_end; + break; + } else { + self.in_leading_whitespace = true; + indented.push('\n'); + start_ix = line_end + 1; + } + } + self.buffer.replace_range(..start_ix, ""); + if is_final { + indented.push_str(&self.buffer); + self.buffer.clear(); + } + indented + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_indent_single_chunk() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + let out = r.push(" abc\n def\n ghi"); + // All three lines are emitted: "ghi" starts with spaces but + // contains non-whitespace, so it's processed immediately. + assert_eq!(out, " abc\n def\n ghi"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_outdent_tabs() { + let mut r = Reindenter::new(IndentDelta::Tabs(-2)); + let out = r.push("\t\t\t\tabc\n\t\tdef\n\t\t\t\t\t\tghi"); + assert_eq!(out, "\t\tabc\ndef\n\t\t\t\tghi"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_incremental_chunks() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + // Feed " ab" — the `a` is non-whitespace, so the line is + // processed immediately even without a trailing newline. + let out = r.push(" ab"); + assert_eq!(out, " ab"); + // Feed "c\n" — appended to the already-processed line (no longer + // in leading whitespace). + let out = r.push("c\n"); + assert_eq!(out, "c\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_zero_delta() { + let mut r = Reindenter::new(IndentDelta::Spaces(0)); + let out = r.push(" hello\n world\n"); + assert_eq!(out, " hello\n world\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_clamp_negative_indent() { + let mut r = Reindenter::new(IndentDelta::Spaces(-10)); + let out = r.push(" abc\n"); + // max(0, 2 - 10) = 0, so no leading spaces. + assert_eq!(out, "abc\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_whitespace_only_lines() { + let mut r = Reindenter::new(IndentDelta::Spaces(2)); + let out = r.push(" \n code\n"); + // First line is all whitespace — emitted verbatim. Second line is indented. + assert_eq!(out, " \n code\n"); + let out = r.finish(); + assert_eq!(out, ""); + } + + #[test] + fn test_compute_indent_delta_spaces() { + let buffer = LineIndent { + tabs: 0, + spaces: 8, + line_blank: false, + }; + let query = LineIndent { + tabs: 0, + spaces: 4, + line_blank: false, + }; + let delta = compute_indent_delta(buffer, query); + assert_eq!(delta.len(), 4); + assert_eq!(delta.character(), ' '); + } + + #[test] + fn test_compute_indent_delta_tabs() { + let buffer = LineIndent { + tabs: 2, + spaces: 0, + line_blank: false, + }; + let query = LineIndent { + tabs: 3, + spaces: 0, + line_blank: false, + }; + let delta = compute_indent_delta(buffer, query); + assert_eq!(delta.len(), -1); + assert_eq!(delta.character(), '\t'); + } +} diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index b2724801befc7459ad37494d298819f4b7ca6b27..446472e0c459aa15fa57bb8b49178b08e6781d11 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -17,6 +17,7 @@ mod save_file_tool; mod spawn_agent_tool; mod streaming_edit_file_tool; mod terminal_tool; +mod tool_edit_parser; mod tool_permissions; mod web_search_tool; diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 20dfe0ab18aa05e6b90125f1c50a1b8a66ab25f9..2658e372d77044b60648d8fab39e458f02dba23d 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -1,13 +1,17 @@ use super::edit_file_tool::EditFileTool; use super::restore_file_from_disk_tool::RestoreFileFromDiskTool; use super::save_file_tool::SaveFileTool; +use super::tool_edit_parser::{ToolEditEvent, ToolEditParser}; use crate::{ AgentTool, Thread, ToolCallEventStream, ToolInput, - edit_agent::streaming_fuzzy_matcher::StreamingFuzzyMatcher, + edit_agent::{ + reindent::{Reindenter, compute_indent_delta}, + streaming_fuzzy_matcher::StreamingFuzzyMatcher, + }, }; use acp_thread::Diff; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result}; use collections::HashSet; use futures::FutureExt as _; use gpui::{App, AppContext, AsyncApp, Entity, Task, WeakEntity}; @@ -15,16 +19,15 @@ use language::language_settings::{self, FormatOnSave}; use language::{Buffer, LanguageRegistry}; use language_model::LanguageModelToolResultContent; use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use project::{Project, ProjectPath}; +use project::{AgentLocation, Project, ProjectPath}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; -use text::{BufferSnapshot, ToOffset as _}; +use streaming_diff::{CharOperation, StreamingDiff}; use ui::SharedString; use util::rel_path::RelPath; -use util::{Deferred, ResultExt, debug_panic}; +use util::{Deferred, ResultExt}; const DEFAULT_UI_TEXT: &str = "Editing file"; @@ -70,14 +73,13 @@ pub struct StreamingEditFileToolInput { pub path: String, /// The mode of operation on the file. Possible values: - /// - 'create': Create a new file if it doesn't exist. Requires 'content' field. - /// - 'overwrite': Replace the entire contents of an existing file. Requires 'content' field. + /// - 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field. /// - 'edit': Make granular edits to an existing file. Requires 'edits' field. /// /// When a file already exists or you just created it, prefer editing it as opposed to recreating it from scratch. pub mode: StreamingEditFileMode, - /// The complete content for the new file (required for 'create' and 'overwrite' modes). + /// The complete content for the new file (required for 'write' mode). /// This field should contain the entire file content. #[serde(default, skip_serializing_if = "Option::is_none")] pub content: Option, @@ -85,23 +87,22 @@ pub struct StreamingEditFileToolInput { /// List of edit operations to apply sequentially (required for 'edit' mode). /// Each edit finds `old_text` in the file and replaces it with `new_text`. #[serde(default, skip_serializing_if = "Option::is_none")] - pub edits: Option>, + pub edits: Option>, } #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum StreamingEditFileMode { - /// Create a new file if it doesn't exist - Create, - /// Replace the entire contents of an existing file - Overwrite, + /// Overwrite the file with new content (replacing any existing content). + /// If the file does not exist, it will be created. + Write, /// Make granular edits to an existing file Edit, } /// A single edit operation that replaces old text with new text #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] -pub struct EditOperation { +pub struct Edit { /// The exact text to find in the file. This will be matched using fuzzy matching /// to handle minor differences in whitespace or formatting. pub old_text: String, @@ -118,271 +119,328 @@ struct StreamingEditFileToolPartialInput { #[serde(default)] mode: Option, #[serde(default)] - #[allow(dead_code)] content: Option, #[serde(default)] - edits: Option>, + edits: Option>, } #[derive(Default, Debug, Deserialize)] -struct PartialEditOperation { +pub struct PartialEdit { #[serde(default)] - old_text: Option, + pub old_text: Option, #[serde(default)] - new_text: Option, + pub new_text: Option, } -enum StreamingEditState { - Idle, - BufferResolved { - abs_path: PathBuf, - buffer: Entity, +#[derive(Debug, Serialize, Deserialize)] +#[serde(untagged)] +pub enum StreamingEditFileToolOutput { + Success { + #[serde(alias = "original_path")] + input_path: PathBuf, + new_text: String, old_text: Arc, - diff: Entity, - mode: StreamingEditFileMode, - last_content_len: usize, - edit_state: IncrementalEditState, - _finalize_diff_guard: Deferred>, + #[serde(default)] + diff: String, + }, + Error { + error: String, }, } -#[derive(Default)] -struct IncrementalEditState { - in_progress_matcher: Option, - last_old_text_len: usize, - applied_ranges: Vec>, +impl StreamingEditFileToolOutput { + pub fn error(error: impl Into) -> Self { + Self::Error { + error: error.into(), + } + } +} + +impl std::fmt::Display for StreamingEditFileToolOutput { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + StreamingEditFileToolOutput::Success { + diff, input_path, .. + } => { + if diff.is_empty() { + write!(f, "No edits were made.") + } else { + write!( + f, + "Edited {}:\n\n```diff\n{diff}\n```", + input_path.display() + ) + } + } + StreamingEditFileToolOutput::Error { error } => write!(f, "{error}"), + } + } } -impl IncrementalEditState { - fn applied_count(&self) -> usize { - self.applied_ranges.len() +impl From for LanguageModelToolResultContent { + fn from(output: StreamingEditFileToolOutput) -> Self { + output.to_string().into() } } -impl StreamingEditState { - async fn finalize( - &mut self, - input: StreamingEditFileToolInput, - tool: &StreamingEditFileTool, +pub struct StreamingEditFileTool { + thread: WeakEntity, + language_registry: Arc, + project: Entity, +} + +impl StreamingEditFileTool { + pub fn new( + project: Entity, + thread: WeakEntity, + language_registry: Arc, + ) -> Self { + Self { + project, + thread, + language_registry, + } + } + + fn authorize( + &self, + path: &PathBuf, + description: &str, event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result { - let remaining_edits_start_ix = match self { - StreamingEditState::Idle => { - *self = Self::transition_to_buffer_resolved( - &input.path, - &input.display_description, - input.mode.clone(), - tool, - event_stream, - cx, - ) - .await?; - 0 - } - StreamingEditState::BufferResolved { edit_state, .. } => edit_state.applied_count(), - }; + cx: &mut App, + ) -> Task> { + super::tool_permissions::authorize_file_edit( + EditFileTool::NAME, + path, + description, + &self.thread, + event_stream, + cx, + ) + } - let StreamingEditState::BufferResolved { - buffer, - old_text, - diff, - abs_path, - .. - } = self - else { - debug_panic!("Invalid state"); - return Ok(StreamingEditFileToolOutput::Error { - error: "Internal error. Try to apply the edits again".to_string(), - }); - }; + fn set_agent_location(&self, buffer: WeakEntity, position: text::Anchor, cx: &mut App) { + self.project.update(cx, |project, cx| { + project.set_agent_location(Some(AgentLocation { buffer, position }), cx); + }); + } +} - let result: anyhow::Result = async { - let action_log = tool - .thread - .read_with(cx, |thread, _cx| thread.action_log().clone())?; +impl AgentTool for StreamingEditFileTool { + type Input = StreamingEditFileToolInput; + type Output = StreamingEditFileToolOutput; - match input.mode { - StreamingEditFileMode::Create | StreamingEditFileMode::Overwrite => { - action_log.update(cx, |log, cx| { - log.buffer_created(buffer.clone(), cx); - }); - let content = input.content.ok_or_else(|| { - anyhow!("'content' field is required for create and overwrite modes") - })?; - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit([(0..buffer.len(), content.as_str())], None, cx); - }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - }); - } - StreamingEditFileMode::Edit => { - let edits = input - .edits - .ok_or_else(|| anyhow!("'edits' field is required for edit mode"))?; - - let remaining_edits = &edits[remaining_edits_start_ix..]; - apply_edits( - &buffer, - &action_log, - remaining_edits, - &diff, - event_stream, - &abs_path, - cx, - )?; - } - } + const NAME: &'static str = "streaming_edit_file"; - let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { - let settings = language_settings::language_settings( - buffer.language().map(|l| l.name()), - buffer.file(), - cx, - ); - settings.format_on_save != FormatOnSave::Off - }); + fn supports_input_streaming() -> bool { + true + } - if format_on_save_enabled { - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); + fn kind() -> acp::ToolKind { + acp::ToolKind::Edit + } - let format_task = tool.project.update(cx, |project, cx| { - project.format( - HashSet::from_iter([buffer.clone()]), - LspFormatTarget::Buffers, - false, - FormatTrigger::Save, - cx, - ) - }); - futures::select! { - result = format_task.fuse() => { result.log_err(); }, - _ = event_stream.cancelled_by_user().fuse() => { - anyhow::bail!("Edit cancelled by user"); + fn initial_title( + &self, + input: Result, + cx: &mut App, + ) -> SharedString { + match input { + Ok(input) => self + .project + .read(cx) + .find_project_path(&input.path, cx) + .and_then(|project_path| { + self.project + .read(cx) + .short_full_path_for_project_path(&project_path, cx) + }) + .unwrap_or(input.path) + .into(), + Err(raw_input) => { + if let Some(input) = + serde_json::from_value::(raw_input).ok() + { + let path = input.path.unwrap_or_default(); + let path = path.trim(); + if !path.is_empty() { + return self + .project + .read(cx) + .find_project_path(&path, cx) + .and_then(|project_path| { + self.project + .read(cx) + .short_full_path_for_project_path(&project_path, cx) + }) + .unwrap_or_else(|| path.to_string()) + .into(); } - }; - } - let save_task = tool - .project - .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)); - futures::select! { - result = save_task.fuse() => { result?; }, - _ = event_stream.cancelled_by_user().fuse() => { - anyhow::bail!("Edit cancelled by user"); + let description = input.display_description.unwrap_or_default(); + let description = description.trim(); + if !description.is_empty() { + return description.to_string().into(); + } } - }; - - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - tool.thread.update(cx, |thread, _| { - thread - .file_read_times - .insert(abs_path.to_path_buf(), new_mtime); - })?; + DEFAULT_UI_TEXT.into() } + } + } - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let (new_text, unified_diff) = cx - .background_spawn({ - let new_snapshot = new_snapshot.clone(); - let old_text = old_text.clone(); - async move { - let new_text = new_snapshot.text(); - let diff = language::unified_diff(&old_text, &new_text); - (new_text, diff) + fn run( + self: Arc, + mut input: ToolInput, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |cx: &mut AsyncApp| { + let mut state: Option = None; + loop { + futures::select! { + partial = input.recv_partial().fuse() => { + let Some(partial_value) = partial else { break }; + if let Ok(parsed) = serde_json::from_value::(partial_value) { + if state.is_none() && let Some(path_str) = &parsed.path + && let Some(display_description) = &parsed.display_description + && let Some(mode) = parsed.mode.clone() { + state = Some( + EditSession::new( + path_str, + display_description, + mode, + &self, + &event_stream, + cx, + ) + .await?, + ); + } + + if let Some(state) = &mut state { + state.process(parsed, &self, &event_stream, cx)?; + } + } } - }) - .await; + _ = event_stream.cancelled_by_user().fuse() => { + return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); + } + } + } + let full_input = + input + .recv() + .await + .map_err(|e| StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}")))?; - let output = StreamingEditFileToolOutput::Success { - input_path: PathBuf::from(input.path), - new_text, - old_text: old_text.clone(), - diff: unified_diff, + let mut state = if let Some(state) = state { + state + } else { + EditSession::new( + &full_input.path, + &full_input.display_description, + full_input.mode.clone(), + &self, + &event_stream, + cx, + ) + .await? }; - Ok(output) - } - .await; - result.map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), + state.finalize(full_input, &self, &event_stream, cx).await }) } - async fn process( - &mut self, - partial: StreamingEditFileToolPartialInput, - tool: &StreamingEditFileTool, - event_stream: &ToolCallEventStream, - cx: &mut AsyncApp, - ) -> Result<(), StreamingEditFileToolOutput> { - match self { - Self::Idle => { - if let Some(path_str) = partial.path - && let Some(display_description) = partial.display_description - && let Some(mode) = partial.mode - { - *self = Self::transition_to_buffer_resolved( - &path_str, - &display_description, - mode, - tool, - event_stream, + fn replay( + &self, + _input: Self::Input, + output: Self::Output, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Result<()> { + match output { + StreamingEditFileToolOutput::Success { + input_path, + old_text, + new_text, + .. + } => { + event_stream.update_diff(cx.new(|cx| { + Diff::finalized( + input_path.to_string_lossy().into_owned(), + Some(old_text.to_string()), + new_text, + self.language_registry.clone(), cx, ) - .await?; - } + })); + Ok(()) } - Self::BufferResolved { - abs_path, - buffer, - edit_state, - diff, - mode, - last_content_len, - .. - } => match mode { - StreamingEditFileMode::Create | StreamingEditFileMode::Overwrite => { - if let Some(content) = &partial.content { - Self::process_streaming_content( - buffer, - diff, - last_content_len, - content, - cx, - )?; - } - } - StreamingEditFileMode::Edit => { - if let Some(edits) = partial.edits { - Self::process_streaming_edits( - buffer, - diff, - edit_state, - &edits, - abs_path, - tool, - event_stream, - cx, - )?; - } - } - }, + StreamingEditFileToolOutput::Error { .. } => Ok(()), + } + } +} + +pub struct EditSession { + abs_path: PathBuf, + buffer: Entity, + old_text: Arc, + diff: Entity, + mode: StreamingEditFileMode, + parser: ToolEditParser, + pipeline: EditPipeline, + _finalize_diff_guard: Deferred>, +} + +struct EditPipeline { + edits: Vec, + content_written: bool, +} + +enum EditPipelineEntry { + ResolvingOldText { + matcher: StreamingFuzzyMatcher, + }, + StreamingNewText { + streaming_diff: StreamingDiff, + edit_cursor: usize, + reindenter: Reindenter, + original_snapshot: text::BufferSnapshot, + }, + Done, +} + +impl EditPipeline { + fn new() -> Self { + Self { + edits: Vec::new(), + content_written: false, } - Ok(()) } - async fn transition_to_buffer_resolved( + fn ensure_resolving_old_text( + &mut self, + edit_index: usize, + buffer: &Entity, + cx: &mut AsyncApp, + ) { + while self.edits.len() <= edit_index { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); + self.edits.push(EditPipelineEntry::ResolvingOldText { + matcher: StreamingFuzzyMatcher::new(snapshot), + }); + } + } +} + +/// Compute the `LineIndent` of the first line in a set of query lines. +fn query_first_line_indent(query_lines: &[String]) -> text::LineIndent { + let first_line = query_lines.first().map(|s| s.as_str()).unwrap_or(""); + text::LineIndent::from_iter(first_line.chars()) +} + +impl EditSession { + async fn new( path_str: &str, display_description: &str, mode: StreamingEditFileMode, @@ -393,15 +451,13 @@ impl StreamingEditState { let path = PathBuf::from(path_str); let project_path = cx .update(|cx| resolve_path(mode.clone(), &path, &tool.project, cx)) - .map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), - })?; + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx)) else { - return Err(StreamingEditFileToolOutput::Error { - error: format!("File '{path_str}' does not exist"), - }); + return Err(StreamingEditFileToolOutput::error(format!( + "Worktree at '{path_str}' does not exist" + ))); }; event_stream.update_fields( @@ -410,17 +466,13 @@ impl StreamingEditState { cx.update(|cx| tool.authorize(&path, &display_description, event_stream, cx)) .await - .map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), - })?; + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; let buffer = tool .project .update(cx, |project, cx| project.open_buffer(project_path, cx)) .await - .map_err(|e| StreamingEditFileToolOutput::Error { - error: e.to_string(), - })?; + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; @@ -434,6 +486,14 @@ impl StreamingEditState { } }) as Box); + tool.thread + .update(cx, |thread, cx| { + thread + .action_log() + .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)) + }) + .ok(); + let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let old_text = cx .background_spawn({ @@ -442,205 +502,497 @@ impl StreamingEditState { }) .await; - Ok(Self::BufferResolved { + Ok(Self { abs_path, buffer, old_text, diff, mode, - last_content_len: 0, - edit_state: IncrementalEditState::default(), + parser: ToolEditParser::default(), + pipeline: EditPipeline::new(), _finalize_diff_guard: finalize_diff_guard, }) } - fn process_streaming_content( - buffer: &Entity, - diff: &Entity, - last_content_len: &mut usize, - content: &str, + async fn finalize( + &mut self, + input: StreamingEditFileToolInput, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, cx: &mut AsyncApp, - ) -> Result<(), StreamingEditFileToolOutput> { - let new_len = content.len(); - if new_len > *last_content_len { - let new_chunk = &content[*last_content_len..]; - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - // On the first update, replace the entire buffer (handles Overwrite - // clearing existing content). For Create the buffer is already empty - // so 0..0 is a no-op range prefix. - let insert_at = if *last_content_len == 0 { - 0..buffer.len() - } else { - let len = buffer.len(); - len..len - }; - buffer.edit([(insert_at, new_chunk)], None, cx); + ) -> Result { + let Self { + buffer, + old_text, + diff, + abs_path, + parser, + pipeline, + .. + } = self; + + let action_log = tool + .thread + .read_with(cx, |thread, _cx| thread.action_log().clone()) + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + + match input.mode { + StreamingEditFileMode::Write => { + action_log.update(cx, |log, cx| { + log.buffer_created(buffer.clone(), cx); }); + let content = input.content.ok_or_else(|| { + StreamingEditFileToolOutput::error("'content' field is required for write mode") + })?; + + let events = parser.finalize_content(&content); + Self::process_events( + &events, + buffer, + diff, + pipeline, + abs_path, + tool, + event_stream, + cx, + )?; + } + StreamingEditFileMode::Edit => { + let edits = input.edits.ok_or_else(|| { + StreamingEditFileToolOutput::error("'edits' field is required for edit mode") + })?; + + let final_edits = edits + .into_iter() + .map(|e| Edit { + old_text: e.old_text, + new_text: e.new_text, + }) + .collect::>(); + let events = parser.finalize_edits(&final_edits); + Self::process_events( + &events, + buffer, + diff, + pipeline, + abs_path, + tool, + event_stream, + cx, + )?; + } + } + + let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { + let settings = language_settings::language_settings( + buffer.language().map(|l| l.name()), + buffer.file(), + cx, + ); + settings.format_on_save != FormatOnSave::Off + }); + + if format_on_save_enabled { + action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); }); - *last_content_len = new_len; - let anchor_range = buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_between(0..buffer.len()) + let format_task = tool.project.update(cx, |project, cx| { + project.format( + HashSet::from_iter([buffer.clone()]), + LspFormatTarget::Buffers, + false, + FormatTrigger::Save, + cx, + ) }); - diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + futures::select! { + result = format_task.fuse() => { result.log_err(); }, + _ = event_stream.cancelled_by_user().fuse() => { + return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); + } + }; + } + + let save_task = tool + .project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)); + futures::select! { + result = save_task.fuse() => { result.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; }, + _ = event_stream.cancelled_by_user().fuse() => { + return Err(StreamingEditFileToolOutput::error("Edit cancelled by user")); + } + }; + + action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + }); + + if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { + buffer.file().and_then(|file| file.disk_state().mtime()) + }) { + tool.thread + .update(cx, |thread, _| { + thread + .file_read_times + .insert(abs_path.to_path_buf(), new_mtime); + }) + .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; + } + + let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let (new_text, unified_diff) = cx + .background_spawn({ + let new_snapshot = new_snapshot.clone(); + let old_text = old_text.clone(); + async move { + let new_text = new_snapshot.text(); + let diff = language::unified_diff(&old_text, &new_text); + (new_text, diff) + } + }) + .await; + + let output = StreamingEditFileToolOutput::Success { + input_path: PathBuf::from(input.path), + new_text, + old_text: old_text.clone(), + diff: unified_diff, + }; + Ok(output) + } + + fn process( + &mut self, + partial: StreamingEditFileToolPartialInput, + tool: &StreamingEditFileTool, + event_stream: &ToolCallEventStream, + cx: &mut AsyncApp, + ) -> Result<(), StreamingEditFileToolOutput> { + match &self.mode { + StreamingEditFileMode::Write => { + if let Some(content) = &partial.content { + let events = self.parser.push_content(content); + Self::process_events( + &events, + &self.buffer, + &self.diff, + &mut self.pipeline, + &self.abs_path, + tool, + event_stream, + cx, + )?; + } + } + StreamingEditFileMode::Edit => { + if let Some(edits) = partial.edits { + let events = self.parser.push_edits(&edits); + Self::process_events( + &events, + &self.buffer, + &self.diff, + &mut self.pipeline, + &self.abs_path, + tool, + event_stream, + cx, + )?; + } + } } Ok(()) } - fn process_streaming_edits( + fn process_events( + events: &[ToolEditEvent], buffer: &Entity, diff: &Entity, - edit_state: &mut IncrementalEditState, - edits: &[PartialEditOperation], + pipeline: &mut EditPipeline, abs_path: &PathBuf, tool: &StreamingEditFileTool, event_stream: &ToolCallEventStream, cx: &mut AsyncApp, ) -> Result<(), StreamingEditFileToolOutput> { - if edits.is_empty() { - return Ok(()); - } + for event in events { + match event { + ToolEditEvent::ContentChunk { chunk } => { + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + let insert_at = if !pipeline.content_written && buffer.len() > 0 { + 0..buffer.len() + } else { + let len = buffer.len(); + len..len + }; + buffer.edit([(insert_at, chunk.as_str())], None, cx); + }); + let buffer_id = buffer.read(cx).remote_id(); + tool.set_agent_location( + buffer.downgrade(), + text::Anchor::max_for_buffer(buffer_id), + cx, + ); + }); + pipeline.content_written = true; + } - // Edits at indices applied_count..edits.len()-1 are newly complete - // (a subsequent edit exists, proving the LLM moved on). - // The last edit (edits.len()-1) is potentially still in progress. - let completed_count = edits.len().saturating_sub(1); + ToolEditEvent::OldTextChunk { + edit_index, + chunk, + done: false, + } => { + pipeline.ensure_resolving_old_text(*edit_index, buffer, cx); + + if let EditPipelineEntry::ResolvingOldText { matcher } = + &mut pipeline.edits[*edit_index] + { + if !chunk.is_empty() { + if let Some(match_range) = matcher.push(chunk, None) { + let anchor_range = buffer.read_with(cx, |buffer, _cx| { + buffer.anchor_range_between(match_range.clone()) + }); + diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + cx.update(|cx| { + let position = buffer.read(cx).anchor_before(match_range.end); + tool.set_agent_location(buffer.downgrade(), position, cx); + }); + } + } + } + } - // Apply newly-complete edits - while edit_state.applied_count() < completed_count { - let edit_index = edit_state.applied_count(); - let partial_edit = &edits[edit_index]; + ToolEditEvent::OldTextChunk { + edit_index, + chunk, + done: true, + } => { + pipeline.ensure_resolving_old_text(*edit_index, buffer, cx); + + let EditPipelineEntry::ResolvingOldText { matcher } = + &mut pipeline.edits[*edit_index] + else { + continue; + }; - let old_text = partial_edit.old_text.clone().ok_or_else(|| { - StreamingEditFileToolOutput::Error { - error: format!("Edit at index {} is missing old_text.", edit_index), - } - })?; - let new_text = partial_edit.new_text.clone().unwrap_or_default(); + if !chunk.is_empty() { + matcher.push(chunk, None); + } + let matches = matcher.finish(); + + if matches.is_empty() { + return Err(StreamingEditFileToolOutput::error(format!( + "Could not find matching text for edit at index {}. \ + The old_text did not match any content in the file. \ + Please read the file again to get the current content.", + edit_index, + ))); + } + if matches.len() > 1 { + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let lines = matches + .iter() + .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) + .collect::>() + .join(", "); + return Err(StreamingEditFileToolOutput::error(format!( + "Edit {} matched multiple locations in the file at lines: {}. \ + Please provide more context in old_text to uniquely \ + identify the location.", + edit_index, lines + ))); + } - edit_state.in_progress_matcher = None; - edit_state.last_old_text_len = 0; + let range = matches.into_iter().next().expect("checked len above"); - let edit_op = EditOperation { - old_text: old_text.clone(), - new_text: new_text.clone(), - }; + let anchor_range = buffer + .read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); + diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx)); + + let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - let action_log = tool - .thread - .read_with(cx, |thread, _cx| thread.action_log().clone()) - .ok(); + let line = snapshot.offset_to_point(range.start).row; + event_stream.update_fields( + ToolCallUpdateFields::new() + .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]), + ); + + let EditPipelineEntry::ResolvingOldText { matcher } = + &pipeline.edits[*edit_index] + else { + continue; + }; + let buffer_indent = + snapshot.line_indent_for_row(snapshot.offset_to_point(range.start).row); + let query_indent = query_first_line_indent(matcher.query_lines()); + let indent_delta = compute_indent_delta(buffer_indent, query_indent); + + let old_text_in_buffer = + snapshot.text_for_range(range.clone()).collect::(); + + let text_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); + pipeline.edits[*edit_index] = EditPipelineEntry::StreamingNewText { + streaming_diff: StreamingDiff::new(old_text_in_buffer), + edit_cursor: range.start, + reindenter: Reindenter::new(indent_delta), + original_snapshot: text_snapshot, + }; - // On the first edit, mark the buffer as read - if edit_state.applied_count() == 0 { - if let Some(action_log) = &action_log { - action_log.update(cx, |log, cx| { - log.buffer_read(buffer.clone(), cx); + cx.update(|cx| { + let position = buffer.read(cx).anchor_before(range.end); + tool.set_agent_location(buffer.downgrade(), position, cx); }); } - } - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - - let (range, new_text) = - match resolve_and_reveal_edit(buffer, diff, &snapshot, &edit_op, cx) { - Ok(resolved) => resolved, - Err(EditResolveError::NotFound) => { - return Err(StreamingEditFileToolOutput::Error { - error: format!( - "Could not find matching text for edit at index {}. \ - The old_text did not match any content in the file. \ - Please read the file again to get the current content.", - edit_index - ), - }); + ToolEditEvent::NewTextChunk { + edit_index, + chunk, + done: false, + } => { + if *edit_index >= pipeline.edits.len() { + continue; } - Err(EditResolveError::Ambiguous(ranges)) => { - let lines = ranges - .iter() - .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) - .collect::>() - .join(", "); - return Err(StreamingEditFileToolOutput::Error { - error: format!( - "Edit {} matched multiple locations in the file at lines: {}. \ - Please provide more context in old_text to uniquely \ - identify the location.", - edit_index, lines - ), - }); + let EditPipelineEntry::StreamingNewText { + streaming_diff, + edit_cursor, + reindenter, + original_snapshot, + .. + } = &mut pipeline.edits[*edit_index] + else { + continue; + }; + + let reindented = reindenter.push(chunk); + if reindented.is_empty() { + continue; } - }; - for previous_range in &edit_state.applied_ranges { - let previous_start = previous_range.start.to_offset(&snapshot); - let previous_end = previous_range.end.to_offset(&snapshot); - if range.start < previous_end && previous_start < range.end { - let earlier_start_line = snapshot.offset_to_point(previous_start).row + 1; - let earlier_end_line = snapshot.offset_to_point(previous_end).row + 1; - let later_start_line = snapshot.offset_to_point(range.start).row + 1; - let later_end_line = snapshot.offset_to_point(range.end).row + 1; - return Err(StreamingEditFileToolOutput::Error { - error: format!( - "Conflicting edit ranges detected: lines {}-{} \ - conflicts with lines {}-{}. Conflicting edit \ - ranges are not allowed, as they would overwrite \ - each other.", - earlier_start_line, earlier_end_line, later_start_line, later_end_line, - ), + let char_ops = streaming_diff.push_new(&reindented); + Self::apply_char_operations( + &char_ops, + buffer, + original_snapshot, + edit_cursor, + cx, + ); + + let position = original_snapshot.anchor_before(*edit_cursor); + cx.update(|cx| { + tool.set_agent_location(buffer.downgrade(), position, cx); }); + + let action_log = tool + .thread + .read_with(cx, |thread, _cx| thread.action_log().clone()) + .ok(); + if let Some(action_log) = action_log { + action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); + }); + } } - } - let anchor_range = - buffer.read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); - edit_state.applied_ranges.push(anchor_range); + ToolEditEvent::NewTextChunk { + edit_index, + chunk, + done: true, + } => { + if *edit_index >= pipeline.edits.len() { + continue; + } - let line = snapshot.offset_to_point(range.start).row; - event_stream.update_fields( - ToolCallUpdateFields::new() - .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]), - ); + let EditPipelineEntry::StreamingNewText { + mut streaming_diff, + mut edit_cursor, + mut reindenter, + original_snapshot, + } = std::mem::replace( + &mut pipeline.edits[*edit_index], + EditPipelineEntry::Done, + ) + else { + continue; + }; - if let Some(action_log) = action_log { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit([(range, new_text.as_str())], None, cx); - }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - }); - } - } + // Flush any remaining reindent buffer + final chunk. + let mut final_text = reindenter.push(chunk); + final_text.push_str(&reindenter.finish()); - // Feed the in-progress last edit's old_text to the matcher for live preview - if let Some(partial_edit) = edits.last() { - if let Some(old_text) = &partial_edit.old_text { - let old_text_len = old_text.len(); - if old_text_len > edit_state.last_old_text_len { - let new_chunk = &old_text[edit_state.last_old_text_len..]; + if !final_text.is_empty() { + let char_ops = streaming_diff.push_new(&final_text); + Self::apply_char_operations( + &char_ops, + buffer, + &original_snapshot, + &mut edit_cursor, + cx, + ); + } + + let remaining_ops = streaming_diff.finish(); + Self::apply_char_operations( + &remaining_ops, + buffer, + &original_snapshot, + &mut edit_cursor, + cx, + ); - let matcher = edit_state.in_progress_matcher.get_or_insert_with(|| { - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot()); - StreamingFuzzyMatcher::new(snapshot) + let position = original_snapshot.anchor_before(edit_cursor); + cx.update(|cx| { + tool.set_agent_location(buffer.downgrade(), position, cx); }); - if let Some(match_range) = matcher.push(new_chunk, None) { - let anchor_range = buffer.read_with(cx, |buffer, _cx| { - buffer.anchor_range_between(match_range.clone()) + let action_log = tool + .thread + .read_with(cx, |thread, _cx| thread.action_log().clone()) + .ok(); + if let Some(action_log) = action_log { + action_log.update(cx, |log, cx| { + log.buffer_edited(buffer.clone(), cx); }); - diff.update(cx, |card, cx| card.reveal_range(anchor_range, cx)); } - - edit_state.last_old_text_len = old_text_len; } } } - Ok(()) } + + fn apply_char_operations( + ops: &[CharOperation], + buffer: &Entity, + snapshot: &text::BufferSnapshot, + edit_cursor: &mut usize, + cx: &mut AsyncApp, + ) { + for op in ops { + match op { + CharOperation::Insert { text } => { + let anchor = snapshot.anchor_after(*edit_cursor); + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit([(anchor..anchor, text.as_str())], None, cx); + }); + }); + } + CharOperation::Delete { bytes } => { + let delete_end = *edit_cursor + bytes; + let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end); + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit([(anchor_range, "")], None, cx); + }); + }); + *edit_cursor = delete_end; + } + CharOperation::Keep { bytes } => { + *edit_cursor += bytes; + } + } + } + } } fn ensure_buffer_saved( @@ -670,396 +1022,40 @@ fn ensure_buffer_saved( if is_dirty { let message = match (has_save_tool, has_restore_tool) { (true, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (true, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ - If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." - } - (false, true) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ - If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ - If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." - } - (false, false) => { - "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ - then ask them to save or revert the file manually and inform you when it's ok to proceed." - } - }; - return Err(StreamingEditFileToolOutput::Error { - error: message.to_string(), - }); - } - - if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { - if current != last_read { - return Err(StreamingEditFileToolOutput::Error { - error: "The file has been modified since you last read it. \ - Please read the file again to get the current state before editing it." - .to_string(), - }); - } - } - - Ok(()) -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(untagged)] -pub enum StreamingEditFileToolOutput { - Success { - #[serde(alias = "original_path")] - input_path: PathBuf, - new_text: String, - old_text: Arc, - #[serde(default)] - diff: String, - }, - Error { - error: String, - }, -} - -impl std::fmt::Display for StreamingEditFileToolOutput { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - StreamingEditFileToolOutput::Success { - diff, input_path, .. - } => { - if diff.is_empty() { - write!(f, "No edits were made.") - } else { - write!( - f, - "Edited {}:\n\n```diff\n{diff}\n```", - input_path.display() - ) - } - } - StreamingEditFileToolOutput::Error { error } => write!(f, "{error}"), - } - } -} - -impl From for LanguageModelToolResultContent { - fn from(output: StreamingEditFileToolOutput) -> Self { - output.to_string().into() - } -} - -pub struct StreamingEditFileTool { - thread: WeakEntity, - language_registry: Arc, - project: Entity, -} - -impl StreamingEditFileTool { - pub fn new( - project: Entity, - thread: WeakEntity, - language_registry: Arc, - ) -> Self { - Self { - project, - thread, - language_registry, - } - } - - fn authorize( - &self, - path: &PathBuf, - description: &str, - event_stream: &ToolCallEventStream, - cx: &mut App, - ) -> Task> { - super::tool_permissions::authorize_file_edit( - EditFileTool::NAME, - path, - description, - &self.thread, - event_stream, - cx, - ) - } -} - -impl AgentTool for StreamingEditFileTool { - type Input = StreamingEditFileToolInput; - type Output = StreamingEditFileToolOutput; - - const NAME: &'static str = "streaming_edit_file"; - - fn supports_input_streaming() -> bool { - true - } - - fn kind() -> acp::ToolKind { - acp::ToolKind::Edit - } - - fn initial_title( - &self, - input: Result, - cx: &mut App, - ) -> SharedString { - match input { - Ok(input) => self - .project - .read(cx) - .find_project_path(&input.path, cx) - .and_then(|project_path| { - self.project - .read(cx) - .short_full_path_for_project_path(&project_path, cx) - }) - .unwrap_or(input.path) - .into(), - Err(raw_input) => { - if let Some(input) = - serde_json::from_value::(raw_input).ok() - { - let path = input.path.unwrap_or_default(); - let path = path.trim(); - if !path.is_empty() { - return self - .project - .read(cx) - .find_project_path(&path, cx) - .and_then(|project_path| { - self.project - .read(cx) - .short_full_path_for_project_path(&project_path, cx) - }) - .unwrap_or_else(|| path.to_string()) - .into(); - } - - let description = input.display_description.unwrap_or_default(); - let description = description.trim(); - if !description.is_empty() { - return description.to_string().into(); - } - } - - DEFAULT_UI_TEXT.into() - } - } - } - - fn run( - self: Arc, - mut input: ToolInput, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Task> { - cx.spawn(async move |cx: &mut AsyncApp| { - let mut state = StreamingEditState::Idle; - loop { - futures::select! { - partial = input.recv_partial().fuse() => { - let Some(partial_value) = partial else { break }; - if let Ok(parsed) = serde_json::from_value::(partial_value) { - state.process(parsed, &self, &event_stream, cx).await?; - } - } - _ = event_stream.cancelled_by_user().fuse() => { - return Err(StreamingEditFileToolOutput::Error { - error: "Edit cancelled by user".to_string(), - }); - } - } - } - let full_input = - input - .recv() - .await - .map_err(|e| StreamingEditFileToolOutput::Error { - error: format!("Failed to receive tool input: {e}"), - })?; - - state.finalize(full_input, &self, &event_stream, cx).await - }) - } - - fn replay( - &self, - _input: Self::Input, - output: Self::Output, - event_stream: ToolCallEventStream, - cx: &mut App, - ) -> Result<()> { - match output { - StreamingEditFileToolOutput::Success { - input_path, - old_text, - new_text, - .. - } => { - event_stream.update_diff(cx.new(|cx| { - Diff::finalized( - input_path.to_string_lossy().into_owned(), - Some(old_text.to_string()), - new_text, - self.language_registry.clone(), - cx, - ) - })); - Ok(()) - } - StreamingEditFileToolOutput::Error { .. } => Ok(()), - } - } -} - -fn apply_edits( - buffer: &Entity, - action_log: &Entity, - edits: &[EditOperation], - diff: &Entity, - event_stream: &ToolCallEventStream, - abs_path: &PathBuf, - cx: &mut AsyncApp, -) -> Result<()> { - let mut failed_edits = Vec::new(); - let mut ambiguous_edits = Vec::new(); - let mut resolved_edits: Vec<(Range, String)> = Vec::new(); - - let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); - for (index, edit) in edits.iter().enumerate() { - match resolve_and_reveal_edit(buffer, diff, &snapshot, edit, cx) { - Ok((range, new_text)) => { - resolved_edits.push((range, new_text)); + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." } - Err(EditResolveError::NotFound) => { - failed_edits.push(index); + (true, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask for confirmation then use the save_file tool to save the file, then retry this edit. \ + If they want to discard them, ask the user to manually revert the file, then inform you when it's ok to proceed." } - Err(EditResolveError::Ambiguous(ranges)) => { - ambiguous_edits.push((index, ranges)); + (false, true) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes. \ + If they want to keep them, ask the user to manually save the file, then inform you when it's ok to proceed. \ + If they want to discard them, ask for confirmation then use the restore_file_from_disk tool to restore the on-disk contents, then retry this edit." } - } - } - - if !failed_edits.is_empty() { - let indices = failed_edits - .iter() - .map(|i| i.to_string()) - .collect::>() - .join(", "); - anyhow::bail!( - "Could not find matching text for edit(s) at index(es): {}. \ - The old_text did not match any content in the file. \ - Please read the file again to get the current content.", - indices - ); - } - - if !ambiguous_edits.is_empty() { - let details: Vec = ambiguous_edits - .iter() - .map(|(index, ranges)| { - let lines = ranges - .iter() - .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string()) - .collect::>() - .join(", "); - format!("edit {}: matches at lines {}", index, lines) - }) - .collect(); - anyhow::bail!( - "Some edits matched multiple locations in the file:\n{}. \ - Please provide more context in old_text to uniquely identify the location.", - details.join("\n") - ); - } - - let mut edits_sorted = resolved_edits; - edits_sorted.sort_by(|a, b| a.0.start.cmp(&b.0.start)); - - if let Some((first_range, _)) = edits_sorted.first() { - let line = snapshot.offset_to_point(first_range.start).row; - event_stream.update_fields( - ToolCallUpdateFields::new() - .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]), - ); + (false, false) => { + "This file has unsaved changes. Ask the user whether they want to keep or discard those changes, \ + then ask them to save or revert the file manually and inform you when it's ok to proceed." + } + }; + return Err(StreamingEditFileToolOutput::error(message)); } - for window in edits_sorted.windows(2) { - if let [(earlier_range, _), (later_range, _)] = window - && (earlier_range.end > later_range.start || earlier_range.start == later_range.start) - { - let earlier_start_line = snapshot.offset_to_point(earlier_range.start).row + 1; - let earlier_end_line = snapshot.offset_to_point(earlier_range.end).row + 1; - let later_start_line = snapshot.offset_to_point(later_range.start).row + 1; - let later_end_line = snapshot.offset_to_point(later_range.end).row + 1; - anyhow::bail!( - "Conflicting edit ranges detected: lines {}-{} conflicts with lines {}-{}. \ - Conflicting edit ranges are not allowed, as they would overwrite each other.", - earlier_start_line, - earlier_end_line, - later_start_line, - later_end_line, - ); + if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) { + if current != last_read { + return Err(StreamingEditFileToolOutput::error( + "The file has been modified since you last read it. \ + Please read the file again to get the current state before editing it.", + )); } } - if !edits_sorted.is_empty() { - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit( - edits_sorted - .iter() - .map(|(range, new_text)| (range.clone(), new_text.as_str())), - None, - cx, - ); - }); - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - }); - } - Ok(()) } -enum EditResolveError { - NotFound, - Ambiguous(Vec>), -} - -/// Resolves an edit operation by finding matching text in the buffer, -/// reveals the matched range in the diff view, and returns the resolved -/// range and replacement text. -fn resolve_and_reveal_edit( - buffer: &Entity, - diff: &Entity, - snapshot: &BufferSnapshot, - edit: &EditOperation, - cx: &mut AsyncApp, -) -> std::result::Result<(Range, String), EditResolveError> { - let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone()); - matcher.push(&edit.old_text, None); - let matches = matcher.finish(); - if matches.is_empty() { - return Err(EditResolveError::NotFound); - } - if matches.len() > 1 { - return Err(EditResolveError::Ambiguous(matches)); - } - - let range = matches.into_iter().next().expect("checked len above"); - - let anchor_range = - buffer.read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone())); - diff.update(cx, |card, cx| card.reveal_range(anchor_range, cx)); - - Ok((range, edit.new_text.clone())) -} - fn resolve_path( mode: StreamingEditFileMode, path: &PathBuf, @@ -1069,7 +1065,7 @@ fn resolve_path( let project = project.read(cx); match mode { - StreamingEditFileMode::Edit | StreamingEditFileMode::Overwrite => { + StreamingEditFileMode::Edit => { let path = project .find_project_path(&path, cx) .context("Can't edit file: path not found")?; @@ -1081,13 +1077,12 @@ fn resolve_path( anyhow::ensure!(entry.is_file(), "Can't edit file: path is a directory"); Ok(path) } - - StreamingEditFileMode::Create => { - if let Some(path) = project.find_project_path(&path, cx) { - anyhow::ensure!( - project.entry_for_path(&path, cx).is_none(), - "Can't create file: file already exists" - ); + StreamingEditFileMode::Write => { + if let Some(path) = project.find_project_path(&path, cx) + && let Some(entry) = project.entry_for_path(&path, cx) + { + anyhow::ensure!(entry.is_file(), "Can't write to file: path is a directory"); + return Ok(path); } let parent_path = path.parent().context("Can't create file: incorrect path")?; @@ -1162,7 +1157,7 @@ mod tests { let input = StreamingEditFileToolInput { display_description: "Create new file".into(), path: "root/dir/new_file.txt".into(), - mode: StreamingEditFileMode::Create, + mode: StreamingEditFileMode::Write, content: Some("Hello, World!".into()), edits: None, }; @@ -1214,7 +1209,7 @@ mod tests { let input = StreamingEditFileToolInput { display_description: "Overwrite file".into(), path: "root/file.txt".into(), - mode: StreamingEditFileMode::Overwrite, + mode: StreamingEditFileMode::Write, content: Some("new content".into()), edits: None, }; @@ -1276,7 +1271,7 @@ mod tests { path: "root/file.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "line 2".into(), new_text: "modified line 2".into(), }]), @@ -1301,7 +1296,7 @@ mod tests { } #[gpui::test] - async fn test_streaming_edit_multiple_nonoverlapping_edits(cx: &mut TestAppContext) { + async fn test_streaming_edit_multiple_edits(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); @@ -1336,11 +1331,11 @@ mod tests { mode: StreamingEditFileMode::Edit, content: None, edits: Some(vec![ - EditOperation { + Edit { old_text: "line 5".into(), new_text: "modified line 5".into(), }, - EditOperation { + Edit { old_text: "line 1".into(), new_text: "modified line 1".into(), }, @@ -1404,11 +1399,11 @@ mod tests { mode: StreamingEditFileMode::Edit, content: None, edits: Some(vec![ - EditOperation { + Edit { old_text: "line 2".into(), new_text: "modified line 2".into(), }, - EditOperation { + Edit { old_text: "line 3".into(), new_text: "modified line 3".into(), }, @@ -1472,11 +1467,11 @@ mod tests { mode: StreamingEditFileMode::Edit, content: None, edits: Some(vec![ - EditOperation { + Edit { old_text: "line 1".into(), new_text: "modified line 1".into(), }, - EditOperation { + Edit { old_text: "line 5".into(), new_text: "modified line 5".into(), }, @@ -1533,7 +1528,7 @@ mod tests { path: "root/nonexistent_file.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "foo".into(), new_text: "bar".into(), }]), @@ -1587,7 +1582,7 @@ mod tests { path: "root/file.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "nonexistent text that is not in the file".into(), new_text: "replacement".into(), }]), @@ -1614,79 +1609,6 @@ mod tests { ); } - #[gpui::test] - async fn test_streaming_edit_overlapping_edits_out_of_order(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - // Multi-line file so the line-based fuzzy matcher can resolve each edit. - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - // Edit A spans lines 3-4, edit B spans lines 2-3. They overlap on - // "line 3" and are given in descending file order so the ascending - // sort must reorder them before the pairwise overlap check can - // detect them correctly. - let result = cx - .update(|cx| { - let input = StreamingEditFileToolInput { - display_description: "Overlapping edits".into(), - path: "root/file.txt".into(), - mode: StreamingEditFileMode::Edit, - content: None, - edits: Some(vec![ - EditOperation { - old_text: "line 3\nline 4".into(), - new_text: "SECOND".into(), - }, - EditOperation { - old_text: "line 2\nline 3".into(), - new_text: "FIRST".into(), - }, - ]), - }; - Arc::new(StreamingEditFileTool::new( - project, - thread.downgrade(), - language_registry, - )) - .run( - ToolInput::resolved(input), - ToolCallEventStream::test().0, - cx, - ) - }) - .await; - - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { - panic!("expected error"); - }; - assert!( - error.contains("Conflicting edit ranges detected"), - "Expected 'Conflicting edit ranges detected' but got: {error}" - ); - } - #[gpui::test] async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) { init_test(cx); @@ -1809,7 +1731,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); @@ -1817,7 +1739,7 @@ mod tests { sender.send_final(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite", + "mode": "write", "content": "new content" })); @@ -2026,14 +1948,14 @@ mod tests { sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create" + "mode": "write" })); cx.run_until_parked(); sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "Hello, " })); cx.run_until_parked(); @@ -2042,7 +1964,7 @@ mod tests { sender.send_final(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "Hello, World!" })); @@ -2304,14 +2226,16 @@ mod tests { })); cx.run_until_parked(); - // Verify edit 1 applied + // Verify edit 1 fully applied. Edit 2's new_text is being + // streamed: "CCC" is inserted but the old "ccc" isn't deleted + // yet (StreamingDiff::finish runs when edit 3 marks edit 2 done). let buffer_text = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) .unwrap(); project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) }); - assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nccc\nddd\neee\n")); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCCccc\nddd\neee\n")); // Edit 3 appears — edit 2 is now complete and should be applied sender.send_partial(json!({ @@ -2326,14 +2250,15 @@ mod tests { })); cx.run_until_parked(); - // Verify edits 1 and 2 both applied + // Verify edits 1 and 2 fully applied. Edit 3's new_text is being + // streamed: "EEE" is inserted but old "eee" isn't deleted yet. let buffer_text = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) .unwrap(); project.get_open_buffer(&pp, cx).map(|b| b.read(cx).text()) }); - assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\neee\n")); + assert_eq!(buffer_text.as_deref(), Some("AAA\nbbb\nCCC\nddd\nEEEeee\n")); // Send final sender.send_final(json!({ @@ -2466,82 +2391,6 @@ mod tests { ); } - #[gpui::test] - async fn test_streaming_overlapping_edits_detected_naturally(cx: &mut TestAppContext) { - init_test(cx); - - let fs = project::FakeFs::new(cx.executor()); - fs.insert_tree( - "/root", - json!({ - "file.txt": "line 1\nline 2\nline 3\n" - }), - ) - .await; - let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; - let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - crate::Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - - let (sender, input) = ToolInput::::test(); - let (event_stream, _receiver) = ToolCallEventStream::test(); - - let tool = Arc::new(StreamingEditFileTool::new( - project.clone(), - thread.downgrade(), - language_registry, - )); - - let task = cx.update(|cx| tool.run(input, event_stream, cx)); - - // Setup - sender.send_partial(json!({ - "display_description": "Overlapping edits", - "path": "root/file.txt", - "mode": "edit" - })); - cx.run_until_parked(); - - // Edit 1 targets "line 1\nline 2" and replaces it. - // Edit 2 targets "line 2\nline 3" — but after edit 1 is applied, - // "line 2" has been removed so this should fail to match. - // Edit 3 exists to make edit 2 "complete" during streaming. - sender.send_partial(json!({ - "display_description": "Overlapping edits", - "path": "root/file.txt", - "mode": "edit", - "edits": [ - {"old_text": "line 1\nline 2", "new_text": "REPLACED"}, - {"old_text": "line 2\nline 3", "new_text": "ALSO REPLACED"}, - {"old_text": "line 3", "new_text": "DUMMY"} - ] - })); - cx.run_until_parked(); - - // Edit 1 was applied, edit 2 should fail since "line 2" no longer exists - drop(sender); - - let result = task.await; - let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else { - panic!("expected error"); - }; - assert!( - error.contains("Could not find matching text for edit at index 1"), - "Expected overlapping edit to fail naturally, got: {error}" - ); - } - #[gpui::test] async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) { init_test(cx); @@ -2590,7 +2439,10 @@ mod tests { })); cx.run_until_parked(); - // Buffer should NOT be modified — the single edit is still in-progress + // The edit's old_text and new_text both arrived in one partial, so + // the old_text is resolved and new_text is being streamed via + // StreamingDiff. The buffer reflects the in-progress diff (new text + // inserted, old text not yet fully removed until finalization). let buffer_text = project.update(cx, |project, cx| { let pp = project .find_project_path(&PathBuf::from("root/file.txt"), cx) @@ -2599,8 +2451,8 @@ mod tests { }); assert_eq!( buffer_text.as_deref(), - Some("hello world\n"), - "Single in-progress edit should not be applied during streaming" + Some("goodbye worldhello world\n"), + "In-progress streaming diff: new text inserted, old text not yet removed" ); // Send final — the edit is applied during finalization @@ -2795,12 +2647,12 @@ mod tests { sender.send_partial(json!({ "display_description": "Create", "path": "root/dir/new.txt", - "mode": "create" + "mode": "write" })); sender.send_final(json!({ "display_description": "Create", "path": "root/dir/new.txt", - "mode": "create", + "mode": "write", "content": "streamed content" })); @@ -2813,7 +2665,7 @@ mod tests { #[gpui::test] async fn test_streaming_resolve_path_for_creating_file(cx: &mut TestAppContext) { - let mode = StreamingEditFileMode::Create; + let mode = StreamingEditFileMode::Write; let result = test_resolve_path(&mode, "root/new.txt", cx); assert_resolved_path_eq(result.await, rel_path("new.txt")); @@ -2825,9 +2677,12 @@ mod tests { assert_resolved_path_eq(result.await, rel_path("dir/new.txt")); let result = test_resolve_path(&mode, "root/dir/subdir/existing.txt", cx); + assert_resolved_path_eq(result.await, rel_path("dir/subdir/existing.txt")); + + let result = test_resolve_path(&mode, "root/dir/subdir", cx); assert_eq!( result.await.unwrap_err().to_string(), - "Can't create file: file already exists" + "Can't write to file: path is a directory" ); let result = test_resolve_path(&mode, "root/dir/nonexistent_dir/new.txt", cx); @@ -3003,14 +2858,14 @@ mod tests { sender.send_partial(json!({ "display_description": "Create main function", "path": "root/src/main.rs", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); sender.send_final(json!({ "display_description": "Create main function", "path": "root/src/main.rs", - "mode": "overwrite", + "mode": "write", "content": UNFORMATTED_CONTENT })); @@ -3060,14 +2915,14 @@ mod tests { sender.send_partial(json!({ "display_description": "Update main function", "path": "root/src/main.rs", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); sender.send_final(json!({ "display_description": "Update main function", "path": "root/src/main.rs", - "mode": "overwrite", + "mode": "write", "content": UNFORMATTED_CONTENT })); @@ -3136,7 +2991,7 @@ mod tests { let input = StreamingEditFileToolInput { display_description: "Create main function".into(), path: "root/src/main.rs".into(), - mode: StreamingEditFileMode::Overwrite, + mode: StreamingEditFileMode::Write, content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), edits: None, }; @@ -3183,7 +3038,7 @@ mod tests { let input = StreamingEditFileToolInput { display_description: "Update main function".into(), path: "root/src/main.rs".into(), - mode: StreamingEditFileMode::Overwrite, + mode: StreamingEditFileMode::Write, content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()), edits: None, }; @@ -3904,11 +3759,7 @@ mod tests { language_registry, )); - let modes = vec![ - StreamingEditFileMode::Edit, - StreamingEditFileMode::Create, - StreamingEditFileMode::Overwrite, - ]; + let modes = vec![StreamingEditFileMode::Edit, StreamingEditFileMode::Write]; for _mode in modes { // Test .zed path with different modes @@ -4061,7 +3912,7 @@ mod tests { ToolInput::resolved(StreamingEditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), - mode: StreamingEditFileMode::Overwrite, + mode: StreamingEditFileMode::Write, content: Some("new content".into()), edits: None, }), @@ -4090,7 +3941,7 @@ mod tests { ToolInput::resolved(StreamingEditFileToolInput { display_description: "Edit file".into(), path: path!("/main.rs").into(), - mode: StreamingEditFileMode::Overwrite, + mode: StreamingEditFileMode::Write, content: Some("dropped content".into()), edits: None, }), @@ -4171,7 +4022,7 @@ mod tests { path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "original content".into(), new_text: "modified content".into(), }]), @@ -4196,7 +4047,7 @@ mod tests { path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "modified content".into(), new_text: "further modified content".into(), }]), @@ -4305,7 +4156,7 @@ mod tests { path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "externally modified content".into(), new_text: "new content".into(), }]), @@ -4409,7 +4260,7 @@ mod tests { path: "root/test.txt".into(), mode: StreamingEditFileMode::Edit, content: None, - edits: Some(vec![EditOperation { + edits: Some(vec![Edit { old_text: "original content".into(), new_text: "new content".into(), }]), @@ -4441,15 +4292,14 @@ mod tests { } #[gpui::test] - async fn test_streaming_overlapping_edits_detected_early(cx: &mut TestAppContext) { + async fn test_streaming_overlapping_edits_resolved_sequentially(cx: &mut TestAppContext) { init_test(cx); let fs = project::FakeFs::new(cx.executor()); - // The file content is crafted so that edit 1's replacement still - // contains the old_text of edit 2 as a contiguous substring. - // Without early overlap detection, edit 2 would silently match - // inside the already-modified region and corrupt the file instead - // of producing a clear "Conflicting edit ranges" error. + // Edit 1's replacement introduces text that contains edit 2's + // old_text as a substring. Because edits resolve sequentially + // against the current buffer, edit 2 finds a unique match in + // the modified buffer and succeeds. fs.insert_tree( "/root", json!({ @@ -4492,17 +4342,10 @@ mod tests { })); cx.run_until_parked(); - // Edit 1 targets "bbb\nccc" (lines 2-3) and replaces it with - // text that preserves "ccc\nddd" as a contiguous substring in the - // buffer — so edit 2's old_text will still match after edit 1 is - // applied. - // - // Edit 2 targets "ccc\nddd" (lines 3-4), overlapping with edit 1 on - // line 3 ("ccc"). After edit 1 runs, the buffer becomes: - // "aaa\nXXX\nccc\nddd\nddd\neee\n" - // and "ccc\nddd" is still present, so edit 2 would silently - // succeed without early overlap detection. - // + // Edit 1 replaces "bbb\nccc" with "XXX\nccc\nddd", so the + // buffer becomes "aaa\nXXX\nccc\nddd\nddd\neee\n". + // Edit 2's old_text "ccc\nddd" matches the first occurrence + // in the modified buffer and replaces it with "ZZZ". // Edit 3 exists only to mark edit 2 as "complete" during streaming. sender.send_partial(json!({ "display_description": "Overlapping edits", @@ -4529,23 +4372,10 @@ mod tests { })); let result = task.await; - // We expect a "Conflicting edit ranges" error. Currently the overlap - // goes undetected during streaming and the file gets silently - // corrupted, so this assertion will fail until we add early overlap - // detection. - match result { - Err(StreamingEditFileToolOutput::Error { error }) - if error.contains("Conflicting edit ranges") => {} - Err(StreamingEditFileToolOutput::Error { error }) => { - panic!("Expected 'Conflicting edit ranges' error, got different error: {error}"); - } - Ok(output) => { - panic!("Expected 'Conflicting edit ranges' error, but got success: {output}"); - } - Err(other) => { - panic!("Expected 'Conflicting edit ranges' error, got unexpected output: {other}"); - } - } + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "aaa\nXXX\nZZZ\nddd\nDUMMY\n"); } #[gpui::test] @@ -4585,7 +4415,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create" + "mode": "write" })); cx.run_until_parked(); @@ -4593,7 +4423,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "line 1\n" })); cx.run_until_parked(); @@ -4611,7 +4441,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "line 1\nline 2\n" })); cx.run_until_parked(); @@ -4621,7 +4451,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "line 1\nline 2\nline 3\n" })); cx.run_until_parked(); @@ -4634,7 +4464,7 @@ mod tests { sender.send_final(json!({ "display_description": "Create new file", "path": "root/dir/new_file.txt", - "mode": "create", + "mode": "write", "content": "line 1\nline 2\nline 3\n" })); @@ -4688,7 +4518,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); @@ -4706,7 +4536,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite", + "mode": "write", "content": "new line 1\n" })); cx.run_until_parked(); @@ -4720,7 +4550,7 @@ mod tests { sender.send_final(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite", + "mode": "write", "content": "new line 1\nnew line 2\n" })); @@ -4781,7 +4611,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite" + "mode": "write" })); cx.run_until_parked(); @@ -4799,7 +4629,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite", + "mode": "write", "content": "new line 1\n" })); cx.run_until_parked(); @@ -4809,7 +4639,7 @@ mod tests { sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite", + "mode": "write", "content": "new line 1\nnew line 2\n" })); cx.run_until_parked(); @@ -4822,7 +4652,7 @@ mod tests { sender.send_final(json!({ "display_description": "Overwrite file", "path": "root/file.txt", - "mode": "overwrite", + "mode": "write", "content": "new line 1\nnew line 2\nnew line 3\n" })); @@ -4837,6 +4667,89 @@ mod tests { assert_eq!(*old_text, "old line 1\nold line 2\nold line 3\n"); } + #[gpui::test] + async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) { + init_test(cx); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "file.txt": "hello\nworld\nfoo\n" + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let model = Arc::new(FakeLanguageModel::default()); + let thread = cx.new(|cx| { + crate::Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + Some(model), + cx, + ) + }); + + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + + let tool = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + language_registry, + )); + + let task = cx.update(|cx| tool.run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit" + })); + cx.run_until_parked(); + + // Simulate JSON fixer producing a literal backslash when the LLM + // stream cuts in the middle of a \n escape sequence. + // The old_text "hello\nworld" would be streamed as: + // partial 1: old_text = "hello\\" (fixer closes incomplete \n as \\) + // partial 2: old_text = "hello\nworld" (fixer corrected the escape) + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\\"}] + })); + cx.run_until_parked(); + + // Now the fixer corrects it to the real newline. + sender.send_partial(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld"}] + })); + cx.run_until_parked(); + + // Send final. + sender.send_final(json!({ + "display_description": "Edit", + "path": "root/file.txt", + "mode": "edit", + "edits": [{"old_text": "hello\nworld", "new_text": "HELLO\nWORLD"}] + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "HELLO\nWORLD\nfoo\n"); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); diff --git a/crates/agent/src/tools/tool_edit_parser.rs b/crates/agent/src/tools/tool_edit_parser.rs new file mode 100644 index 0000000000000000000000000000000000000000..86259db916f49c07bbecc63625a93a9ebb955539 --- /dev/null +++ b/crates/agent/src/tools/tool_edit_parser.rs @@ -0,0 +1,941 @@ +use smallvec::SmallVec; + +use crate::{Edit, PartialEdit}; + +/// Events emitted by `ToolEditParser` as tool call input streams in. +#[derive(Debug, PartialEq, Eq)] +pub enum ToolEditEvent { + /// A chunk of `old_text` for an edit operation. + OldTextChunk { + edit_index: usize, + chunk: String, + done: bool, + }, + /// A chunk of `new_text` for an edit operation. + NewTextChunk { + edit_index: usize, + chunk: String, + done: bool, + }, + /// A chunk of content for write/overwrite mode. + ContentChunk { chunk: String }, +} + +/// Tracks the streaming state of a single edit to detect deltas. +#[derive(Default, Debug)] +struct EditStreamState { + old_text_emitted_len: usize, + old_text_done: bool, + new_text_emitted_len: usize, + new_text_done: bool, +} + +/// Converts incrementally-growing tool call JSON into a stream of chunk events. +/// +/// The tool call streaming infrastructure delivers partial JSON objects where +/// string fields grow over time. This parser compares consecutive partials, +/// computes the deltas, and emits `ToolEditEvent`s that downstream pipeline +/// stages (`StreamingFuzzyMatcher` for old_text, `StreamingDiff` for new_text) +/// can consume incrementally. +/// +/// Because partial JSON comes through a fixer (`partial-json-fixer`) that +/// closes incomplete escape sequences, a string can temporarily contain wrong +/// trailing characters (e.g. a literal `\` instead of `\n`). We handle this +/// by holding back trailing backslash characters in non-finalized chunks: if +/// a partial string ends with `\` (0x5C), that byte is not emitted until the +/// next partial confirms or corrects it. This avoids feeding corrupted bytes +/// to downstream consumers. +#[derive(Default, Debug)] +pub struct ToolEditParser { + edit_states: Vec, + content_emitted_len: usize, +} + +impl ToolEditParser { + /// Push a new set of partial edits (from edit mode) and return any events. + /// + /// Each call should pass the *entire current* edits array as seen in the + /// latest partial input. The parser will diff it against its internal state + /// to produce only the new events. + pub fn push_edits(&mut self, edits: &[PartialEdit]) -> SmallVec<[ToolEditEvent; 4]> { + let mut events = SmallVec::new(); + + for (index, partial) in edits.iter().enumerate() { + if index >= self.edit_states.len() { + // A new edit appeared — finalize the previous one if there was one. + if let Some(previous) = self.finalize_previous_edit(index) { + events.extend(previous); + } + self.edit_states.push(EditStreamState::default()); + } + + let state = &mut self.edit_states[index]; + + // Process old_text changes. + if let Some(old_text) = &partial.old_text + && !state.old_text_done + { + if partial.new_text.is_some() { + // new_text appeared, so old_text is done — emit everything. + let start = state.old_text_emitted_len.min(old_text.len()); + let chunk = old_text[start..].to_string(); + state.old_text_done = true; + state.old_text_emitted_len = old_text.len(); + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: true, + }); + } else { + let safe_end = safe_emit_end(old_text); + if safe_end > state.old_text_emitted_len { + let chunk = old_text[state.old_text_emitted_len..safe_end].to_string(); + state.old_text_emitted_len = safe_end; + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: false, + }); + } + } + } + + // Process new_text changes. + if let Some(new_text) = &partial.new_text + && !state.new_text_done + { + let safe_end = safe_emit_end(new_text); + if safe_end > state.new_text_emitted_len { + let chunk = new_text[state.new_text_emitted_len..safe_end].to_string(); + state.new_text_emitted_len = safe_end; + events.push(ToolEditEvent::NewTextChunk { + edit_index: index, + chunk, + done: false, + }); + } + } + } + + events + } + + /// Push new content and return any events. + /// + /// Each call should pass the *entire current* content string. The parser + /// will diff it against its internal state to emit only the new chunk. + pub fn push_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + let mut events = SmallVec::new(); + + let safe_end = safe_emit_end(content); + if safe_end > self.content_emitted_len { + let chunk = content[self.content_emitted_len..safe_end].to_string(); + self.content_emitted_len = safe_end; + events.push(ToolEditEvent::ContentChunk { chunk }); + } + + events + } + + /// Finalize all edits with the complete input. This emits `done: true` + /// events for any in-progress old_text or new_text that hasn't been + /// finalized yet. + /// + /// `final_edits` should be the fully deserialized final edits array. The + /// parser compares against its tracked state and emits any remaining deltas + /// with `done: true`. + pub fn finalize_edits(&mut self, edits: &[Edit]) -> SmallVec<[ToolEditEvent; 4]> { + let mut events = SmallVec::new(); + + for (index, edit) in edits.iter().enumerate() { + if index >= self.edit_states.len() { + // This edit was never seen in partials — emit it fully. + if let Some(previous) = self.finalize_previous_edit(index) { + events.extend(previous); + } + self.edit_states.push(EditStreamState::default()); + } + + let state = &mut self.edit_states[index]; + + if !state.old_text_done { + let start = state.old_text_emitted_len.min(edit.old_text.len()); + let chunk = edit.old_text[start..].to_string(); + state.old_text_done = true; + state.old_text_emitted_len = edit.old_text.len(); + events.push(ToolEditEvent::OldTextChunk { + edit_index: index, + chunk, + done: true, + }); + } + + if !state.new_text_done { + let start = state.new_text_emitted_len.min(edit.new_text.len()); + let chunk = edit.new_text[start..].to_string(); + state.new_text_done = true; + state.new_text_emitted_len = edit.new_text.len(); + events.push(ToolEditEvent::NewTextChunk { + edit_index: index, + chunk, + done: true, + }); + } + } + + events + } + + /// Finalize content with the complete input. + pub fn finalize_content(&mut self, content: &str) -> SmallVec<[ToolEditEvent; 1]> { + let mut events = SmallVec::new(); + + let start = self.content_emitted_len.min(content.len()); + if content.len() > start { + let chunk = content[start..].to_string(); + self.content_emitted_len = content.len(); + events.push(ToolEditEvent::ContentChunk { chunk }); + } + + events + } + + /// When a new edit appears at `index`, finalize the edit at `index - 1` + /// by emitting a `NewTextChunk { done: true }` if it hasn't been finalized. + fn finalize_previous_edit(&mut self, new_index: usize) -> Option> { + if new_index == 0 || self.edit_states.is_empty() { + return None; + } + + let previous_index = new_index - 1; + if previous_index >= self.edit_states.len() { + return None; + } + + let state = &mut self.edit_states[previous_index]; + let mut events = SmallVec::new(); + + // If old_text was never finalized, finalize it now with an empty done chunk. + if !state.old_text_done { + state.old_text_done = true; + events.push(ToolEditEvent::OldTextChunk { + edit_index: previous_index, + chunk: String::new(), + done: true, + }); + } + + // Emit a done event for new_text if not already finalized. + if !state.new_text_done { + state.new_text_done = true; + events.push(ToolEditEvent::NewTextChunk { + edit_index: previous_index, + chunk: String::new(), + done: true, + }); + } + + Some(events) + } +} + +/// Returns the byte position up to which it is safe to emit from a partial +/// string. If the string ends with a backslash (`\`, 0x5C), that byte is +/// held back because it may be an artifact of the partial JSON fixer closing +/// an incomplete escape sequence (e.g. turning a half-received `\n` into `\\`). +/// The next partial will reveal the correct character. +fn safe_emit_end(text: &str) -> usize { + if text.as_bytes().last() == Some(&b'\\') { + text.len() - 1 + } else { + text.len() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_single_edit_streamed_incrementally() { + let mut parser = ToolEditParser::default(); + + // old_text arrives in chunks: "hell" → "hello w" → "hello world" + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hell".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "hell".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello w".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "o w".into(), + done: false, + }] + ); + + // new_text appears → old_text finalizes + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello world".into()), + new_text: Some("good".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "orld".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "good".into(), + done: false, + }, + ] + ); + + // new_text grows + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello world".into()), + new_text: Some("goodbye world".into()), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "bye world".into(), + done: false, + }] + ); + + // Finalize + let events = parser.finalize_edits(&[Edit { + old_text: "hello world".into(), + new_text: "goodbye world".into(), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }] + ); + } + + #[test] + fn test_multiple_edits_sequential() { + let mut parser = ToolEditParser::default(); + + // First edit streams in + let events = parser.push_edits(&[PartialEdit { + old_text: Some("first old".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "first old".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("first old".into()), + new_text: Some("first new".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "first new".into(), + done: false, + }, + ] + ); + + // Second edit appears → first edit's new_text is finalized + let events = parser.push_edits(&[ + PartialEdit { + old_text: Some("first old".into()), + new_text: Some("first new".into()), + }, + PartialEdit { + old_text: Some("second".into()), + new_text: None, + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: "second".into(), + done: false, + }, + ] + ); + + // Finalize everything + let events = parser.finalize_edits(&[ + Edit { + old_text: "first old".into(), + new_text: "first new".into(), + }, + Edit { + old_text: "second old".into(), + new_text: "second new".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: " old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "second new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_content_streamed_incrementally() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_content("hello"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "hello".into(), + }] + ); + + let events = parser.push_content("hello world"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: " world".into(), + }] + ); + + // No change + let events = parser.push_content("hello world"); + assert!(events.is_empty()); + + let events = parser.push_content("hello world!"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "!".into() }] + ); + + // Finalize with no additional content + let events = parser.finalize_content("hello world!"); + assert!(events.is_empty()); + } + + #[test] + fn test_finalize_content_with_remaining() { + let mut parser = ToolEditParser::default(); + + parser.push_content("partial"); + let events = parser.finalize_content("partial content here"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: " content here".into(), + }] + ); + } + + #[test] + fn test_content_trailing_backslash_held_back() { + let mut parser = ToolEditParser::default(); + + // Partial JSON fixer turns incomplete \n into \\ (literal backslash). + // The trailing backslash is held back. + let events = parser.push_content("hello,\\"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "hello,".into(), + }] + ); + + // Next partial corrects the escape to an actual newline. + // The held-back byte was wrong; the correct newline is emitted. + let events = parser.push_content("hello,\n"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + ); + + // Normal growth. + let events = parser.push_content("hello,\nworld"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { + chunk: "world".into(), + }] + ); + } + + #[test] + fn test_content_finalize_with_trailing_backslash() { + let mut parser = ToolEditParser::default(); + + // Stream a partial with a fixer-corrupted trailing backslash. + // The backslash is held back. + parser.push_content("abc\\"); + + // Finalize reveals the correct character. + let events = parser.finalize_content("abc\n"); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::ContentChunk { chunk: "\n".into() }] + ); + } + + #[test] + fn test_no_partials_direct_finalize() { + let mut parser = ToolEditParser::default(); + + let events = parser.finalize_edits(&[Edit { + old_text: "old".into(), + new_text: "new".into(), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_no_partials_direct_finalize_multiple() { + let mut parser = ToolEditParser::default(); + + let events = parser.finalize_edits(&[ + Edit { + old_text: "first old".into(), + new_text: "first new".into(), + }, + Edit { + old_text: "second old".into(), + new_text: "second new".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "first old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "first new".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 1, + chunk: "second old".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "second new".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_old_text_no_growth() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("same".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "same".into(), + done: false, + }] + ); + + // Same old_text, no new_text → no events + let events = parser.push_edits(&[PartialEdit { + old_text: Some("same".into()), + new_text: None, + }]); + assert!(events.is_empty()); + } + + #[test] + fn test_old_text_none_then_appears() { + let mut parser = ToolEditParser::default(); + + // Edit exists but old_text is None (field hasn't arrived yet) + let events = parser.push_edits(&[PartialEdit { + old_text: None, + new_text: None, + }]); + assert!(events.is_empty()); + + // old_text appears + let events = parser.push_edits(&[PartialEdit { + old_text: Some("text".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "text".into(), + done: false, + }] + ); + } + + #[test] + fn test_empty_old_text_with_new_text() { + let mut parser = ToolEditParser::default(); + + // old_text is empty, new_text appears immediately + let events = parser.push_edits(&[PartialEdit { + old_text: Some("".into()), + new_text: Some("inserted".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "inserted".into(), + done: false, + }, + ] + ); + } + + #[test] + fn test_three_edits_streamed() { + let mut parser = ToolEditParser::default(); + + // Stream first edit + parser.push_edits(&[PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }]); + + // Second edit appears + parser.push_edits(&[ + PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }, + PartialEdit { + old_text: Some("b".into()), + new_text: Some("B".into()), + }, + ]); + + // Third edit appears + let events = parser.push_edits(&[ + PartialEdit { + old_text: Some("a".into()), + new_text: Some("A".into()), + }, + PartialEdit { + old_text: Some("b".into()), + new_text: Some("B".into()), + }, + PartialEdit { + old_text: Some("c".into()), + new_text: None, + }, + ]); + + // Should finalize edit 1 (index=1) and start edit 2 (index=2) + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::NewTextChunk { + edit_index: 1, + chunk: "".into(), + done: true, + }, + ToolEditEvent::OldTextChunk { + edit_index: 2, + chunk: "c".into(), + done: false, + }, + ] + ); + + // Finalize + let events = parser.finalize_edits(&[ + Edit { + old_text: "a".into(), + new_text: "A".into(), + }, + Edit { + old_text: "b".into(), + new_text: "B".into(), + }, + Edit { + old_text: "c".into(), + new_text: "C".into(), + }, + ]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 2, + chunk: "".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 2, + chunk: "C".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_finalize_with_unseen_old_text() { + let mut parser = ToolEditParser::default(); + + // Only saw partial old_text, never saw new_text in partials + parser.push_edits(&[PartialEdit { + old_text: Some("partial".into()), + new_text: None, + }]); + + let events = parser.finalize_edits(&[Edit { + old_text: "partial old text".into(), + new_text: "replacement".into(), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: " old text".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "replacement".into(), + done: true, + }, + ] + ); + } + + #[test] + fn test_finalize_with_partially_seen_new_text() { + let mut parser = ToolEditParser::default(); + + parser.push_edits(&[PartialEdit { + old_text: Some("old".into()), + new_text: Some("partial".into()), + }]); + + let events = parser.finalize_edits(&[Edit { + old_text: "old".into(), + new_text: "partial new text".into(), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: " new text".into(), + done: true, + }] + ); + } + + #[test] + fn test_repeated_pushes_with_no_change() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert_eq!(events.len(), 2); // old done + new chunk + + // Push the exact same data again + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert!(events.is_empty()); + + // And again + let events = parser.push_edits(&[PartialEdit { + old_text: Some("stable".into()), + new_text: Some("also stable".into()), + }]); + assert!(events.is_empty()); + } + + #[test] + fn test_old_text_trailing_backslash_held_back() { + let mut parser = ToolEditParser::default(); + + // Partial-json-fixer produces a literal backslash when the JSON stream + // cuts in the middle of an escape sequence like \n. The parser holds + // back the trailing backslash instead of emitting it. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\\".into()), // fixer closed incomplete \n as \\ + new_text: None, + }]); + // The trailing `\` is held back — only "hello," is emitted. + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "hello,".into(), + done: false, + }] + ); + + // Next partial: the fixer corrects the escape to \n. + // The held-back byte was wrong, but we never emitted it. Now the + // correct newline at that position is emitted normally. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\n".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "\n".into(), + done: false, + }] + ); + + // Continue normally. + let events = parser.push_edits(&[PartialEdit { + old_text: Some("hello,\nworld".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "world".into(), + done: false, + }] + ); + } + + #[test] + fn test_multiline_old_and_new_text() { + let mut parser = ToolEditParser::default(); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2".into()), + new_text: None, + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "line1\nline2".into(), + done: false, + }] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2\nline3".into()), + new_text: Some("LINE1\n".into()), + }]); + assert_eq!( + events.as_slice(), + &[ + ToolEditEvent::OldTextChunk { + edit_index: 0, + chunk: "\nline3".into(), + done: true, + }, + ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "LINE1\n".into(), + done: false, + }, + ] + ); + + let events = parser.push_edits(&[PartialEdit { + old_text: Some("line1\nline2\nline3".into()), + new_text: Some("LINE1\nLINE2\nLINE3".into()), + }]); + assert_eq!( + events.as_slice(), + &[ToolEditEvent::NewTextChunk { + edit_index: 0, + chunk: "LINE2\nLINE3".into(), + done: false, + }] + ); + } +} From a759000b167ad148e29541ac35b004eaad8bbfb5 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Fri, 27 Feb 2026 14:35:30 +0200 Subject: [PATCH 153/548] ep: Heuristic to apply patches to files without trailing newline (#50291) This is a workaround for the fact that ep patches don't have the '\No newline at end of file` marker. Release Notes: - N/A --- crates/edit_prediction/src/udiff.rs | 261 ++++++++++++++++++++++++++-- 1 file changed, 247 insertions(+), 14 deletions(-) diff --git a/crates/edit_prediction/src/udiff.rs b/crates/edit_prediction/src/udiff.rs index e1e475c85526befb5549571cf7b7a2e1ef10c3d8..14be1991d34e985067f5ad8729fd7ac8485211db 100644 --- a/crates/edit_prediction/src/udiff.rs +++ b/crates/edit_prediction/src/udiff.rs @@ -266,6 +266,66 @@ pub fn strip_diff_metadata(diff: &str) -> String { result } +/// Find all byte offsets where `hunk.context` occurs as a substring of `text`. +/// +/// If no exact matches are found and the context ends with `'\n'` but `text` +/// does not, retries without the trailing newline, accepting only a match at +/// the very end of `text`. When this fallback fires, the hunk's context is +/// trimmed and its edit ranges are clamped so that downstream code doesn't +/// index past the end of the matched region. This handles diffs that are +/// missing a `\ No newline at end of file` marker: the parser always appends +/// `'\n'` via `writeln!`, so the context can have a trailing newline that +/// doesn't exist in the source text. +fn find_context_candidates(text: &str, hunk: &mut Hunk) -> Vec { + let candidates: Vec = text + .match_indices(&hunk.context) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + return candidates; + } + + if hunk.context.ends_with('\n') && !hunk.context.is_empty() { + let old_len = hunk.context.len(); + hunk.context.pop(); + let new_len = hunk.context.len(); + + if !hunk.context.is_empty() { + let candidates: Vec = text + .match_indices(&hunk.context) + .filter(|(offset, _)| offset + new_len == text.len()) + .map(|(offset, _)| offset) + .collect(); + + if !candidates.is_empty() { + for edit in &mut hunk.edits { + let touched_phantom = edit.range.end > new_len; + edit.range.start = edit.range.start.min(new_len); + edit.range.end = edit.range.end.min(new_len); + if touched_phantom { + // The replacement text was also written with a + // trailing '\n' that corresponds to the phantom + // newline we just removed from the context. + if edit.text.ends_with('\n') { + edit.text.pop(); + } + } + } + return candidates; + } + + // Restore if fallback didn't help either. + hunk.context.push('\n'); + debug_assert_eq!(hunk.context.len(), old_len); + } else { + hunk.context.push('\n'); + } + } + + Vec::new() +} + /// Given multiple candidate offsets where context matches, use line numbers to disambiguate. /// Returns the offset that matches the expected line, or None if no match or no line number available. fn disambiguate_by_line_number( @@ -305,15 +365,11 @@ pub fn apply_diff_to_string_with_hunk_offset( while let Some(event) = diff.next().context("Failed to parse diff")? { match event { DiffEvent::Hunk { - hunk, + mut hunk, path: _, status: _, } => { - // Find all matches of the context in the text - let candidates: Vec = text - .match_indices(&hunk.context) - .map(|(offset, _)| offset) - .collect(); + let candidates = find_context_candidates(&text, &mut hunk); let hunk_offset = disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { @@ -348,7 +404,7 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result while let Some(event) = diff.next()? { match event { DiffEvent::Hunk { - hunk, + mut hunk, path: _, status: _, } => { @@ -356,11 +412,7 @@ pub fn edits_for_diff(content: &str, diff_str: &str) -> Result return Ok(Vec::new()); } - // Find all matches of the context in the content - let candidates: Vec = content - .match_indices(&hunk.context) - .map(|(offset, _)| offset) - .collect(); + let candidates = find_context_candidates(content, &mut hunk); let Some(context_offset) = disambiguate_by_line_number(&candidates, hunk.start_line, &|offset| { @@ -611,7 +663,7 @@ impl<'a> DiffParser<'a> { } fn resolve_hunk_edits_in_buffer( - hunk: Hunk, + mut hunk: Hunk, buffer: &TextBufferSnapshot, ranges: &[Range], status: FileStatus, @@ -623,7 +675,7 @@ fn resolve_hunk_edits_in_buffer( for range in ranges { let range = range.to_offset(buffer); let text = buffer.text_for_range(range.clone()).collect::(); - for (ix, _) in text.match_indices(&hunk.context) { + for ix in find_context_candidates(&text, &mut hunk) { candidates.push(range.start + ix); } } @@ -1513,4 +1565,185 @@ mod tests { "#} ); } + + #[test] + fn test_apply_diff_to_string_no_trailing_newline() { + // Text without trailing newline; diff generated without + // `\ No newline at end of file` marker. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3"); + } + + #[test] + fn test_apply_diff_to_string_trailing_newline_present() { + // When text has a trailing newline, exact matching still works and + // the fallback is never needed. + let text = "line1\nline2\nline3\n"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + line1 + -line2 + +replaced + line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nreplaced\nline3\n"); + } + + #[test] + fn test_apply_diff_to_string_deletion_at_end_no_trailing_newline() { + // Deletion of the last line when text has no trailing newline. + // The edit range must be clamped so it doesn't index past the + // end of the text. + let text = "line1\nline2\nline3"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,2 @@ + line1 + line2 + -line3 + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "line1\nline2\n"); + } + + #[test] + fn test_apply_diff_to_string_replace_last_line_no_trailing_newline() { + // Replace the last line when text has no trailing newline. + let text = "aaa\nbbb\nccc"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + aaa + bbb + -ccc + +ddd + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "aaa\nbbb\nddd"); + } + + #[test] + fn test_apply_diff_to_string_multibyte_no_trailing_newline() { + // Multi-byte UTF-8 characters near the end; ensures char boundary + // safety when the fallback clamps edit ranges. + let text = "hello\n세계"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,2 +1,2 @@ + hello + -세계 + +world + "}; + + let result = apply_diff_to_string(diff, text).unwrap(); + assert_eq!(result, "hello\nworld"); + } + + #[test] + fn test_find_context_candidates_no_false_positive_mid_text() { + // The stripped fallback must only match at the end of text, not in + // the middle where a real newline exists. + let text = "aaa\nbbb\nccc\n"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // Exact match at offset 4 — the fallback is not used. + assert_eq!(candidates, vec![4]); + } + + #[test] + fn test_find_context_candidates_fallback_at_end() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "bbb\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![4]); + // Context should be stripped. + assert_eq!(hunk.context, "bbb"); + } + + #[test] + fn test_find_context_candidates_no_fallback_mid_text() { + // "bbb" appears mid-text followed by a newline, so the exact + // match succeeds. Verify the stripped fallback doesn't produce a + // second, spurious candidate. + let text = "aaa\nbbb\nccc"; + let mut hunk = Hunk { + context: "bbb\nccc\n".into(), + edits: vec![], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + // No exact match (text ends without newline after "ccc"), but the + // stripped context "bbb\nccc" matches at offset 4, which is the end. + assert_eq!(candidates, vec![4]); + assert_eq!(hunk.context, "bbb\nccc"); + } + + #[test] + fn test_find_context_candidates_clamps_edit_ranges() { + let text = "aaa\nbbb"; + let mut hunk = Hunk { + context: "aaa\nbbb\n".into(), + edits: vec![Edit { + range: 4..8, // "bbb\n" — end points at the trailing \n + text: "ccc\n".into(), + }], + start_line: None, + }; + + let candidates = find_context_candidates(text, &mut hunk); + assert_eq!(candidates, vec![0]); + // Edit range end should be clamped to 7 (new context length). + assert_eq!(hunk.edits[0].range, 4..7); + } + + #[test] + fn test_edits_for_diff_no_trailing_newline() { + let content = "foo\nbar\nbaz"; + let diff = indoc! {" + --- a/file.txt + +++ b/file.txt + @@ -1,3 +1,3 @@ + foo + -bar + +qux + baz + "}; + + let result = edits_for_diff(content, diff).unwrap(); + assert_eq!(result.len(), 1); + let (range, text) = &result[0]; + assert_eq!(&content[range.clone()], "bar"); + assert_eq!(text, "qux"); + } } From c5f83f570cf2e8de2c8efca7bbe3f5dee2ee987b Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Fri, 27 Feb 2026 13:37:57 +0100 Subject: [PATCH 154/548] agent_ui: Registry follow-ups (#50287) Fixes a few things: - Don't filter out any agents from the registry - Sort the agents case-insensitively - Allow removing custom agents from the settings page as well Release Notes: - N/A --- crates/agent_ui/src/agent_configuration.rs | 29 +++++++++++++++++++++- crates/agent_ui/src/agent_registry_ui.rs | 20 ++++++--------- 2 files changed, 36 insertions(+), 13 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 9126d289c94563e99d9bda2212bda5259e9e4fa3..aa316ba7c5efe5f679764cd7d4626a1f1310e4c6 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -1140,7 +1140,34 @@ impl AgentConfiguration { })), ) } - ExternalAgentSource::Custom => None, + ExternalAgentSource::Custom => { + let fs = self.fs.clone(); + Some( + IconButton::new( + SharedString::from(format!("uninstall-{}", id)), + IconName::Trash, + ) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Remove Custom Agent")) + .on_click(cx.listener(move |_, _, _window, cx| { + let agent_name = agent_server_name.clone(); + update_settings_file(fs.clone(), cx, move |settings, _| { + let Some(agent_servers) = settings.agent_servers.as_mut() else { + return; + }; + if let Some(entry) = agent_servers.get(agent_name.0.as_ref()) + && matches!( + entry, + settings::CustomAgentServerSettings::Custom { .. } + ) + { + agent_servers.remove(agent_name.0.as_ref()); + } + }); + })), + ) + } }; h_flex() diff --git a/crates/agent_ui/src/agent_registry_ui.rs b/crates/agent_ui/src/agent_registry_ui.rs index 45361b9ee26d287a233c02c25f7fba8fd0de37f6..44d5bb20fb77c18447afbe985695cee08a646558 100644 --- a/crates/agent_ui/src/agent_registry_ui.rs +++ b/crates/agent_ui/src/agent_registry_ui.rs @@ -24,10 +24,6 @@ use workspace::{ item::{Item, ItemEvent}, }; -/// Registry IDs for built-in agents that Zed already provides first-class support for. -/// These are filtered out of the ACP Agent Registry UI to avoid showing duplicates. -const BUILT_IN_REGISTRY_IDS: [&str; 4] = ["claude-acp", "claude-code-acp", "codex-acp", "gemini"]; - #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum RegistryFilter { All, @@ -162,8 +158,14 @@ impl AgentRegistryPage { self.registry_agents.sort_by(|left, right| { left.name() .as_ref() - .cmp(right.name().as_ref()) - .then_with(|| left.id().as_ref().cmp(right.id().as_ref())) + .to_lowercase() + .cmp(&right.name().as_ref().to_lowercase()) + .then_with(|| { + left.id() + .as_ref() + .to_lowercase() + .cmp(&right.id().as_ref().to_lowercase()) + }) }); self.filter_registry_agents(cx); } @@ -215,12 +217,6 @@ impl AgentRegistryPage { .iter() .enumerate() .filter(|(_, agent)| { - // Filter out built-in agents since they already appear in the main - // agent configuration UI and don't need to be installed from the registry. - if BUILT_IN_REGISTRY_IDS.contains(&agent.id().as_ref()) { - return false; - } - let matches_search = search.as_ref().is_none_or(|query| { let query = query.as_str(); agent.id().as_ref().to_lowercase().contains(query) From 5ed538f49c54ca464bb9d1e59446060a3a925668 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 27 Feb 2026 13:49:45 +0100 Subject: [PATCH 155/548] Format Tree-sitter queries with `ts_query_ls` (#50138) Release Notes: - N/A --- crates/languages/src/bash/brackets.scm | 74 +- crates/languages/src/bash/highlights.scm | 27 +- crates/languages/src/bash/indents.scm | 21 +- crates/languages/src/bash/injections.scm | 3 +- crates/languages/src/bash/overrides.scm | 1 + crates/languages/src/bash/redactions.scm | 2 +- crates/languages/src/bash/runnables.scm | 8 +- crates/languages/src/bash/textobjects.scm | 2 +- crates/languages/src/c/brackets.scm | 21 +- crates/languages/src/c/highlights.scm | 8 +- crates/languages/src/c/imports.scm | 12 +- crates/languages/src/c/indents.scm | 16 +- crates/languages/src/c/injections.scm | 11 +- crates/languages/src/c/outline.scm | 138 +- crates/languages/src/c/overrides.scm | 1 + crates/languages/src/c/runnables.scm | 12 +- crates/languages/src/c/textobjects.scm | 39 +- crates/languages/src/cpp/brackets.scm | 25 +- crates/languages/src/cpp/highlights.scm | 33 +- crates/languages/src/cpp/imports.scm | 9 +- crates/languages/src/cpp/indents.scm | 32 +- crates/languages/src/cpp/injections.scm | 11 +- crates/languages/src/cpp/outline.scm | 319 ++-- crates/languages/src/cpp/overrides.scm | 1 + crates/languages/src/cpp/textobjects.scm | 49 +- crates/languages/src/css/brackets.scm | 21 +- crates/languages/src/css/highlights.scm | 32 +- crates/languages/src/css/indents.scm | 4 +- crates/languages/src/css/injections.scm | 3 +- crates/languages/src/css/outline.scm | 24 +- crates/languages/src/css/overrides.scm | 1 + crates/languages/src/css/textobjects.scm | 41 +- crates/languages/src/diff/highlights.scm | 8 +- crates/languages/src/diff/injections.scm | 2 +- crates/languages/src/gitcommit/highlights.scm | 28 +- crates/languages/src/gitcommit/injections.scm | 7 +- crates/languages/src/go/brackets.scm | 25 +- crates/languages/src/go/debugger.scm | 42 +- crates/languages/src/go/highlights.scm | 8 +- crates/languages/src/go/imports.scm | 20 +- crates/languages/src/go/indents.scm | 20 +- crates/languages/src/go/injections.scm | 1357 ++++++++--------- crates/languages/src/go/outline.scm | 94 +- crates/languages/src/go/overrides.scm | 1 + crates/languages/src/go/runnables.scm | 407 ++--- crates/languages/src/go/textobjects.scm | 31 +- crates/languages/src/gomod/highlights.scm | 4 +- crates/languages/src/gomod/injections.scm | 2 +- crates/languages/src/gomod/structure.scm | 30 +- crates/languages/src/gowork/highlights.scm | 4 +- crates/languages/src/gowork/injections.scm | 2 +- crates/languages/src/javascript/brackets.scm | 38 +- crates/languages/src/javascript/debugger.scm | 52 +- .../languages/src/javascript/highlights.scm | 180 ++- crates/languages/src/javascript/imports.scm | 24 +- crates/languages/src/javascript/indents.scm | 38 +- .../languages/src/javascript/injections.scm | 162 +- crates/languages/src/javascript/outline.scm | 386 ++--- crates/languages/src/javascript/overrides.scm | 3 +- crates/languages/src/javascript/runnables.scm | 80 +- .../languages/src/javascript/textobjects.scm | 120 +- crates/languages/src/jsdoc/brackets.scm | 7 +- crates/languages/src/jsdoc/highlights.scm | 2 + crates/languages/src/json/brackets.scm | 12 +- crates/languages/src/json/highlights.scm | 1 + crates/languages/src/json/indents.scm | 7 +- crates/languages/src/json/outline.scm | 3 +- crates/languages/src/json/redactions.scm | 15 +- crates/languages/src/json/runnables.scm | 32 +- crates/languages/src/jsonc/brackets.scm | 12 +- crates/languages/src/jsonc/highlights.scm | 1 + crates/languages/src/jsonc/indents.scm | 7 +- crates/languages/src/jsonc/injections.scm | 2 +- crates/languages/src/jsonc/outline.scm | 3 +- crates/languages/src/jsonc/overrides.scm | 1 + crates/languages/src/jsonc/redactions.scm | 15 +- .../src/markdown-inline/highlights.scm | 17 +- crates/languages/src/markdown/brackets.scm | 31 +- crates/languages/src/markdown/highlights.scm | 21 +- crates/languages/src/markdown/indents.scm | 3 +- crates/languages/src/markdown/injections.scm | 8 +- crates/languages/src/markdown/outline.scm | 7 +- crates/languages/src/markdown/textobjects.scm | 4 +- crates/languages/src/python/brackets.scm | 16 +- crates/languages/src/python/debugger.scm | 102 +- crates/languages/src/python/highlights.scm | 137 +- crates/languages/src/python/imports.scm | 64 +- crates/languages/src/python/indents.scm | 26 +- crates/languages/src/python/outline.scm | 11 +- crates/languages/src/python/overrides.scm | 1 + crates/languages/src/python/runnables.scm | 223 ++- crates/languages/src/regex/brackets.scm | 11 +- crates/languages/src/regex/highlights.scm | 3 +- crates/languages/src/rust/brackets.scm | 30 +- crates/languages/src/rust/debugger.scm | 87 +- crates/languages/src/rust/highlights.scm | 104 +- crates/languages/src/rust/imports.scm | 30 +- crates/languages/src/rust/indents.scm | 34 +- crates/languages/src/rust/injections.scm | 97 +- crates/languages/src/rust/outline.scm | 94 +- crates/languages/src/rust/overrides.scm | 1 + crates/languages/src/rust/runnables.scm | 153 +- crates/languages/src/rust/textobjects.scm | 81 +- crates/languages/src/tsx/brackets.scm | 46 +- crates/languages/src/tsx/debugger.scm | 56 +- crates/languages/src/tsx/highlights.scm | 201 +-- crates/languages/src/tsx/imports.scm | 24 +- crates/languages/src/tsx/indents.scm | 38 +- crates/languages/src/tsx/injections.scm | 165 +- crates/languages/src/tsx/outline.scm | 391 ++--- crates/languages/src/tsx/overrides.scm | 10 +- crates/languages/src/tsx/runnables.scm | 80 +- crates/languages/src/tsx/textobjects.scm | 162 +- crates/languages/src/typescript/brackets.scm | 30 +- crates/languages/src/typescript/debugger.scm | 52 +- .../languages/src/typescript/highlights.scm | 216 ++- crates/languages/src/typescript/imports.scm | 33 +- crates/languages/src/typescript/indents.scm | 39 +- .../languages/src/typescript/injections.scm | 232 +-- crates/languages/src/typescript/outline.scm | 391 ++--- crates/languages/src/typescript/overrides.scm | 10 +- crates/languages/src/typescript/runnables.scm | 140 +- .../languages/src/typescript/textobjects.scm | 162 +- crates/languages/src/yaml/brackets.scm | 17 +- crates/languages/src/yaml/highlights.scm | 32 +- crates/languages/src/yaml/injections.scm | 29 +- crates/languages/src/yaml/outline.scm | 14 +- crates/languages/src/yaml/overrides.scm | 1 + crates/languages/src/yaml/redactions.scm | 3 +- .../src/zed-keybind-context/brackets.scm | 3 +- extensions/glsl/languages/glsl/brackets.scm | 11 +- extensions/glsl/languages/glsl/highlights.scm | 62 +- extensions/html/languages/html/highlights.scm | 5 + extensions/html/languages/html/indents.scm | 7 +- extensions/html/languages/html/injections.scm | 19 +- extensions/html/languages/html/overrides.scm | 1 + .../proto/languages/proto/highlights.scm | 6 +- extensions/proto/languages/proto/indents.scm | 14 +- extensions/proto/languages/proto/outline.scm | 24 +- .../proto/languages/proto/textobjects.scm | 18 +- .../languages/gleam/highlights.scm | 66 +- .../languages/gleam/indents.scm | 14 +- .../languages/gleam/outline.scm | 36 +- 143 files changed, 4774 insertions(+), 3778 deletions(-) diff --git a/crates/languages/src/bash/brackets.scm b/crates/languages/src/bash/brackets.scm index 88a2a1b67f602afb4e7de21a0ec0a523d33e37ee..aba1fa2b35735d4380761ea6e1360305556072b3 100644 --- a/crates/languages/src/bash/brackets.scm +++ b/crates/languages/src/bash/brackets.scm @@ -1,12 +1,62 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -(("do" @open "done" @close) (#set! newline.only) (#set! rainbow.exclude)) -((case_statement ("in" @open "esac" @close)) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement (elif_clause ("then" @open)) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement (else_clause ("else" @open)) "fi" @close) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open) (elif_clause ("elif" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open) (else_clause ("else" @close))) (#set! newline.only) (#set! rainbow.exclude)) -((if_statement ("then" @open "fi" @close)) (#set! newline.only) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +(("do" @open + "done" @close) + (#set! newline.only) + (#set! rainbow.exclude)) + +((case_statement + ("in" @open + "esac" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + (elif_clause + "then" @open) + (else_clause + "else" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + (else_clause + "else" @open) + "fi" @close) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + "then" @open + (elif_clause + "elif" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + "then" @open + (else_clause + "else" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) + +((if_statement + ("then" @open + "fi" @close)) + (#set! newline.only) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/bash/highlights.scm b/crates/languages/src/bash/highlights.scm index 4a8d7eaf345b147270302b5ba8f20c975494766e..bc1c3b7ec1159f6d19cdf20ab36e0a02db076c66 100644 --- a/crates/languages/src/bash/highlights.scm +++ b/crates/languages/src/bash/highlights.scm @@ -43,13 +43,17 @@ (comment) @keyword.directive) (#match? @keyword.directive "^#![ \t]*/")) -(function_definition name: (word) @function) -(command_name (word) @function) +(function_definition + name: (word) @function) + +(command_name + (word) @function) (command argument: [ (word) @variable.parameter - (_ (word) @variable.parameter) + (_ + (word) @variable.parameter) ]) [ @@ -65,7 +69,6 @@ (expansion) ] @embedded - [ "$" "&&" @@ -89,9 +92,7 @@ (test_operator) @keyword.operator -[ - ";" -] @punctuation.delimiter +";" @punctuation.delimiter [ "(" @@ -104,6 +105,7 @@ (simple_expansion "$" @punctuation.special) + (expansion "${" @punctuation.special "}" @punctuation.special) @embedded @@ -112,10 +114,11 @@ "$(" @punctuation.special ")" @punctuation.special) -( - (command (_) @constant) - (#match? @constant "^-") -) +((command + (_) @constant) + (#match? @constant "^-")) + +(case_item + value: (_) @string.regex) -(case_item value: (_) @string.regex) (special_variable_name) @variable.special diff --git a/crates/languages/src/bash/indents.scm b/crates/languages/src/bash/indents.scm index 468fc595e56e2616547dc3e752318cd89df4a363..25a0dc20fd7fff62cd355d20917260e8e781e90e 100644 --- a/crates/languages/src/bash/indents.scm +++ b/crates/languages/src/bash/indents.scm @@ -1,12 +1,27 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent (function_definition) @start.function + (if_statement) @start.if + (elif_clause) @start.elif + (else_clause) @start.else + (for_statement) @start.for + (while_statement) @start.while + (case_statement) @start.case + (case_item) @start.case_item diff --git a/crates/languages/src/bash/injections.scm b/crates/languages/src/bash/injections.scm index 9117c713b98fdd2896b13e4949a77c6489b9ee36..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/bash/injections.scm +++ b/crates/languages/src/bash/injections.scm @@ -1,3 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) diff --git a/crates/languages/src/bash/overrides.scm b/crates/languages/src/bash/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/bash/overrides.scm +++ b/crates/languages/src/bash/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/languages/src/bash/redactions.scm b/crates/languages/src/bash/redactions.scm index 000cb042a573112a7d3c46f56862ba4119fdfdf3..5c2c83aa666e31ae23b7e54f966638f41f98244e 100644 --- a/crates/languages/src/bash/redactions.scm +++ b/crates/languages/src/bash/redactions.scm @@ -1,2 +1,2 @@ (variable_assignment - value: (_) @redact) + value: (_) @redact) diff --git a/crates/languages/src/bash/runnables.scm b/crates/languages/src/bash/runnables.scm index c88e549347b4d4897c43d22d24550f3904d8c5d1..3856495422dcd84b9c3619d34778e2183aae8498 100644 --- a/crates/languages/src/bash/runnables.scm +++ b/crates/languages/src/bash/runnables.scm @@ -1,5 +1,5 @@ ; Run bash scripts -( - (program . (_) @run) @_bash-script - (#set! tag bash-script) -) +((program + . + (_) @run) @_bash-script + (#set! tag bash-script)) diff --git a/crates/languages/src/bash/textobjects.scm b/crates/languages/src/bash/textobjects.scm index cca2f7d9e9e4a876984a602ee308ad7270b684dc..9a5e4853ee711abbc7407185a6da19b0c9cc3fef 100644 --- a/crates/languages/src/bash/textobjects.scm +++ b/crates/languages/src/bash/textobjects.scm @@ -2,6 +2,6 @@ body: (_ "{" (_)* @function.inside - "}" )) @function.around + "}")) @function.around (comment) @comment.around diff --git a/crates/languages/src/c/brackets.scm b/crates/languages/src/c/brackets.scm index 2149bddc6c9a7ec04667d03da75580b676e12a28..313d212a5eb28d006775781576d50e359be675a2 100644 --- a/crates/languages/src/c/brackets.scm +++ b/crates/languages/src/c/brackets.scm @@ -1,5 +1,16 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/c/highlights.scm b/crates/languages/src/c/highlights.scm index e426bd4f9048a96c09aef297f95c420c9ec21458..dc5a3bd99937eb3cd1a3af6efb7124aebc4008f1 100644 --- a/crates/languages/src/c/highlights.scm +++ b/crates/languages/src/c/highlights.scm @@ -116,19 +116,23 @@ (identifier) @variable ((identifier) @constant - (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) (call_expression function: (identifier) @function) + (call_expression function: (field_expression field: (field_identifier) @function)) + (function_declarator declarator: (identifier) @function) + (preproc_function_def name: (identifier) @function.special) (field_identifier) @property + (statement_identifier) @label [ @@ -139,6 +143,7 @@ ; GNU __attribute__ (attribute_specifier) @attribute + (attribute_specifier (argument_list (identifier) @attribute)) @@ -146,5 +151,6 @@ ; C23 [[attributes]] (attribute prefix: (identifier) @attribute) + (attribute name: (identifier) @attribute) diff --git a/crates/languages/src/c/imports.scm b/crates/languages/src/c/imports.scm index c3c2c9e68c4503d323d039f9c042d9501b5e4126..2aaab2106f5422db426876a7fa65c9674fe93174 100644 --- a/crates/languages/src/c/imports.scm +++ b/crates/languages/src/c/imports.scm @@ -1,7 +1,7 @@ (preproc_include - path: [ - ( - (system_lib_string) @source @wildcard - (#strip! @source "[<>]")) - (string_literal (string_content) @source @wildcard) - ]) @import + path: [ + ((system_lib_string) @source @wildcard + (#strip! @source "[<>]")) + (string_literal + (string_content) @source @wildcard) + ]) @import diff --git a/crates/languages/src/c/indents.scm b/crates/languages/src/c/indents.scm index b6d3c3c3bf7d1a05fd90667e42418bf9a389f8fb..0b55631e5ca6cdfca377f5bf4018d751cdf31bf4 100644 --- a/crates/languages/src/c/indents.scm +++ b/crates/languages/src/c/indents.scm @@ -9,15 +9,25 @@ (else_clause) ] @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent ((comment) @indent - (#match? @indent "^/\\*")) + (#match? @indent "^/\\*")) (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (do_statement) @start.do + (switch_statement) @start.switch + (else_clause) @start.else diff --git a/crates/languages/src/c/injections.scm b/crates/languages/src/c/injections.scm index 9ec3cf1f780123426f681ad758179b81697e59c5..010c697f08adec1d196833b4de492027a80960a4 100644 --- a/crates/languages/src/c/injections.scm +++ b/crates/languages/src/c/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((comment) @injection.content (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)") @@ -8,9 +7,9 @@ (#set! injection.include-children)) (preproc_def - value: (preproc_arg) @injection.content - (#set! injection.language "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) (preproc_function_def - value: (preproc_arg) @injection.content - (#set! injection.language "c")) + value: (preproc_arg) @injection.content + (#set! injection.language "c")) diff --git a/crates/languages/src/c/outline.scm b/crates/languages/src/c/outline.scm index efd911836cb718d698460adfe2d91d19cb976b30..abc9608343826545b9ebfd5f915d6352943911f6 100644 --- a/crates/languages/src/c/outline.scm +++ b/crates/languages/src/c/outline.scm @@ -1,91 +1,89 @@ (preproc_def - "#define" @context - name: (_) @name) @item + "#define" @context + name: (_) @name) @item (preproc_function_def - "#define" @context - name: (_) @name - parameters: (preproc_params - "(" @context - ")" @context)) @item + "#define" @context + name: (_) @name + parameters: (preproc_params + "(" @context + ")" @context)) @item (struct_specifier - "struct" @context - name: (_) @name) @item + "struct" @context + name: (_) @name) @item (union_specifier - "union" @context - name: (_) @name) @item + "union" @context + name: (_) @name) @item (enum_specifier - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (enumerator - name: (_) @name) @item + name: (_) @name) @item (field_declaration - type: (_) @context - declarator: (field_identifier) @name) @item + type: (_) @context + declarator: (field_identifier) @name) @item (type_definition - "typedef" @context - declarator: (_) @name) @item + "typedef" @context + declarator: (_) @name) @item (declaration - (type_qualifier)? @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - ] -) @item + (type_qualifier)? @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + ]) @item (function_definition - (type_qualifier)? @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - ] -) @item + (type_qualifier)? @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + ]) @item (comment) @annotation diff --git a/crates/languages/src/c/overrides.scm b/crates/languages/src/c/overrides.scm index 36473eb300fd01370e1947873435a821e2d6417a..7c4cf69697200efa1cedd59b895d5ebd064ce486 100644 --- a/crates/languages/src/c/overrides.scm +++ b/crates/languages/src/c/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_literal) @string diff --git a/crates/languages/src/c/runnables.scm b/crates/languages/src/c/runnables.scm index 5a203f5d7a6eea3ab831a1b4281a74d3795ca74f..50c5ef5b71b4df5d0735a6a5019e9aee5a19f083 100644 --- a/crates/languages/src/c/runnables.scm +++ b/crates/languages/src/c/runnables.scm @@ -1,10 +1,6 @@ ; Tag the main function -( - (function_definition - declarator: (function_declarator - declarator: (identifier) @run - ) - ) @_c-main +((function_definition + declarator: (function_declarator + declarator: (identifier) @run)) @_c-main (#eq? @run "main") - (#set! tag c-main) -) + (#set! tag c-main)) diff --git a/crates/languages/src/c/textobjects.scm b/crates/languages/src/c/textobjects.scm index e29f508b701c8ee22eec27af47d899d446e67860..fd5ec0b49b7484a8ef2cbb7cb321f7020bdaeff8 100644 --- a/crates/languages/src/c/textobjects.scm +++ b/crates/languages/src/c/textobjects.scm @@ -1,31 +1,34 @@ (declaration - declarator: (function_declarator)) @function.around + declarator: (function_declarator)) @function.around (function_definition - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (preproc_function_def - value: (_) @function.inside) @function.around + value: (_) @function.inside) @function.around (comment) @comment.around (struct_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (enum_specifier - body: (_ - "{" - [(_) ","?]* @class.inside - "}")) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (union_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around diff --git a/crates/languages/src/cpp/brackets.scm b/crates/languages/src/cpp/brackets.scm index 9eaebba332861ef716902b3827d4940b71f37221..e0330c9b1f2ebdd45480c54e9053503a6b6f611b 100644 --- a/crates/languages/src/cpp/brackets.scm +++ b/crates/languages/src/cpp/brackets.scm @@ -1,6 +1,19 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/cpp/highlights.scm b/crates/languages/src/cpp/highlights.scm index dbb79e69b04e351ca231b45b21507e305b2cabf5..e074707d05dec638a1be9ea840c31f47537c438a 100644 --- a/crates/languages/src/cpp/highlights.scm +++ b/crates/languages/src/cpp/highlights.scm @@ -1,13 +1,15 @@ (identifier) @variable + (field_identifier) @property + (namespace_identifier) @namespace (concept_definition - name: (identifier) @concept) + name: (identifier) @concept) (requires_clause - constraint: (template_type - name: (type_identifier) @concept)) + constraint: (template_type + name: (type_identifier) @concept)) (module_name (identifier) @module) @@ -83,18 +85,23 @@ (operator_name "<=>" @operator.spaceship) -(destructor_name (identifier) @function) +(destructor_name + (identifier) @function) ((namespace_identifier) @type - (#match? @type "^[A-Z]")) + (#match? @type "^[A-Z]")) (auto) @type + (type_identifier) @type + type: (primitive_type) @type.builtin + (sized_type_specifier) @type.builtin ; GNU __attribute__ (attribute_specifier) @attribute + (attribute_specifier (argument_list (identifier) @attribute)) @@ -102,15 +109,18 @@ type: (primitive_type) @type.builtin ; C++11 [[attributes]] (attribute prefix: (identifier) @attribute) + (attribute name: (identifier) @attribute) ((identifier) @constant.builtin - (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant.builtin "^_*[A-Z][A-Z\\d_]*$")) (statement_identifier) @label + (this) @variable.builtin -("static_assert") @function.builtin + +"static_assert" @function.builtin [ "alignas" @@ -197,7 +207,7 @@ type: (primitive_type) @type.builtin [ (null) - ("nullptr") + "nullptr" ] @constant.builtin (number_literal) @number @@ -285,5 +295,8 @@ type: (primitive_type) @type.builtin (binary_expression operator: "<=>" @operator.spaceship) -(conditional_expression ":" @operator) -(user_defined_literal (literal_suffix) @operator) +(conditional_expression + ":" @operator) + +(user_defined_literal + (literal_suffix) @operator) diff --git a/crates/languages/src/cpp/imports.scm b/crates/languages/src/cpp/imports.scm index a4ef817a80dbcd44336bdd8cd681587662aad435..43adde711b5352ef0d92566d4bdde91a847319b8 100644 --- a/crates/languages/src/cpp/imports.scm +++ b/crates/languages/src/cpp/imports.scm @@ -1,5 +1,6 @@ (preproc_include - path: [ - ((system_lib_string) @source @wildcard) - (string_literal (string_content) @source @wildcard) - ]) @import + path: [ + (system_lib_string) @source @wildcard + (string_literal + (string_content) @source @wildcard) + ]) @import diff --git a/crates/languages/src/cpp/indents.scm b/crates/languages/src/cpp/indents.scm index 985ebda6ffe679f479804d667db011587eacb2f9..0b55631e5ca6cdfca377f5bf4018d751cdf31bf4 100644 --- a/crates/languages/src/cpp/indents.scm +++ b/crates/languages/src/cpp/indents.scm @@ -1,23 +1,33 @@ [ - (field_expression) - (assignment_expression) - (init_declarator) - (if_statement) - (for_statement) - (while_statement) - (do_statement) - (else_clause) + (field_expression) + (assignment_expression) + (init_declarator) + (if_statement) + (for_statement) + (while_statement) + (do_statement) + (else_clause) ] @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent ((comment) @indent - (#match? @indent "^/\\*")) + (#match? @indent "^/\\*")) (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (do_statement) @start.do + (switch_statement) @start.switch + (else_clause) @start.else diff --git a/crates/languages/src/cpp/injections.scm b/crates/languages/src/cpp/injections.scm index 60c6ea7b63eb6dcb7e1bae02c66045266c0b6cd5..0f622d4edbada60d162e14260dfb1d05423cd503 100644 --- a/crates/languages/src/cpp/injections.scm +++ b/crates/languages/src/cpp/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((comment) @injection.content (#match? @injection.content "^(///|//!|/\\*\\*|/\\*!)(.*)") @@ -8,12 +7,12 @@ (#set! injection.include-children)) (preproc_def - value: (preproc_arg) @injection.content - (#set! injection.language "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (preproc_function_def - value: (preproc_arg) @injection.content - (#set! injection.language "c++")) + value: (preproc_arg) @injection.content + (#set! injection.language "c++")) (raw_string_literal delimiter: (raw_string_delimiter) @injection.language diff --git a/crates/languages/src/cpp/outline.scm b/crates/languages/src/cpp/outline.scm index 75be97a916dca9f10b044ee9dc01eca09e6372ec..041ff7d1b02ec0be14aead872c5436b2c897e125 100644 --- a/crates/languages/src/cpp/outline.scm +++ b/crates/languages/src/cpp/outline.scm @@ -1,186 +1,195 @@ (preproc_def - "#define" @context - name: (_) @name) @item + "#define" @context + name: (_) @name) @item (preproc_function_def - "#define" @context - name: (_) @name - parameters: (preproc_params - "(" @context - ")" @context)) @item + "#define" @context + name: (_) @name + parameters: (preproc_params + "(" @context + ")" @context)) @item (namespace_definition - "inline"? @context - "namespace" @context - name: (_) @name) @item + "inline"? @context + "namespace" @context + name: (_) @name) @item (type_definition - "typedef" @context - declarator: (_) @name) @item + "typedef" @context + declarator: (_) @name) @item (struct_specifier - "struct" @context - name: (_) @name) @item + "struct" @context + name: (_) @name) @item (class_specifier - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item (enum_specifier - "enum" @context - [ - "class" - "struct" - ]? @context - name: (_) @name) @item + "enum" @context + [ + "class" + "struct" + ]? @context + name: (_) @name) @item (union_specifier - "union" @context - name: (_) @name) @item + "union" @context + name: (_) @name) @item (enumerator - name: (_) @name) @item + name: (_) @name) @item (concept_definition - "concept" @context - name: (_) @name) @item + "concept" @context + name: (_) @name) @item (declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_) @context - declarator: [ - ; The declaration may define multiple variables, using @item on the - ; declarator so that they get distinct ranges. - (init_declarator - declarator: (_) @item @name) - (identifier) @item @name - ] @item) + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_) @context + declarator: [ + ; The declaration may define multiple variables, using @item on the + ; declarator so that they get distinct ranges. + (init_declarator + declarator: (_) @item @name) + (identifier) @item @name + ] @item) (function_definition - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_)? @context - declarator: [ - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - ] - (type_qualifier)? @context) @item + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_)? @context + declarator: [ + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + ] + (type_qualifier)? @context) @item (declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_)? @context - declarator: [ - (field_identifier) @name - (pointer_declarator - "*" @context - declarator: (field_identifier) @name) - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - ] - (type_qualifier)? @context) @item + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_)? @context + declarator: [ + (field_identifier) @name + (pointer_declarator + "*" @context + declarator: (field_identifier) @name) + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + ] + (type_qualifier)? @context) @item (field_declaration - [ - (storage_class_specifier) - (type_qualifier) - ]* @context - type: (_) @context - declarator: [ - (field_identifier) @name - (pointer_declarator - "*" @context - declarator: (field_identifier) @name) - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) - (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) - (pointer_declarator - "*" @context - declarator: (pointer_declarator - "*" @context - declarator: (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)))) - (reference_declarator - ["&" "&&"] @context - (function_declarator - declarator: (_) @name - parameters: (parameter_list - "(" @context - ")" @context))) + [ + (storage_class_specifier) + (type_qualifier) + ]* @context + type: (_) @context + declarator: [ + (field_identifier) @name + (pointer_declarator + "*" @context + declarator: (field_identifier) @name) + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) + (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) + (pointer_declarator + "*" @context + declarator: (pointer_declarator + "*" @context + declarator: (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)))) + (reference_declarator + [ + "&" + "&&" + ] @context + (function_declarator + declarator: (_) @name + parameters: (parameter_list + "(" @context + ")" @context))) ; Fields declarations may define multiple fields, and so @item is on the ; declarator so they each get distinct ranges. - ] @item - (type_qualifier)? @context) + ] @item + (type_qualifier)? @context) (comment) @annotation diff --git a/crates/languages/src/cpp/overrides.scm b/crates/languages/src/cpp/overrides.scm index 36473eb300fd01370e1947873435a821e2d6417a..7c4cf69697200efa1cedd59b895d5ebd064ce486 100644 --- a/crates/languages/src/cpp/overrides.scm +++ b/crates/languages/src/cpp/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_literal) @string diff --git a/crates/languages/src/cpp/textobjects.scm b/crates/languages/src/cpp/textobjects.scm index 027185a0cfab7b71f3dcd6a5d5507445e2778d34..61260cd814689aef68ca785132929963eb12d54f 100644 --- a/crates/languages/src/cpp/textobjects.scm +++ b/crates/languages/src/cpp/textobjects.scm @@ -1,37 +1,44 @@ (declaration - declarator: (function_declarator)) @function.around + declarator: (function_declarator)) @function.around (function_definition - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (preproc_function_def - value: (_) @function.inside) @function.around + value: (_) @function.inside) @function.around (comment) @comment.around (struct_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (enum_specifier - body: (_ - "{" - [(_) ","?]* @class.inside - "}")) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (union_specifier - body: (_ - "{" - (_)* @class.inside - "}")) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (class_specifier body: (_ - "{" - [(_) ":"? ";"?]* @class.inside - "}"?)) @class.around + "{" + [ + (_) + ":"? + ";"? + ]* @class.inside + "}"?)) @class.around diff --git a/crates/languages/src/css/brackets.scm b/crates/languages/src/css/brackets.scm index 2149bddc6c9a7ec04667d03da75580b676e12a28..313d212a5eb28d006775781576d50e359be675a2 100644 --- a/crates/languages/src/css/brackets.scm +++ b/crates/languages/src/css/brackets.scm @@ -1,5 +1,16 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/css/highlights.scm b/crates/languages/src/css/highlights.scm index 8fbb9f47d2bcdde1a3b20a184885efb5382557a8..b9d708b661b221544fb58a767981d868d33cb9f7 100644 --- a/crates/languages/src/css/highlights.scm +++ b/crates/languages/src/css/highlights.scm @@ -30,14 +30,24 @@ ] @keyword.operator (id_name) @selector.id + (class_name) @selector.class (namespace_name) @namespace -(namespace_selector (tag_name) @namespace "|") + +(namespace_selector + (tag_name) @namespace + "|") (attribute_name) @attribute -(pseudo_element_selector "::" (tag_name) @selector.pseudo) -(pseudo_class_selector ":" (class_name) @selector.pseudo) + +(pseudo_element_selector + "::" + (tag_name) @selector.pseudo) + +(pseudo_class_selector + ":" + (class_name) @selector.pseudo) [ (feature_name) @@ -58,13 +68,11 @@ (parenthesized_query (keyword_query) @property) -( - [ - (property_name) - (plain_value) - ] @variable - (#match? @variable "^--") -) +([ + (property_name) + (plain_value) +] @variable + (#match? @variable "^--")) [ "@media" @@ -80,6 +88,7 @@ ] @keyword (string_value) @string + (color_value) @string.special [ @@ -97,7 +106,8 @@ ";" ] @punctuation.delimiter -(id_selector "#" @punctuation.delimiter) +(id_selector + "#" @punctuation.delimiter) [ "{" diff --git a/crates/languages/src/css/indents.scm b/crates/languages/src/css/indents.scm index e9754690920500f55e611f981e46d0365560eb4f..a768bb040790087fa905c09a436e81c923db240a 100644 --- a/crates/languages/src/css/indents.scm +++ b/crates/languages/src/css/indents.scm @@ -1 +1,3 @@ -(_ "{" "}" @end) @indent +(_ + "{" + "}" @end) @indent diff --git a/crates/languages/src/css/injections.scm b/crates/languages/src/css/injections.scm index 9117c713b98fdd2896b13e4949a77c6489b9ee36..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/css/injections.scm +++ b/crates/languages/src/css/injections.scm @@ -1,3 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) diff --git a/crates/languages/src/css/outline.scm b/crates/languages/src/css/outline.scm index 645616f9056d0cc9e85232e94f5d0666809884c6..6e6e9d3a03c7efd4e7d1814e74705ba3c34e20a1 100644 --- a/crates/languages/src/css/outline.scm +++ b/crates/languages/src/css/outline.scm @@ -1,18 +1,16 @@ (stylesheet - (import_statement - "@import" @context - ((string_value) @name)) @item) - + (import_statement + "@import" @context + (string_value) @name) @item) (rule_set - (selectors - . - (_) @name - ("," @name (_) @name)* - )) @item + (selectors + . + (_) @name + ("," @name + (_) @name)*)) @item (media_statement - "@media" @context - (_) @name - (block) -) @item + "@media" @context + (_) @name + (block)) @item diff --git a/crates/languages/src/css/overrides.scm b/crates/languages/src/css/overrides.scm index e5eade479723c33894b6165085603631bdfe8c64..7ca202fd7bc3db34dd71d5ae7893efe853101ced 100644 --- a/crates/languages/src/css/overrides.scm +++ b/crates/languages/src/css/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string_value) @string diff --git a/crates/languages/src/css/textobjects.scm b/crates/languages/src/css/textobjects.scm index c9c6207b851e6b4c5908ce7d664616798db27f3b..88ae6bb8423feec432de6e168507233c1f293b09 100644 --- a/crates/languages/src/css/textobjects.scm +++ b/crates/languages/src/css/textobjects.scm @@ -1,30 +1,31 @@ (comment) @comment.around (rule_set - (block ( - "{" - (_)* @function.inside - "}" ))) @function.around + (block + ("{" + (_)* @function.inside + "}"))) @function.around + (keyframe_block - (block ( - "{" - (_)* @function.inside - "}" ))) @function.around + (block + ("{" + (_)* @function.inside + "}"))) @function.around (media_statement - (block ( - "{" - (_)* @class.inside - "}" ))) @class.around + (block + ("{" + (_)* @class.inside + "}"))) @class.around (supports_statement - (block ( - "{" - (_)* @class.inside - "}" ))) @class.around + (block + ("{" + (_)* @class.inside + "}"))) @class.around (keyframes_statement - (keyframe_block_list ( - "{" - (_)* @class.inside - "}" ))) @class.around + (keyframe_block_list + ("{" + (_)* @class.inside + "}"))) @class.around diff --git a/crates/languages/src/diff/highlights.scm b/crates/languages/src/diff/highlights.scm index 4a344389032b9ff12f7c00e42adffb00721737e1..a2e33190f154d6a210572dbb066000dca6f30455 100644 --- a/crates/languages/src/diff/highlights.scm +++ b/crates/languages/src/diff/highlights.scm @@ -4,14 +4,14 @@ (addition) (new_file) ] @string -;; TODO: This should eventually be `@diff.plus` with a fallback of `@string` +; TODO: This should eventually be `@diff.plus` with a fallback of `@string` [ (deletion) (old_file) ] @keyword -;; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword` +; TODO: This should eventually be `@diff.minus` with a fallback of `@keyword` (commit) @constant (location) @attribute @@ -22,7 +22,7 @@ (mode) @number -([ +[ ".." "+" "++" @@ -32,7 +32,7 @@ "--" "---" "----" -] @punctuation.special) +] @punctuation.special [ (binary_change) diff --git a/crates/languages/src/diff/injections.scm b/crates/languages/src/diff/injections.scm index 01e833d1e31d480b66a558bdfb8f07b2f0cdbc46..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/diff/injections.scm +++ b/crates/languages/src/diff/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/gitcommit/highlights.scm b/crates/languages/src/gitcommit/highlights.scm index 8670a6615aa49b8062a1ef2375884bf298b1df76..750a400f2b147c459d1a7932cd476286dc8189fc 100644 --- a/crates/languages/src/gitcommit/highlights.scm +++ b/crates/languages/src/gitcommit/highlights.scm @@ -1,18 +1,36 @@ (subject) @markup.heading + (path) @string.special.path + (branch) @string.special.symbol + (commit) @constant + (item) @markup.link.url + (header) @tag + (comment) @comment -(change kind: "new file" @diff.plus) -(change kind: "deleted" @diff.minus) -(change kind: "modified" @diff.delta) -(change kind: "renamed" @diff.delta.moved) +(change + kind: "new file" @diff.plus) + +(change + kind: "deleted" @diff.minus) + +(change + kind: "modified" @diff.delta) + +(change + kind: "renamed" @diff.delta.moved) (trailer key: (trailer_key) @variable.other.member value: (trailer_value) @string) -[":" "=" "->" (scissors)] @punctuation.delimiter +[ + ":" + "=" + "->" + (scissors) +] @punctuation.delimiter diff --git a/crates/languages/src/gitcommit/injections.scm b/crates/languages/src/gitcommit/injections.scm index 8fb9b459679489be7588d1ab9b6d53e40ea10c60..07c2dd95ca69642b15a7a778ab7e0caad47586cb 100644 --- a/crates/languages/src/gitcommit/injections.scm +++ b/crates/languages/src/gitcommit/injections.scm @@ -1,9 +1,8 @@ ((comment) @content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ((scissors) @content - (#set! "language" "diff")) + (#set! "language" "diff")) ((rebase_command) @content - (#set! "language" "git_rebase")) + (#set! "language" "git_rebase")) diff --git a/crates/languages/src/go/brackets.scm b/crates/languages/src/go/brackets.scm index 05fb1d7f9219889d652bbdbb294ca45e72cc9c05..6bee4099173ee83cc03e4f1d24d7000d102880fb 100644 --- a/crates/languages/src/go/brackets.scm +++ b/crates/languages/src/go/brackets.scm @@ -1,6 +1,19 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -((rune_literal) @open @close (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +((rune_literal) @open @close + (#set! rainbow.exclude)) diff --git a/crates/languages/src/go/debugger.scm b/crates/languages/src/go/debugger.scm index f22b91f938e1159fa9bfec99f5000976766faf06..306b0448a7d817040562152b39d410100b207f1a 100644 --- a/crates/languages/src/go/debugger.scm +++ b/crates/languages/src/go/debugger.scm @@ -1,26 +1,44 @@ -(parameter_declaration (identifier) @debug-variable) +(parameter_declaration + (identifier) @debug-variable) -(short_var_declaration (expression_list (identifier) @debug-variable)) +(short_var_declaration + (expression_list + (identifier) @debug-variable)) -(var_declaration (var_spec (identifier) @debug-variable)) +(var_declaration + (var_spec + (identifier) @debug-variable)) -(const_declaration (const_spec (identifier) @debug-variable)) +(const_declaration + (const_spec + (identifier) @debug-variable)) -(assignment_statement (expression_list (identifier) @debug-variable)) +(assignment_statement + (expression_list + (identifier) @debug-variable)) -(binary_expression (identifier) @debug-variable +(binary_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(call_expression (argument_list (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) +(call_expression + (argument_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) -(return_statement (expression_list (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) +(return_statement + (expression_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) -(range_clause (expression_list (identifier) @debug-variable)) +(range_clause + (expression_list + (identifier) @debug-variable)) -(parenthesized_expression (identifier) @debug-variable +(parenthesized_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) (block) @debug-scope + (function_declaration) @debug-scope diff --git a/crates/languages/src/go/highlights.scm b/crates/languages/src/go/highlights.scm index 15a512d6b7e359bf7290aee9d433f1ae7be352ec..670b4f05a961e35d3826c294d061ea7757fd1c0f 100644 --- a/crates/languages/src/go/highlights.scm +++ b/crates/languages/src/go/highlights.scm @@ -1,10 +1,12 @@ (identifier) @variable (type_identifier) @type + (type_spec name: (type_identifier) @type.definition) (field_identifier) @property + (package_identifier) @namespace (label_name) @label @@ -26,6 +28,7 @@ (method_declaration name: (field_identifier) @function.method) + (method_elem name: (field_identifier) @function.method) @@ -144,8 +147,7 @@ ; Go directives ((comment) @preproc - (#match? @preproc "^//go:")) + (#match? @preproc "^//go:")) ((comment) @preproc - (#match? @preproc "^// \\+build")) - + (#match? @preproc "^// \\+build")) diff --git a/crates/languages/src/go/imports.scm b/crates/languages/src/go/imports.scm index 7f0ff2d46e6a271d4258d23f46cc942830e2c6f9..23e480c10b20b76c6724df29a550e627c2aee799 100644 --- a/crates/languages/src/go/imports.scm +++ b/crates/languages/src/go/imports.scm @@ -1,14 +1,12 @@ (import_spec - name: [ - (dot) - (package_identifier) - ] - path: (interpreted_string_literal - (interpreted_string_literal_content) @namespace) -) @wildcard @import + name: [ + (dot) + (package_identifier) + ] + path: (interpreted_string_literal + (interpreted_string_literal_content) @namespace)) @wildcard @import (import_spec - !name - path: (interpreted_string_literal - (interpreted_string_literal_content) @namespace) -) @wildcard @import + !name + path: (interpreted_string_literal + (interpreted_string_literal_content) @namespace)) @wildcard @import diff --git a/crates/languages/src/go/indents.scm b/crates/languages/src/go/indents.scm index abbb72eb379d5fbb52267a633c60def07895a081..21e8cf7abbc1420ba94063a7ae6655ec0daa9baa 100644 --- a/crates/languages/src/go/indents.scm +++ b/crates/languages/src/go/indents.scm @@ -1,9 +1,17 @@ [ - (assignment_statement) - (call_expression) - (selector_expression) + (assignment_statement) + (call_expression) + (selector_expression) ] @indent -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/languages/src/go/injections.scm b/crates/languages/src/go/injections.scm index 58583f4d22c7db8016397d8e47cd817b7c240764..73cf0bd352de0213f9a0d1efff300039f52a0697 100644 --- a/crates/languages/src/go/injections.scm +++ b/crates/languages/src/go/injections.scm @@ -1,7 +1,6 @@ ; Refer to https://github.com/nvim-treesitter/nvim-treesitter/blob/master/queries/go/injections.scm#L4C1-L16C41 ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (call_expression (selector_expression) @_function @@ -14,722 +13,718 @@ (raw_string_literal) (interpreted_string_literal) ] @injection.content - (#set! injection.language "regex") - )) + (#set! injection.language "regex"))) ; INJECT SQL -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] (#match? @_comment "^\\/\\*\\s*sql\\s*\\*\\/$") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; INJECT JSON -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/") ; /* json */ or /*json*/ - (#set! injection.language "json") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*json\\s*\\*\\/") + ; /* json */ or /*json*/ + (#set! injection.language "json")) ; INJECT YAML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/") ; /* yaml */ or /*yaml*/ - (#set! injection.language "yaml") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*yaml\\s*\\*\\/") + ; /* yaml */ or /*yaml*/ + (#set! injection.language "yaml")) ; INJECT XML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/") ; /* xml */ or /*xml*/ - (#set! injection.language "xml") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*xml\\s*\\*\\/") + ; /* xml */ or /*xml*/ + (#set! injection.language "xml")) ; INJECT HTML -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/") ; /* html */ or /*html*/ - (#set! injection.language "html") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*html\\s*\\*\\/") + ; /* html */ or /*html*/ + (#set! injection.language "html")) ; INJECT JS -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/") ; /* js */ or /*js*/ - (#set! injection.language "javascript") -) - + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*js\\s*\\*\\/") + ; /* js */ or /*js*/ + (#set! injection.language "javascript")) ; INJECT CSS -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/") ; /* css */ or /*css*/ - (#set! injection.language "css") -) - + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*css\\s*\\*\\/") + ; /* css */ or /*css*/ + (#set! injection.language "css")) ; INJECT LUA -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/") ; /* lua */ or /*lua*/ - (#set! injection.language "lua") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*lua\\s*\\*\\/") + ; /* lua */ or /*lua*/ + (#set! injection.language "lua")) ; INJECT BASH -( - [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (composite_literal + body: (literal_value + (keyed_element (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])))) + (expression_statement + (call_expression + (argument_list (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (composite_literal - body: (literal_value - (keyed_element - (comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )))) - - (expression_statement - (call_expression - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ))) - ] - (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/") ; /* bash */ or /*bash*/ - (#set! injection.language "bash") -) + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]))) +] + (#match? @_comment "^\\/\\*\\s*bash\\s*\\*\\/") + ; /* bash */ or /*bash*/ + (#set! injection.language "bash")) ; INJECT CSV -( +([ + (const_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (var_spec + name: (identifier) + "=" + (comment) @_comment + value: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (assignment_statement + left: (expression_list) + "=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (short_var_declaration + left: (expression_list) + ":=" + (comment) @_comment + right: (expression_list + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + ((comment) @_comment + value: (literal_element + [ + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ])) + (argument_list + (comment) @_comment [ - (const_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (var_spec - name: (identifier) - "=" - (comment) @_comment - value: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (assignment_statement - left: (expression_list) - "=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (short_var_declaration - left: (expression_list) - ":=" - (comment) @_comment - right: (expression_list - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - ((comment) @_comment - value: (literal_element - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - )) - - (argument_list - (comment) @_comment - [ - (interpreted_string_literal (interpreted_string_literal_content) @injection.content) - (raw_string_literal (raw_string_literal_content) @injection.content) - ] - ) - ] - (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/") ; /* csv */ or /*csv */ - (#set! injection.language "csv") -) + (interpreted_string_literal + (interpreted_string_literal_content) @injection.content) + (raw_string_literal + (raw_string_literal_content) @injection.content) + ]) +] + (#match? @_comment "^\\/\\*\\s*csv\\s*\\*\\/") + ; /* csv */ or /*csv */ + (#set! injection.language "csv")) diff --git a/crates/languages/src/go/outline.scm b/crates/languages/src/go/outline.scm index c745f55aff7dcd4b3bfd802884db7a985c1387fa..da42904fab942635b1140b486dde0c25694147d3 100644 --- a/crates/languages/src/go/outline.scm +++ b/crates/languages/src/go/outline.scm @@ -1,67 +1,61 @@ (comment) @annotation (type_declaration - "type" @context - [ - (type_spec - name: (_) @name) @item - ( - "(" - (type_spec - name: (_) @name) @item - ")" - ) - ] -) + "type" @context + [ + (type_spec + name: (_) @name) @item + ("(" + (type_spec + name: (_) @name) @item + ")") + ]) (function_declaration - "func" @context - name: (identifier) @name - parameters: (parameter_list - "(" - ")")) @item + "func" @context + name: (identifier) @name + parameters: (parameter_list + "(" + ")")) @item (method_declaration - "func" @context - receiver: (parameter_list - "(" @context - (parameter_declaration - name: (_) @context - type: (_) @context) - ")" @context) - name: (field_identifier) @name - parameters: (parameter_list - "(" - ")")) @item + "func" @context + receiver: (parameter_list + "(" @context + (parameter_declaration + name: (_) @context + type: (_) @context) + ")" @context) + name: (field_identifier) @name + parameters: (parameter_list + "(" + ")")) @item (const_declaration - "const" @context - (const_spec - name: (identifier) @name) @item) + "const" @context + (const_spec + name: (identifier) @name) @item) (source_file - (var_declaration - "var" @context - [ - ; The declaration may define multiple variables, and so @item is on - ; the identifier so they get distinct ranges. - (var_spec - name: (identifier) @name @item) - (var_spec_list - (var_spec - name: (identifier) @name @item) - ) - ] - ) -) + (var_declaration + "var" @context + [ + ; The declaration may define multiple variables, and so @item is on + ; the identifier so they get distinct ranges. + (var_spec + name: (identifier) @name @item) + (var_spec_list + (var_spec + name: (identifier) @name @item)) + ])) (method_elem - name: (_) @name - parameters: (parameter_list - "(" @context - ")" @context)) @item + name: (_) @name + parameters: (parameter_list + "(" @context + ")" @context)) @item ; Fields declarations may define multiple fields, and so @item is on the ; declarator so they each get distinct ranges. (field_declaration - name: (_) @name @item) + name: (_) @name @item) diff --git a/crates/languages/src/go/overrides.scm b/crates/languages/src/go/overrides.scm index aae1520301bbb2a04b04f930b747d290051bc9cc..7989c4271f0ec9f18a6f75315f01d13454fca7b9 100644 --- a/crates/languages/src/go/overrides.scm +++ b/crates/languages/src/go/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment.inclusive + [ (interpreted_string_literal) (raw_string_literal) diff --git a/crates/languages/src/go/runnables.scm b/crates/languages/src/go/runnables.scm index 786a923566d433f20b042178df2609e993e6da15..d00be6e1d0db4b8fd97596002099525128458a7f 100644 --- a/crates/languages/src/go/runnables.scm +++ b/crates/languages/src/go/runnables.scm @@ -1,170 +1,118 @@ ; Functions names start with `Test` -( - ( - (function_declaration name: (_) @run - (#match? @run "^Test.*") - (#not-match? @run "^TestMain$")) - ) @_ - (#set! tag go-test) -) +(((function_declaration + name: (_) @run + (#match? @run "^Test.*") + (#not-match? @run "^TestMain$"))) @_ + (#set! tag go-test)) ; Suite test methods (testify/suite) -( - (method_declaration - receiver: (parameter_list - (parameter_declaration - type: [ - (pointer_type (type_identifier) @_suite_name) - (type_identifier) @_suite_name - ] - ) - ) - name: (field_identifier) @run @_subtest_name - (#match? @_subtest_name "^Test.*") - (#match? @_suite_name ".*Suite") - ) @_ - (#set! tag go-testify-suite) -) +((method_declaration + receiver: (parameter_list + (parameter_declaration + type: [ + (pointer_type + (type_identifier) @_suite_name) + (type_identifier) @_suite_name + ])) + name: (field_identifier) @run @_subtest_name + (#match? @_subtest_name "^Test.*") + (#match? @_suite_name ".*Suite")) @_ + (#set! tag go-testify-suite)) ; `go:generate` comments -( - ((comment) @_comment @run - (#match? @_comment "^//go:generate")) - (#set! tag go-generate) -) +(((comment) @_comment @run + (#match? @_comment "^//go:generate")) + (#set! tag go-generate)) ; `t.Run` -( - ( - (call_expression - function: ( - selector_expression - field: _ @run @_name - (#eq? @_name "Run") - ) - arguments: ( - argument_list - . - [ - (interpreted_string_literal) - (raw_string_literal) - ] @_subtest_name - . - (func_literal - parameters: ( - parameter_list - (parameter_declaration - name: (identifier) @_param_name - type: (pointer_type - (qualified_type - package: (package_identifier) @_pkg - name: (type_identifier) @_type - (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) @_second_argument - ) - ) - ) @_ - (#set! tag go-subtest) -) +(((call_expression + function: (selector_expression + field: _ @run @_name + (#eq? @_name "Run")) + arguments: (argument_list + . + [ + (interpreted_string_literal) + (raw_string_literal) + ] @_subtest_name + . + (func_literal + parameters: (parameter_list + (parameter_declaration + name: (identifier) @_param_name + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T")))))) @_second_argument))) @_ + (#set! tag go-subtest)) ; Functions names start with `Example` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Example.*")) - ) @_ - (#set! tag go-example) -) +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Example.*"))) @_ + (#set! tag go-example)) ; Functions names start with `Benchmark` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Benchmark.*")) - ) @_ - (#set! tag go-benchmark) -) +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Benchmark.*"))) @_ + (#set! tag go-benchmark)) ; Functions names start with `Fuzz` -( - ( - (function_declaration name: (_) @run @_name - (#match? @_name "^Fuzz")) - ) @_ - (#set! tag go-fuzz) -) +(((function_declaration + name: (_) @run @_name + (#match? @_name "^Fuzz"))) @_ + (#set! tag go-fuzz)) ; go run -( - ( - (function_declaration name: (_) @run - (#eq? @run "main")) - ) @_ - (#set! tag go-main) -) +(((function_declaration + name: (_) @run + (#eq? @run "main"))) @_ + (#set! tag go-main)) ; Table test cases - slice and map with explicit variable -( - (short_var_declaration - left: (expression_list (identifier) @_collection_var) - right: (expression_list - (composite_literal - type: [ - (slice_type) - (map_type - key: (type_identifier) @_key_type - (#eq? @_key_type "string") - ) - ] - body: (literal_value - [ +((short_var_declaration + left: (expression_list + (identifier) @_collection_var) + right: (expression_list + (composite_literal + type: [ + (slice_type) + (map_type + key: (type_identifier) @_key_type + (#eq? @_key_type "string")) + ] + body: (literal_value + [ + (literal_element + (literal_value + (keyed_element + (literal_element + (identifier) @_field_name) + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])))) + (keyed_element (literal_element - (literal_value - (keyed_element - (literal_element - (identifier) @_field_name - ) - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ) - ) - (keyed_element - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ] - ) - ) - ) - ) + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])) + ])))) (for_statement (range_clause left: (expression_list [ - ( - (identifier) - (identifier) @_loop_var_inner - ) + ((identifier) + (identifier) @_loop_var_inner) (identifier) @_loop_var_outer - ] - ) + ]) right: (identifier) @_range_var - (#eq? @_range_var @_collection_var) - ) + (#eq? @_range_var @_collection_var)) body: (block (statement_list (expression_statement @@ -172,8 +120,7 @@ function: (selector_expression operand: (identifier) field: (field_identifier) @_run_method - (#eq? @_run_method "Run") - ) + (#eq? @_run_method "Run")) arguments: (argument_list . [ @@ -181,8 +128,7 @@ operand: (identifier) @_tc_var (#eq? @_tc_var @_loop_var_inner) field: (field_identifier) @_field_check - (#eq? @_field_check @_field_name) - ) + (#eq? @_field_check @_field_name)) (identifier) @_arg_var (#eq? @_arg_var @_loop_var_outer) ] @@ -195,113 +141,72 @@ package: (package_identifier) @_pkg name: (type_identifier) @_type (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) @_ - (#set! tag go-table-test-case) -) + (#eq? @_type "T")))))))))))) @_ + (#set! tag go-table-test-case)) ; Table test cases - slice and map declared right inside the loop without ; explicit variable -( - (for_statement - (range_clause - left: (expression_list +((for_statement + (range_clause + left: (expression_list + [ + ((identifier) + (identifier) @_loop_var_inner) + (identifier) @_loop_var_outer + ]) + right: (composite_literal + type: [ + (slice_type) + (map_type + key: (type_identifier) @_key_type + (#eq? @_key_type "string")) + ] + body: (literal_value [ - ( - (identifier) - (identifier) @_loop_var_inner - ) - (identifier) @_loop_var_outer - ] - ) - right: (composite_literal - type: [ - (slice_type) - (map_type - key: (type_identifier) @_key_type - (#eq? @_key_type "string") - ) - ] - body: (literal_value - [ + (literal_element + (literal_value + (keyed_element + (literal_element + (identifier) @_field_name) + (literal_element + [ + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])))) + (keyed_element (literal_element - (literal_value - (keyed_element - (literal_element - (identifier) @_field_name - ) - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ) - ) - (keyed_element - (literal_element - [ - (interpreted_string_literal) @run @_table_test_case_name - (raw_string_literal) @run @_table_test_case_name - ] - ) - ) - ] - ) - ) - ) - body: (block - (statement_list - (expression_statement - (call_expression - function: (selector_expression - operand: (identifier) - field: (field_identifier) @_run_method - (#eq? @_run_method "Run") - ) - arguments: (argument_list - . [ - (selector_expression - operand: (identifier) @_tc_var - (#eq? @_tc_var @_loop_var_inner) - field: (field_identifier) @_field_check - (#eq? @_field_check @_field_name) - ) - (identifier) @_arg_var - (#eq? @_arg_var @_loop_var_outer) - ] - . - (func_literal - parameters: (parameter_list - (parameter_declaration - type: (pointer_type - (qualified_type - package: (package_identifier) @_pkg - name: (type_identifier) @_type - (#eq? @_pkg "testing") - (#eq? @_type "T") - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) - ) @_ - (#set! tag go-table-test-case-without-explicit-variable) -) + (interpreted_string_literal) @run @_table_test_case_name + (raw_string_literal) @run @_table_test_case_name + ])) + ]))) + body: (block + (statement_list + (expression_statement + (call_expression + function: (selector_expression + operand: (identifier) + field: (field_identifier) @_run_method + (#eq? @_run_method "Run")) + arguments: (argument_list + . + [ + (selector_expression + operand: (identifier) @_tc_var + (#eq? @_tc_var @_loop_var_inner) + field: (field_identifier) @_field_check + (#eq? @_field_check @_field_name)) + (identifier) @_arg_var + (#eq? @_arg_var @_loop_var_outer) + ] + . + (func_literal + parameters: (parameter_list + (parameter_declaration + type: (pointer_type + (qualified_type + package: (package_identifier) @_pkg + name: (type_identifier) @_type + (#eq? @_pkg "testing") + (#eq? @_type "T")))))))))))) @_ + (#set! tag go-table-test-case-without-explicit-variable)) diff --git a/crates/languages/src/go/textobjects.scm b/crates/languages/src/go/textobjects.scm index eb4f3a00501021167c3c2b9136d5cef2f131878f..4e0a78991a4b1ca49f48b0c1c73c51ff5e002f50 100644 --- a/crates/languages/src/go/textobjects.scm +++ b/crates/languages/src/go/textobjects.scm @@ -1,24 +1,27 @@ (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (type_declaration - (type_spec (struct_type (field_declaration_list ( - "{" - (_)* @class.inside - "}")?)))) @class.around + (type_spec + (struct_type + (field_declaration_list + ("{" + (_)* @class.inside + "}")?)))) @class.around (type_declaration - (type_spec (interface_type - (_)* @class.inside))) @class.around + (type_spec + (interface_type + (_)* @class.inside))) @class.around (type_declaration) @class.around diff --git a/crates/languages/src/gomod/highlights.scm b/crates/languages/src/gomod/highlights.scm index 03be1b5957160820033d93b35b39d4329b7890a6..f026035cb126382274e783ece2515148b6cffd73 100644 --- a/crates/languages/src/gomod/highlights.scm +++ b/crates/languages/src/gomod/highlights.scm @@ -15,6 +15,6 @@ (comment) @comment [ -(version) -(go_version) + (version) + (go_version) ] @string diff --git a/crates/languages/src/gomod/injections.scm b/crates/languages/src/gomod/injections.scm index 321c90add3710f35721daeb6b42abe38af094953..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/gomod/injections.scm +++ b/crates/languages/src/gomod/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/gomod/structure.scm b/crates/languages/src/gomod/structure.scm index ce1bc9aa3ee0b1f77086103bad91825b5927005f..2da1b0d5e643d2235b9555c15cfe3624f14758f2 100644 --- a/crates/languages/src/gomod/structure.scm +++ b/crates/languages/src/gomod/structure.scm @@ -1,35 +1,29 @@ (require_directive "require" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (exclude_directive "exclude" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (module_directive "module" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (replace_directive "replace" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (retract_directive "retract" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) (ignore_directive "ignore" @structure.anchor - ("(") @structure.open - (")") @structure.close -) + "(" @structure.open + ")" @structure.close) diff --git a/crates/languages/src/gowork/highlights.scm b/crates/languages/src/gowork/highlights.scm index 9c84bcc4496394817190a86fa8cd4995b39475a2..b9d3d42e630c5c4f4eb877a330a15371ceb4d96a 100644 --- a/crates/languages/src/gowork/highlights.scm +++ b/crates/languages/src/gowork/highlights.scm @@ -9,6 +9,6 @@ (comment) @comment [ -(version) -(go_version) + (version) + (go_version) ] @string diff --git a/crates/languages/src/gowork/injections.scm b/crates/languages/src/gowork/injections.scm index 321c90add3710f35721daeb6b42abe38af094953..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/gowork/injections.scm +++ b/crates/languages/src/gowork/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/javascript/brackets.scm b/crates/languages/src/javascript/brackets.scm index a16a6432692ec7b9e0e3d24151cb814fc11bd83d..69acbcd614e440d8e8e2010f1677e52cb651e15e 100644 --- a/crates/languages/src/javascript/brackets.scm +++ b/crates/languages/src/javascript/brackets.scm @@ -1,9 +1,29 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("<" @open "/>" @close) -("" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +("<" @open + "/>" @close) + +("" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/javascript/debugger.scm b/crates/languages/src/javascript/debugger.scm index a99f194a4a4130210b47f8170fca039acc163411..8f384fd8ad9e07fea89972464e64b905086bf580 100644 --- a/crates/languages/src/javascript/debugger.scm +++ b/crates/languages/src/javascript/debugger.scm @@ -1,23 +1,51 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) +(for_in_statement + left: (identifier) @debug-variable) -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (statement_block) @debug-scope + (program) @debug-scope diff --git a/crates/languages/src/javascript/highlights.scm b/crates/languages/src/javascript/highlights.scm index 5fb31ce100b5884d99d3e941ce6fb67b69ff2cfd..4824684177ae2fe8c5b2f1d582b4d443216b9519 100644 --- a/crates/languages/src/javascript/highlights.scm +++ b/crates/languages/src/javascript/highlights.scm @@ -1,56 +1,33 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -59,36 +36,58 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -121,6 +120,7 @@ ; Special identifiers ; (type_identifier) @type + (predefined_type) @type.builtin (class_declaration @@ -133,12 +133,12 @@ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -163,11 +163,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -224,7 +225,8 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) [ "(" @@ -233,14 +235,13 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) [ "abstract" @@ -310,7 +311,8 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) (template_substitution "${" @punctuation.special @@ -320,7 +322,8 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) ; JSX elements (jsx_opening_element @@ -328,36 +331,61 @@ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_self_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) - -(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_attribute (property_identifier) @attribute.jsx) -(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx) -(jsx_closing_element ([""]) @punctuation.bracket.jsx) -(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) -(jsx_attribute "=" @punctuation.delimiter.jsx) + property: (property_identifier) @type) + ]) + +(jsx_opening_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_self_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_attribute + (property_identifier) @attribute.jsx) + +(jsx_opening_element + ([ + "<" + ">" + ]) @punctuation.bracket.jsx) + +(jsx_closing_element + ([ + "" + ]) @punctuation.bracket.jsx) + +(jsx_self_closing_element + ([ + "<" + "/>" + ]) @punctuation.bracket.jsx) + +(jsx_attribute + "=" @punctuation.delimiter.jsx) + (jsx_text) @text.jsx + (html_character_reference) @string.special diff --git a/crates/languages/src/javascript/imports.scm b/crates/languages/src/javascript/imports.scm index e26b97aeef9cb62395e7030f3173208d79187bd6..0e688d53fb6ed639c55c1fa84917711d19c3108a 100644 --- a/crates/languages/src/javascript/imports.scm +++ b/crates/languages/src/javascript/imports.scm @@ -1,14 +1,16 @@ (import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - ]) - source: (string (string_fragment) @source)) @import + import_clause: (import_clause + [ + (identifier) @name + (named_imports + (import_specifier + name: (_) @name + alias: (_)? @alias)) + ]) + source: (string + (string_fragment) @source)) @import (import_statement - !import_clause - source: (string (string_fragment) @source @wildcard)) @import + !import_clause + source: (string + (string_fragment) @source @wildcard)) @import diff --git a/crates/languages/src/javascript/indents.scm b/crates/languages/src/javascript/indents.scm index 9897f3060eaf37891cf4563cebc93345112422f8..1e72160bca2f5fd04ce6d3bc7b02e9ab029eb018 100644 --- a/crates/languages/src/javascript/indents.scm +++ b/crates/languages/src/javascript/indents.scm @@ -1,20 +1,32 @@ [ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - (if_statement) - (for_statement) + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + (if_statement) + (for_statement) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent -(jsx_opening_element ">" @end) @indent +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent + +(jsx_opening_element + ">" @end) @indent (jsx_element (jsx_opening_element) @start diff --git a/crates/languages/src/javascript/injections.scm b/crates/languages/src/javascript/injections.scm index 244e025a6f5d62f1d3500fc35fc480b1baa2471e..8ccfc5028dea453013134c52db885d51ab2f673b 100644 --- a/crates/languages/src/javascript/injections.scm +++ b/crates/languages/src/javascript/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content @@ -10,119 +9,136 @@ (#set! injection.language "regex")) (call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") + object: (identifier) @_obj + (#eq? @_obj "styled") property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression - function: (identifier) @_name (#eq? @_name "html") + function: (identifier) @_name + (#eq? @_name "html") arguments: (template_string) @injection.content - (#set! injection.language "html") -) + (#set! injection.language "html")) (call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) (call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) (call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) (call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) (call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) (call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) ; Parse the contents of strings and tagged template ; literals with leading ECMAScript comments: ; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) + (#set! injection.language "html")) ; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; '/* gql */' or '/*gql*/' ; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) + (#set! injection.language "graphql")) ; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) + (#set! injection.language "css")) diff --git a/crates/languages/src/javascript/outline.scm b/crates/languages/src/javascript/outline.scm index 5f72103bc63bdfab73f7b858c01abe8d34317b22..7b8e4b2d46c9b88e6b719ceea5bb64eeb19af518 100644 --- a/crates/languages/src/javascript/outline.scm +++ b/crates/languages/src/javascript/outline.scm @@ -1,223 +1,269 @@ (internal_module - "namespace" @context - name: (_) @name) @item + "namespace" @context + name: (_) @name) @item (enum_declaration - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (interface_declaration - "interface" @context - name: (_) @name) @item + "interface" @context + name: (_) @name) @item (program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item))) + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item))) ; Exported array destructuring (program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ]))))) + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ]))))) ; Exported object destructuring (program - (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)]))))) + (export_statement + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ]))))) (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Top-level array destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Top-level object destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (class_declaration - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item ; Method definitions in classes (not in object literals) (class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) ; Object literal methods (variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#eq? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#eq? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Object properties (pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item ; Nested variables in function bodies (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Nested array destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Nested object destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (comment) @annotation diff --git a/crates/languages/src/javascript/overrides.scm b/crates/languages/src/javascript/overrides.scm index 6dbbc88ef924c2cac65aaf9ff7e7dba87b99a359..4707e2a89fdd246de8d0152d6284e188caaf539e 100644 --- a/crates/languages/src/javascript/overrides.scm +++ b/crates/languages/src/javascript/overrides.scm @@ -2,7 +2,8 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) (jsx_element) @element diff --git a/crates/languages/src/javascript/runnables.scm b/crates/languages/src/javascript/runnables.scm index c64aacb50e286b1aeeb1231f2745d4d5923a7c1d..b410fb4d8cadd879f657f20a4685cf3bf834ad86 100644 --- a/crates/languages/src/javascript/runnables.scm +++ b/crates/languages/src/javascript/runnables.scm @@ -1,46 +1,42 @@ ; Add support for (node:test, bun:test and Jest) runnable ; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#eq? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#eq? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) diff --git a/crates/languages/src/javascript/textobjects.scm b/crates/languages/src/javascript/textobjects.scm index eace658e6b9847bcc651deedad2bc27cbfbf6975..f1cc9c9491e20320d193de5dec2a9c438cee5dcc 100644 --- a/crates/languages/src/javascript/textobjects.scm +++ b/crates/languages/src/javascript/textobjects.scm @@ -1,85 +1,91 @@ (comment)+ @comment.around (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) ; Arrow function in variable declaration - capture the full declaration ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) ]) @function.around ; Arrow function in variable declaration (captures body for expression-bodied arrows) ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) ]) @function.around ; Catch-all for arrow functions in other contexts (callbacks, etc.) ((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) (generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (class - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around diff --git a/crates/languages/src/jsdoc/brackets.scm b/crates/languages/src/jsdoc/brackets.scm index 0e1bf5ca191bf7a319e2bcad6673d5cf0401380c..0f6ce4bf3d4c9c903d092fc669a416e83c44e82d 100644 --- a/crates/languages/src/jsdoc/brackets.scm +++ b/crates/languages/src/jsdoc/brackets.scm @@ -1,2 +1,5 @@ -("[" @open "]" @close) -("{" @open "}" @close) +("[" @open + "]" @close) + +("{" @open + "}" @close) diff --git a/crates/languages/src/jsdoc/highlights.scm b/crates/languages/src/jsdoc/highlights.scm index 581b5d8111fe25443de9951cfdddc8c277ad83ff..4b5657cb2d3fa6651e2e2b7eb495c095c0ae8482 100644 --- a/crates/languages/src/jsdoc/highlights.scm +++ b/crates/languages/src/jsdoc/highlights.scm @@ -1,3 +1,5 @@ (tag_name) @keyword.jsdoc + (type) @type.jsdoc + (identifier) @variable.jsdoc diff --git a/crates/languages/src/json/brackets.scm b/crates/languages/src/json/brackets.scm index cd5cdf328b3a04730d56ec0cb06c3802fe07c978..ac2e2ad37bfc6cd2e72323914f6975c5d3cdb60e 100644 --- a/crates/languages/src/json/brackets.scm +++ b/crates/languages/src/json/brackets.scm @@ -1,3 +1,9 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/json/highlights.scm b/crates/languages/src/json/highlights.scm index 1098320ccba78c143b43c7608b1d4e41ad5ec20d..f9b1c337358d26f08fe5b77b3a6e1a70b3f5b418 100644 --- a/crates/languages/src/json/highlights.scm +++ b/crates/languages/src/json/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string + (escape_sequence) @string.escape (pair diff --git a/crates/languages/src/json/indents.scm b/crates/languages/src/json/indents.scm index b7b2a2e7670f324730a64e15f1f59f37ac126270..63b015c2fe74dda013e201d88ebbfe06107def4a 100644 --- a/crates/languages/src/json/indents.scm +++ b/crates/languages/src/json/indents.scm @@ -1,2 +1,5 @@ -(array "]" @end) @indent -(object "}" @end) @indent +(array + "]" @end) @indent + +(object + "}" @end) @indent diff --git a/crates/languages/src/json/outline.scm b/crates/languages/src/json/outline.scm index 43e2743478b27e4430bf3ddf82e49023d3bad584..c7f988077767819128b6f028fbcf196dcf5a5678 100644 --- a/crates/languages/src/json/outline.scm +++ b/crates/languages/src/json/outline.scm @@ -1,2 +1,3 @@ (pair - key: (string (string_content) @name)) @item + key: (string + (string_content) @name)) @item diff --git a/crates/languages/src/json/redactions.scm b/crates/languages/src/json/redactions.scm index 7359637244ac5892c0d57b41e2ef11652a3d0890..c220d0c18b79e007a6de511099254c59214ace74 100644 --- a/crates/languages/src/json/redactions.scm +++ b/crates/languages/src/json/redactions.scm @@ -1,4 +1,11 @@ -(pair value: (number) @redact) -(pair value: (string) @redact) -(array (number) @redact) -(array (string) @redact) +(pair + value: (number) @redact) + +(pair + value: (string) @redact) + +(array + (number) @redact) + +(array + (string) @redact) diff --git a/crates/languages/src/json/runnables.scm b/crates/languages/src/json/runnables.scm index 2396f05a5722f422f46fda8bb09e8b4e25bdb794..a0d95d89b577bf3f5a22b3ff6cedcd7945b4881b 100644 --- a/crates/languages/src/json/runnables.scm +++ b/crates/languages/src/json/runnables.scm @@ -1,21 +1,13 @@ ; Add support `package.json` and `composer.json` script runnable - -( - (document - (object - (pair - key: (string - (string_content) @_name - (#eq? @_name "scripts") - ) - value: (object - (pair - key: (string (string_content) @run @script) - ) - ) - ) - ) - ) - (#set! tag package-script) - (#set! tag composer-script) -) +((document + (object + (pair + key: (string + (string_content) @_name + (#eq? @_name "scripts")) + value: (object + (pair + key: (string + (string_content) @run @script)))))) + (#set! tag package-script) + (#set! tag composer-script)) diff --git a/crates/languages/src/jsonc/brackets.scm b/crates/languages/src/jsonc/brackets.scm index cd5cdf328b3a04730d56ec0cb06c3802fe07c978..ac2e2ad37bfc6cd2e72323914f6975c5d3cdb60e 100644 --- a/crates/languages/src/jsonc/brackets.scm +++ b/crates/languages/src/jsonc/brackets.scm @@ -1,3 +1,9 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/jsonc/highlights.scm b/crates/languages/src/jsonc/highlights.scm index 1098320ccba78c143b43c7608b1d4e41ad5ec20d..f9b1c337358d26f08fe5b77b3a6e1a70b3f5b418 100644 --- a/crates/languages/src/jsonc/highlights.scm +++ b/crates/languages/src/jsonc/highlights.scm @@ -1,6 +1,7 @@ (comment) @comment (string) @string + (escape_sequence) @string.escape (pair diff --git a/crates/languages/src/jsonc/indents.scm b/crates/languages/src/jsonc/indents.scm index b7b2a2e7670f324730a64e15f1f59f37ac126270..63b015c2fe74dda013e201d88ebbfe06107def4a 100644 --- a/crates/languages/src/jsonc/indents.scm +++ b/crates/languages/src/jsonc/indents.scm @@ -1,2 +1,5 @@ -(array "]" @end) @indent -(object "}" @end) @indent +(array + "]" @end) @indent + +(object + "}" @end) @indent diff --git a/crates/languages/src/jsonc/injections.scm b/crates/languages/src/jsonc/injections.scm index 01e833d1e31d480b66a558bdfb8f07b2f0cdbc46..2f0e58eb6431515b86b6042e5828263341513e99 100644 --- a/crates/languages/src/jsonc/injections.scm +++ b/crates/languages/src/jsonc/injections.scm @@ -1,2 +1,2 @@ ((comment) @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) diff --git a/crates/languages/src/jsonc/outline.scm b/crates/languages/src/jsonc/outline.scm index 43e2743478b27e4430bf3ddf82e49023d3bad584..c7f988077767819128b6f028fbcf196dcf5a5678 100644 --- a/crates/languages/src/jsonc/outline.scm +++ b/crates/languages/src/jsonc/outline.scm @@ -1,2 +1,3 @@ (pair - key: (string (string_content) @name)) @item + key: (string + (string_content) @name)) @item diff --git a/crates/languages/src/jsonc/overrides.scm b/crates/languages/src/jsonc/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/jsonc/overrides.scm +++ b/crates/languages/src/jsonc/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/languages/src/jsonc/redactions.scm b/crates/languages/src/jsonc/redactions.scm index 7359637244ac5892c0d57b41e2ef11652a3d0890..c220d0c18b79e007a6de511099254c59214ace74 100644 --- a/crates/languages/src/jsonc/redactions.scm +++ b/crates/languages/src/jsonc/redactions.scm @@ -1,4 +1,11 @@ -(pair value: (number) @redact) -(pair value: (string) @redact) -(array (number) @redact) -(array (string) @redact) +(pair + value: (number) @redact) + +(pair + value: (string) @redact) + +(array + (number) @redact) + +(array + (string) @redact) diff --git a/crates/languages/src/markdown-inline/highlights.scm b/crates/languages/src/markdown-inline/highlights.scm index 3c9f6fbcc340bd085466055c7b35551dd71b8c53..26c066ea0a0f6cc93073f6d525d44f2a6456fd49 100644 --- a/crates/languages/src/markdown-inline/highlights.scm +++ b/crates/languages/src/markdown-inline/highlights.scm @@ -1,6 +1,9 @@ (emphasis) @emphasis.markup + (strong_emphasis) @emphasis.strong.markup + (code_span) @text.literal.markup + (strikethrough) @strikethrough.markup [ @@ -13,8 +16,18 @@ (link_label) ] @link_text.markup -(inline_link ["(" ")"] @link_uri.markup) -(image ["(" ")"] @link_uri.markup) +(inline_link + [ + "(" + ")" + ] @link_uri.markup) + +(image + [ + "(" + ")" + ] @link_uri.markup) + [ (link_destination) (uri_autolink) diff --git a/crates/languages/src/markdown/brackets.scm b/crates/languages/src/markdown/brackets.scm index 172a2e7f723e3a170d80d19fa2f78fa334258105..5aaf93f63da3502c41b43027ee615592521c94ae 100644 --- a/crates/languages/src/markdown/brackets.scm +++ b/crates/languages/src/markdown/brackets.scm @@ -1,7 +1,24 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(((fenced_code_block_delimiter) @open (fenced_code_block_delimiter) @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(((fenced_code_block_delimiter) @open + (fenced_code_block_delimiter) @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/markdown/highlights.scm b/crates/languages/src/markdown/highlights.scm index 707bcc0816366f5cc875c9f1197b42a2363cab99..1a471a848dfe0c9457ab23ba9dbf3fd9e8438f7d 100644 --- a/crates/languages/src/markdown/highlights.scm +++ b/crates/languages/src/markdown/highlights.scm @@ -9,7 +9,9 @@ (setext_heading) (thematic_break) ] @title.markup -(setext_heading (paragraph) @title.markup) + +(setext_heading + (paragraph) @title.markup) [ (list_marker_plus) @@ -20,10 +22,18 @@ ] @punctuation.list_marker.markup (block_quote_marker) @punctuation.markup -(pipe_table_header "|" @punctuation.markup) -(pipe_table_row "|" @punctuation.markup) -(pipe_table_delimiter_row "|" @punctuation.markup) -(pipe_table_delimiter_cell "-" @punctuation.markup) + +(pipe_table_header + "|" @punctuation.markup) + +(pipe_table_row + "|" @punctuation.markup) + +(pipe_table_delimiter_row + "|" @punctuation.markup) + +(pipe_table_delimiter_cell + "-" @punctuation.markup) [ (fenced_code_block_delimiter) @@ -31,4 +41,5 @@ ] @punctuation.embedded.markup (link_reference_definition) @link_text.markup + (link_destination) @link_uri.markup diff --git a/crates/languages/src/markdown/indents.scm b/crates/languages/src/markdown/indents.scm index dc6dfa6118309c264e146a5af167327947fc6946..742100e3238b6dc7d456307762b2089bb780ac33 100644 --- a/crates/languages/src/markdown/indents.scm +++ b/crates/languages/src/markdown/indents.scm @@ -1,3 +1,4 @@ -(list (list_item) @indent) +(list + (list_item) @indent) (list_item) @start.list_item diff --git a/crates/languages/src/markdown/injections.scm b/crates/languages/src/markdown/injections.scm index f2b959dfdae9d5b0c11146c2f2e5509005a2fe5e..46717b28a97a2019f3bcd6b01815debccb3c3e30 100644 --- a/crates/languages/src/markdown/injections.scm +++ b/crates/languages/src/markdown/injections.scm @@ -4,11 +4,13 @@ (code_fence_content) @injection.content) ((inline) @injection.content - (#set! injection.language "markdown-inline")) + (#set! injection.language "markdown-inline")) ((html_block) @injection.content (#set! injection.language "html")) -((minus_metadata) @injection.content (#set! injection.language "yaml")) +((minus_metadata) @injection.content + (#set! injection.language "yaml")) -((plus_metadata) @injection.content (#set! injection.language "toml")) +((plus_metadata) @injection.content + (#set! injection.language "toml")) diff --git a/crates/languages/src/markdown/outline.scm b/crates/languages/src/markdown/outline.scm index dcca3db4d4cb920a7d9f939dc99197d139c3c2e6..a4d8c586dd991f4ada1b7cffa1b2319eb79a7973 100644 --- a/crates/languages/src/markdown/outline.scm +++ b/crates/languages/src/markdown/outline.scm @@ -1,3 +1,6 @@ (section - (atx_heading - . (_) @context . (_) @name)) @item + (atx_heading + . + (_) @context + . + (_) @name)) @item diff --git a/crates/languages/src/markdown/textobjects.scm b/crates/languages/src/markdown/textobjects.scm index e0f76c5365155687d6d53d38f222513b480a3aa7..c84914b2409dd53c27e22c33d8ca9771b699f48d 100644 --- a/crates/languages/src/markdown/textobjects.scm +++ b/crates/languages/src/markdown/textobjects.scm @@ -1,3 +1,3 @@ (section - (atx_heading) - (_)* @class.inside) @class.around + (atx_heading) + (_)* @class.inside) @class.around diff --git a/crates/languages/src/python/brackets.scm b/crates/languages/src/python/brackets.scm index 9e5b59788fc88fcb0830325417de50a9414828b8..5abcf6bdd43624f625e3c08444701fa67311c00f 100644 --- a/crates/languages/src/python/brackets.scm +++ b/crates/languages/src/python/brackets.scm @@ -1,4 +1,12 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -(((string_start) @open (string_end) @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(((string_start) @open + (string_end) @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/python/debugger.scm b/crates/languages/src/python/debugger.scm index 807d6e865d2f60637f60b397ccc1a61fe3360fa1..8c241f8cae0e4c1e2ea39311dd86fda2ba6978dc 100644 --- a/crates/languages/src/python/debugger.scm +++ b/crates/languages/src/python/debugger.scm @@ -1,43 +1,97 @@ (identifier) @debug-variable (#eq? @debug-variable "self") -(assignment left: (identifier) @debug-variable) -(assignment left: (pattern_list (identifier) @debug-variable)) -(assignment left: (tuple_pattern (identifier) @debug-variable)) +(assignment + left: (identifier) @debug-variable) -(augmented_assignment left: (identifier) @debug-variable) +(assignment + left: (pattern_list + (identifier) @debug-variable)) -(for_statement left: (identifier) @debug-variable) -(for_statement left: (pattern_list (identifier) @debug-variable)) -(for_statement left: (tuple_pattern (identifier) @debug-variable)) +(assignment + left: (tuple_pattern + (identifier) @debug-variable)) -(for_in_clause left: (identifier) @debug-variable) -(for_in_clause left: (pattern_list (identifier) @debug-variable)) -(for_in_clause left: (tuple_pattern (identifier) @debug-variable)) +(augmented_assignment + left: (identifier) @debug-variable) -(as_pattern (identifier) @debug-variable) +(for_statement + left: (identifier) @debug-variable) -(binary_operator left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_operator right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(comparison_operator (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + left: (pattern_list + (identifier) @debug-variable)) -(list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(tuple (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(set (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + left: (tuple_pattern + (identifier) @debug-variable)) -(subscript value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_in_clause + left: (identifier) @debug-variable) -(attribute object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_in_clause + left: (pattern_list + (identifier) @debug-variable)) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_in_clause + left: (tuple_pattern + (identifier) @debug-variable)) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(as_pattern + (identifier) @debug-variable) -(argument_list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_operator + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(if_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_operator + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(while_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(comparison_operator + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(tuple + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(set + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(subscript + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(attribute + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(argument_list + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(if_statement + condition: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(while_statement + condition: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (block) @debug-scope + (module) @debug-scope diff --git a/crates/languages/src/python/highlights.scm b/crates/languages/src/python/highlights.scm index f15b3a0e2b03d9c913627b319aff9bca6bb8708e..87283aaa799a15ea188f3427b4277e9eaba517c1 100644 --- a/crates/languages/src/python/highlights.scm +++ b/crates/languages/src/python/highlights.scm @@ -1,6 +1,8 @@ ; Identifier naming conventions; these "soft conventions" should stay at the top of the file as they're often overridden (identifier) @variable -(attribute attribute: (identifier) @property) + +(attribute + attribute: (identifier) @property) ; CamelCase for classes ((identifier) @type.class @@ -10,45 +12,56 @@ ((identifier) @constant (#match? @constant "^_*[A-Z][A-Z0-9_]*$")) -(type (identifier) @type) -(generic_type (identifier) @type) +(type + (identifier) @type) + +(generic_type + (identifier) @type) + (comment) @comment + (string) @string + (escape_sequence) @string.escape ; Type alias -(type_alias_statement "type" @keyword) +(type_alias_statement + "type" @keyword) ; TypeVar with constraints in type parameters (type - (tuple (identifier) @type) -) + (tuple + (identifier) @type)) ; Forward references (type - (string) @type -) - + (string) @type) ; Function calls - (call - function: (attribute attribute: (identifier) @function.method.call)) + function: (attribute + attribute: (identifier) @function.method.call)) + (call function: (identifier) @function.call) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) + (decorator "@" @punctuation.special [ (identifier) @function.decorator - (attribute attribute: (identifier) @function.decorator) - (call function: (identifier) @function.decorator.call) - (call (attribute attribute: (identifier) @function.decorator.call)) + (attribute + attribute: (identifier) @function.decorator) + (call + function: (identifier) @function.decorator.call) + (call + (attribute + attribute: (identifier) @function.decorator.call)) ]) ; Function and class definitions - (function_definition name: (identifier) @function.definition) @@ -69,15 +82,15 @@ ; Function arguments (function_definition parameters: (parameters - [ - (identifier) @variable.parameter; Simple parameters + [ + (identifier) @variable.parameter ; Simple parameters (typed_parameter (identifier) @variable.parameter) ; Typed parameters (default_parameter name: (identifier) @variable.parameter) ; Default parameters (typed_default_parameter name: (identifier) @variable.parameter) ; Typed default parameters - ])) + ])) ; Keyword arguments (call @@ -86,28 +99,30 @@ name: (identifier) @function.kwargs))) ; Class definitions and calling: needs to come after the regex matching above - (class_definition name: (identifier) @type.class.definition) (class_definition superclasses: (argument_list - (identifier) @type.class.inheritance)) + (identifier) @type.class.inheritance)) (call function: (identifier) @type.class.call (#match? @type.class.call "^_*[A-Z][A-Za-z0-9_]*$")) ; Builtins - ((call function: (identifier) @function.builtin) - (#any-of? - @function.builtin - "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr" "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec" "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id" "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview" "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed" "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type" "vars" "zip" "__import__")) + (#any-of? @function.builtin + "abs" "all" "any" "ascii" "bin" "bool" "breakpoint" "bytearray" "bytes" "callable" "chr" + "classmethod" "compile" "complex" "delattr" "dict" "dir" "divmod" "enumerate" "eval" "exec" + "filter" "float" "format" "frozenset" "getattr" "globals" "hasattr" "hash" "help" "hex" "id" + "input" "int" "isinstance" "issubclass" "iter" "len" "list" "locals" "map" "max" "memoryview" + "min" "next" "object" "oct" "open" "ord" "pow" "print" "property" "range" "repr" "reversed" + "round" "set" "setattr" "slice" "sorted" "staticmethod" "str" "sum" "super" "tuple" "type" + "vars" "zip" "__import__")) ; Literals - [ (true) (false) @@ -124,10 +139,11 @@ ] @number ; Self references - [ - (parameters (identifier) @variable.special) - (attribute (identifier) @variable.special) + (parameters + (identifier) @variable.special) + (attribute + (identifier) @variable.special) (#any-of? @variable.special "self" "cls") ] @@ -152,37 +168,57 @@ ; Docstrings. ([ - (expression_statement (assignment)) + (expression_statement + (assignment)) (type_alias_statement) ] -. (expression_statement (string) @string.doc)+) + . + (expression_statement + (string) @string.doc)+) (module - .(expression_statement (string) @string.doc)+) + . + (expression_statement + (string) @string.doc)+) (class_definition - body: (block .(expression_statement (string) @string.doc)+)) + body: (block + . + (expression_statement + (string) @string.doc)+)) (function_definition "async"? "def" name: (_) (parameters)? - body: (block .(expression_statement (string) @string.doc)+)) + body: (block + . + (expression_statement + (string) @string.doc)+)) (class_definition body: (block - . (comment) @comment* - . (expression_statement (string) @string.doc)+)) + . + (comment) @comment* + . + (expression_statement + (string) @string.doc)+)) (module - . (comment) @comment* - . (expression_statement (string) @string.doc)+) + . + (comment) @comment* + . + (expression_statement + (string) @string.doc)+) (class_definition body: (block - (expression_statement (assignment)) - . (expression_statement (string) @string.doc)+)) + (expression_statement + (assignment)) + . + (expression_statement + (string) @string.doc)+)) (class_definition body: (block @@ -190,9 +226,11 @@ name: (identifier) @function.method.constructor (#eq? @function.method.constructor "__init__") body: (block - (expression_statement (assignment)) - . (expression_statement (string) @string.doc)+)))) - + (expression_statement + (assignment)) + . + (expression_statement + (string) @string.doc)+)))) [ "-" @@ -286,18 +324,23 @@ "lambda" ] @keyword.definition -(decorator (identifier) @attribute.builtin +(decorator + (identifier) @attribute.builtin (#any-of? @attribute.builtin "classmethod" "staticmethod" "property")) ; Builtin types as identifiers [ (call function: (identifier) @type.builtin) - (type (identifier) @type.builtin) - (generic_type (identifier) @type.builtin) + (type + (identifier) @type.builtin) + (generic_type + (identifier) @type.builtin) ; also check if type binary operator left identifier for union types (type (binary_operator left: (identifier) @type.builtin)) - (#any-of? @type.builtin "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview" "object" "range" "set" "slice" "str" "tuple") + (#any-of? @type.builtin + "bool" "bytearray" "bytes" "complex" "dict" "float" "frozenset" "int" "list" "memoryview" + "object" "range" "set" "slice" "str" "tuple") ] diff --git a/crates/languages/src/python/imports.scm b/crates/languages/src/python/imports.scm index 7a1e2b225b9e310098f316c29fe6b1a27634bf12..26538fee1b41df13f258c8b315cc5e266458efa1 100644 --- a/crates/languages/src/python/imports.scm +++ b/crates/languages/src/python/imports.scm @@ -1,32 +1,38 @@ (import_statement - name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .) - (aliased_import - name: (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .)) - ]) @wildcard @import + name: [ + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .) + (aliased_import + name: (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .)) + ]) @wildcard @import (import_from_statement - module_name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .) - (relative_import - (dotted_name - ((identifier) @namespace ".")* - (identifier) @namespace .)?) - ] - (wildcard_import)? @wildcard - name: [ - (dotted_name - ((identifier) @namespace ".")* - (identifier) @name .) - (aliased_import - name: (dotted_name - ((identifier) @namespace ".")* - (identifier) @name .) - alias: (identifier) @alias) - ]?) @import + module_name: [ + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .) + (relative_import + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @namespace .)?) + ] + (wildcard_import)? @wildcard + name: [ + (dotted_name + ((identifier) @namespace + ".")* + (identifier) @name .) + (aliased_import + name: (dotted_name + ((identifier) @namespace + ".")* + (identifier) @name .) + alias: (identifier) @alias) + ]?) @import diff --git a/crates/languages/src/python/indents.scm b/crates/languages/src/python/indents.scm index 3d4c1cc9c4260d4e925cc373662ae5ca3b82e124..9361aa7158725b22e40040e7d730d2693c688c97 100644 --- a/crates/languages/src/python/indents.scm +++ b/crates/languages/src/python/indents.scm @@ -1,17 +1,37 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent (function_definition) @start.def + (class_definition) @start.class + (if_statement) @start.if + (for_statement) @start.for + (while_statement) @start.while + (with_statement) @start.with + (match_statement) @start.match + (try_statement) @start.try + (elif_clause) @start.elif + (else_clause) @start.else + (except_clause) @start.except + (finally_clause) @start.finally + (case_clause) @start.case diff --git a/crates/languages/src/python/outline.scm b/crates/languages/src/python/outline.scm index 70beb4e67585918ca1f893140ec79ee2428d47d7..c335eef46545fcc0b493e66c780b6ecf839bd791 100644 --- a/crates/languages/src/python/outline.scm +++ b/crates/languages/src/python/outline.scm @@ -1,11 +1,10 @@ (decorator) @annotation (class_definition - "class" @context - name: (identifier) @name - ) @item + "class" @context + name: (identifier) @name) @item (function_definition - "async"? @context - "def" @context - name: (_) @name) @item + "async"? @context + "def" @context + name: (_) @name) @item diff --git a/crates/languages/src/python/overrides.scm b/crates/languages/src/python/overrides.scm index 81fec9a5f57b28fc67b4781ec37df43559e21dc9..544e9876f8ea8f1d676ee21731fdcb30fc7163ec 100644 --- a/crates/languages/src/python/overrides.scm +++ b/crates/languages/src/python/overrides.scm @@ -1,2 +1,3 @@ (comment) @comment.inclusive + (string) @string diff --git a/crates/languages/src/python/runnables.scm b/crates/languages/src/python/runnables.scm index 1c7e76d73be2357c71ac2f3adf41b14db969e262..3480d4a81017605da2f7cd473595d339f5d522a8 100644 --- a/crates/languages/src/python/runnables.scm +++ b/crates/languages/src/python/runnables.scm @@ -1,151 +1,108 @@ ; subclasses of unittest.TestCase or TestCase -( - (class_definition - name: (identifier) @run @_unittest_class_name - superclasses: (argument_list - [(identifier) @_superclass - (attribute (identifier) @_superclass)] - ) - (#eq? @_superclass "TestCase") - ) @_python-unittest-class - (#set! tag python-unittest-class) - ) +((class_definition + name: (identifier) @run @_unittest_class_name + superclasses: (argument_list + [ + (identifier) @_superclass + (attribute + (identifier) @_superclass) + ]) + (#eq? @_superclass "TestCase")) @_python-unittest-class + (#set! tag python-unittest-class)) ; test methods whose names start with `test` in a TestCase -( - (class_definition - name: (identifier) @_unittest_class_name - superclasses: (argument_list - [(identifier) @_superclass - (attribute (identifier) @_superclass)] - ) - (#eq? @_superclass "TestCase") - body: (block - (function_definition - name: (identifier) @run @_unittest_method_name - (#match? @_unittest_method_name "^test.*") - ) @_python-unittest-method - (#set! tag python-unittest-method) - ) - ) - ) +(class_definition + name: (identifier) @_unittest_class_name + superclasses: (argument_list + [ + (identifier) @_superclass + (attribute + (identifier) @_superclass) + ]) + (#eq? @_superclass "TestCase") + body: (block + (function_definition + name: (identifier) @run @_unittest_method_name + (#match? @_unittest_method_name "^test.*")) @_python-unittest-method + (#set! tag python-unittest-method))) ; pytest functions -( - (module - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) @_python-pytest-method - ) - (#set! tag python-pytest-method) - ) +((module + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_")) @_python-pytest-method) + (#set! tag python-pytest-method)) ; decorated pytest functions -( - (module - (decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) @_python-pytest-method - ) - (#set! tag python-pytest-method) - ) - +((module + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) @_python-pytest-method) + (#set! tag python-pytest-method)) ; pytest classes -( - (module - (class_definition - name: (identifier) @run @_pytest_class_name - (#match? @_pytest_class_name "^Test") - ) - (#set! tag python-pytest-class) - ) - ) - +(module + (class_definition + name: (identifier) @run @_pytest_class_name + (#match? @_pytest_class_name "^Test")) + (#set! tag python-pytest-class)) ; decorated pytest classes -( - (module - (decorated_definition - (decorator)+ @_decorator - definition: (class_definition - name: (identifier) @run @_pytest_class_name - (#match? @_pytest_class_name "^Test") - ) - ) - (#set! tag python-pytest-class) - ) - ) - +(module + (decorated_definition + (decorator)+ @_decorator + definition: (class_definition + name: (identifier) @run @_pytest_class_name + (#match? @_pytest_class_name "^Test"))) + (#set! tag python-pytest-class)) ; pytest class methods -( - (module - (class_definition - name: (identifier) @_pytest_class_name - (#match? @_pytest_class_name "^Test") - body: (block - [(decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test") - ) - ] @_python-pytest-method) - (#set! tag python-pytest-method) - ) - ) - ) +(module + (class_definition + name: (identifier) @_pytest_class_name + (#match? @_pytest_class_name "^Test") + body: (block + [ + (decorated_definition + (decorator)+ @_decorator + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test")) + ] @_python-pytest-method) + (#set! tag python-pytest-method))) ; decorated pytest class methods -( - (module - (decorated_definition +(module + (decorated_definition + (decorator)+ @_decorator + definition: (class_definition + name: (identifier) @_pytest_class_name + (#match? @_pytest_class_name "^Test") + body: (block + [ + (decorated_definition (decorator)+ @_decorator - definition: (class_definition - name: (identifier) @_pytest_class_name - (#match? @_pytest_class_name "^Test") - body: (block - [(decorated_definition - (decorator)+ @_decorator - definition: (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test_") - ) - ) - (function_definition - name: (identifier) @run @_pytest_method_name - (#match? @_pytest_method_name "^test") - ) - ] @_python-pytest-method) - (#set! tag python-pytest-method) - ) - ) - ) - ) + definition: (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test_"))) + (function_definition + name: (identifier) @run @_pytest_method_name + (#match? @_pytest_method_name "^test")) + ] @_python-pytest-method) + (#set! tag python-pytest-method)))) ; module main method -( - (module - (if_statement - condition: (comparison_operator - (identifier) @run @_lhs - operators: "==" - (string) @_rhs - ) - (#eq? @_lhs "__name__") - (#match? @_rhs "^[\"']__main__[\"']$") - (#set! tag python-module-main-method) - ) - ) - ) +(module + (if_statement + condition: (comparison_operator + (identifier) @run @_lhs + operators: "==" + (string) @_rhs) + (#eq? @_lhs "__name__") + (#match? @_rhs "^[\"']__main__[\"']$") + (#set! tag python-module-main-method))) diff --git a/crates/languages/src/regex/brackets.scm b/crates/languages/src/regex/brackets.scm index 191fd9c084a52eced37428281971ff9e569a4932..3779d8514bdee9fed0abe1f14b98851754decd8c 100644 --- a/crates/languages/src/regex/brackets.scm +++ b/crates/languages/src/regex/brackets.scm @@ -1,3 +1,8 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) diff --git a/crates/languages/src/regex/highlights.scm b/crates/languages/src/regex/highlights.scm index b5adecf472941154ae84d2acb62fea218859bbea..b0df4b98be08214554dd58a1dcfd1aab0b06586b 100644 --- a/crates/languages/src/regex/highlights.scm +++ b/crates/languages/src/regex/highlights.scm @@ -51,5 +51,6 @@ (character_class [ "^" @operator.regex - (class_range "-" @operator.regex) + (class_range + "-" @operator.regex) ]) diff --git a/crates/languages/src/rust/brackets.scm b/crates/languages/src/rust/brackets.scm index 7a35adb10021c83b8e08e888187ab133c5313ad9..9d78bb11116a0cbff542c721596ec6f8fc92d0cb 100644 --- a/crates/languages/src/rust/brackets.scm +++ b/crates/languages/src/rust/brackets.scm @@ -1,7 +1,23 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(closure_parameters "|" @open "|" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(closure_parameters + "|" @open + "|" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/rust/debugger.scm b/crates/languages/src/rust/debugger.scm index 5347413f698083287b9bedd25f4732d24fbbf76e..3c7195796feb41a771ec8071d78bea60efb61fd9 100644 --- a/crates/languages/src/rust/debugger.scm +++ b/crates/languages/src/rust/debugger.scm @@ -1,50 +1,85 @@ (metavariable) @debug-variable -(parameter (identifier) @debug-variable) +(parameter + (identifier) @debug-variable) (self) @debug-variable -(static_item (identifier) @debug-variable) -(const_item (identifier) @debug-variable) +(static_item + (identifier) @debug-variable) -(let_declaration pattern: (identifier) @debug-variable) +(const_item + (identifier) @debug-variable) -(let_condition (identifier) @debug-variable) +(let_declaration + pattern: (identifier) @debug-variable) -(match_arm (identifier) @debug-variable) +(let_condition + (identifier) @debug-variable) -(for_expression (identifier) @debug-variable) +(match_arm + (identifier) @debug-variable) -(closure_parameters (identifier) @debug-variable) +(for_expression + (identifier) @debug-variable) -(assignment_expression (identifier) @debug-variable) +(closure_parameters + (identifier) @debug-variable) -(field_expression (identifier) @debug-variable) +(assignment_expression + (identifier) @debug-variable) -(binary_expression (identifier) @debug-variable +(field_expression + (identifier) @debug-variable) + +(binary_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(reference_expression (identifier) @debug-variable +(reference_expression + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(array_expression (identifier) @debug-variable) -(tuple_expression (identifier) @debug-variable) -(return_expression (identifier) @debug-variable) -(await_expression (identifier) @debug-variable) -(try_expression (identifier) @debug-variable) -(index_expression (identifier) @debug-variable) -(range_expression (identifier) @debug-variable) -(unary_expression (identifier) @debug-variable) +(array_expression + (identifier) @debug-variable) + +(tuple_expression + (identifier) @debug-variable) + +(return_expression + (identifier) @debug-variable) + +(await_expression + (identifier) @debug-variable) + +(try_expression + (identifier) @debug-variable) + +(index_expression + (identifier) @debug-variable) + +(range_expression + (identifier) @debug-variable) + +(unary_expression + (identifier) @debug-variable) + +(if_expression + (identifier) @debug-variable) -(if_expression (identifier) @debug-variable) -(while_expression (identifier) @debug-variable) +(while_expression + (identifier) @debug-variable) -(parenthesized_expression (identifier) @debug-variable) +(parenthesized_expression + (identifier) @debug-variable) -(arguments (identifier) @debug-variable +(arguments + (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(macro_invocation (token_tree (identifier) @debug-variable - (#not-match? @debug-variable "^[A-Z]"))) +(macro_invocation + (token_tree + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]"))) (block) @debug-scope diff --git a/crates/languages/src/rust/highlights.scm b/crates/languages/src/rust/highlights.scm index 82008d701536177cbe7cab8d6fc6c82e0568e944..57e5ed3f704dcd70974b73e0a0d4e31253191048 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/languages/src/rust/highlights.scm @@ -1,17 +1,33 @@ (identifier) @variable + (metavariable) @variable + (type_identifier) @type + (fragment_specifier) @type + (primitive_type) @type.builtin + (self) @variable.special + (field_identifier) @property + (shorthand_field_identifier) @property -(trait_item name: (type_identifier) @type.interface) -(impl_item trait: (type_identifier) @type.interface) -(abstract_type trait: (type_identifier) @type.interface) -(dynamic_type trait: (type_identifier) @type.interface) -(trait_bounds (type_identifier) @type.interface) +(trait_item + name: (type_identifier) @type.interface) + +(impl_item + trait: (type_identifier) @type.interface) + +(abstract_type + trait: (type_identifier) @type.interface) + +(dynamic_type + trait: (type_identifier) @type.interface) + +(trait_bounds + (type_identifier) @type.interface) (call_expression function: [ @@ -31,8 +47,11 @@ field: (field_identifier) @function.method) ]) -(function_item name: (identifier) @function.definition) -(function_signature_item name: (identifier) @function.definition) +(function_item + name: (identifier) @function.definition) + +(function_signature_item + name: (identifier) @function.definition) (macro_invocation macro: [ @@ -48,17 +67,17 @@ name: (identifier) @function.special.definition) ; Identifier conventions - ; Assume uppercase names are types/enum-constructors ((identifier) @type - (#match? @type "^[A-Z]")) + (#match? @type "^[A-Z]")) ; Assume all-caps names are constants ((identifier) @constant - (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) + (#match? @constant "^_*[A-Z][A-Z\\d_]*$")) ; Ensure enum variants are highlighted correctly regardless of naming convention -(enum_variant name: (identifier) @type) +(enum_variant + name: (identifier) @type) [ "(" @@ -81,9 +100,7 @@ "::" ] @punctuation.delimiter -[ - "#" -] @punctuation.special +"#" @punctuation.special [ "as" @@ -131,7 +148,7 @@ ] @keyword.control (for_expression - ("for" @keyword.control)) + "for" @keyword.control) [ (string_literal) @@ -154,8 +171,10 @@ ] @comment [ - (line_comment (doc_comment)) - (block_comment (doc_comment)) + (line_comment + (doc_comment)) + (block_comment + (doc_comment)) ] @comment.doc [ @@ -198,25 +217,44 @@ ] @operator ; Avoid highlighting these as operators when used in doc comments. -(unary_expression "!" @operator) +(unary_expression + "!" @operator) + operator: "/" @operator (lifetime "'" @lifetime (identifier) @lifetime) -(parameter (identifier) @variable.parameter) - -(attribute_item (attribute [ - (identifier) @attribute - (scoped_identifier name: (identifier) @attribute) - (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$")) - (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$")) -])) - -(inner_attribute_item (attribute [ - (identifier) @attribute - (scoped_identifier name: (identifier) @attribute) - (token_tree (identifier) @attribute (#match? @attribute "^[a-z\\d_]*$")) - (token_tree (identifier) @none "::" (#match? @none "^[a-z\\d_]*$")) -])) +(parameter + (identifier) @variable.parameter) + +(attribute_item + (attribute + [ + (identifier) @attribute + (scoped_identifier + name: (identifier) @attribute) + (token_tree + (identifier) @attribute + (#match? @attribute "^[a-z\\d_]*$")) + (token_tree + (identifier) @none + "::" + (#match? @none "^[a-z\\d_]*$")) + ])) + +(inner_attribute_item + (attribute + [ + (identifier) @attribute + (scoped_identifier + name: (identifier) @attribute) + (token_tree + (identifier) @attribute + (#match? @attribute "^[a-z\\d_]*$")) + (token_tree + (identifier) @none + "::" + (#match? @none "^[a-z\\d_]*$")) + ])) diff --git a/crates/languages/src/rust/imports.scm b/crates/languages/src/rust/imports.scm index 3ce6a4f073506dd4d27320a7fd5bb547927f9c1a..2c368523d63b9c6ae9494b1ab801192161fd7000 100644 --- a/crates/languages/src/rust/imports.scm +++ b/crates/languages/src/rust/imports.scm @@ -1,27 +1,29 @@ (use_declaration) @import (scoped_use_list - path: (_) @namespace - list: (_) @list) + path: (_) @namespace + list: (_) @list) (scoped_identifier - path: (_) @namespace - name: (identifier) @name) + path: (_) @namespace + name: (identifier) @name) -(use_list (identifier) @name) +(use_list + (identifier) @name) -(use_declaration (identifier) @name) +(use_declaration + (identifier) @name) (use_as_clause - path: (scoped_identifier - path: (_) @namespace - name: (_) @name) - alias: (_) @alias) + path: (scoped_identifier + path: (_) @namespace + name: (_) @name) + alias: (_) @alias) (use_as_clause - path: (identifier) @name - alias: (_) @alias) + path: (identifier) @name + alias: (_) @alias) (use_wildcard - (_)? @namespace - "*" @wildcard) + (_)? @namespace + "*" @wildcard) diff --git a/crates/languages/src/rust/indents.scm b/crates/languages/src/rust/indents.scm index 9ab6b029083fd5d8e3249916c00a5f90648eb3e2..b4ef2ebcd78016de1092e718385ab52a89273003 100644 --- a/crates/languages/src/rust/indents.scm +++ b/crates/languages/src/rust/indents.scm @@ -1,14 +1,26 @@ [ - ((where_clause) _ @end) - (field_expression) - (call_expression) - (assignment_expression) - (let_declaration) - (let_chain) - (await_expression) + ((where_clause) + _ @end) + (field_expression) + (call_expression) + (assignment_expression) + (let_declaration) + (let_chain) + (await_expression) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/languages/src/rust/injections.scm b/crates/languages/src/rust/injections.scm index 88df78d793c5666492b0f3917d78b4210be5e094..89d839282d3388f450f9ebdb923167f0986f349c 100644 --- a/crates/languages/src/rust/injections.scm +++ b/crates/languages/src/rust/injections.scm @@ -1,64 +1,67 @@ ([ - (line_comment) - (block_comment) + (line_comment) + (block_comment) ] @injection.content - (#set! injection.language "comment")) + (#set! injection.language "comment")) (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#not-any-of? @_macro_name "view" "html") - (token_tree) @injection.content - (#set! injection.language "rust")) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#not-any-of? @_macro_name "view" "html") + (token_tree) @injection.content + (#set! injection.language "rust")) ; we need a better way for the leptos extension to declare that ; it wants to inject inside of rust, instead of modifying the rust ; injections to support leptos injections (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#any-of? @_macro_name "view" "html") - (token_tree) @injection.content - (#set! injection.language "rstml") - ) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#any-of? @_macro_name "view" "html") + (token_tree) @injection.content + (#set! injection.language "rstml")) (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (#any-of? @_macro_name "sql") - (_) @injection.content - (#set! injection.language "sql") - ) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (#any-of? @_macro_name "sql") + (_) @injection.content + (#set! injection.language "sql")) ; lazy_regex (macro_invocation - macro: [ - ((identifier) @_macro_name) - (scoped_identifier (identifier) @_macro_name .) - ] - (token_tree [ - (string_literal (string_content) @injection.content) - (raw_string_literal (string_content) @injection.content) + macro: [ + (identifier) @_macro_name + (scoped_identifier + (identifier) @_macro_name .) + ] + (token_tree + [ + (string_literal + (string_content) @injection.content) + (raw_string_literal + (string_content) @injection.content) ]) - (#set! injection.language "regex") - (#any-of? @_macro_name "regex" "bytes_regex") -) + (#set! injection.language "regex") + (#any-of? @_macro_name "regex" "bytes_regex")) (call_expression - function: (scoped_identifier) @_fn_path - arguments: (arguments - [ - (string_literal (string_content) @injection.content) - (raw_string_literal (string_content) @injection.content) - ] - ) - - (#match? @_fn_path ".*Regex(Builder)?::new") - (#set! injection.language "regex") -) + function: (scoped_identifier) @_fn_path + arguments: (arguments + [ + (string_literal + (string_content) @injection.content) + (raw_string_literal + (string_content) @injection.content) + ]) + (#match? @_fn_path ".*Regex(Builder)?::new") + (#set! injection.language "regex")) diff --git a/crates/languages/src/rust/outline.scm b/crates/languages/src/rust/outline.scm index a99f53dd2b3154aa3717f67fd683da4a8b57d31b..03ecb99facdc99cb0be8e2fb6bd4e177cb936b4e 100644 --- a/crates/languages/src/rust/outline.scm +++ b/crates/languages/src/rust/outline.scm @@ -1,73 +1,81 @@ (attribute_item) @annotation + (line_comment) @annotation (struct_item - (visibility_modifier)? @context - "struct" @context - name: (_) @name) @item + (visibility_modifier)? @context + "struct" @context + name: (_) @name) @item (enum_item - (visibility_modifier)? @context - "enum" @context - name: (_) @name) @item + (visibility_modifier)? @context + "enum" @context + name: (_) @name) @item (enum_variant - (visibility_modifier)? @context - name: (_) @name) @item + (visibility_modifier)? @context + name: (_) @name) @item (impl_item - "impl" @context - trait: (_)? @name - "for"? @context - type: (_) @name - body: (_ . "{" @open "}" @close .)) @item + "impl" @context + trait: (_)? @name + "for"? @context + type: (_) @name + body: (_ + . + "{" @open + "}" @close .)) @item (trait_item - (visibility_modifier)? @context - "trait" @context - name: (_) @name) @item + (visibility_modifier)? @context + "trait" @context + name: (_) @name) @item (function_item - (visibility_modifier)? @context - (function_modifiers)? @context - "fn" @context - name: (_) @name - body: (_ . "{" @open "}" @close .)) @item + (visibility_modifier)? @context + (function_modifiers)? @context + "fn" @context + name: (_) @name + body: (_ + . + "{" @open + "}" @close .)) @item (function_signature_item - (visibility_modifier)? @context - (function_modifiers)? @context - "fn" @context - name: (_) @name) @item + (visibility_modifier)? @context + (function_modifiers)? @context + "fn" @context + name: (_) @name) @item (macro_definition - . "macro_rules!" @context - name: (_) @name) @item + . + "macro_rules!" @context + name: (_) @name) @item (mod_item - (visibility_modifier)? @context - "mod" @context - name: (_) @name) @item + (visibility_modifier)? @context + "mod" @context + name: (_) @name) @item (type_item - (visibility_modifier)? @context - "type" @context - name: (_) @name) @item + (visibility_modifier)? @context + "type" @context + name: (_) @name) @item (associated_type - "type" @context - name: (_) @name) @item + "type" @context + name: (_) @name) @item (const_item - (visibility_modifier)? @context - "const" @context - name: (_) @name) @item + (visibility_modifier)? @context + "const" @context + name: (_) @name) @item (static_item - (visibility_modifier)? @context - "static" @context - name: (_) @name) @item + (visibility_modifier)? @context + "static" @context + name: (_) @name) @item (field_declaration - (visibility_modifier)? @context - name: (_) @name) @item + (visibility_modifier)? @context + name: (_) @name) @item diff --git a/crates/languages/src/rust/overrides.scm b/crates/languages/src/rust/overrides.scm index 91fa6139d387db97676cd32a84433b16f3c8e94e..039425a91d519b2b4b030a37ad9e71705833820e 100644 --- a/crates/languages/src/rust/overrides.scm +++ b/crates/languages/src/rust/overrides.scm @@ -2,6 +2,7 @@ (string_literal) (raw_string_literal) ] @string + [ (line_comment) (block_comment) diff --git a/crates/languages/src/rust/runnables.scm b/crates/languages/src/rust/runnables.scm index 7c1571614424161ec866f5fa2607ea55975500e2..ef7050397df586ebb96c2648ea3be282d246e5aa 100644 --- a/crates/languages/src/rust/runnables.scm +++ b/crates/languages/src/rust/runnables.scm @@ -1,92 +1,75 @@ ; Rust mod test -( - (attribute_item (attribute - ( - (identifier) @_attribute) - arguments: ( - (token_tree (identifier) @_test) - (#eq? @_test "test") - ) - ) - (#eq? @_attribute "cfg") - ) - . - (mod_item - name: (_) @run - ) - (#set! tag rust-mod-test) -) +((attribute_item + (attribute + (identifier) @_attribute + arguments: ((token_tree + (identifier) @_test) + (#eq? @_test "test"))) + (#eq? @_attribute "cfg")) + . + (mod_item + name: (_) @run) + (#set! tag rust-mod-test)) ; Rust test -( - ( - (attribute_item (attribute - [((identifier) @_attribute) - (scoped_identifier (identifier) @_attribute) - ]) - (#match? @_attribute "test") - ) @_start - . - (attribute_item) * - . - [(line_comment) (block_comment)] * - . - (function_item - name: (_) @run @_test_name - body: _ - ) @_end - ) - (#set! tag rust-test) -) +(((attribute_item + (attribute + [ + (identifier) @_attribute + (scoped_identifier + (identifier) @_attribute) + ]) + (#match? @_attribute "test")) @_start + . + (attribute_item)* + . + [ + (line_comment) + (block_comment) + ]* + . + (function_item + name: (_) @run @_test_name + body: _) @_end) + (#set! tag rust-test)) ; Rust doc test -( - ( - (line_comment) * - (line_comment - doc: (_) @_comment_content - ) @_start @run - (#match? @_comment_content "```") - . - (line_comment) * - . - (line_comment - doc: (_) @_end_comment_content - ) @_end_code_block - (#match? @_end_comment_content "```") - . - (line_comment) * - (attribute_item) * - . - [(function_item - name: (_) @_doc_test_name - body: _ - ) (function_signature_item - name: (_) @_doc_test_name - ) (struct_item - name: (_) @_doc_test_name - ) (enum_item - name: (_) @_doc_test_name - body: _ - ) ( - (attribute_item) ? - (macro_definition - name: (_) @_doc_test_name) - ) (mod_item - name: (_) @_doc_test_name - )] @_end - ) - (#set! tag rust-doc-test) -) +(((line_comment)* + (line_comment + doc: (_) @_comment_content) @_start @run + (#match? @_comment_content "```") + . + (line_comment)* + . + (line_comment + doc: (_) @_end_comment_content) @_end_code_block + (#match? @_end_comment_content "```") + . + (line_comment)* + (attribute_item)* + . + [ + (function_item + name: (_) @_doc_test_name + body: _) + (function_signature_item + name: (_) @_doc_test_name) + (struct_item + name: (_) @_doc_test_name) + (enum_item + name: (_) @_doc_test_name + body: _) + ((attribute_item)? + (macro_definition + name: (_) @_doc_test_name)) + (mod_item + name: (_) @_doc_test_name) + ] @_end) + (#set! tag rust-doc-test)) ; Rust main function -( - ( - (function_item - name: (_) @run - body: _ - ) @_rust_main_function_end - (#eq? @run "main") - ) - (#set! tag rust-main) -) +(((function_item + name: (_) @run + body: _) @_rust_main_function_end + (#eq? @run "main")) + (#set! tag rust-main)) diff --git a/crates/languages/src/rust/textobjects.scm b/crates/languages/src/rust/textobjects.scm index 4e7e7fa0cd1ba4393bc99998e38e940f751aef97..97a90a54f800942eb733a9bd494b6e56e191a3ec 100644 --- a/crates/languages/src/rust/textobjects.scm +++ b/crates/languages/src/rust/textobjects.scm @@ -2,50 +2,73 @@ (function_signature_item) @function.around (function_item - body: (_ - "{" - (_)* @function.inside - "}" )) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ; classes (struct_item - body: (_ - ["{" "("]? - [(_) ","?]* @class.inside - ["}" ")"]? )) @class.around + body: (_ + [ + "{" + "(" + ]? + [ + (_) + ","? + ]* @class.inside + [ + "}" + ")" + ]?)) @class.around (enum_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (union_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (trait_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (impl_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (mod_item - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around ; comments - (line_comment)+ @comment.around (block_comment) @comment.around diff --git a/crates/languages/src/tsx/brackets.scm b/crates/languages/src/tsx/brackets.scm index 0e98b78036b4b19fd63d812fa92d2416788764f4..d72fcb26005a0021907558bbbee7471cfeaec603 100644 --- a/crates/languages/src/tsx/brackets.scm +++ b/crates/languages/src/tsx/brackets.scm @@ -1,11 +1,35 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -("<" @open "/>" @close) -("" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) - -((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +("<" @open + "/>" @close) + +("" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) + +((jsx_element + (jsx_opening_element) @open + (jsx_closing_element) @close) + (#set! newline.only) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/tsx/debugger.scm b/crates/languages/src/tsx/debugger.scm index 3e73dc839e4e5fc5ccc1654e96b327bc8181a2e8..5a6ab143d0dbed601534cc214bd017fcf5c29a41 100644 --- a/crates/languages/src/tsx/debugger.scm +++ b/crates/languages/src/tsx/debugger.scm @@ -1,25 +1,55 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) +(for_in_statement + left: (identifier) @debug-variable) -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(jsx_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(jsx_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (statement_block) @debug-scope + (program) @debug-scope diff --git a/crates/languages/src/tsx/highlights.scm b/crates/languages/src/tsx/highlights.scm index a96bf96281fd90a77a3411d1ad909f22c12ac0df..056956199ef26faea0c9fc09467f48c19a712b14 100644 --- a/crates/languages/src/tsx/highlights.scm +++ b/crates/languages/src/tsx/highlights.scm @@ -1,56 +1,33 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -59,36 +36,58 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -122,9 +121,10 @@ name: (identifier) @variable.parameter) ; Special identifiers - (type_annotation) @type + (type_identifier) @type + (predefined_type) @type.builtin (type_alias_declaration @@ -153,12 +153,12 @@ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -182,11 +182,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -244,7 +245,8 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) [ "(" @@ -253,14 +255,13 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) ; Keywords [ @@ -334,7 +335,8 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) (template_substitution "${" @punctuation.special @@ -352,31 +354,32 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) (union_type - ("|") @punctuation.special) + "|" @punctuation.special) (intersection_type - ("&") @punctuation.special) + "&" @punctuation.special) (type_annotation - (":") @punctuation.special) + ":" @punctuation.special) (index_signature - (":") @punctuation.special) + ":" @punctuation.special) (type_predicate_annotation - (":") @punctuation.special) + ":" @punctuation.special) (public_field_definition - ("?") @punctuation.special) + "?" @punctuation.special) (property_signature - ("?") @punctuation.special) + "?" @punctuation.special) (method_signature - ("?") @punctuation.special) + "?" @punctuation.special) (optional_parameter ([ @@ -384,44 +387,66 @@ ":" ]) @punctuation.special) - - (jsx_opening_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) + property: (property_identifier) @type) + ]) + (jsx_self_closing_element [ (identifier) @type (member_expression object: (identifier) @type - property: (property_identifier) @type - ) - ] -) - -(jsx_opening_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) -(jsx_self_closing_element (identifier) @tag.jsx (#match? @tag.jsx "^[a-z][^.]*$")) - -(jsx_attribute (property_identifier) @attribute.jsx) -(jsx_opening_element (["<" ">"]) @punctuation.bracket.jsx) -(jsx_closing_element ([""]) @punctuation.bracket.jsx) -(jsx_self_closing_element (["<" "/>"]) @punctuation.bracket.jsx) -(jsx_attribute "=" @punctuation.delimiter.jsx) + property: (property_identifier) @type) + ]) + +(jsx_opening_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_self_closing_element + (identifier) @tag.jsx + (#match? @tag.jsx "^[a-z][^.]*$")) + +(jsx_attribute + (property_identifier) @attribute.jsx) + +(jsx_opening_element + ([ + "<" + ">" + ]) @punctuation.bracket.jsx) + +(jsx_closing_element + ([ + "" + ]) @punctuation.bracket.jsx) + +(jsx_self_closing_element + ([ + "<" + "/>" + ]) @punctuation.bracket.jsx) + +(jsx_attribute + "=" @punctuation.delimiter.jsx) + (jsx_text) @text.jsx + (html_character_reference) @string.special diff --git a/crates/languages/src/tsx/imports.scm b/crates/languages/src/tsx/imports.scm index e26b97aeef9cb62395e7030f3173208d79187bd6..0e688d53fb6ed639c55c1fa84917711d19c3108a 100644 --- a/crates/languages/src/tsx/imports.scm +++ b/crates/languages/src/tsx/imports.scm @@ -1,14 +1,16 @@ (import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - ]) - source: (string (string_fragment) @source)) @import + import_clause: (import_clause + [ + (identifier) @name + (named_imports + (import_specifier + name: (_) @name + alias: (_)? @alias)) + ]) + source: (string + (string_fragment) @source)) @import (import_statement - !import_clause - source: (string (string_fragment) @source @wildcard)) @import + !import_clause + source: (string + (string_fragment) @source @wildcard)) @import diff --git a/crates/languages/src/tsx/indents.scm b/crates/languages/src/tsx/indents.scm index 9897f3060eaf37891cf4563cebc93345112422f8..1e72160bca2f5fd04ce6d3bc7b02e9ab029eb018 100644 --- a/crates/languages/src/tsx/indents.scm +++ b/crates/languages/src/tsx/indents.scm @@ -1,20 +1,32 @@ [ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - (if_statement) - (for_statement) + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + (if_statement) + (for_statement) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent -(jsx_opening_element ">" @end) @indent +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent + +(jsx_opening_element + ">" @end) @indent (jsx_element (jsx_opening_element) @start diff --git a/crates/languages/src/tsx/injections.scm b/crates/languages/src/tsx/injections.scm index 2cf3ea69ca2fd95402eba6fadb85f3505c5562b7..fda53263f575238051d325cd5820a285f8f24259 100644 --- a/crates/languages/src/tsx/injections.scm +++ b/crates/languages/src/tsx/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content @@ -10,119 +9,137 @@ (#set! injection.language "regex")) (call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") + object: (identifier) @_obj + (#eq? @_obj "styled") property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression - function: (identifier) @_name (#eq? @_name "html") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "html")) -) + function: (identifier) @_name + (#eq? @_name "html") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "html"))) (call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) (call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) (call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) (call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) (call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) (call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) ; Parse the contents of strings and tagged template ; literals with leading ECMAScript comments: ; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) + (#set! injection.language "html")) ; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; '/* gql */' or '/*gql*/' ; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) + (#set! injection.language "graphql")) ; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) + (#set! injection.language "css")) diff --git a/crates/languages/src/tsx/outline.scm b/crates/languages/src/tsx/outline.scm index 54d29007c7b7eb57c0bcaefc2c1e0ab75e4d9a6c..37991965256a0def9b0458958ac4e50c6f337af6 100644 --- a/crates/languages/src/tsx/outline.scm +++ b/crates/languages/src/tsx/outline.scm @@ -1,230 +1,275 @@ (internal_module - "namespace" @context - name: (_) @name) @item + "namespace" @context + name: (_) @name) @item (enum_declaration - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (type_alias_declaration - "type" @context - name: (_) @name) @item + "type" @context + name: (_) @name) @item (function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (interface_declaration - "interface" @context - name: (_) @name) @item + "interface" @context + name: (_) @name) @item (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Exported array destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Exported object destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Top-level array destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Top-level object destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (class_declaration - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item (abstract_class_declaration - "abstract" @context - "class" @context - name: (_) @name) @item + "abstract" @context + "class" @context + name: (_) @name) @item ; Method definitions in classes (not in object literals) (class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) ; Object literal methods (variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Object properties (pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item - + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item ; Nested variables in function bodies (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Nested array destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Nested object destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (comment) @annotation diff --git a/crates/languages/src/tsx/overrides.scm b/crates/languages/src/tsx/overrides.scm index f5a51af33fee340762d6b689e78d2e94e9c84901..759ffe8703ff27f53e5ccadb3eb4687a279f21f8 100644 --- a/crates/languages/src/tsx/overrides.scm +++ b/crates/languages/src/tsx/overrides.scm @@ -2,7 +2,8 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) (jsx_element) @element @@ -13,6 +14,7 @@ (jsx_expression) ] @default -(_ value: (call_expression - function: (identifier) @function_name_before_type_arguments - type_arguments: (type_arguments))) +(_ + value: (call_expression + function: (identifier) @function_name_before_type_arguments + type_arguments: (type_arguments))) diff --git a/crates/languages/src/tsx/runnables.scm b/crates/languages/src/tsx/runnables.scm index 85702cf99d9968b29f9375bfd8215ecba53f2eb5..db1f69a2c22e5a5dbcf7892f6c02158260c764e9 100644 --- a/crates/languages/src/tsx/runnables.scm +++ b/crates/languages/src/tsx/runnables.scm @@ -1,46 +1,42 @@ ; Add support for (node:test, bun:test and Jest) runnable ; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) diff --git a/crates/languages/src/tsx/textobjects.scm b/crates/languages/src/tsx/textobjects.scm index 628a921f3ac9ea04ff59654d72caf73cebbc9071..7a3a4768d94f495f9654d7ba1c182d3f7a47dcb4 100644 --- a/crates/languages/src/tsx/textobjects.scm +++ b/crates/languages/src/tsx/textobjects.scm @@ -1,113 +1,129 @@ (comment)+ @comment.around (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) ; Arrow function in variable declaration - capture the full declaration ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) ]) @function.around ; Arrow function in variable declaration (expression body fallback) ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) ]) @function.around ; Catch-all for arrow functions in other contexts (callbacks, etc.) ((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) + (function_signature) @function.around (generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (class - body: (_ - "{" - (_)* @class.inside - "}" )) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (interface_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (enum_declaration - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (ambient_declaration - (module + (module body: (_ - "{" - [(_) ";"?]* @class.inside - "}" ))) @class.around + "{" + [ + (_) + ";"? + ]* @class.inside + "}"))) @class.around (internal_module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (type_alias_declaration) @class.around diff --git a/crates/languages/src/typescript/brackets.scm b/crates/languages/src/typescript/brackets.scm index 635233849142d8951edeca02ca0c79253aa91e80..2f6f3a133fbe47abfcf54473beff0c73c04afaf4 100644 --- a/crates/languages/src/typescript/brackets.scm +++ b/crates/languages/src/typescript/brackets.scm @@ -1,7 +1,23 @@ -("(" @open ")" @close) -("[" @open "]" @close) -("{" @open "}" @close) -("<" @open ">" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) -(("`" @open "`" @close) (#set! rainbow.exclude)) +("(" @open + ")" @close) + +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("<" @open + ">" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) + +(("`" @open + "`" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/typescript/debugger.scm b/crates/languages/src/typescript/debugger.scm index a99f194a4a4130210b47f8170fca039acc163411..8f384fd8ad9e07fea89972464e64b905086bf580 100644 --- a/crates/languages/src/typescript/debugger.scm +++ b/crates/languages/src/typescript/debugger.scm @@ -1,23 +1,51 @@ -(lexical_declaration (variable_declarator name: (identifier) @debug-variable)) +(lexical_declaration + (variable_declarator + name: (identifier) @debug-variable)) -(for_in_statement left: (identifier) @debug-variable) -(for_statement initializer: (lexical_declaration (variable_declarator name: (identifier) @debug-variable))) +(for_in_statement + left: (identifier) @debug-variable) -(binary_expression left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(binary_expression right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(for_statement + initializer: (lexical_declaration + (variable_declarator + name: (identifier) @debug-variable))) -(unary_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) -(update_expression argument: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + left: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(binary_expression + right: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(unary_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(array (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(update_expression + argument: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(pair value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(return_statement + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) -(member_expression object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]")) +(parenthesized_expression + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(array + (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(pair + value: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) + +(member_expression + object: (identifier) @debug-variable + (#not-match? @debug-variable "^[A-Z]")) (statement_block) @debug-scope + (program) @debug-scope diff --git a/crates/languages/src/typescript/highlights.scm b/crates/languages/src/typescript/highlights.scm index 8ec3ec26cca805c65d68d9df08037102a32494dc..55eb9119e4963957e77aa1791d2a51aadd8d2890 100644 --- a/crates/languages/src/typescript/highlights.scm +++ b/crates/languages/src/typescript/highlights.scm @@ -1,46 +1,19 @@ ; Variables - (identifier) @variable (call_expression function: (member_expression object: (identifier) @type - (#any-of? - @type - "Promise" - "Array" - "Object" - "Map" - "Set" - "WeakMap" - "WeakSet" - "Date" - "Error" - "TypeError" - "RangeError" - "SyntaxError" - "ReferenceError" - "EvalError" - "URIError" - "RegExp" - "Function" - "Number" - "String" - "Boolean" - "Symbol" - "BigInt" - "Proxy" - "ArrayBuffer" - "DataView" - ) - ) -) + (#any-of? @type + "Promise" "Array" "Object" "Map" "Set" "WeakMap" "WeakSet" "Date" "Error" "TypeError" + "RangeError" "SyntaxError" "ReferenceError" "EvalError" "URIError" "RegExp" "Function" + "Number" "String" "Boolean" "Symbol" "BigInt" "Proxy" "ArrayBuffer" "DataView"))) ; Special identifiers - (type_annotation) @type (type_identifier) @type + (predefined_type) @type.builtin (type_alias_declaration @@ -65,49 +38,47 @@ (implements_clause (type_identifier) @type) -;; Enables ts-pretty-errors -;; The Lsp returns "snippets" of typescript, which are not valid typescript in totality, -;; but should still be highlighted -;; Highlights object literals by hijacking the statement_block pattern, but only if -;; the statement block follows an object literal pattern -((statement_block - (labeled_statement - ;; highlight the label like a property name - label: (statement_identifier) @property.name - body: [ - ;; match a terminating expression statement - (expression_statement - ;; single identifier - treat as a type name - [(identifier) @type.name - ;; object - treat as a property - type pair - (object - (pair - key: (_) @property.name - value: (_) @type.name)) - ;; subscript_expression - treat as an array declaration - (subscript_expression - object: (_) @type.name - index: (_) - ) - ;; templated string - treat each identifier contained as a type name - (template_string - (template_substitution - (identifier) @type.name)) - ]) - ;; match a nested statement block - (statement_block) @nested - ]))) +; Enables ts-pretty-errors +; The Lsp returns "snippets" of typescript, which are not valid typescript in totality, +; but should still be highlighted +; Highlights object literals by hijacking the statement_block pattern, but only if +; the statement block follows an object literal pattern +(statement_block + (labeled_statement + ; highlight the label like a property name + label: (statement_identifier) @property.name + body: [ + ; match a terminating expression statement + (expression_statement + ; single identifier - treat as a type name + [ + (identifier) @type.name + ; object - treat as a property - type pair + (object + (pair + key: (_) @property.name + value: (_) @type.name)) + ; subscript_expression - treat as an array declaration + (subscript_expression + object: (_) @type.name + index: (_)) + ; templated string - treat each identifier contained as a type name + (template_string + (template_substitution + (identifier) @type.name)) + ]) + ; match a nested statement block + (statement_block) @nested + ])) ; Inline type imports: import { type Foo } or import { type Foo as Bar } (import_specifier "type" - name: (identifier) @type -) + name: (identifier) @type) (import_specifier "type" - alias: (identifier) @type -) + alias: (identifier) @type) ; Full type imports: import type { Foo } or import type { Foo as Bar } (import_statement @@ -115,45 +86,41 @@ (import_clause (named_imports (import_specifier - name: (identifier) @type - ) - ) - ) -) + name: (identifier) @type)))) (import_statement "type" (import_clause (named_imports (import_specifier - alias: (identifier) @type - ) - ) - ) -) + alias: (identifier) @type)))) ([ (identifier) (shorthand_property_identifier) (shorthand_property_identifier_pattern) - ] @constant - (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) +] @constant + (#match? @constant "^_*[A-Z_][A-Z\\d_]*$")) ; Properties - (property_identifier) @property + (shorthand_property_identifier) @property + (shorthand_property_identifier_pattern) @property + (private_property_identifier) @property ; Function and method calls - (call_expression function: (identifier) @function) (call_expression function: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method)) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method)) (new_expression constructor: (identifier) @type) @@ -162,38 +129,60 @@ module: (identifier) @type) ; Function and method definitions - (function_expression name: (identifier) @function) + (function_declaration name: (identifier) @function) + (method_definition - name: [(property_identifier) (private_property_identifier)] @function.method) + name: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + (method_definition - name: (property_identifier) @constructor - (#eq? @constructor "constructor")) + name: (property_identifier) @constructor + (#eq? @constructor "constructor")) (pair - key: [(property_identifier) (private_property_identifier)] @function.method - value: [(function_expression) (arrow_function)]) + key: [ + (property_identifier) + (private_property_identifier) + ] @function.method + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (member_expression - property: [(property_identifier) (private_property_identifier)] @function.method) - right: [(function_expression) (arrow_function)]) + property: [ + (property_identifier) + (private_property_identifier) + ] @function.method) + right: [ + (function_expression) + (arrow_function) + ]) (variable_declarator name: (identifier) @function - value: [(function_expression) (arrow_function)]) + value: [ + (function_expression) + (arrow_function) + ]) (assignment_expression left: (identifier) @function - right: [(function_expression) (arrow_function)]) + right: [ + (function_expression) + (arrow_function) + ]) (arrow_function) @function ; Parameters - (required_parameter (identifier) @variable.parameter) @@ -227,8 +216,8 @@ name: (identifier) @variable.parameter) ; Literals - (this) @variable.special + (super) @variable.special [ @@ -247,8 +236,7 @@ (undefined) (true) (false) - ] @type.builtin -) + ] @type.builtin) (comment) @comment @@ -263,11 +251,12 @@ (escape_sequence) @string.escape (regex) @string.regex + (regex_flags) @keyword.operator.regex + (number) @number ; Tokens - [ ";" "?." @@ -326,14 +315,14 @@ "..." ] @operator -(regex "/" @string.regex) +(regex + "/" @string.regex) (ternary_expression [ "?" ":" - ] @operator -) + ] @operator) [ "(" @@ -342,7 +331,7 @@ "]" "{" "}" -] @punctuation.bracket +] @punctuation.bracket (template_substitution "${" @punctuation.special @@ -360,31 +349,32 @@ "<" @punctuation.bracket ">" @punctuation.bracket) -(decorator "@" @punctuation.special) +(decorator + "@" @punctuation.special) (union_type - ("|") @punctuation.special) + "|" @punctuation.special) (intersection_type - ("&") @punctuation.special) + "&" @punctuation.special) (type_annotation - (":") @punctuation.special) + ":" @punctuation.special) (index_signature - (":") @punctuation.special) + ":" @punctuation.special) (type_predicate_annotation - (":") @punctuation.special) + ":" @punctuation.special) (public_field_definition - ("?") @punctuation.special) + "?" @punctuation.special) (property_signature - ("?") @punctuation.special) + "?" @punctuation.special) (method_signature - ("?") @punctuation.special) + "?" @punctuation.special) (optional_parameter ([ @@ -393,7 +383,6 @@ ]) @punctuation.special) ; Keywords - [ "abstract" "as" @@ -465,4 +454,5 @@ "yield" ] @keyword.control -(switch_default "default" @keyword.control) +(switch_default + "default" @keyword.control) diff --git a/crates/languages/src/typescript/imports.scm b/crates/languages/src/typescript/imports.scm index 68ca25b2c15b7e312edbc3eeb9b2f0e493ca2d6f..de8f8db418157511d5756d6b5ede1a02a03bd831 100644 --- a/crates/languages/src/typescript/imports.scm +++ b/crates/languages/src/typescript/imports.scm @@ -1,20 +1,23 @@ (import_statement - import_clause: (import_clause - [ - (identifier) @name - (named_imports - (import_specifier - name: (_) @name - alias: (_)? @alias)) - (namespace_import) @wildcard - ]) - source: (string (string_fragment) @source)) @import + import_clause: (import_clause + [ + (identifier) @name + (named_imports + (import_specifier + name: (_) @name + alias: (_)? @alias)) + (namespace_import) @wildcard + ]) + source: (string + (string_fragment) @source)) @import (import_statement - !source - import_clause: (import_require_clause - source: (string (string_fragment) @source))) @wildcard @import + !source + import_clause: (import_require_clause + source: (string + (string_fragment) @source))) @wildcard @import (import_statement - !import_clause - source: (string (string_fragment) @source)) @wildcard @import + !import_clause + source: (string + (string_fragment) @source)) @wildcard @import diff --git a/crates/languages/src/typescript/indents.scm b/crates/languages/src/typescript/indents.scm index b4ac50bf5ac0bf1871523cabc9ee3683a28cd0f3..2715d2567194f00a9566e9b0c385ae8aa6258df0 100644 --- a/crates/languages/src/typescript/indents.scm +++ b/crates/languages/src/typescript/indents.scm @@ -1,17 +1,28 @@ [ - (call_expression) - (assignment_expression) - (member_expression) - (lexical_declaration) - (variable_declaration) - (assignment_expression) - ; below handled by `(_ "{" "}" @end) @indent` - ; (if_statement) - ; (for_statement) - ; (while_statement) + (call_expression) + (assignment_expression) + (member_expression) + (lexical_declaration) + (variable_declaration) + (assignment_expression) + ; below handled by `(_ "{" "}" @end) @indent` + ; (if_statement) + ; (for_statement) + ; (while_statement) ] @indent -(_ "[" "]" @end) @indent -(_ "<" ">" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "<" + ">" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/crates/languages/src/typescript/injections.scm b/crates/languages/src/typescript/injections.scm index 91880407900e7407e46982a54dbeaa3e30277bdd..a8cf9a41b5f90a6b9d02358b1b6073286fbe86ac 100644 --- a/crates/languages/src/typescript/injections.scm +++ b/crates/languages/src/typescript/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (((comment) @_jsdoc_comment (#match? @_jsdoc_comment "(?s)^/[*][*][^*].*[*]/$")) @injection.content @@ -14,156 +13,187 @@ (#set! injection.language "regex")) (call_expression - function: (identifier) @_name (#eq? @_name "css") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "css") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (member_expression - object: (identifier) @_obj (#eq? @_obj "styled") + object: (identifier) @_obj + (#eq? @_obj "styled") property: (property_identifier)) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression function: (call_expression - function: (identifier) @_name (#eq? @_name "styled")) - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "css")) -) + function: (identifier) @_name + (#eq? @_name "styled")) + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "css"))) (call_expression - function: (identifier) @_name (#eq? @_name "html") + function: (identifier) @_name + (#eq? @_name "html") arguments: (template_string) @injection.content - (#set! injection.language "html") -) + (#set! injection.language "html")) (call_expression - function: (identifier) @_name (#eq? @_name "js") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "javascript")) -) + function: (identifier) @_name + (#eq? @_name "js") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "javascript"))) (call_expression - function: (identifier) @_name (#eq? @_name "json") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "json")) -) + function: (identifier) @_name + (#eq? @_name "json") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "json"))) (call_expression - function: (identifier) @_name (#eq? @_name "sql") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "sql")) -) + function: (identifier) @_name + (#eq? @_name "sql") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "sql"))) (call_expression - function: (identifier) @_name (#eq? @_name "ts") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "typescript")) -) + function: (identifier) @_name + (#eq? @_name "ts") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "typescript"))) (call_expression - function: (identifier) @_name (#match? @_name "^ya?ml$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "yaml")) -) + function: (identifier) @_name + (#match? @_name "^ya?ml$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "yaml"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (template_string (string_fragment) @injection.content - (#set! injection.language "graphql")) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql"))) (call_expression - function: (identifier) @_name (#match? @_name "^g(raph)?ql$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "graphql"))) -) + function: (identifier) @_name + (#match? @_name "^g(raph)?ql$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "graphql")))) (call_expression - function: (identifier) @_name(#match? @_name "^iso$") - arguments: (arguments (template_string (string_fragment) @injection.content - (#set! injection.language "isograph"))) -) - -;; Angular Component template injection + function: (identifier) @_name + (#match? @_name "^iso$") + arguments: (arguments + (template_string + (string_fragment) @injection.content + (#set! injection.language "isograph")))) + +; Angular Component template injection (call_expression function: [ - (identifier) @_decorator (#eq? @_decorator "Component") - (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component")) + (identifier) @_decorator + (#eq? @_decorator "Component") + (member_expression + property: (property_identifier) @_decorator + (#eq? @_decorator "Component")) ] - arguments: (arguments (object - (pair - key: (property_identifier) @_prop (#eq? @_prop "template") - value: [ - (string) @injection.content - (template_string) @injection.content - (template_string (string_fragment) @injection.content) - ] - ))) + arguments: (arguments + (object + (pair + key: (property_identifier) @_prop + (#eq? @_prop "template") + value: [ + (string) @injection.content + (template_string) @injection.content + (template_string + (string_fragment) @injection.content) + ]))) (#set! injection.language "angular")) -;; Angular Component styles injection +; Angular Component styles injection (call_expression function: [ - (identifier) @_decorator (#eq? @_decorator "Component") - (member_expression property: (property_identifier) @_decorator (#eq? @_decorator "Component")) + (identifier) @_decorator + (#eq? @_decorator "Component") + (member_expression + property: (property_identifier) @_decorator + (#eq? @_decorator "Component")) ] - arguments: (arguments (object - (pair - key: (property_identifier) @_prop (#eq? @_prop "styles") - value: [ - (string) @injection.content - (template_string) @injection.content - (template_string (string_fragment) @injection.content) - (array (string) @injection.content) - (array (template_string) @injection.content) - (array (template_string (string_fragment)) @injection.content) - ] - ))) + arguments: (arguments + (object + (pair + key: (property_identifier) @_prop + (#eq? @_prop "styles") + value: [ + (string) @injection.content + (template_string) @injection.content + (template_string + (string_fragment) @injection.content) + (array + (string) @injection.content) + (array + (template_string) @injection.content) + (array + (template_string + (string_fragment)) @injection.content) + ]))) (#set! injection.language "css")) ; Parse the contents of strings and tagged template ; literals with leading ECMAScript comments: ; '/* html */' or '/*html*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*html\\s*\\*\\/") - (#set! injection.language "html") -) + (#set! injection.language "html")) ; '/* sql */' or '/*sql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*sql\\s*\\*\\/") - (#set! injection.language "sql") -) + (#set! injection.language "sql")) ; '/* gql */' or '/*gql*/' ; '/* graphql */' or '/*graphql*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(gql|graphql)\\s*\\*\\/") - (#set! injection.language "graphql") -) + (#set! injection.language "graphql")) ; '/* css */' or '/*css*/' -( - ((comment) @_ecma_comment [ - (string (string_fragment) @injection.content) - (template_string (string_fragment) @injection.content) +(((comment) @_ecma_comment + [ + (string + (string_fragment) @injection.content) + (template_string + (string_fragment) @injection.content) ]) (#match? @_ecma_comment "^\\/\\*\\s*(css)\\s*\\*\\/") - (#set! injection.language "css") -) + (#set! injection.language "css")) diff --git a/crates/languages/src/typescript/outline.scm b/crates/languages/src/typescript/outline.scm index 54d29007c7b7eb57c0bcaefc2c1e0ab75e4d9a6c..37991965256a0def9b0458958ac4e50c6f337af6 100644 --- a/crates/languages/src/typescript/outline.scm +++ b/crates/languages/src/typescript/outline.scm @@ -1,230 +1,275 @@ (internal_module - "namespace" @context - name: (_) @name) @item + "namespace" @context + name: (_) @name) @item (enum_declaration - "enum" @context - name: (_) @name) @item + "enum" @context + name: (_) @name) @item (type_alias_declaration - "type" @context - name: (_) @name) @item + "type" @context + name: (_) @name) @item (function_declaration - "async"? @context - "function" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (generator_function_declaration - "async"? @context - "function" @context - "*" @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item + "async"? @context + "function" @context + "*" @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item (interface_declaration - "interface" @context - name: (_) @name) @item + "interface" @context + name: (_) @name) @item (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Exported array destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Exported object destructuring (export_statement - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Top-level array destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Top-level object destructuring (program - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern - value: (identifier) @name @item) - (pair_pattern - value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (class_declaration - "class" @context - name: (_) @name) @item + "class" @context + name: (_) @name) @item (abstract_class_declaration - "abstract" @context - "class" @context - name: (_) @name) @item + "abstract" @context + "class" @context + name: (_) @name) @item ; Method definitions in classes (not in object literals) (class_body - (method_definition - [ - "get" - "set" - "async" - "*" - "readonly" - "static" - (override_modifier) - (accessibility_modifier) - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item) + (method_definition + [ + "get" + "set" + "async" + "*" + "readonly" + "static" + (override_modifier) + (accessibility_modifier) + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item) ; Object literal methods (variable_declarator - value: (object - (method_definition - [ - "get" - "set" - "async" - "*" - ]* @context - name: (_) @name - parameters: (formal_parameters - "(" @context - ")" @context)) @item)) + value: (object + (method_definition + [ + "get" + "set" + "async" + "*" + ]* @context + name: (_) @name + parameters: (formal_parameters + "(" @context + ")" @context)) @item)) (public_field_definition - [ - "declare" - "readonly" - "abstract" - "static" - (accessibility_modifier) - ]* @context - name: (_) @name) @item + [ + "declare" + "readonly" + "abstract" + "static" + (accessibility_modifier) + ]* @context + name: (_) @name) @item ; Add support for (node:test, bun:test and Jest) runnable -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] @context - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] @context + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @name) - (identifier) @name - ] - ) - ) -) @item +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @name) + (identifier) @name + ]))) @item ; Object properties (pair - key: [ - (property_identifier) @name - (string (string_fragment) @name) - (number) @name - (computed_property_name) @name - ]) @item - + key: [ + (property_identifier) @name + (string + (string_fragment) @name) + (number) @name + (computed_property_name) @name + ]) @item ; Nested variables in function bodies (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (identifier) @name) @item)) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (identifier) @name) @item)) ; Nested array destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (array_pattern - [ - (identifier) @name @item - (assignment_pattern left: (identifier) @name @item) - (rest_pattern (identifier) @name @item) - ])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (array_pattern + [ + (identifier) @name @item + (assignment_pattern + left: (identifier) @name @item) + (rest_pattern + (identifier) @name @item) + ])))) ; Nested object destructuring in functions (statement_block - (lexical_declaration - ["let" "const"] @context - (variable_declarator - name: (object_pattern - [(shorthand_property_identifier_pattern) @name @item - (pair_pattern value: (identifier) @name @item) - (pair_pattern value: (assignment_pattern left: (identifier) @name @item)) - (rest_pattern (identifier) @name @item)])))) + (lexical_declaration + [ + "let" + "const" + ] @context + (variable_declarator + name: (object_pattern + [ + (shorthand_property_identifier_pattern) @name @item + (pair_pattern + value: (identifier) @name @item) + (pair_pattern + value: (assignment_pattern + left: (identifier) @name @item)) + (rest_pattern + (identifier) @name @item) + ])))) (comment) @annotation diff --git a/crates/languages/src/typescript/overrides.scm b/crates/languages/src/typescript/overrides.scm index 8f437a1424af06aa4855aac67511926181977936..f5e99cad68a91695d9d0b19b308e3ce19f75555a 100644 --- a/crates/languages/src/typescript/overrides.scm +++ b/crates/languages/src/typescript/overrides.scm @@ -2,8 +2,10 @@ (string) @string -(template_string (string_fragment) @string) +(template_string + (string_fragment) @string) -(_ value: (call_expression - function: (identifier) @function_name_before_type_arguments - type_arguments: (type_arguments))) +(_ + value: (call_expression + function: (identifier) @function_name_before_type_arguments + type_arguments: (type_arguments))) diff --git a/crates/languages/src/typescript/runnables.scm b/crates/languages/src/typescript/runnables.scm index 6bfc53632910ce8212f739d310e3d560d05cffc1..38fee610e85f2aa2f5f7f7c58caf79b3c6a3d1ed 100644 --- a/crates/languages/src/typescript/runnables.scm +++ b/crates/languages/src/typescript/runnables.scm @@ -1,85 +1,71 @@ ; Add support for (node:test, bun:test, Jest and Deno.test) runnable ; Function expression that has `it`, `test` or `describe` as the function name -( - (call_expression - function: [ - (identifier) @_name - (member_expression - object: [ - (identifier) @_name - (member_expression object: (identifier) @_name) - ] - ) - ] - (#any-of? @_name "it" "test" "describe" "context" "suite") - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: [ + (identifier) @_name + (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ]) + ] + (#any-of? @_name "it" "test" "describe" "context" "suite") + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for parameterized tests -( - (call_expression - function: (call_expression - function: (member_expression - object: [(identifier) @_name (member_expression object: (identifier) @_name)] - property: (property_identifier) @_property - ) - (#any-of? @_name "it" "test" "describe" "context" "suite") - (#any-of? @_property "each") - ) - arguments: ( - arguments . [ - (string (string_fragment) @run) - (identifier) @run - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (call_expression + function: (member_expression + object: [ + (identifier) @_name + (member_expression + object: (identifier) @_name) + ] + property: (property_identifier) @_property) + (#any-of? @_name "it" "test" "describe" "context" "suite") + (#any-of? @_property "each")) + arguments: (arguments + . + [ + (string + (string_fragment) @run) + (identifier) @run + ])) @_js-test + (#set! tag js-test)) ; Add support for Deno.test with string names -( - (call_expression - function: (member_expression - object: (identifier) @_namespace - property: (property_identifier) @_method - ) - (#eq? @_namespace "Deno") - (#eq? @_method "test") - arguments: ( - arguments . [ - (string (string_fragment) @run @DENO_TEST_NAME) - (identifier) @run @DENO_TEST_NAME - ] - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (member_expression + object: (identifier) @_namespace + property: (property_identifier) @_method) + (#eq? @_namespace "Deno") + (#eq? @_method "test") + arguments: (arguments + . + [ + (string + (string_fragment) @run @DENO_TEST_NAME) + (identifier) @run @DENO_TEST_NAME + ])) @_js-test + (#set! tag js-test)) ; Add support for Deno.test with named function expressions -( - (call_expression - function: (member_expression - object: (identifier) @_namespace - property: (property_identifier) @_method - ) - (#eq? @_namespace "Deno") - (#eq? @_method "test") - arguments: ( - arguments . (function_expression - name: (identifier) @run @DENO_TEST_NAME - ) - ) - ) @_js-test - - (#set! tag js-test) -) +((call_expression + function: (member_expression + object: (identifier) @_namespace + property: (property_identifier) @_method) + (#eq? @_namespace "Deno") + (#eq? @_method "test") + arguments: (arguments + . + (function_expression + name: (identifier) @run @DENO_TEST_NAME))) @_js-test + (#set! tag js-test)) diff --git a/crates/languages/src/typescript/textobjects.scm b/crates/languages/src/typescript/textobjects.scm index 96289f058cd7b605a8f5b4c8966e3c372022d065..384ea482352dfb1f617357bd3af719a64425d876 100644 --- a/crates/languages/src/typescript/textobjects.scm +++ b/crates/languages/src/typescript/textobjects.scm @@ -1,114 +1,130 @@ (comment)+ @comment.around (function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (method_definition - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (function_expression - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around ((arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (statement_block + "{" + (_)* @function.inside + "}")) @function.around + (#not-has-parent? @function.around variable_declarator)) ; Arrow function in variable declaration - capture the full declaration ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (statement_block - "{" - (_)* @function.inside - "}")))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (statement_block + "{" + (_)* @function.inside + "}")))) ]) @function.around ; Arrow function in variable declaration - capture body as @function.inside ; (for statement blocks, the more specific pattern above captures just the contents) ([ - (lexical_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) - (variable_declaration - (variable_declarator - value: (arrow_function - body: (_) @function.inside))) + (lexical_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) + (variable_declaration + (variable_declarator + value: (arrow_function + body: (_) @function.inside))) ]) @function.around ; Catch-all for arrow functions in other contexts (callbacks, etc.) ((arrow_function - body: (_) @function.inside) @function.around - (#not-has-parent? @function.around variable_declarator)) + body: (_) @function.inside) @function.around + (#not-has-parent? @function.around variable_declarator)) + (function_signature) @function.around (generator_function - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (generator_function_declaration - body: (_ - "{" - (_)* @function.inside - "}")) @function.around + body: (_ + "{" + (_)* @function.inside + "}")) @function.around (class_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (class - body: (_ - "{" - (_)* @class.inside - "}" )) @class.around + body: (_ + "{" + (_)* @class.inside + "}")) @class.around (interface_declaration - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (enum_declaration - body: (_ - "{" - [(_) ","?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ","? + ]* @class.inside + "}")) @class.around (ambient_declaration - (module + (module body: (_ - "{" - [(_) ";"?]* @class.inside - "}" ))) @class.around + "{" + [ + (_) + ";"? + ]* @class.inside + "}"))) @class.around (internal_module - body: (_ - "{" - [(_) ";"?]* @class.inside - "}" )) @class.around + body: (_ + "{" + [ + (_) + ";"? + ]* @class.inside + "}")) @class.around (type_alias_declaration) @class.around diff --git a/crates/languages/src/yaml/brackets.scm b/crates/languages/src/yaml/brackets.scm index 0cfc5072d4eeda19d75ce943481670a3ee8938b0..edeb53a0d313846089e716bedff4256e2b47d94e 100644 --- a/crates/languages/src/yaml/brackets.scm +++ b/crates/languages/src/yaml/brackets.scm @@ -1,4 +1,13 @@ -("[" @open "]" @close) -("{" @open "}" @close) -(("\"" @open "\"" @close) (#set! rainbow.exclude)) -(("'" @open "'" @close) (#set! rainbow.exclude)) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +(("\"" @open + "\"" @close) + (#set! rainbow.exclude)) + +(("'" @open + "'" @close) + (#set! rainbow.exclude)) diff --git a/crates/languages/src/yaml/highlights.scm b/crates/languages/src/yaml/highlights.scm index dfecf3f9d421cf1a574ce03dccfeb1201d8086a9..1d9c97c17a7925e5e9d87ed8e3bfba51c9b11d8b 100644 --- a/crates/languages/src/yaml/highlights.scm +++ b/crates/languages/src/yaml/highlights.scm @@ -1,4 +1,5 @@ (boolean_scalar) @boolean + (null_scalar) @constant.builtin [ @@ -25,30 +26,31 @@ key: (flow_node [ - (plain_scalar (string_scalar)) + (plain_scalar + (string_scalar)) (double_quote_scalar) (single_quote_scalar) ] @property) [ - "," - "-" - ":" - ">" - "?" - "|" + "," + "-" + ":" + ">" + "?" + "|" ] @punctuation.delimiter [ - "[" - "]" - "{" - "}" + "[" + "]" + "{" + "}" ] @punctuation.bracket [ - "*" - "&" - "---" - "..." + "*" + "&" + "---" + "..." ] @punctuation.special diff --git a/crates/languages/src/yaml/injections.scm b/crates/languages/src/yaml/injections.scm index c9de25a18f8afb7d8e0c6874401798edede9bce1..2b94b7f0cdd9d18c3c7157d9bd4adfd1b59ea061 100644 --- a/crates/languages/src/yaml/injections.scm +++ b/crates/languages/src/yaml/injections.scm @@ -1,25 +1,26 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) ; GitHub actions: JavaScript for workflow scripting (inline and block) (block_mapping (block_mapping_pair - key: (flow_node) @_uses (#eq? @_uses "uses") - value: (flow_node) @_actions_ghs (#match? @_actions_ghs "^actions/github-script")) + key: (flow_node) @_uses + (#eq? @_uses "uses") + value: (flow_node) @_actions_ghs + (#match? @_actions_ghs "^actions/github-script")) (block_mapping_pair - key: (flow_node) @_with (#eq? @_with "with") + key: (flow_node) @_with + (#eq? @_with "with") value: (block_node (block_mapping (block_mapping_pair - key: (flow_node) @_run (#eq? @_run "script") + key: (flow_node) @_run + (#eq? @_run "script") value: [ - (flow_node (plain_scalar (string_scalar) @injection.content)) - (block_node (block_scalar) @injection.content) + (flow_node + (plain_scalar + (string_scalar) @injection.content)) + (block_node + (block_scalar) @injection.content) ] - (#set! injection.language "javascript") - ) - ) - ) - ) -) + (#set! injection.language "javascript")))))) diff --git a/crates/languages/src/yaml/outline.scm b/crates/languages/src/yaml/outline.scm index c5a7f8e5d40388c020ec9dab83d6cee02746b581..a41447bf64cceadd1ae3d59bd2804e85bd5e8c39 100644 --- a/crates/languages/src/yaml/outline.scm +++ b/crates/languages/src/yaml/outline.scm @@ -1,9 +1,7 @@ (block_mapping_pair - key: - (flow_node - (plain_scalar - (string_scalar) @name)) - value: - (flow_node - (plain_scalar - (string_scalar) @context))?) @item + key: (flow_node + (plain_scalar + (string_scalar) @name)) + value: (flow_node + (plain_scalar + (string_scalar) @context))?) @item diff --git a/crates/languages/src/yaml/overrides.scm b/crates/languages/src/yaml/overrides.scm index 9503051a62080eb2fdfca3416ef9e5286464dd17..99c991e7d445137dc335275138a8fd68cea31d17 100644 --- a/crates/languages/src/yaml/overrides.scm +++ b/crates/languages/src/yaml/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment.inclusive + [ (single_quote_scalar) (double_quote_scalar) diff --git a/crates/languages/src/yaml/redactions.scm b/crates/languages/src/yaml/redactions.scm index 85fdbd26ea0fc0b3956652ef48c61a44613337e4..56c7415e70f183afe63950511479e74512ac97f8 100644 --- a/crates/languages/src/yaml/redactions.scm +++ b/crates/languages/src/yaml/redactions.scm @@ -1 +1,2 @@ -(block_mapping_pair value: (flow_node) @redact) +(block_mapping_pair + value: (flow_node) @redact) diff --git a/crates/languages/src/zed-keybind-context/brackets.scm b/crates/languages/src/zed-keybind-context/brackets.scm index d086b2e98df0837208a13f6c6f79db84c204fb99..24c20234b639f2afe7754b1d6dceb5685ac7b8e7 100644 --- a/crates/languages/src/zed-keybind-context/brackets.scm +++ b/crates/languages/src/zed-keybind-context/brackets.scm @@ -1 +1,2 @@ -("(" @open ")" @close) +("(" @open + ")" @close) diff --git a/extensions/glsl/languages/glsl/brackets.scm b/extensions/glsl/languages/glsl/brackets.scm index 62e137ef2629f3b7f7aeafbad419a36d19361d19..e83d67f411a71f2602dc774531d904a949c45b9a 100644 --- a/extensions/glsl/languages/glsl/brackets.scm +++ b/extensions/glsl/languages/glsl/brackets.scm @@ -1,3 +1,8 @@ -("[" @open "]" @close) -("{" @open "}" @close) -("(" @open ")" @close) +("[" @open + "]" @close) + +("{" @open + "}" @close) + +("(" @open + ")" @close) diff --git a/extensions/glsl/languages/glsl/highlights.scm b/extensions/glsl/languages/glsl/highlights.scm index 09f94d4fb587963254c9bc31ec25b66a0e1e4323..9e40610ff5494102f8524b287ad2e50ec48d78db 100644 --- a/extensions/glsl/languages/glsl/highlights.scm +++ b/extensions/glsl/languages/glsl/highlights.scm @@ -1,82 +1,136 @@ "break" @keyword + "case" @keyword + "const" @keyword + "continue" @keyword + "default" @keyword + "do" @keyword + "else" @keyword + "enum" @keyword + "extern" @keyword + "for" @keyword + "if" @keyword + "inline" @keyword + "return" @keyword + "sizeof" @keyword + "static" @keyword + "struct" @keyword + "switch" @keyword + "typedef" @keyword + "union" @keyword + "volatile" @keyword + "while" @keyword "#define" @keyword + "#elif" @keyword + "#else" @keyword + "#endif" @keyword + "#if" @keyword + "#ifdef" @keyword + "#ifndef" @keyword + "#include" @keyword + (preproc_directive) @keyword "--" @operator + "-" @operator + "-=" @operator + "->" @operator + "=" @operator + "!=" @operator + "*" @operator + "&" @operator + "&&" @operator + "+" @operator + "++" @operator + "+=" @operator + "<" @operator + "==" @operator + ">" @operator + "||" @operator "." @delimiter + ";" @delimiter (string_literal) @string + (system_lib_string) @string (null) @constant + (number_literal) @number + (char_literal) @number (identifier) @variable (field_identifier) @property + (statement_identifier) @label + (type_identifier) @type + (primitive_type) @type + (sized_type_specifier) @type (call_expression function: (identifier) @function) + (call_expression function: (field_expression field: (field_identifier) @function)) + (function_declarator declarator: (identifier) @function) + (preproc_function_def name: (identifier) @function.special) ((identifier) @constant - (#match? @constant "^[A-Z][A-Z\\d_]*$")) + (#match? @constant "^[A-Z][A-Z\\d_]*$")) (comment) @comment @@ -111,7 +165,5 @@ (extension_storage_class) @storageclass -( - (identifier) @variable.builtin - (#match? @variable.builtin "^gl_") -) +((identifier) @variable.builtin + (#match? @variable.builtin "^gl_")) diff --git a/extensions/html/languages/html/highlights.scm b/extensions/html/languages/html/highlights.scm index bb3b43e813929de705605e3ecc3e0b1052c48297..21bf193cf346313024ba8df6e7457c785e21476e 100644 --- a/extensions/html/languages/html/highlights.scm +++ b/extensions/html/languages/html/highlights.scm @@ -1,12 +1,17 @@ (tag_name) @tag + (doctype) @tag.doctype + (attribute_name) @attribute + [ "\"" "'" (attribute_value) ] @string + (comment) @comment + (entity) @string.special "=" @punctuation.delimiter.html diff --git a/extensions/html/languages/html/indents.scm b/extensions/html/languages/html/indents.scm index 436663dba3e1993c84e151f09c581844fdcb977a..6e5bf97d4c3edeb251cdcffdaf6c9f9659d39849 100644 --- a/extensions/html/languages/html/indents.scm +++ b/extensions/html/languages/html/indents.scm @@ -1,5 +1,8 @@ -(start_tag ">" @end) @indent -(self_closing_tag "/>" @end) @indent +(start_tag + ">" @end) @indent + +(self_closing_tag + "/>" @end) @indent (element (start_tag) @start diff --git a/extensions/html/languages/html/injections.scm b/extensions/html/languages/html/injections.scm index 525b3efe29dca541afc8829dd41ff217f48439c3..e9c2c98155768fdee9a4fcefe672bebf7d4ce8f4 100644 --- a/extensions/html/languages/html/injections.scm +++ b/extensions/html/languages/html/injections.scm @@ -1,6 +1,5 @@ ((comment) @injection.content - (#set! injection.language "comment") -) + (#set! injection.language "comment")) (script_element (raw_text) @injection.content @@ -11,11 +10,15 @@ (#set! injection.language "css")) (attribute - (attribute_name) @_attribute_name (#match? @_attribute_name "^style$") - (quoted_attribute_value (attribute_value) @injection.content) - (#set! injection.language "css")) + (attribute_name) @_attribute_name + (#match? @_attribute_name "^style$") + (quoted_attribute_value + (attribute_value) @injection.content) + (#set! injection.language "css")) (attribute - (attribute_name) @_attribute_name (#match? @_attribute_name "^on[a-z]+$") - (quoted_attribute_value (attribute_value) @injection.content) - (#set! injection.language "javascript")) + (attribute_name) @_attribute_name + (#match? @_attribute_name "^on[a-z]+$") + (quoted_attribute_value + (attribute_value) @injection.content) + (#set! injection.language "javascript")) diff --git a/extensions/html/languages/html/overrides.scm b/extensions/html/languages/html/overrides.scm index 434f610e70242be8589a9f58cc7fd4704d5d9296..3e9e499e5c95b960e7ec9fe4e46bb078b8043092 100644 --- a/extensions/html/languages/html/overrides.scm +++ b/extensions/html/languages/html/overrides.scm @@ -1,4 +1,5 @@ (comment) @comment + (quoted_attribute_value) @string [ diff --git a/extensions/proto/languages/proto/highlights.scm b/extensions/proto/languages/proto/highlights.scm index 923e00bb1dfca30afcf41a6ab681846d8f20b900..f17c48127380a4c314f4d5b8498b16d4c9d85be6 100644 --- a/extensions/proto/languages/proto/highlights.scm +++ b/extensions/proto/languages/proto/highlights.scm @@ -52,11 +52,11 @@ "}" "<" ">" -] @punctuation.bracket +] @punctuation.bracket [ - ";" - "," + ";" + "," ] @punctuation.delimiter "=" @operator diff --git a/extensions/proto/languages/proto/indents.scm b/extensions/proto/languages/proto/indents.scm index acb44a5e1e617cc0d735228af022129c0b39d561..c096b82d2b2d6856bcb6c39bf44212507b605e38 100644 --- a/extensions/proto/languages/proto/indents.scm +++ b/extensions/proto/languages/proto/indents.scm @@ -1,3 +1,11 @@ -(_ "{" "}" @end) @indent -(_ "[" "]" @end) @indent -(_ "(" ")" @end) @indent +(_ + "{" + "}" @end) @indent + +(_ + "[" + "]" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/extensions/proto/languages/proto/outline.scm b/extensions/proto/languages/proto/outline.scm index f90b1bae33effade920bf8f2c76d7f2d187f1d8e..f0aa6eff9801cdbfe8f61395901cfb96806e10a7 100644 --- a/extensions/proto/languages/proto/outline.scm +++ b/extensions/proto/languages/proto/outline.scm @@ -1,19 +1,19 @@ (message - "message" @context - (message_name - (identifier) @name)) @item + "message" @context + (message_name + (identifier) @name)) @item (service - "service" @context - (service_name - (identifier) @name)) @item + "service" @context + (service_name + (identifier) @name)) @item (rpc - "rpc" @context - (rpc_name - (identifier) @name)) @item + "rpc" @context + (rpc_name + (identifier) @name)) @item (enum - "enum" @context - (enum_name - (identifier) @name)) @item + "enum" @context + (enum_name + (identifier) @name)) @item diff --git a/extensions/proto/languages/proto/textobjects.scm b/extensions/proto/languages/proto/textobjects.scm index 90ea84282da39df8a2023108c367c3ef76a0ef9a..7e859c0d65bd5d119b616d626f3d88ee6d1fc6ee 100644 --- a/extensions/proto/languages/proto/textobjects.scm +++ b/extensions/proto/languages/proto/textobjects.scm @@ -1,17 +1,21 @@ -(message (message_body +(message + (message_body "{" (_)* @class.inside "}")) @class.around -(enum (enum_body + +(enum + (enum_body "{" (_)* @class.inside "}")) @class.around + (service - "service" - (_) - "{" - (_)* @class.inside - "}") @class.around + "service" + (_) + "{" + (_)* @class.inside + "}") @class.around (rpc) @function.around diff --git a/extensions/test-extension/languages/gleam/highlights.scm b/extensions/test-extension/languages/gleam/highlights.scm index 4b85b88d0151a1bfe9018f0c526497261d6e1801..50de3a6acbe6a8b65340d288334aa7185afc8609 100644 --- a/extensions/test-extension/languages/gleam/highlights.scm +++ b/extensions/test-extension/languages/gleam/highlights.scm @@ -1,6 +1,8 @@ ; Comments (module_comment) @comment + (statement_comment) @comment + (comment) @comment ; Constants @@ -9,43 +11,61 @@ ; Variables (identifier) @variable + (discard) @comment.unused ; Modules (module) @module -(import alias: (identifier) @module) + +(import + alias: (identifier) @module) + (remote_type_identifier module: (identifier) @module) + (remote_constructor_name module: (identifier) @module) + ((field_access record: (identifier) @module field: (label) @function) - (#is-not? local)) + (#is-not? local)) ; Functions -(unqualified_import (identifier) @function) -(unqualified_import "type" (type_identifier) @type) -(unqualified_import (type_identifier) @constructor) +(unqualified_import + (identifier) @function) + +(unqualified_import + "type" + (type_identifier) @type) + +(unqualified_import + (type_identifier) @constructor) + (function name: (identifier) @function) + (external_function name: (identifier) @function) + (function_parameter name: (identifier) @variable.parameter) + ((function_call - function: (identifier) @function) - (#is-not? local)) + function: (identifier) @function) + (#is-not? local)) + ((binary_expression - operator: "|>" - right: (identifier) @function) - (#is-not? local)) + operator: "|>" + right: (identifier) @function) + (#is-not? local)) ; "Properties" ; Assumed to be intended to refer to a name for a field; something that comes ; before ":" or after "." ; e.g. record field names, tuple indices, names for named arguments, etc (label) @property + (tuple_access index: (integer) @property) @@ -54,10 +74,12 @@ "@" @attribute name: (identifier) @attribute) -(attribute_value (identifier) @constant) +(attribute_value + (identifier) @constant) ; Type names (remote_type_identifier) @type + (type_identifier) @type ; Data constructors @@ -65,19 +87,24 @@ ; Literals (string) @string + ((escape_sequence) @warning - ; Deprecated in v0.33.0-rc2: - (#eq? @warning "\\e")) + ; Deprecated in v0.33.0-rc2: + (#eq? @warning "\\e")) + (escape_sequence) @string.escape + (bit_string_segment_option) @function.builtin + (integer) @number + (float) @number ; Reserved identifiers ; TODO: when tree-sitter supports `#any-of?` in the Rust bindings, ; refactor this to use `#any-of?` rather than `#match?` ((identifier) @warning - (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$")) + (#match? @warning "^(auto|delegate|derive|else|implement|macro|test|echo)$")) ; Keywords [ @@ -102,8 +129,12 @@ ; Operators (binary_expression operator: _ @operator) -(boolean_negation "!" @operator) -(integer_negation "-" @operator) + +(boolean_negation + "!" @operator) + +(integer_negation + "-" @operator) ; Punctuation [ @@ -116,10 +147,11 @@ "<<" ">>" ] @punctuation.bracket + [ "." "," - ;; Controversial -- maybe some are operators? + ; Controversial -- maybe some are operators? ":" "#" "=" diff --git a/extensions/test-extension/languages/gleam/indents.scm b/extensions/test-extension/languages/gleam/indents.scm index 112b414aa45f277138d0c681851129a608ee96e0..92f1a04d86d34d60763cceb872c5ac1004ba4601 100644 --- a/extensions/test-extension/languages/gleam/indents.scm +++ b/extensions/test-extension/languages/gleam/indents.scm @@ -1,3 +1,11 @@ -(_ "[" "]" @end) @indent -(_ "{" "}" @end) @indent -(_ "(" ")" @end) @indent +(_ + "[" + "]" @end) @indent + +(_ + "{" + "}" @end) @indent + +(_ + "(" + ")" @end) @indent diff --git a/extensions/test-extension/languages/gleam/outline.scm b/extensions/test-extension/languages/gleam/outline.scm index 5df7a6af800e8e3c9f0b00834576f2e059bd12b0..2d1a7d800fb9c662f41a0a865e492716b876f2fd 100644 --- a/extensions/test-extension/languages/gleam/outline.scm +++ b/extensions/test-extension/languages/gleam/outline.scm @@ -1,31 +1,31 @@ (external_type - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + "type" @context + (type_name) @name) @item (type_definition - (visibility_modifier)? @context - (opacity_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + (opacity_modifier)? @context + "type" @context + (type_name) @name) @item (data_constructor - (constructor_name) @name) @item + (constructor_name) @name) @item (data_constructor_argument - (label) @name) @item + (label) @name) @item (type_alias - (visibility_modifier)? @context - "type" @context - (type_name) @name) @item + (visibility_modifier)? @context + "type" @context + (type_name) @name) @item (function - (visibility_modifier)? @context - "fn" @context - name: (_) @name) @item + (visibility_modifier)? @context + "fn" @context + name: (_) @name) @item (constant - (visibility_modifier)? @context - "const" @context - name: (_) @name) @item + (visibility_modifier)? @context + "const" @context + name: (_) @name) @item From 88df73c8b5babc36e6c580a11343aaaff691a09b Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 27 Feb 2026 14:01:39 +0100 Subject: [PATCH 156/548] Add PR 50138 to `.git-blame-ignore-revs` (#50293) This PR adds https://github.com/zed-industries/zed/pull/50138 to the `.git-blame-ignore-revs` file. Release Notes: - N/A --- .git-blame-ignore-revs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index fbcc76a8654f7ed2241fb05c305eb466e3177c20..041826ad8a676e154edac9c306cee4e5816e6f62 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -34,3 +34,7 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e # 2024-07-24 docs: Format docs # https://github.com/zed-industries/zed/pull/15352 3a44a59f8ec114ac1ba22f7da1652717ef7e4e5c + +# 2026-02-27 Format Tree-sitter query files +# https://github.com/zed-industries/zed/pull/50138 +5ed538f49c54ca464bb9d1e59446060a3a925668 From d9ee18a12a20279b4b9ad9737d73bcb717ae456a Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 27 Feb 2026 10:08:31 -0300 Subject: [PATCH 157/548] docs: Add footer with links to pages (#50296) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR includes a little footer with handy links in all docs pages: Screenshot 2026-02-27 at 10  03@2x Release Notes: - N/A --- docs/theme/css/chrome.css | 38 +++++++++++++++++++++++++++++++++++- docs/theme/css/variables.css | 2 ++ docs/theme/index.hbs | 37 +++++++++++++++++++++++++++++++++++ 3 files changed, 76 insertions(+), 1 deletion(-) diff --git a/docs/theme/css/chrome.css b/docs/theme/css/chrome.css index 637da6063dee3c20170cb334e7bee7843fea6721..3f4fa40bc41a9c034c50c94c10fd8d0222d6b720 100644 --- a/docs/theme/css/chrome.css +++ b/docs/theme/css/chrome.css @@ -70,7 +70,9 @@ a > .hljs { } .logo-nav { - display: block; + display: flex; + align-items: center; + justify-content: center; filter: var(--logo-brightness); } @@ -140,6 +142,40 @@ a > .hljs { text-decoration-color: transparent !important; } +.footer { + display: flex; + align-items: center; + justify-content: center; + flex-wrap: wrap; + gap: 0.5rem; + padding: 24px 0; + border-top: 1px dashed var(--border-footer); +} + +.footer-link { + font-size: 1.25rem; + color: var(--links); + text-decoration: underline; + text-decoration-color: var(--link-line-decoration); + text-wrap-mode: nowrap; + + :hover { + text-decoration-color: var(--link-line-decoration-hover); + } +} + +.footer-separator { + color: var(--border-light); + font-size: 0.8em; +} + +.footer-logo { + height: 16px; + border-radius: 0 !important; + border: none !important; + background: transparent !important; +} + .mobile-nav-chapters { font-size: 2.5em; text-align: center; diff --git a/docs/theme/css/variables.css b/docs/theme/css/variables.css index adbb5da74009b96d3d285c4e37a79ee758eb57c7..46ea739daf8643db5ad57a239091e557df2a3d0c 100644 --- a/docs/theme/css/variables.css +++ b/docs/theme/css/variables.css @@ -27,6 +27,7 @@ --border: hsl(220, 13%, 80%); --border-light: hsl(220, 13%, 90%); --border-hover: hsl(220, 13%, 70%); + --border-footer: hsl(220, 13%, 91%); --media-bg: hsl(50, 25%, 92%); @@ -124,6 +125,7 @@ --border: hsl(220, 13%, 20%); --border-light: hsl(220, 13%, 15%); --border-hover: hsl(220, 13%, 40%); + --border-footer: hsl(220, 13%, 12%); --media-bg: hsl(220, 13%, 8%); diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 98f64d41c3eb86dfb335ecf0964f434c50fad0bb..8e6d185a57874a84bd373115e2f4b988a6c0b864 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -307,6 +307,43 @@ {{/next}}
+
From ab7cf50857ec173578affa7b6beb35fa0350635a Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:24:26 +0100 Subject: [PATCH 158/548] Revert "sidebar: Zoom temporarily hides sidebar (#50088)" (#50297) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit aa3a12b566, because we no longer want a zoomed item to block the sidebar. This is because the sidebar is used for navigation and it interrupts a user's flow if they're scrolling through their active workspaces, and the sidebar just vanishes. ### Before image ### After Screenshot 2026-02-27 at 2 10
24 PM Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/workspace/src/multi_workspace.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index d58101f8b5c266fafa2120d8fe58634dc2414762..cd77f4fe30461b5f726c3bcd2f5f78b561e4d415 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -672,7 +672,6 @@ impl MultiWorkspace { impl Render for MultiWorkspace { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let multi_workspace_enabled = self.multi_workspace_enabled(cx); - let is_zoomed = self.workspace().read(cx).zoomed_item().is_some(); let sidebar: Option = if multi_workspace_enabled && self.sidebar_open { self.sidebar.as_ref().map(|sidebar_handle| { @@ -784,14 +783,13 @@ impl Render for MultiWorkspace { .flex_1() .size_full() .overflow_hidden() - .when(is_zoomed, |this| this.absolute().inset_0()) .child(self.workspace().clone()), ) .child(self.workspace().read(cx).modal_layer.clone()), window, cx, Tiling { - left: multi_workspace_enabled && self.sidebar_open && !is_zoomed, + left: multi_workspace_enabled && self.sidebar_open, ..Tiling::default() }, ) From d15263e45abd8da7692ecfc8ed12c57746ee2878 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:31:12 +0100 Subject: [PATCH 159/548] gpui: Gate Linux screen capture APIs behind feature flag (#50300) We were missing the cfg statements in the `LinuxClient` trait definition Release Notes: - N/A --- crates/gpui_linux/src/linux/platform.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index f044b086a580ea70ef2b959ed5e8a0931f4ce4e9..ff79aa64b2f7cd61c3ab6a8b54e2e11b72614d0f 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -55,12 +55,12 @@ pub(crate) trait LinuxClient { fn display(&self, id: DisplayId) -> Option>; fn primary_display(&self) -> Option>; - #[allow(dead_code)] + #[cfg(feature = "screen-capture")] fn is_screen_capture_supported(&self) -> bool { false } - #[allow(dead_code)] + #[cfg(feature = "screen-capture")] fn screen_capture_sources( &self, ) -> oneshot::Receiver>>> { From 9c7686942ea84cb0d5afb69856f457d1090f2fff Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Fri, 27 Feb 2026 15:02:16 +0100 Subject: [PATCH 160/548] ci: Add check for formatting of query files (#50140) Follow-up to https://github.com/zed-industries/zed/pull/50138 I deliberately decided against adding this in a separete job because ts_query_ls is very fast when it comes to both formatting these as well as checking for proper formatting. Will see here how long it takes to install and whether we might need to adjust to account for the installation time. Release Notes: - N/A --- .github/workflows/run_tests.yml | 15 +++++++++ .../xtask/src/tasks/workflows/run_tests.rs | 32 ++++++++++++++++++- 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 97e918aab37f3dc375eb259f416f7998b4b196fd..cdf01b1aa91dd722de2533887d44182a9c55fcb0 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -139,6 +139,21 @@ jobs: uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 with: config: ./typos.toml + - name: run_tests::check_style::fetch_ts_query_ls + uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c + with: + repo: ribru17/ts_query_ls + version: tags/v3.15.1 + file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + - name: run_tests::check_style::run_ts_query_ls + run: |- + tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + ./ts_query_ls format --check . || { + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + } timeout-minutes: 60 clippy_windows: needs: diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 8b633edab6d81ad71c31e25c5171af076402fa9d..f40e02335c5bbccd396ebed916a9a4d3a6a22f29 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -3,6 +3,7 @@ use gh_workflow::{ Workflow, }; use indexmap::IndexMap; +use indoc::formatdoc; use crate::tasks::workflows::{ steps::{CommonJobConditions, repository_owner_guard_expression}, @@ -267,6 +268,9 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { named::job(job) } +const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz"; +const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1"; + fn check_style() -> NamedJob { fn check_for_typos() -> Step { named::uses( @@ -276,6 +280,30 @@ fn check_style() -> NamedJob { ) // v1.40.0 .with(("config", "./typos.toml")) } + + fn fetch_ts_query_ls() -> Step { + named::uses( + "dsaltares", + "fetch-gh-release-asset", + "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c", + ) // v1.1.1 + .add_with(("repo", "ribru17/ts_query_ls")) + .add_with(("version", CI_TS_QUERY_RELEASE)) + .add_with(("file", TS_QUERY_LS_FILE)) + } + + fn run_ts_query_ls() -> Step { + named::bash(formatdoc!( + r#"tar -xf {TS_QUERY_LS_FILE} + ./ts_query_ls format --check . || {{ + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + }}"# + )) + } + named::job( release_job(&[]) .runs_on(runners::LINUX_MEDIUM) @@ -286,7 +314,9 @@ fn check_style() -> NamedJob { .add_step(steps::cargo_fmt()) .add_step(steps::script("./script/check-todos")) .add_step(steps::script("./script/check-keymaps")) - .add_step(check_for_typos()), + .add_step(check_for_typos()) + .add_step(fetch_ts_query_ls()) + .add_step(run_ts_query_ls()), ) } From 6796539d20bfd34e30639e67d51ff7cc9f447ab7 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Fri, 27 Feb 2026 15:25:57 +0100 Subject: [PATCH 161/548] agent: Allow jumping from diff to actual file (#50292) If you place your cursor inside an agent diff you can now jump to the corresponding file by running `editor: Open excerpts`/`editor: Open excerpts split`. https://github.com/user-attachments/assets/cf3dba4e-68b7-4643-b9a9-844c2df58295 Release Notes: - agent: Allow jumping to a file from a diff inside the agent conversation with option-enter (`editor: Open excerpts`) --------- Co-authored-by: Danilo Leal --- crates/acp_thread/src/diff.rs | 10 + .../src/connection_view/thread_view.rs | 308 ++++++++++++------ crates/agent_ui/src/entry_view_state.rs | 49 ++- 3 files changed, 272 insertions(+), 95 deletions(-) diff --git a/crates/acp_thread/src/diff.rs b/crates/acp_thread/src/diff.rs index 8886b458d623237b74f715d3c1d0def33fbefa7d..08b1b9bdf24d1ff9980164c1af8b3e60bd2f3339 100644 --- a/crates/acp_thread/src/diff.rs +++ b/crates/acp_thread/src/diff.rs @@ -149,6 +149,16 @@ impl Diff { } } + pub fn file_path(&self, cx: &App) -> Option { + match self { + Self::Pending(PendingDiff { new_buffer, .. }) => new_buffer + .read(cx) + .file() + .map(|file| file.full_path(cx).to_string_lossy().into_owned()), + Self::Finalized(FinalizedDiff { path, .. }) => Some(path.clone()), + } + } + pub fn multibuffer(&self) -> &Entity { match self { Self::Pending(PendingDiff { multibuffer, .. }) => multibuffer, diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 777a54312e8d4c35a100c6c1f7e5ac446613c4b9..ddeabf46c9ed85ea2a70f1d935f53a764ba66323 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -1,4 +1,5 @@ use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody}; +use editor::actions::OpenExcerpts; use gpui::{Corner, List}; use language_model::{LanguageModelEffortLevel, Speed}; use settings::update_settings_file; @@ -578,9 +579,70 @@ impl ThreadView { ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Cancel) => { self.cancel_editing(&Default::default(), window, cx); } + ViewEvent::OpenDiffLocation { + path, + position, + split, + } => { + self.open_diff_location(path, *position, *split, window, cx); + } } } + fn open_diff_location( + &self, + path: &str, + position: Point, + split: bool, + window: &mut Window, + cx: &mut Context, + ) { + let Some(project) = self.project.upgrade() else { + return; + }; + let Some(project_path) = project.read(cx).find_project_path(path, cx) else { + return; + }; + + let open_task = if split { + self.workspace + .update(cx, |workspace, cx| { + workspace.split_path(project_path, window, cx) + }) + .log_err() + } else { + self.workspace + .update(cx, |workspace, cx| { + workspace.open_path(project_path, None, true, window, cx) + }) + .log_err() + }; + + let Some(open_task) = open_task else { + return; + }; + + window + .spawn(cx, async move |cx| { + let item = open_task.await?; + let Some(editor) = item.downcast::() else { + return anyhow::Ok(()); + }; + editor.update_in(cx, |editor, window, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::center()), + window, + cx, + |selections| { + selections.select_ranges([position..position]); + }, + ); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + // turns pub fn start_turn(&mut self, cx: &mut Context) -> usize { @@ -4995,14 +5057,20 @@ impl ThreadView { matches!(tool_call.kind, acp::ToolKind::Edit) || tool_call.diffs().next().is_some(); let is_cancelled_edit = is_edit && matches!(tool_call.status, ToolCallStatus::Canceled); - let has_revealed_diff = tool_call.diffs().next().is_some_and(|diff| { - self.entry_view_state - .read(cx) - .entry(entry_ix) - .and_then(|entry| entry.editor_for_diff(diff)) - .is_some() - && diff.read(cx).has_revealed_range(cx) - }); + let (has_revealed_diff, tool_call_output_focus) = tool_call + .diffs() + .next() + .and_then(|diff| { + let editor = self + .entry_view_state + .read(cx) + .entry(entry_ix) + .and_then(|entry| entry.editor_for_diff(diff))?; + let has_revealed_diff = diff.read(cx).has_revealed_range(cx); + let has_focus = editor.read(cx).is_focused(window); + Some((has_revealed_diff, has_focus)) + }) + .unwrap_or((false, false)); let use_card_layout = needs_confirmation || is_edit || is_terminal_tool; @@ -5211,7 +5279,12 @@ impl ThreadView { .map(|this| { if is_terminal_tool { let label_source = tool_call.label.read(cx).source(); - this.child(self.render_collapsible_command(true, label_source, &tool_call.id, cx)) + this.child(self.render_collapsible_command( + true, + label_source, + &tool_call.id, + cx, + )) } else { this.child( h_flex() @@ -5235,97 +5308,148 @@ impl ThreadView { window, cx, )) - .when(is_collapsible || failed_or_canceled, |this| { - let diff_for_discard = - if has_revealed_diff && is_cancelled_edit && cx.has_flag::() { - tool_call.diffs().next().cloned() - } else { - None - }; - this.child( - h_flex() - .px_1() - .when_some(diff_for_discard.clone(), |this, _| this.pr_0p5()) - .gap_1() - .when(is_collapsible, |this| { - this.child( - Disclosure::new(("expand-output", entry_ix), is_open) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .visible_on_hover(&card_header_id) - .on_click(cx.listener({ - let id = tool_call.id.clone(); - move |this: &mut Self, _, _, cx: &mut Context| { + .child( + h_flex() + .gap_0p5() + .when(is_collapsible || failed_or_canceled, |this| { + let diff_for_discard = if has_revealed_diff + && is_cancelled_edit + && cx.has_flag::() + { + tool_call.diffs().next().cloned() + } else { + None + }; + + this.child( + h_flex() + .px_1() + .when_some(diff_for_discard.clone(), |this, _| { + this.pr_0p5() + }) + .gap_1() + .when(is_collapsible, |this| { + this.child( + Disclosure::new( + ("expand-output", entry_ix), + is_open, + ) + .opened_icon(IconName::ChevronUp) + .closed_icon(IconName::ChevronDown) + .visible_on_hover(&card_header_id) + .on_click(cx.listener({ + let id = tool_call.id.clone(); + move |this: &mut Self, + _, + _, + cx: &mut Context| { if is_open { - this - .expanded_tool_calls.remove(&id); + this.expanded_tool_calls + .remove(&id); } else { - this.expanded_tool_calls.insert(id.clone()); + this.expanded_tool_calls + .insert(id.clone()); } - cx.notify(); + cx.notify(); + } + })), + ) + }) + .when(failed_or_canceled, |this| { + if is_cancelled_edit && !has_revealed_diff { + this.child( + div() + .id(entry_ix) + .tooltip(Tooltip::text( + "Interrupted Edit", + )) + .child( + Icon::new(IconName::XCircle) + .color(Color::Muted) + .size(IconSize::Small), + ), + ) + } else if is_cancelled_edit { + this + } else { + this.child( + Icon::new(IconName::Close) + .color(Color::Error) + .size(IconSize::Small), + ) } - })), + }) + .when_some(diff_for_discard, |this, diff| { + let tool_call_id = tool_call.id.clone(); + let is_discarded = self + .discarded_partial_edits + .contains(&tool_call_id); + + this.when(!is_discarded, |this| { + this.child( + IconButton::new( + ("discard-partial-edit", entry_ix), + IconName::Undo, + ) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::with_meta( + "Discard Interrupted Edit", + None, + "You can discard this interrupted partial edit and restore the original file content.", + cx, + ) + }) + .on_click(cx.listener({ + let tool_call_id = + tool_call_id.clone(); + move |this, _, _window, cx| { + let diff_data = diff.read(cx); + let base_text = diff_data + .base_text() + .clone(); + let buffer = + diff_data.buffer().clone(); + buffer.update( + cx, + |buffer, cx| { + buffer.set_text( + base_text.as_ref(), + cx, + ); + }, + ); + this.discarded_partial_edits + .insert( + tool_call_id.clone(), + ); + cx.notify(); + } + })), + ) + }) + }), ) - }) - .when(failed_or_canceled, |this| { - if is_cancelled_edit && !has_revealed_diff { - this.child( - div() - .id(entry_ix) - .tooltip(Tooltip::text( - "Interrupted Edit", - )) - .child( - Icon::new(IconName::XCircle) - .color(Color::Muted) - .size(IconSize::Small), - ), - ) - } else if is_cancelled_edit { - this - } else { - this.child( - Icon::new(IconName::Close) - .color(Color::Error) - .size(IconSize::Small), + }) + .when(tool_call_output_focus, |this| { + this.child( + Button::new("open-file-button", "Open File") + .label_size(LabelSize::Small) + .style(ButtonStyle::OutlinedGhost) + .key_binding( + KeyBinding::for_action(&OpenExcerpts, cx) + .map(|s| s.size(rems_from_px(12.))), ) - } - }) - .when_some(diff_for_discard, |this, diff| { - let tool_call_id = tool_call.id.clone(); - let is_discarded = self.discarded_partial_edits.contains(&tool_call_id); - this.when(!is_discarded, |this| { - this.child( - IconButton::new( - ("discard-partial-edit", entry_ix), - IconName::Undo, + .on_click(|_, window, cx| { + window.dispatch_action( + Box::new(OpenExcerpts), + cx, ) - .icon_size(IconSize::Small) - .tooltip(move |_, cx| Tooltip::with_meta( - "Discard Interrupted Edit", - None, - "You can discard this interrupted partial edit and restore the original file content.", - cx - )) - .on_click(cx.listener({ - let tool_call_id = tool_call_id.clone(); - move |this, _, _window, cx| { - let diff_data = diff.read(cx); - let base_text = diff_data.base_text().clone(); - let buffer = diff_data.buffer().clone(); - buffer.update(cx, |buffer, cx| { - buffer.set_text(base_text.as_ref(), cx); - }); - this.discarded_partial_edits.insert(tool_call_id.clone()); - cx.notify(); - } - })), - ) - }) - }) + }), + ) + }), + ) - ) - }), ) } }) diff --git a/crates/agent_ui/src/entry_view_state.rs b/crates/agent_ui/src/entry_view_state.rs index ea794cd5234c60b0932f1a26813854fdb28dcc95..b06d67f63b997e67ca891ab6238e0bd2ce94a304 100644 --- a/crates/agent_ui/src/entry_view_state.rs +++ b/crates/agent_ui/src/entry_view_state.rs @@ -5,7 +5,7 @@ use acp_thread::{AcpThread, AgentThreadEntry}; use agent::ThreadStore; use agent_client_protocol::{self as acp, ToolCallId}; use collections::HashMap; -use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior}; +use editor::{Editor, EditorEvent, EditorMode, MinimapVisibility, SizingBehavior}; use gpui::{ AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable, ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window, @@ -13,6 +13,7 @@ use gpui::{ use language::language_settings::SoftWrap; use project::Project; use prompt_store::PromptStore; +use rope::Point; use settings::Settings as _; use terminal_view::TerminalView; use theme::ThemeSettings; @@ -168,12 +169,48 @@ impl EntryViewState { for diff in diffs { views.entry(diff.entity_id()).or_insert_with(|| { - let element = create_editor_diff(diff.clone(), window, cx).into_any(); + let editor = create_editor_diff(diff.clone(), window, cx); + cx.subscribe(&editor, { + let diff = diff.clone(); + let entry_index = index; + move |_this, _editor, event: &EditorEvent, cx| { + if let EditorEvent::OpenExcerptsRequested { + selections_by_buffer, + split, + } = event + { + let multibuffer = diff.read(cx).multibuffer(); + if let Some((buffer_id, (ranges, _))) = + selections_by_buffer.iter().next() + { + if let Some(buffer) = + multibuffer.read(cx).buffer(*buffer_id) + { + if let Some(range) = ranges.first() { + let point = + buffer.read(cx).offset_to_point(range.start.0); + if let Some(path) = diff.read(cx).file_path(cx) { + cx.emit(EntryViewEvent { + entry_index, + view_event: ViewEvent::OpenDiffLocation { + path, + position: point, + split: *split, + }, + }); + } + } + } + } + } + } + }) + .detach(); cx.emit(EntryViewEvent { entry_index: index, view_event: ViewEvent::NewDiff(id.clone()), }); - element + editor.into_any() }); } } @@ -242,6 +279,11 @@ pub enum ViewEvent { NewTerminal(ToolCallId), TerminalMovedToBackground(ToolCallId), MessageEditorEvent(Entity, MessageEditorEvent), + OpenDiffLocation { + path: String, + position: Point, + split: bool, + }, } #[derive(Default, Debug)] @@ -379,6 +421,7 @@ fn create_editor_diff( editor.scroll_manager.set_forbid_vertical_scroll(true); editor.set_show_indent_guides(false, cx); editor.set_read_only(true); + editor.set_delegate_open_excerpts(true); editor.set_show_breakpoints(false, cx); editor.set_show_code_actions(false, cx); editor.set_show_git_diff_gutter(false, cx); From cabf189854083b8e8531e9476d531e0fc68f8e3d Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Fri, 27 Feb 2026 15:31:54 +0100 Subject: [PATCH 162/548] agent: Render subagent labels as they stream in (#50306) Release Notes: - N/A --- crates/agent/src/tools/spawn_agent_tool.rs | 11 +++++-- .../src/connection_view/thread_view.rs | 29 ++++++++++--------- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index f46e85ce26d9194047ef62223393db0ac30f0f4b..2c5c40c704464639ca43b7da32ab8ae0239e3b6a 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -83,9 +83,14 @@ impl AgentTool for SpawnAgentTool { input: Result, _cx: &mut App, ) -> SharedString { - input - .map(|i| i.label.into()) - .unwrap_or_else(|_| "Spawning agent".into()) + match input { + Ok(i) => i.label.into(), + Err(value) => value + .get("label") + .and_then(|v| v.as_str()) + .map(|s| SharedString::from(s.to_owned())) + .unwrap_or_else(|| "Spawning agent".into()), + } } fn run( diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index ddeabf46c9ed85ea2a70f1d935f53a764ba66323..20d860c5c14fd8c5c50be3b2bc8eefb89d9d7db6 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -6365,22 +6365,25 @@ impl ThreadView { ToolCallStatus::Canceled | ToolCallStatus::Failed | ToolCallStatus::Rejected ); - let has_title = thread + let thread_title = thread .as_ref() - .is_some_and(|t| !t.read(cx).title().is_empty()); + .map(|t| t.read(cx).title()) + .filter(|t| !t.is_empty()); + let tool_call_label = tool_call.label.read(cx).source().to_string(); + let has_tool_call_label = !tool_call_label.is_empty(); + + let has_title = thread_title.is_some() || has_tool_call_label; let has_no_title_or_canceled = !has_title || is_canceled_or_failed; - let title = thread - .as_ref() - .map(|t| t.read(cx).title()) - .unwrap_or_else(|| { - if is_canceled_or_failed { - "Subagent Canceled" - } else { - "Spawning Subagent…" - } - .into() - }); + let title: SharedString = if let Some(thread_title) = thread_title { + thread_title + } else if !tool_call_label.is_empty() { + tool_call_label.into() + } else if is_canceled_or_failed { + "Subagent Canceled".into() + } else { + "Spawning agent…".into() + }; let card_header_id = format!("subagent-header-{}", entry_ix); let diff_stat_id = format!("subagent-diff-{}", entry_ix); From e762bb96a2994abb059f61fac815b6404215492b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Feb 2026 08:32:29 -0700 Subject: [PATCH 163/548] Remove unwrap() from `lsp::Uri::from_file_path` (#50244) Fixes ZED-3BM Fixes ZED-1RT Release Notes: - Windows: Fixed a panic registering a path with language servers when the UNC path cannot be represented by a Rust URI. --------- Co-authored-by: MrSubidubi --- crates/project/src/lsp_store.rs | 12 ++++++++++-- crates/project/src/lsp_store/lsp_ext_command.rs | 6 +++--- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index ad3d4bdb703548f86304ac6c3892f3cabab01caa..676fd06f495ed6a69b246cc6a0df2ca6ca60a6b0 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -11406,6 +11406,15 @@ impl LspStore { let buffer_id = buffer.remote_id(); if local.registered_buffers.contains_key(&buffer_id) { + let abs_path = file.abs_path(cx); + let uri = match lsp::Uri::from_file_path(&abs_path) { + Ok(uri) => uri, + Err(()) => { + log::error!("failed to convert path to URI: {:?}", abs_path); + continue; + } + }; + let versions = local .buffer_snapshots .entry(buffer_id) @@ -11427,14 +11436,13 @@ impl LspStore { let snapshot = versions.last().unwrap(); let version = snapshot.version; let initial_snapshot = &snapshot.snapshot; - let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap(); language_server.register_buffer( uri, adapter.language_id(&language.name()), version, initial_snapshot.text(), ); - buffer_paths_registered.push((buffer_id, file.abs_path(cx))); + buffer_paths_registered.push((buffer_id, abs_path)); local .buffers_opened_in_servers .entry(buffer_id) diff --git a/crates/project/src/lsp_store/lsp_ext_command.rs b/crates/project/src/lsp_store/lsp_ext_command.rs index 270db67576f0a02155997757a01d489d44ef1766..9c284a143613c47aa3a5fcc9af5afac9d6dbbf4d 100644 --- a/crates/project/src/lsp_store/lsp_ext_command.rs +++ b/crates/project/src/lsp_store/lsp_ext_command.rs @@ -211,10 +211,10 @@ impl LspCommand for OpenDocs { _: &Arc, _: &App, ) -> Result { + let uri = lsp::Uri::from_file_path(path) + .map_err(|()| anyhow::anyhow!("{path:?} is not a valid URI"))?; Ok(OpenDocsParams { - text_document: lsp::TextDocumentIdentifier { - uri: lsp::Uri::from_file_path(path).unwrap(), - }, + text_document: lsp::TextDocumentIdentifier { uri }, position: point_to_lsp(self.position), }) } From 112b90c4a4b70cd0c623180c56e96e77fbb072cb Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Fri, 27 Feb 2026 17:08:57 +0100 Subject: [PATCH 164/548] agent_servers: Use more stable hasher for binary artifact downloads (#50315) Since there aren't as many guarantees on the default hasher, uses sha256 like our other github downloaders. Release Notes: - N/A --- crates/project/src/agent_server_store.rs | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index b0c10086cac1c39c4570b416e790df85cdc55cf0..f12e4da5cd39847c94c32fd26c826dff886edbf7 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -21,6 +21,7 @@ use rpc::{ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{RegisterSetting, SettingsStore}; +use sha2::{Digest, Sha256}; use task::Shell; use util::{ResultExt as _, debug_panic}; @@ -1075,12 +1076,10 @@ impl ExternalAgentServer for LocalExtensionArchiveAgent { // Use URL as version identifier for caching // Hash the URL to get a stable directory name - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - let mut hasher = DefaultHasher::new(); - archive_url.hash(&mut hasher); - let url_hash = hasher.finish(); - let version_dir = dir.join(format!("v_{:x}", url_hash)); + let mut hasher = Sha256::new(); + hasher.update(archive_url.as_bytes()); + let url_hash = format!("{:x}", hasher.finalize()); + let version_dir = dir.join(format!("v_{}", url_hash)); if !fs.is_dir(&version_dir).await { // Determine SHA256 for verification @@ -1273,12 +1272,10 @@ impl ExternalAgentServer for LocalRegistryArchiveAgent { let archive_url = &target_config.archive; - use std::collections::hash_map::DefaultHasher; - use std::hash::{Hash, Hasher}; - let mut hasher = DefaultHasher::new(); - archive_url.hash(&mut hasher); - let url_hash = hasher.finish(); - let version_dir = dir.join(format!("v_{:x}", url_hash)); + let mut hasher = Sha256::new(); + hasher.update(archive_url.as_bytes()); + let url_hash = format!("{:x}", hasher.finalize()); + let version_dir = dir.join(format!("v_{}", url_hash)); if !fs.is_dir(&version_dir).await { let sha256 = if let Some(provided_sha) = &target_config.sha256 { From d39277b6e058df48d47b486ee3f406fd6b09dbdf Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 27 Feb 2026 18:50:51 +0100 Subject: [PATCH 165/548] xtask: Fix cargo xtask web-examples build on stable (#50325) Avert thy eyes rust project folks Release Notes: - N/A *or* Added/Fixed/Improved ... --- Cargo.lock | 81 ++++++++++++------------- Cargo.toml | 2 +- tooling/xtask/src/tasks/web_examples.rs | 4 ++ 3 files changed, 44 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 06def4875c2517965381840faabaab45126fdea2..a5b7662a95341f68e82a6a9e95c23e3fd76f027a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -692,6 +692,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "ar_archive_writer" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" +dependencies = [ + "object 0.37.3", +] + [[package]] name = "arbitrary" version = "1.4.2" @@ -4961,7 +4970,7 @@ dependencies = [ "libc", "option-ext", "redox_users 0.5.2", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -5727,7 +5736,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -8923,9 +8932,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.81" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305" +checksum = "14dc6f6450b3f6d4ed5b16327f38fed626d375a886159ca555bd7822c0c3a5a6" dependencies = [ "once_cell", "wasm-bindgen", @@ -10485,7 +10494,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "536bfad37a309d62069485248eeaba1e8d9853aaf951caaeaed0585a95346f08" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -10895,7 +10904,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -13172,7 +13181,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ "bytes 1.11.1", - "heck 0.4.1", + "heck 0.5.0", "itertools 0.12.1", "log", "multimap 0.10.1", @@ -13265,10 +13274,11 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.27" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e66fcd288453b748497d8fb18bccc83a16b0518e3906d4b8df0a8d42d93dbb1c" +checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" dependencies = [ + "ar_archive_writer", "cc", ] @@ -14644,7 +14654,7 @@ dependencies = [ "errno 0.3.14", "libc", "linux-raw-sys 0.11.0", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -16227,9 +16237,9 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" +checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013" dependencies = [ "cc", "cfg-if", @@ -17096,7 +17106,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix 1.1.2", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -17996,7 +18006,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb391ac70462b3097a755618fbf9c8f95ecc1eb379a414f7b46f202ed10db1f" dependencies = [ "cc", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -19022,9 +19032,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" +checksum = "60722a937f594b7fde9adb894d7c092fc1bb6612897c46368d18e7a20208eff2" dependencies = [ "cfg-if", "once_cell", @@ -19033,27 +19043,14 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.104" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.106", - "wasm-bindgen-shared", -] - [[package]] name = "wasm-bindgen-futures" -version = "0.4.54" +version = "0.4.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" +checksum = "8a89f4650b770e4521aa6573724e2aed4704372151bd0de9d16a3bbabb87441a" dependencies = [ "cfg-if", + "futures-util", "js-sys", "once_cell", "wasm-bindgen", @@ -19062,9 +19059,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119" +checksum = "0fac8c6395094b6b91c4af293f4c79371c163f9a6f56184d2c9a85f5a95f3950" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -19072,22 +19069,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" +checksum = "ab3fabce6159dc20728033842636887e4877688ae94382766e00b180abac9d60" dependencies = [ + "bumpalo", "proc-macro2", "quote", "syn 2.0.106", - "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.104" +version = "0.2.113" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1" +checksum = "de0e091bdb824da87dc01d967388880d017a0a9bc4f3bdc0d86ee9f9336e3bb5" dependencies = [ "unicode-ident", ] @@ -19702,9 +19699,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.81" +version = "0.3.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120" +checksum = "705eceb4ce901230f8625bd1d665128056ccbe4b7408faa625eec1ba80f59a97" dependencies = [ "js-sys", "wasm-bindgen", @@ -20070,7 +20067,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.61.2", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index cb388e08a5b0c403a15dea8ebb8d0905cbcea316..d2ac2569a7d0ede9e64aa9daf118340312cbb671 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -768,7 +768,7 @@ wasmtime = { version = "33", default-features = false, features = [ wasmtime-wasi = "33" wax = "0.7" which = "6.0.0" -wasm-bindgen = "0.2.104" +wasm-bindgen = "0.2.113" web-time = "1.1.0" wgpu = "28.0" windows-core = "0.61" diff --git a/tooling/xtask/src/tasks/web_examples.rs b/tooling/xtask/src/tasks/web_examples.rs index 93179c92ca9a021838d48ae6a976f3c2a434f6a2..5b8e0fdd610e39a8ee020eddfbc9b98d00bdf419 100644 --- a/tooling/xtask/src/tasks/web_examples.rs +++ b/tooling/xtask/src/tasks/web_examples.rs @@ -71,6 +71,8 @@ pub fn run_web_examples(args: WebExamplesArgs) -> Result<()> { "gpui", "--keep-going", ]); + // 🙈 + cmd.env("RUSTC_BOOTSTRAP", "1"); for name in &examples { cmd.args(["--example", name]); } @@ -109,6 +111,8 @@ pub fn run_web_examples(args: WebExamplesArgs) -> Result<()> { "--out-name", name, ]) + // 🙈 + .env("RUSTC_BOOTSTRAP", "1") .status() .context("failed to run wasm-bindgen")?; if !status.success() { From 654857d8618324fda94bb68bd0409cf0c1a9f360 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 27 Feb 2026 20:22:03 +0100 Subject: [PATCH 166/548] Use updated version of our fork of `rust-sdks` (#50205) Use updated version of our fork of `rust-sdks` with two minor tweaks that I also submitted for upstreaming. Release Notes: - N/A --- .github/workflows/release.yml | 6 + .github/workflows/run_tests.yml | 18 ++ Cargo.lock | 185 ++++++++++++++---- Cargo.toml | 4 + crates/audio/Cargo.toml | 2 +- crates/livekit_client/Cargo.toml | 7 +- crates/livekit_client/src/livekit_client.rs | 6 +- .../src/livekit_client/playback.rs | 11 +- nix/build.nix | 4 + script/linux | 7 +- .../xtask/src/tasks/workflows/run_tests.rs | 67 ++++--- 11 files changed, 231 insertions(+), 86 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4442b068a88800e8437d5c6e459acec954308946..4fe1d4dd7d698b624e9dc2391d371acc4335cdcb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -53,6 +53,9 @@ jobs: run_tests_linux: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -177,6 +180,9 @@ jobs: clippy_linux: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index cdf01b1aa91dd722de2533887d44182a9c55fcb0..12a0dc2db1b6019e5e1e163f282f80e9bfcd0c66 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -190,6 +190,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -300,6 +303,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -400,6 +406,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -443,6 +452,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-8x16-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -525,6 +537,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_tests == 'true' runs-on: namespace-profile-2x4-ubuntu-2404 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -557,6 +572,9 @@ jobs: - orchestrate if: needs.orchestrate.outputs.run_docs == 'true' runs-on: namespace-profile-8x16-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/Cargo.lock b/Cargo.lock index a5b7662a95341f68e82a6a9e95c23e3fd76f027a..d37563dc8595c72f71901dd84cdd4fca5a34ee84 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2172,6 +2172,16 @@ dependencies = [ "piper", ] +[[package]] +name = "bmrng" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54df9073108f1558f90ae6c5bf5ab9c917c4185f5527b280c87a993cbead0ac" +dependencies = [ + "futures-core", + "tokio", +] + [[package]] name = "bon" version = "3.8.2" @@ -2754,6 +2764,16 @@ dependencies = [ "target-lexicon 0.12.16", ] +[[package]] +name = "cfg-expr" +version = "0.20.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78cef5b5a1a6827c7322ae2a636368a573006b27cfa76c7ebd53e834daeaab6a" +dependencies = [ + "smallvec", + "target-lexicon 0.13.3", +] + [[package]] name = "cfg-if" version = "1.0.4" @@ -7141,6 +7161,19 @@ version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" +[[package]] +name = "gio-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0071fe88dba8e40086c8ff9bbb62622999f49628344b1d1bf490a48a29d80f22" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps 7.0.7", + "windows-sys 0.61.2", +] + [[package]] name = "git" version = "0.1.0" @@ -7315,6 +7348,50 @@ dependencies = [ "xml-rs", ] +[[package]] +name = "glib" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16de123c2e6c90ce3b573b7330de19be649080ec612033d397d72da265f1bd8b" +dependencies = [ + "bitflags 2.10.0", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "memchr", + "smallvec", +] + +[[package]] +name = "glib-macros" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf59b675301228a696fe01c3073974643365080a76cc3ed5bc2cbc466ad87f17" +dependencies = [ + "heck 0.5.0", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "glib-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d95e1a3a19ae464a7286e14af9a90683c64d70c02532d88d87ce95056af3e6c" +dependencies = [ + "libc", + "system-deps 7.0.7", +] + [[package]] name = "glob" version = "0.3.3" @@ -7390,6 +7467,17 @@ dependencies = [ "workspace", ] +[[package]] +name = "gobject-sys" +version = "0.21.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dca35da0d19a18f4575f3cb99fe1c9e029a2941af5662f326f738a21edaf294" +dependencies = [ + "glib-sys", + "libc", + "system-deps 7.0.7", +] + [[package]] name = "goblin" version = "0.8.2" @@ -9607,10 +9695,11 @@ dependencies = [ [[package]] name = "libwebrtc" -version = "0.3.10" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.26" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" dependencies = [ "cxx", + "glib", "jni", "js-sys", "lazy_static", @@ -9704,9 +9793,12 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "livekit" -version = "0.7.8" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.7.32" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" dependencies = [ + "base64 0.22.1", + "bmrng", + "bytes 1.11.1", "chrono", "futures-util", "lazy_static", @@ -9727,11 +9819,12 @@ dependencies = [ [[package]] name = "livekit-api" -version = "0.4.2" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.4.14" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" dependencies = [ + "base64 0.21.7", "futures-util", - "http 0.2.12", + "http 1.3.1", "livekit-protocol", "livekit-runtime", "log", @@ -9739,20 +9832,22 @@ dependencies = [ "pbjson-types", "prost 0.12.6", "rand 0.9.2", - "reqwest 0.11.27", + "reqwest 0.12.24", + "rustls-native-certs 0.6.3", "scopeguard", "serde", "sha2", "thiserror 1.0.69", "tokio", - "tokio-tungstenite 0.26.2", + "tokio-rustls 0.26.2", + "tokio-tungstenite 0.28.0", "url", ] [[package]] name = "livekit-protocol" -version = "0.3.9" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.7.1" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" dependencies = [ "futures-util", "livekit-runtime", @@ -9760,7 +9855,6 @@ dependencies = [ "pbjson", "pbjson-types", "prost 0.12.6", - "prost-types 0.12.6", "serde", "thiserror 1.0.69", "tokio", @@ -9769,7 +9863,7 @@ dependencies = [ [[package]] name = "livekit-runtime" version = "0.4.0" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" dependencies = [ "tokio", "tokio-stream", @@ -9825,7 +9919,6 @@ dependencies = [ "sha2", "simplelog", "smallvec", - "tokio-tungstenite 0.26.2", "ui", "util", "zed-scap", @@ -10586,12 +10679,6 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" -[[package]] -name = "multimap" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" - [[package]] name = "naga" version = "28.0.0" @@ -13165,7 +13252,7 @@ dependencies = [ "itertools 0.10.5", "lazy_static", "log", - "multimap 0.8.3", + "multimap", "petgraph", "prost 0.9.0", "prost-types 0.9.0", @@ -13184,7 +13271,7 @@ dependencies = [ "heck 0.5.0", "itertools 0.12.1", "log", - "multimap 0.10.1", + "multimap", "once_cell", "petgraph", "prettyplease", @@ -13670,7 +13757,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "simd_helpers", - "system-deps", + "system-deps 6.2.2", "thiserror 1.0.69", "v_frame", "wasm-bindgen", @@ -14181,7 +14268,6 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.32", - "hyper-rustls 0.24.2", "hyper-tls", "ipnet", "js-sys", @@ -14191,8 +14277,6 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -14201,7 +14285,6 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", - "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", @@ -14225,16 +14308,22 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.7.0", + "hyper-rustls 0.27.7", "hyper-util", "js-sys", "log", "percent-encoding", "pin-project-lite", + "quinn", + "rustls 0.23.33", + "rustls-native-certs 0.8.2", + "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper 1.0.2", "tokio", + "tokio-rustls 0.26.2", "tower 0.5.2", "tower-http 0.6.6", "tower-service", @@ -16910,13 +16999,26 @@ version = "6.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" dependencies = [ - "cfg-expr", + "cfg-expr 0.15.8", "heck 0.5.0", "pkg-config", "toml 0.8.23", "version-compare", ] +[[package]] +name = "system-deps" +version = "7.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c8f33736f986f16d69b6cb8b03f55ddcad5c41acc4ccc39dd88e84aa805e7f" +dependencies = [ + "cfg-expr 0.20.6", + "heck 0.5.0", + "pkg-config", + "toml 0.9.8", + "version-compare", +] + [[package]] name = "system-interface" version = "0.27.3" @@ -17668,17 +17770,18 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.26.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" dependencies = [ "futures-util", "log", "rustls 0.23.33", + "rustls-native-certs 0.8.2", "rustls-pki-types", "tokio", "tokio-rustls 0.26.2", - "tungstenite 0.26.2", + "tungstenite 0.28.0", ] [[package]] @@ -18334,9 +18437,9 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.26.2" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" dependencies = [ "bytes 1.11.1", "data-encoding", @@ -18353,9 +18456,9 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.27.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadc29d668c91fcc564941132e17b28a7ceb2f3ebf0b9dae3e03fd7a6748eb0d" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" dependencies = [ "bytes 1.11.1", "data-encoding", @@ -19776,25 +19879,27 @@ dependencies = [ [[package]] name = "webrtc-sys" -version = "0.3.7" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.23" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" dependencies = [ "cc", "cxx", "cxx-build", "glob", "log", + "pkg-config", "webrtc-sys-build", ] [[package]] name = "webrtc-sys-build" -version = "0.3.6" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=5f04705ac3f356350ae31534ffbc476abc9ea83d#5f04705ac3f356350ae31534ffbc476abc9ea83d" +version = "0.3.13" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" dependencies = [ + "anyhow", "fs2", "regex", - "reqwest 0.11.27", + "reqwest 0.12.24", "scratch", "semver", "zip 0.6.6", diff --git a/Cargo.toml b/Cargo.toml index d2ac2569a7d0ede9e64aa9daf118340312cbb671..3b63808046e98c5e314cb5ab0bb731c32fce716b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -581,6 +581,8 @@ jupyter-websocket-client = "1.0.0" libc = "0.2" libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } linkify = "0.10.0" +libwebrtc = "0.3.26" +livekit = { version = "0.7.32", features = ["tokio", "rustls-tls-native-roots"] } log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] } lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "a4f410987660bf560d1e617cb78117c6b6b9f599" } mach2 = "0.5" @@ -835,6 +837,8 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24c notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24cad542c28e04ced02e20325a4ec28a31d" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } calloop = { git = "https://github.com/zed-industries/calloop" } +livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "9c38d9a0a91951967f8fa84ed86e193626436774" } +libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "9c38d9a0a91951967f8fa84ed86e193626436774" } [profile.dev] split-debuginfo = "unpacked" diff --git a/crates/audio/Cargo.toml b/crates/audio/Cargo.toml index 3139eb56c7e30555c48fe0be329c55d472b3f8eb..f3898265e500dd40602c9877b5e4c0980932a81a 100644 --- a/crates/audio/Cargo.toml +++ b/crates/audio/Cargo.toml @@ -30,4 +30,4 @@ thiserror.workspace = true util.workspace = true [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] -libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } +libwebrtc.workspace = true diff --git a/crates/livekit_client/Cargo.toml b/crates/livekit_client/Cargo.toml index e4c530bbcb3864cf2557f15ef02ddbe7e81852c7..66511da9daa943628e71000a2009b2026eeace6c 100644 --- a/crates/livekit_client/Cargo.toml +++ b/crates/livekit_client/Cargo.toml @@ -40,15 +40,12 @@ serde.workspace = true serde_urlencoded.workspace = true settings.workspace = true smallvec.workspace = true -tokio-tungstenite.workspace = true ui.workspace = true util.workspace = true [target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies] -libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" } -livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [ - "__rustls-tls" -] } +libwebrtc.workspace = true +livekit.workspace = true [target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))'.dependencies] scap.workspace = true diff --git a/crates/livekit_client/src/livekit_client.rs b/crates/livekit_client/src/livekit_client.rs index 6fc1d3415a493e7e1989472616015916a82cf818..1db9a12ef2b7f3b4f3de1cba6c61a30db12a5bd9 100644 --- a/crates/livekit_client/src/livekit_client.rs +++ b/crates/livekit_client/src/livekit_client.rs @@ -1,5 +1,3 @@ -use std::sync::Arc; - use anyhow::{Context as _, Result, anyhow}; use audio::AudioSettings; use collections::HashMap; @@ -54,10 +52,8 @@ impl Room { token: String, cx: &mut AsyncApp, ) -> Result<(Self, mpsc::UnboundedReceiver)> { - let connector = - tokio_tungstenite::Connector::Rustls(Arc::new(http_client_tls::tls_config())); let mut config = livekit::RoomOptions::default(); - config.connector = Some(connector); + config.tls_config = livekit::TlsConfig(Some(http_client_tls::tls_config())); let (room, mut events) = Tokio::spawn(cx, async move { livekit::Room::connect(&url, &token, config).await }) diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index 6e39c2abfb4162ceaa43373f4170a41ffdb36351..df62479f022be5295a3de44f40fabf48aed515f2 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -466,10 +466,13 @@ pub(crate) async fn capture_local_video_track( ) -> Result<(crate::LocalVideoTrack, Box)> { let metadata = capture_source.metadata()?; let track_source = gpui_tokio::Tokio::spawn(cx, async move { - NativeVideoSource::new(VideoResolution { - width: metadata.resolution.width.0 as u32, - height: metadata.resolution.height.0 as u32, - }) + NativeVideoSource::new( + VideoResolution { + width: metadata.resolution.width.0 as u32, + height: metadata.resolution.height.0 as u32, + }, + true, + ) }) .await?; diff --git a/nix/build.nix b/nix/build.nix index 28031337da6877cebda056e9cf2eab0f8f0d3ff7..8953fbc19fb1e6bb165a2585e3a76ffeb0bdea04 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -24,8 +24,10 @@ fontconfig, freetype, git, + glib, libgit2, libglvnd, + libva, libxkbcommon, livekit-libwebrtc, nodejs_22, @@ -161,6 +163,8 @@ let ] ++ lib.optionals stdenv'.hostPlatform.isLinux [ alsa-lib + glib + libva libxkbcommon wayland gpu-lib diff --git a/script/linux b/script/linux index c5c4ea9ab3856545bcff63bc6bdaed5f06b8e07c..1d1f78dc1d6d29ead26cca27e48b2559b9f81215 100755 --- a/script/linux +++ b/script/linux @@ -27,13 +27,14 @@ if [[ -n $apt ]]; then g++ libasound2-dev libfontconfig-dev + libgit2-dev + libssl-dev + libva-dev + libvulkan1 libwayland-dev libx11-xcb-dev libxkbcommon-x11-dev - libssl-dev libzstd-dev - libvulkan1 - libgit2-dev make cmake clang diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index f40e02335c5bbccd396ebed916a9a4d3a6a22f29..378af0eba5dcea9c98f23d57ced5951073fb9cc2 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -1,5 +1,5 @@ use gh_workflow::{ - Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use, + Concurrency, Container, Env, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use, Workflow, }; use indexmap::IndexMap; @@ -15,6 +15,11 @@ use super::{ steps::{self, FluentBuilder, NamedJob, named, release_job}, }; +fn use_clang(job: Job) -> Job { + job.add_env(Env::new("CC", "clang")) + .add_env(Env::new("CXX", "clang++")) +} + pub(crate) fn run_tests() -> Workflow { // Specify anything which should potentially skip full test suite in this regex: // - docs/ @@ -354,7 +359,7 @@ fn check_dependencies() -> NamedJob { .with(("license-check", false)) } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_SMALL) .add_step(steps::checkout_repo()) @@ -363,7 +368,7 @@ fn check_dependencies() -> NamedJob { .add_step(run_cargo_machete()) .add_step(check_cargo_lock()) .add_step(check_vulnerable_dependencies()), - ) + )) } fn check_wasm() -> NamedJob { @@ -399,7 +404,7 @@ fn check_wasm() -> NamedJob { } fn check_workspace_binaries() -> NamedJob { - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_LARGE) .add_step(steps::checkout_repo()) @@ -411,7 +416,7 @@ fn check_workspace_binaries() -> NamedJob { .add_step(steps::script("cargo build --workspace --bins --examples")) .add_step(steps::show_sccache_stats(Platform::Linux)) .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) + )) } pub(crate) fn clippy(platform: Platform) -> NamedJob { @@ -420,23 +425,27 @@ pub(crate) fn clippy(platform: Platform) -> NamedJob { Platform::Linux => runners::LINUX_DEFAULT, Platform::Mac => runners::MAC_DEFAULT, }; + let mut job = release_job(&[]) + .runs_on(runner) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(platform)) + .when( + platform == Platform::Linux || platform == Platform::Mac, + |this| this.add_step(steps::cache_rust_dependencies_namespace()), + ) + .when( + platform == Platform::Linux, + steps::install_linux_dependencies, + ) + .add_step(steps::setup_sccache(platform)) + .add_step(steps::clippy(platform)) + .add_step(steps::show_sccache_stats(platform)); + if platform == Platform::Linux { + job = use_clang(job); + } NamedJob { name: format!("clippy_{platform}"), - job: release_job(&[]) - .runs_on(runner) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_cargo_config(platform)) - .when( - platform == Platform::Linux || platform == Platform::Mac, - |this| this.add_step(steps::cache_rust_dependencies_namespace()), - ) - .when( - platform == Platform::Linux, - steps::install_linux_dependencies, - ) - .add_step(steps::setup_sccache(platform)) - .add_step(steps::clippy(platform)) - .add_step(steps::show_sccache_stats(platform)), + job, } } @@ -474,10 +483,12 @@ fn run_platform_tests_impl(platform: Platform, filter_packages: bool) -> NamedJo }) .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(platform)) - .when( - platform == Platform::Linux || platform == Platform::Mac, - |this| this.add_step(steps::cache_rust_dependencies_namespace()), - ) + .when(platform == Platform::Mac, |this| { + this.add_step(steps::cache_rust_dependencies_namespace()) + }) + .when(platform == Platform::Linux, |this| { + use_clang(this.add_step(steps::cache_rust_dependencies_namespace())) + }) .when( platform == Platform::Linux, steps::install_linux_dependencies, @@ -549,7 +560,7 @@ fn doctests() -> NamedJob { .id("run_doctests") } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_DEFAULT) .add_step(steps::checkout_repo()) @@ -560,7 +571,7 @@ fn doctests() -> NamedJob { .add_step(run_doctests()) .add_step(steps::show_sccache_stats(Platform::Linux)) .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) + )) } fn check_licenses() -> NamedJob { @@ -602,7 +613,7 @@ fn check_docs() -> NamedJob { "#}) } - named::job( + named::job(use_clang( release_job(&[]) .runs_on(runners::LINUX_LARGE) .add_step(steps::checkout_repo()) @@ -619,7 +630,7 @@ fn check_docs() -> NamedJob { .add_step( lychee_link_check("target/deploy/docs"), // check links in generated html ), - ) + )) } pub(crate) fn check_scripts() -> NamedJob { From 51dc09c861fe0b4f522d765a722dbd84bd4b6162 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Feb 2026 12:30:29 -0700 Subject: [PATCH 167/548] Fail better when workspace restoration all fails (#50330) Release Notes: - Fixed a bug that caused Zed to appear not to open if your workspaces failed to restore --- crates/zed/src/main.rs | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index f429c32df79b6a1a62a82832e69d412800544e8a..cfa339afc08faeac8b050ef3d3abbe627b19dadf 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -1398,7 +1398,7 @@ pub(crate) async fn restore_or_create_workspace( .update(cx, |multi_workspace, _, cx| { multi_workspace.workspace().update(cx, |workspace, cx| { workspace.show_toast( - Toast::new(NotificationId::unique::<()>(), message), + Toast::new(NotificationId::unique::<()>(), message.clone()), cx, ) }); @@ -1410,11 +1410,23 @@ pub(crate) async fn restore_or_create_workspace( }); // If we couldn't show a toast (no windows opened successfully), - // we've already logged the errors above, so the user can check logs + // open a fallback empty workspace and show the error there if !toast_shown { - log::error!( - "Failed to show notification for window restoration errors, because no workspace windows were available." - ); + log::error!("All workspace restorations failed. Opening fallback empty workspace."); + cx.update(|cx| { + workspace::open_new( + Default::default(), + app_state.clone(), + cx, + |workspace, _window, cx| { + workspace.show_toast( + Toast::new(NotificationId::unique::<()>(), message), + cx, + ); + }, + ) + }) + .await?; } } } else if matches!(KEY_VALUE_STORE.read_kvp(FIRST_OPEN), Ok(None)) { From 9ca4c608374ba764cd26fa1992bb6ead6d1887fc Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Feb 2026 13:24:37 -0700 Subject: [PATCH 168/548] Fix panic in vim increment (#50311) Fixes ZED-59V Release Notes: - vim: Fixed panic when incrementing a number preceded by a multibyte character --- crates/vim/src/normal/increment.rs | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 9b6707fdb92520e95e874a5be143024beb21b873..9df8721301a82ed26618f7181ba80c43cbc702df 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -203,20 +203,25 @@ fn find_target( let start_offset = start.to_offset(snapshot); let end_offset = end.to_offset(snapshot); - let mut offset = start_offset; let mut first_char_is_num = snapshot - .chars_at(offset) + .chars_at(start_offset) .next() .map_or(false, |ch| ch.is_ascii_hexdigit()); let mut pre_char = String::new(); - let next_offset = offset + let next_offset = start_offset + snapshot .chars_at(start_offset) .next() .map_or(0, |ch| ch.len_utf8()); - // Backward scan to find the start of the number, but stop at start_offset + // Backward scan to find the start of the number, but stop at start_offset. + // We track `offset` as the start position of the current character. Initialize + // to `next_offset` and decrement at the start of each iteration so that `offset` + // always lands on a valid character boundary (not in the middle of a multibyte char). + let mut offset = next_offset; for ch in snapshot.reversed_chars_at(next_offset) { + offset -= ch.len_utf8(); + // Search boundaries if offset.0 == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) { break; @@ -238,7 +243,6 @@ fn find_target( } pre_char.insert(0, ch); - offset -= ch.len_utf8(); } // The backward scan breaks on whitespace, including newlines. Without this @@ -895,4 +899,15 @@ mod test { .await .assert_eq("# Title\n2. item\nˇ2. item\n3. item"); } + + #[gpui::test] + async fn test_increment_with_multibyte_characters(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + // Test cursor after a multibyte character - this would panic before the fix + // because the backward scan would land in the middle of the Korean character + cx.set_state("지ˇ1", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("지ˇ2", Mode::Normal); + } } From 30ceb9176cb751b0f4f60516a3248f4ded99b082 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Feb 2026 13:24:53 -0700 Subject: [PATCH 169/548] Reduce wgpu memory usage during resize (#50030) Closes #49435 Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Linux: Reduced GPU memory usage during resize --- crates/gpui_wgpu/src/wgpu_renderer.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 489f354c691c280a5331e5a7765c9d626064eb9c..cc416ead908830262cdae0144b4912ccf5dbc4ad 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -5,6 +5,7 @@ use gpui::{ PolychromeSprite, PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, SubpixelSprite, Underline, get_gamma_correction_ratios, }; +use log::warn; #[cfg(not(target_family = "wasm"))] use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; use std::num::NonZeroU64; @@ -825,6 +826,20 @@ impl WgpuRenderer { let height = size.height.0 as u32; if width != self.surface_config.width || height != self.surface_config.height { + // Wait for any in-flight GPU work to complete before destroying textures + if let Err(e) = self.device.poll(wgpu::PollType::Wait { + submission_index: None, + timeout: None, + }) { + warn!("Failed to poll device during resize: {e:?}"); + } + + // Destroy old textures before allocating new ones to avoid GPU memory spikes + self.path_intermediate_texture.destroy(); + if let Some(ref texture) = self.path_msaa_texture { + texture.destroy(); + } + self.surface_config.width = width.max(1); self.surface_config.height = height.max(1); self.surface.configure(&self.device, &self.surface_config); From 19190e861338ffe8b3bae711ed443653bfcffad4 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Feb 2026 13:41:22 -0700 Subject: [PATCH 170/548] Add glib-2 to script/linux (#50335) required after livekit bump Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- script/linux | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/script/linux b/script/linux index 1d1f78dc1d6d29ead26cca27e48b2559b9f81215..3f098ec21e3a84734d5f25c7b63b12d8588b8264 100755 --- a/script/linux +++ b/script/linux @@ -28,6 +28,7 @@ if [[ -n $apt ]]; then libasound2-dev libfontconfig-dev libgit2-dev + libglib2.0-dev libssl-dev libva-dev libvulkan1 @@ -79,6 +80,7 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then cmake alsa-lib-devel fontconfig-devel + glib2-devel wayland-devel libxcb-devel libxkbcommon-x11-devel @@ -143,6 +145,7 @@ if [[ -n $zyp ]]; then fontconfig-devel gcc gcc-c++ + glib2-devel git gzip jq @@ -176,6 +179,7 @@ if [[ -n $pacman ]]; then cmake alsa-lib fontconfig + glib2 wayland libgit2 libxcb @@ -206,6 +210,7 @@ if [[ -n $xbps ]]; then gcc alsa-lib-devel fontconfig-devel + glib-devel libxcb-devel libxkbcommon-devel libzstd-devel @@ -227,6 +232,7 @@ if [[ -n $emerge ]]; then deps=( app-arch/zstd app-misc/jq + dev-libs/glib dev-libs/openssl dev-libs/wayland dev-util/cmake From 47c5a31ac5575074cee305859478962cbc70dc9b Mon Sep 17 00:00:00 2001 From: John Tur Date: Fri, 27 Feb 2026 15:46:18 -0500 Subject: [PATCH 171/548] Dynamically detect surface size limits on WGPU (#50340) Fallout from https://github.com/zed-industries/zed/pull/50270 Release Notes: - N/A Co-authored-by: Conrad Irwin --- crates/gpui_wgpu/src/wgpu_context.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index 38a27d0623c821144a2b0ba4ed5cadaaceb03812..84b7166f6e6b97a9dc7f16c76069872bae473161 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -120,7 +120,9 @@ impl WgpuContext { .request_device(&wgpu::DeviceDescriptor { label: Some("gpui_device"), required_features, - required_limits: wgpu::Limits::downlevel_defaults(), + required_limits: wgpu::Limits::downlevel_defaults() + .using_resolution(adapter.limits()) + .using_alignment(adapter.limits()), memory_hints: wgpu::MemoryHints::MemoryUsage, trace: wgpu::Trace::Off, experimental_features: wgpu::ExperimentalFeatures::disabled(), From 69d6bfd789be8df0c69bfd751a230f9bb8165218 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Feb 2026 13:53:32 -0700 Subject: [PATCH 172/548] Avoid panicking in wgpu rendering during resize (#50169) Fixes Zed-5AW Fixes Zed-5AP Claude believes this is the right fix, but would love someone who knows more about graphics than me to take a look: @reflectronic / @zortax? The panic is: ``` wgpu error: Validation Error Caused by: In Texture::create_view Texture with 'path_intermediate' label is invalid gpui::platform::wgpu::wgpu_renderer::WgpuRenderer::create_path_intermediate (wgpu_renderer.rs:742) gpui::platform::wgpu::wgpu_renderer::WgpuRenderer::update_drawable_size (wgpu_renderer.rs:784) gpui::platform::linux::x11::window::X11WindowStatePtr::set_bounds (window.rs:1169) gpui::platform::linux::x11::client::X11Client::handle_event (client.rs:902) ``` or: ``` wgpu error: Validation Error Caused by: In Texture::create_view Texture with 'path_intermediate' label is invalid gpui::platform::wgpu::wgpu_renderer::WgpuRenderer::create_path_intermediate (wgpu_renderer.rs:742) gpui::platform::wgpu::wgpu_renderer::WgpuRenderer::new (wgpu_renderer.rs:274) gpui::platform::linux::x11::window::X11WindowState::new::{{closure}} (window.rs:698) gpui::platform::linux::x11::window::X11WindowState::new (window.rs:488) gpui::platform::linux::x11::window::X11Window::new (window.rs:814) gpui::platform::linux::x11::client::X11Client::open_window (client.rs:1514) gpui::platform::linux::platform::::open_window (platform.rs:289) gpui::window::Window::new (window.rs:1119) gpui::app::App::open_window::{{closure}} (app.rs:1025) gpui::app::App::update (app.rs:835) gpui::app::App::open_window (app.rs:1022) ``` I haven't seen a Wayland equivalent (not sure if that's because it doesn't happen on Wayalnd or because I havent' seen it yet) Release Notes: - Linux: Fixed a panic in the new WPGU renderer during resize --- crates/gpui_wgpu/src/wgpu_renderer.rs | 105 +++++++++++++++----------- 1 file changed, 60 insertions(+), 45 deletions(-) diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index cc416ead908830262cdae0144b4912ccf5dbc4ad..23de2f55e0707b2f706ecb6ae977e4b08850d894 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -109,8 +109,8 @@ pub struct WgpuRenderer { instance_buffer_capacity: u64, max_buffer_size: u64, storage_buffer_alignment: u64, - path_intermediate_texture: wgpu::Texture, - path_intermediate_view: wgpu::TextureView, + path_intermediate_texture: Option, + path_intermediate_view: Option, path_msaa_texture: Option, path_msaa_view: Option, rendering_params: RenderingParameters, @@ -297,23 +297,6 @@ impl WgpuRenderer { mapped_at_creation: false, }); - let (path_intermediate_texture, path_intermediate_view) = Self::create_path_intermediate( - &device, - surface_format, - config.size.width.0 as u32, - config.size.height.0 as u32, - ); - - let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( - &device, - surface_format, - config.size.width.0 as u32, - config.size.height.0 as u32, - rendering_params.path_sample_count, - ) - .map(|(t, v)| (Some(t), Some(v))) - .unwrap_or((None, None)); - let globals_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { label: Some("globals_bind_group"), layout: &bind_group_layouts.globals, @@ -380,10 +363,12 @@ impl WgpuRenderer { instance_buffer_capacity: initial_instance_buffer_capacity, max_buffer_size, storage_buffer_alignment, - path_intermediate_texture, - path_intermediate_view, - path_msaa_texture, - path_msaa_view, + // Defer intermediate texture creation to first draw call via ensure_intermediate_textures(). + // This avoids panics when the device/surface is in an invalid state during initialization. + path_intermediate_texture: None, + path_intermediate_view: None, + path_msaa_texture: None, + path_msaa_view: None, rendering_params, dual_source_blending, adapter_info, @@ -835,7 +820,9 @@ impl WgpuRenderer { } // Destroy old textures before allocating new ones to avoid GPU memory spikes - self.path_intermediate_texture.destroy(); + if let Some(ref texture) = self.path_intermediate_texture { + texture.destroy(); + } if let Some(ref texture) = self.path_msaa_texture { texture.destroy(); } @@ -844,28 +831,44 @@ impl WgpuRenderer { self.surface_config.height = height.max(1); self.surface.configure(&self.device, &self.surface_config); - let (path_intermediate_texture, path_intermediate_view) = - Self::create_path_intermediate( - &self.device, - self.surface_config.format, - self.surface_config.width, - self.surface_config.height, - ); - self.path_intermediate_texture = path_intermediate_texture; - self.path_intermediate_view = path_intermediate_view; - - let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( + // Invalidate intermediate textures - they will be lazily recreated + // in draw() after we confirm the surface is healthy. This avoids + // panics when the device/surface is in an invalid state during resize. + self.path_intermediate_texture = None; + self.path_intermediate_view = None; + self.path_msaa_texture = None; + self.path_msaa_view = None; + } + } + + fn ensure_intermediate_textures(&mut self) { + if self.path_intermediate_texture.is_some() { + return; + } + + let (path_intermediate_texture, path_intermediate_view) = { + let (t, v) = Self::create_path_intermediate( &self.device, self.surface_config.format, self.surface_config.width, self.surface_config.height, - self.rendering_params.path_sample_count, - ) - .map(|(t, v)| (Some(t), Some(v))) - .unwrap_or((None, None)); - self.path_msaa_texture = path_msaa_texture; - self.path_msaa_view = path_msaa_view; - } + ); + (Some(t), Some(v)) + }; + self.path_intermediate_texture = path_intermediate_texture; + self.path_intermediate_view = path_intermediate_view; + + let (path_msaa_texture, path_msaa_view) = Self::create_msaa_if_needed( + &self.device, + self.surface_config.format, + self.surface_config.width, + self.surface_config.height, + self.rendering_params.path_sample_count, + ) + .map(|(t, v)| (Some(t), Some(v))) + .unwrap_or((None, None)); + self.path_msaa_texture = path_msaa_texture; + self.path_msaa_view = path_msaa_view; } pub fn update_transparency(&mut self, transparent: bool) { @@ -928,6 +931,10 @@ impl WgpuRenderer { return; } }; + + // Now that we know the surface is healthy, ensure intermediate textures exist + self.ensure_intermediate_textures(); + let frame_view = frame .texture .create_view(&wgpu::TextureViewDescriptor::default()); @@ -1317,11 +1324,15 @@ impl WgpuRenderer { vec![PathSprite { bounds }] }; + let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else { + return true; + }; + let sprite_data = unsafe { Self::instance_bytes(&sprites) }; self.draw_instances_with_texture( sprite_data, sprites.len() as u32, - &self.path_intermediate_view, + path_intermediate_view, &self.pipelines.paths, instance_offset, pass, @@ -1365,10 +1376,14 @@ impl WgpuRenderer { }], }); + let Some(path_intermediate_view) = self.path_intermediate_view.as_ref() else { + return true; + }; + let (target_view, resolve_target) = if let Some(ref msaa_view) = self.path_msaa_view { - (msaa_view, Some(&self.path_intermediate_view)) + (msaa_view, Some(path_intermediate_view)) } else { - (&self.path_intermediate_view, None) + (path_intermediate_view, None) }; { From 2757aa41402c24a5b815b87fea7f7ba8c1db5184 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Fri, 27 Feb 2026 13:59:28 -0700 Subject: [PATCH 173/548] Clamp window size on wgpu (#50329) Fixes ZED-59P Release Notes: - Linux: Fix panic when requested window size was larger than supported by your GPU --- crates/gpui_linux/src/linux/wayland/window.rs | 6 +++ crates/gpui_linux/src/linux/x11/window.rs | 34 ++++++++------- crates/gpui_wgpu/src/wgpu_renderer.rs | 42 ++++++++++++++++--- 3 files changed, 62 insertions(+), 20 deletions(-) diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index 4a4c4060bdc31b95bd4b90d930afdc54727a9667..dd8e0b27c32ca9d15152028e686b065165a9e0c1 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -345,6 +345,12 @@ impl WaylandWindowState { if let Some(title) = options.titlebar.and_then(|titlebar| titlebar.title) { xdg_state.toplevel.set_title(title.to_string()); } + // Set max window size based on the GPU's maximum texture dimension. + // This prevents the window from being resized larger than what the GPU can render. + let max_texture_size = renderer.max_texture_size() as i32; + xdg_state + .toplevel + .set_max_size(max_texture_size, max_texture_size); } Ok(Self { diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 0ddd6e7adff84908e6a1c06d661347d39bdc5c9e..55da1d89947eb9a39937b9e70b05ab71aceb6525 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -497,21 +497,6 @@ impl X11WindowState { ), )?; - if let Some(size) = params.window_min_size { - let mut size_hints = WmSizeHints::new(); - let min_size = (f32::from(size.width) as i32, f32::from(size.height) as i32); - size_hints.min_size = Some(min_size); - check_reply( - || { - format!( - "X11 change of WM_SIZE_HINTS failed. min_size: {:?}", - min_size - ) - }, - size_hints.set_normal_hints(xcb, x_window), - )?; - } - let reply = get_reply(|| "X11 GetGeometry failed.", xcb.get_geometry(x_window))?; if reply.x == 0 && reply.y == 0 { bounds.origin.x.0 += 2; @@ -697,6 +682,25 @@ impl X11WindowState { WgpuRenderer::new(gpu_context, &raw_window, config)? }; + // Set max window size hints based on the GPU's maximum texture dimension. + // This prevents the window from being resized larger than what the GPU can render. + let max_texture_size = renderer.max_texture_size(); + let mut size_hints = WmSizeHints::new(); + if let Some(size) = params.window_min_size { + size_hints.min_size = + Some((f32::from(size.width) as i32, f32::from(size.height) as i32)); + } + size_hints.max_size = Some((max_texture_size as i32, max_texture_size as i32)); + check_reply( + || { + format!( + "X11 change of WM_SIZE_HINTS failed. max_size: {:?}", + max_texture_size + ) + }, + size_hints.set_normal_hints(xcb, x_window), + )?; + let display = Rc::new(X11Display::new(xcb, scale_factor, x_screen_index)?); Ok(Self { diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 23de2f55e0707b2f706ecb6ae977e4b08850d894..6e4169e34b4706dbdcdfc88238c170ec484180be 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -118,6 +118,7 @@ pub struct WgpuRenderer { adapter_info: wgpu::AdapterInfo, transparent_alpha_mode: wgpu::CompositeAlphaMode, opaque_alpha_mode: wgpu::CompositeAlphaMode, + max_texture_size: u32, } impl WgpuRenderer { @@ -239,11 +240,27 @@ impl WgpuRenderer { opaque_alpha_mode }; + let device = Arc::clone(&context.device); + let max_texture_size = device.limits().max_texture_dimension_2d; + + let requested_width = config.size.width.0 as u32; + let requested_height = config.size.height.0 as u32; + let clamped_width = requested_width.min(max_texture_size); + let clamped_height = requested_height.min(max_texture_size); + + if clamped_width != requested_width || clamped_height != requested_height { + warn!( + "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \ + Clamping to ({}, {}). Window content may not fill the entire window.", + requested_width, requested_height, max_texture_size, clamped_width, clamped_height + ); + } + let surface_config = wgpu::SurfaceConfiguration { usage: wgpu::TextureUsages::RENDER_ATTACHMENT, format: surface_format, - width: config.size.width.0 as u32, - height: config.size.height.0 as u32, + width: clamped_width.max(1), + height: clamped_height.max(1), present_mode: wgpu::PresentMode::Fifo, desired_maximum_frame_latency: 2, alpha_mode, @@ -251,7 +268,6 @@ impl WgpuRenderer { }; surface.configure(&context.device, &surface_config); - let device = Arc::clone(&context.device); let queue = Arc::clone(&context.queue); let dual_source_blending = context.supports_dual_source_blending(); @@ -374,6 +390,7 @@ impl WgpuRenderer { adapter_info, transparent_alpha_mode, opaque_alpha_mode, + max_texture_size, }) } @@ -811,6 +828,17 @@ impl WgpuRenderer { let height = size.height.0 as u32; if width != self.surface_config.width || height != self.surface_config.height { + let clamped_width = width.min(self.max_texture_size); + let clamped_height = height.min(self.max_texture_size); + + if clamped_width != width || clamped_height != height { + warn!( + "Requested surface size ({}, {}) exceeds maximum texture dimension {}. \ + Clamping to ({}, {}). Window content may not fill the entire window.", + width, height, self.max_texture_size, clamped_width, clamped_height + ); + } + // Wait for any in-flight GPU work to complete before destroying textures if let Err(e) = self.device.poll(wgpu::PollType::Wait { submission_index: None, @@ -827,8 +855,8 @@ impl WgpuRenderer { texture.destroy(); } - self.surface_config.width = width.max(1); - self.surface_config.height = height.max(1); + self.surface_config.width = clamped_width.max(1); + self.surface_config.height = clamped_height.max(1); self.surface.configure(&self.device, &self.surface_config); // Invalidate intermediate textures - they will be lazily recreated @@ -917,6 +945,10 @@ impl WgpuRenderer { } } + pub fn max_texture_size(&self) -> u32 { + self.max_texture_size + } + pub fn draw(&mut self, scene: &Scene) { self.atlas.before_frame(); From 5856bd4ebcd7e6bb1cf82cf54253284622d036d6 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 27 Feb 2026 22:43:31 +0100 Subject: [PATCH 174/548] nix: Vendor up-to-date libwebrtc until patched upstream in NixOS (#50352) Follow-up to #50205 Release Notes: - N/A Co-authored-by: Neel Chotai Co-authored-by: Josh Robson Chase --- nix/build.nix | 30 +- .../0001-shared-libraries.patch | 13 + nix/livekit-libwebrtc/README.md | 7 + nix/livekit-libwebrtc/chromium-129-rust.patch | 21 + nix/livekit-libwebrtc/mkSystemLibraries.nix | 64 +++ nix/livekit-libwebrtc/package.nix | 339 ++++++++++++++++ nix/livekit-libwebrtc/sources.json | 372 ++++++++++++++++++ nix/livekit-libwebrtc/update.sh | 33 ++ 8 files changed, 877 insertions(+), 2 deletions(-) create mode 100644 nix/livekit-libwebrtc/0001-shared-libraries.patch create mode 100644 nix/livekit-libwebrtc/README.md create mode 100644 nix/livekit-libwebrtc/chromium-129-rust.patch create mode 100644 nix/livekit-libwebrtc/mkSystemLibraries.nix create mode 100644 nix/livekit-libwebrtc/package.nix create mode 100644 nix/livekit-libwebrtc/sources.json create mode 100644 nix/livekit-libwebrtc/update.sh diff --git a/nix/build.nix b/nix/build.nix index 8953fbc19fb1e6bb165a2585e3a76ffeb0bdea04..3ae9ca95506baf05b8a433d1232190773b41321e 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -1,4 +1,6 @@ { + pkgs, + system, lib, stdenv, @@ -25,11 +27,17 @@ freetype, git, glib, + libdrm, + libgbm, libgit2, libglvnd, libva, + libxcomposite, + libxdamage, + libxext, + libxfixes, libxkbcommon, - livekit-libwebrtc, + libxrandr, nodejs_22, openssl, perl, @@ -170,6 +178,14 @@ let gpu-lib xorg.libX11 xorg.libxcb + libdrm + libgbm + libva + libxcomposite + libxdamage + libxext + libxfixes + libxrandr ] ++ lib.optionals stdenv'.hostPlatform.isDarwin [ apple-sdk_15 @@ -204,7 +220,7 @@ let }; ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled."; RELEASE_VERSION = version; - LK_CUSTOM_WEBRTC = livekit-libwebrtc; + LK_CUSTOM_WEBRTC = pkgs.callPackage ./livekit-libwebrtc/package.nix { }; PROTOC = "${protobuf}/bin/protoc"; CARGO_PROFILE = profile; @@ -248,6 +264,16 @@ let postPatch = '' substituteInPlace webrtc-sys/build.rs --replace-fail \ "cargo:rustc-link-lib=static=webrtc" "cargo:rustc-link-lib=dylib=webrtc" + + substituteInPlace webrtc-sys/build.rs --replace-fail \ + 'add_gio_headers(&mut builder);' \ + 'for lib_name in ["glib-2.0", "gio-2.0"] { + if let Ok(lib) = pkg_config::Config::new().cargo_metadata(false).probe(lib_name) { + for path in lib.include_paths { + builder.include(&path); + } + } + }' '' + lib.optionalString withGLES '' cat ${glesConfig} >> .cargo/config/config.toml diff --git a/nix/livekit-libwebrtc/0001-shared-libraries.patch b/nix/livekit-libwebrtc/0001-shared-libraries.patch new file mode 100644 index 0000000000000000000000000000000000000000..e0b8709a4d1607f2ab416d725079d71f0fe40105 --- /dev/null +++ b/nix/livekit-libwebrtc/0001-shared-libraries.patch @@ -0,0 +1,13 @@ +--- a/BUILD.gn 2026-01-10 19:22:47.201811909 -0500 ++++ b/BUILD.gn 2026-01-10 19:24:36.440918317 -0500 +@@ -143,8 +143,8 @@ + # target_defaults and direct_dependent_settings. + config("common_inherited_config") { + defines = [ "PROTOBUF_ENABLE_DEBUG_LOGGING_MAY_LEAK_PII=0" ] +- cflags = [] +- ldflags = [] ++ cflags = [ "-fvisibility=default" ] ++ ldflags = [ "-lavutil", "-lavformat", "-lavcodec" ] + + if (rtc_objc_prefix != "") { + defines += [ "RTC_OBJC_TYPE_PREFIX=${rtc_objc_prefix}" ] diff --git a/nix/livekit-libwebrtc/README.md b/nix/livekit-libwebrtc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..87d4fc5599fa0a3b50f853ad53f19e90c5c2121e --- /dev/null +++ b/nix/livekit-libwebrtc/README.md @@ -0,0 +1,7 @@ +# Vendored livekit-libwebrtc build + +The contents of this directory is vendored from [this nixpkgs +PR](https://github.com/NixOS/nixpkgs/pull/478907). + +It should be removed as soon as said PR is merged and the new version of libwebrtc hits +nixpkgs-unstable. diff --git a/nix/livekit-libwebrtc/chromium-129-rust.patch b/nix/livekit-libwebrtc/chromium-129-rust.patch new file mode 100644 index 0000000000000000000000000000000000000000..1fe0c7f87324d8a046ae5226ccfbb06aa42d30b1 --- /dev/null +++ b/nix/livekit-libwebrtc/chromium-129-rust.patch @@ -0,0 +1,21 @@ +diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn +index 45086d6838cac..81132ad8ecb31 100644 +--- a/build/config/compiler/BUILD.gn ++++ b/build/config/compiler/BUILD.gn +@@ -1727,16 +1727,6 @@ config("runtime_library") { + configs += [ "//build/config/c++:runtime_library" ] + } + +- # Rust and C++ both provide intrinsics for LLVM to call for math operations. We +- # want to use the C++ intrinsics, not the ones in the Rust compiler_builtins +- # library. The Rust symbols are marked as weak, so that they can be replaced by +- # the C++ symbols. This config ensures the C++ symbols exist and are strong in +- # order to cause that replacement to occur by explicitly linking in clang's +- # compiler-rt library. +- if (is_clang && !is_nacl && !is_cronet_build) { +- configs += [ "//build/config/clang:compiler_builtins" ] +- } +- + # TODO(crbug.com/40570904): Come up with a better name for is POSIX + Fuchsia + # configuration. + if (is_posix || is_fuchsia) { diff --git a/nix/livekit-libwebrtc/mkSystemLibraries.nix b/nix/livekit-libwebrtc/mkSystemLibraries.nix new file mode 100644 index 0000000000000000000000000000000000000000..4293798faf9031ddc80f6c2a9e70a34b6fd56d62 --- /dev/null +++ b/nix/livekit-libwebrtc/mkSystemLibraries.nix @@ -0,0 +1,64 @@ +{ + brotli, + fontconfig, + freetype, + harfbuzz, + icu, + jsoncpp, + libpng, + libwebp, + libxml2, + libxslt, + minizip, + ffmpeg_6, +}: +{ + "brotli" = { + package = brotli; + path = "third_party/brotli/BUILD.gn"; + }; + "fontconfig" = { + package = fontconfig; + path = "third_party/fontconfig/BUILD.gn"; + }; + "freetype" = { + package = freetype; + path = "build/config/freetype/freetype.gni"; + }; + "harfbuzz-ng" = { + package = harfbuzz; + path = "third_party/harfbuzz-ng/harfbuzz.gni"; + }; + "jsoncpp" = { + package = jsoncpp; + path = "third_party/jsoncpp/BUILD.gn"; + }; + "icu" = { + package = icu; + path = "third_party/icu/BUILD.gn"; + }; + "libpng" = { + package = libpng; + path = "third_party/libpng/BUILD.gn"; + }; + "libwebp" = { + package = libwebp; + path = "third_party/libwebp/BUILD.gn"; + }; + "libxml" = { + package = libxml2; + path = "third_party/libxml/BUILD.gn"; + }; + "libxslt" = { + package = libxslt; + path = "third_party/libxslt/BUILD.gn"; + }; + "zlib" = { + package = minizip; + path = "third_party/zlib/BUILD.gn"; + }; + "ffmpeg" = { + package = ffmpeg_6; + path = "third_party/ffmpeg/BUILD.gn"; + }; +} diff --git a/nix/livekit-libwebrtc/package.nix b/nix/livekit-libwebrtc/package.nix new file mode 100644 index 0000000000000000000000000000000000000000..80ed3e18c58e9f3d1a4c5695b9fa7772a9bf51de --- /dev/null +++ b/nix/livekit-libwebrtc/package.nix @@ -0,0 +1,339 @@ +{ + stdenv, + clang, + gclient2nix, + lib, + gn, + fetchurl, + fetchpatch, + xcbuild, + python3, + ninja, + git, + cpio, + pkg-config, + glib, + alsa-lib, + pulseaudio, + nasm, + brotli, + fontconfig, + freetype, + harfbuzz, + icu, + jsoncpp, + libpng, + libwebp, + libxml2, + libxslt, + minizip, + ffmpeg_6, + libepoxy, + libgbm, + libGL, + libxcomposite, + libxdamage, + libxext, + libxfixes, + libxrandr, + libxtst, + pipewire, + xorg, +}: +let + platformMap = { + "x86_64" = "x64"; + "i686" = "x86"; + "arm" = "arm"; + "aarch64" = "arm64"; + }; + cpuName = stdenv.hostPlatform.parsed.cpu.name; + gnArch = platformMap."${cpuName}" or (throw "unsupported arch ${cpuName}"); + gnOs = + if stdenv.hostPlatform.isLinux then + "linux" + else if stdenv.hostPlatform.isDarwin then + "mac" + else + throw "unknown platform ${stdenv.hostPlatform.config}"; + boringSslSymbols = fetchurl { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/refs/tags/webrtc-dac8015-6/webrtc-sys/libwebrtc/boringssl_prefix_symbols.txt"; + hash = "sha256-dAweArv8zjsFPENEKi9mNBQkt4y+hh3rCqG6QZjRC20="; + }; + gnSystemLibraries = import ./mkSystemLibraries.nix { + inherit + brotli + fontconfig + freetype + harfbuzz + icu + jsoncpp + libpng + libwebp + libxml2 + libxslt + minizip + ffmpeg_6 + ; + }; +in +stdenv.mkDerivation { + pname = "livekit-libwebrtc"; + version = "137-unstable-2025-11-24"; + + gclientDeps = gclient2nix.importGclientDeps ./sources.json; + sourceRoot = "src"; + + patches = [ + # Adds missing dependencies to generated LICENSE + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_licenses.patch"; + hash = "sha256-9A4KyRW1K3eoQxsTbPX0vOnj66TCs2Fxjpsu5wO8mGI="; + }) + # Fixes the certificate chain, required for Let's Encrypt certs + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/ssl_verify_callback_with_native_handle.patch"; + hash = "sha256-RBvRcJzoKItpEbqpe07YZe1D1ZVGS12EnDSISldGy+0="; + }) + # Adds dependencies and features required by livekit + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/add_deps.patch"; + hash = "sha256-DwRtGdU5sppmiFsVuyhJoVCQrRl5JFmZJfxgUPhYXBg="; + }) + # Fix gcc-related errors + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/force_gcc.patch"; + hash = "sha256-1d73Pi1HkbunjYvp1NskUNE4xXbCmnh++rC6NrCJHbY="; + stripLen = 1; + extraPrefix = "build/"; + }) + # fix a gcc-related dav1d compile option + (fetchpatch { + url = "https://raw.githubusercontent.com/livekit/rust-sdks/a4343fe9d88fcc96f8e88959c90d509abbd0307b/webrtc-sys/libwebrtc/patches/david_disable_gun_source_macro.patch"; + hash = "sha256-RCZpeeSQHaxkL3dY2oFFXDjYeU0KHw7idQFONGge8+0="; + stripLen = 1; + extraPrefix = "third_party/"; + }) + # Required for dynamically linking to ffmpeg libraries and exposing symbols + ./0001-shared-libraries.patch + # Borrow a patch from chromium to prevent a build failure due to missing libclang libraries + ./chromium-129-rust.patch + ]; + + postPatch = '' + substituteInPlace .gn \ + --replace-fail "vpython3" "python3" + + substituteInPlace tools/generate_shim_headers/generate_shim_headers.py \ + --replace-fail "OFFICIAL_BUILD" "GOOGLE_CHROME_BUILD" + + substituteInPlace BUILD.gn \ + --replace-fail "rtc_static_library" "rtc_shared_library" \ + --replace-fail "complete_static_lib = true" "" + + substituteInPlace webrtc.gni \ + --replace-fail "!build_with_chromium && is_component_build" "false" + + substituteInPlace rtc_tools/BUILD.gn \ + --replace-fail "\":frame_analyzer\"," "" + + for lib in ${toString (builtins.attrNames gnSystemLibraries)}; do + if [ -d "third_party/$lib" ]; then + find "third_party/$lib" -type f \ + \! -path "third_party/$lib/chromium/*" \ + \! -path "third_party/$lib/google/*" \ + \! -path "third_party/harfbuzz-ng/utils/hb_scoped.h" \ + \! -regex '.*\.\(gn\|gni\|isolate\)' \ + \! -name 'LICENSE*' \ + \! -name 'COPYING*' \ + -delete + fi + done + + # Trick the update_rust.py script into thinking we have *this specific* rust available. + # It isn't actually needed for the libwebrtc build, but GN will fail if it isn't there. + mkdir -p third_party/rust-toolchain + (python3 tools/rust/update_rust.py --print-package-version || true) \ + | head -n 1 \ + | sed 's/.* expected Rust version is \([^ ]*\) .*/rustc 1.0 1234 (\1 chromium)/' \ + > third_party/rust-toolchain/VERSION + '' + + lib.optionalString stdenv.hostPlatform.isLinux '' + mkdir -p buildtools/linux64 + ln -sf ${lib.getExe gn} buildtools/linux64/gn + substituteInPlace build/toolchain/linux/BUILD.gn \ + --replace 'toolprefix = "aarch64-linux-gnu-"' 'toolprefix = ""' + '' + + lib.optionalString stdenv.hostPlatform.isDarwin '' + mkdir -p buildtools/mac + ln -sf ${lib.getExe gn} buildtools/mac/gn + chmod +x build/toolchain/apple/linker_driver.py + patchShebangs build/toolchain/apple/linker_driver.py + substituteInPlace build/toolchain/apple/toolchain.gni --replace-fail "/bin/cp -Rc" "cp -a" + ''; + + outputs = [ + "dev" + "out" + ]; + + nativeBuildInputs = + (builtins.concatLists ( + lib.mapAttrsToList ( + _: library: if (library.package ? dev) then [ library.package.dev ] else [ ] + ) gnSystemLibraries + )) + ++ [ + gclient2nix.gclientUnpackHook + gn + (python3.withPackages (ps: [ ps.setuptools ])) + ninja + git + cpio + pkg-config + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ xcbuild ]; + + buildInputs = [ + nasm + ] + ++ (lib.mapAttrsToList (_: library: library.package) gnSystemLibraries) + ++ (lib.optionals stdenv.hostPlatform.isLinux [ + glib + alsa-lib + pulseaudio + libepoxy + libgbm + libGL + libxcomposite + libxdamage + libxext + libxfixes + libxrandr + libxtst + pipewire + xorg.libX11 + xorg.libXi + ]); + + preConfigure = '' + echo "generate_location_tags = true" >> build/config/gclient_args.gni + echo "0" > build/util/LASTCHANGE.committime + + python build/linux/unbundle/replace_gn_files.py \ + --system-libraries ${toString (builtins.attrNames gnSystemLibraries)} + ''; + + gnFlags = [ + "is_debug=false" + "rtc_include_tests=false" + ''target_os="${gnOs}"'' + ''target_cpu="${gnArch}"'' + "treat_warnings_as_errors=false" + "rtc_enable_protobuf=false" + "rtc_include_tests=false" + "rtc_build_examples=false" + "rtc_build_tools=false" + "rtc_libvpx_build_vp9=true" + "enable_libaom=true" + "use_dummy_lastchange=true" + "is_component_build=true" + "enable_stripping=true" + "rtc_use_h264=true" + "rtc_use_h265=true" + "use_custom_libcxx=false" + "use_rtti=true" + ] + ++ (lib.optionals stdenv.hostPlatform.isLinux [ + "rtc_use_pipewire=true" + "symbol_level=0" + "enable_iterator_debugging=false" + "rtc_use_x11=true" + "use_sysroot=false" + "use_custom_libcxx_for_host=false" + "use_libcxx_modules=false" + "use_llvm_libatomic=false" + "is_clang=false" + ]) + ++ (lib.optionals stdenv.hostPlatform.isDarwin [ + ''mac_deployment_target="${stdenv.hostPlatform.darwinMinVersion}"'' + "rtc_enable_symbol_export=true" + "rtc_enable_objc_symbol_export=true" + "rtc_include_dav1d_in_internal_decoder_factory=true" + "clang_use_chrome_plugins=false" + "use_lld=false" + ''clang_base_path="${clang}"'' + ]); + + ninjaFlags = [ + ":default" + ] + ++ lib.optionals stdenv.hostPlatform.isDarwin [ + "api/audio_codecs:builtin_audio_decoder_factory" + "api/task_queue:default_task_queue_factory" + "sdk:native_api" + "sdk:default_codec_factory_objc" + "pc:peer_connection" + "sdk:videocapture_objc" + "sdk:mac_framework_objc" + "desktop_capture_objc" + ]; + + postBuild = + lib.optionalString stdenv.hostPlatform.isLinux '' + objcopy --redefine-syms="${boringSslSymbols}" "libwebrtc.so" + '' + + '' + # Generate licenses + python3 "../../tools_webrtc/libs/generate_licenses.py" \ + --target ${if stdenv.hostPlatform.isDarwin then ":webrtc" else ":default"} $PWD $PWD + ''; + + installPhase = '' + runHook preInstall + + mkdir -p $out/lib + mkdir -p $dev/include + + install -m0644 obj/webrtc.ninja obj/modules/desktop_capture/desktop_capture.ninja args.gn LICENSE.md $dev + + pushd ../.. + find . -name "*.h" -print | cpio -pd $dev/include + find . -name "*.inc" -print | cpio -pd $dev/include + popd + '' + + lib.optionalString stdenv.hostPlatform.isLinux '' + install -m0644 libwebrtc.so libthird_party_boringssl.so $out/lib + '' + + lib.optionalString stdenv.hostPlatform.isDarwin '' + install -m0644 WebRTC.framework/Versions/A/WebRTC $out/lib/libwebrtc.dylib + install -m0644 libthird_party_boringssl.dylib $out/lib + '' + + '' + ln -s $out/lib $dev/lib + + runHook postInstall + ''; + + postFixup = lib.optionalString stdenv.hostPlatform.isDarwin '' + boringssl="$out/lib/libthird_party_boringssl.dylib" + webrtc="$out/lib/libwebrtc.dylib" + + install_name_tool -id "$boringssl" "$boringssl" + install_name_tool -id "$webrtc" "$webrtc" + install_name_tool -change @rpath/libthird_party_boringssl.dylib "$boringssl" "$webrtc" + ''; + + passthru.updateScript = ./update.sh; + + meta = { + description = "WebRTC library used by livekit"; + homepage = "https://github.com/livekit/rust-sdks/"; + license = lib.licenses.bsd3; + maintainers = with lib.maintainers; [ + WeetHet + niklaskorz + ]; + platforms = lib.platforms.linux ++ lib.platforms.darwin; + }; +} diff --git a/nix/livekit-libwebrtc/sources.json b/nix/livekit-libwebrtc/sources.json new file mode 100644 index 0000000000000000000000000000000000000000..2db785a840f1db0e86a255c5d8c540f5c566ac59 --- /dev/null +++ b/nix/livekit-libwebrtc/sources.json @@ -0,0 +1,372 @@ +{ + "src": { + "args": { + "hash": "sha256-+PgmOZD2Fi+SC66nguixhSwDsoXi4Sz693qOZZrLXm8=", + "owner": "webrtc-sdk", + "repo": "webrtc", + "rev": "624fa1dce239af785fc5fa9ca3b21b9250d3f835" + }, + "fetcher": "fetchFromGitHub" + }, + "src/base": { + "args": { + "hash": "sha256-MTG+pjMPY6/dqeEUy+xJVxPuICETtV98S+h/lFwGItg=", + "rev": "86c814633cf284bc8057a539bc722e2a672afe2f", + "url": "https://chromium.googlesource.com/chromium/src/base" + }, + "fetcher": "fetchFromGitiles" + }, + "src/build": { + "args": { + "hash": "sha256-qFZ12YFX4qxFEHU+VWOG+HDYYPXodgGz+iJ7WEc7cD8=", + "owner": "webrtc-sdk", + "repo": "build", + "rev": "01021e6c12636951a6b4e5342e16b2101b352367" + }, + "fetcher": "fetchFromGitHub" + }, + "src/buildtools": { + "args": { + "hash": "sha256-YWtmMKL1ydueNJ4XM/Pq+8OpqIFe5A6/vYyfZTv7/EI=", + "rev": "0f32cb9025766951122d4ed19aba87a94ded3f43", + "url": "https://chromium.googlesource.com/chromium/src/buildtools" + }, + "fetcher": "fetchFromGitiles" + }, + "src/testing": { + "args": { + "hash": "sha256-s65cABkyMo+FkAmilS67qM3VnrT7iYZg9scycrXzxyE=", + "rev": "a89c37d36bf80c05963727e28b9916835ae88d3a", + "url": "https://chromium.googlesource.com/chromium/src/testing" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party": { + "args": { + "hash": "sha256-q+xVOFlpC0vnLMSF9Z6ZRL7mb/cu8jBpsWjDNFFgiKM=", + "rev": "8062e0e102496ff14a8c58b586f014527424953d", + "url": "https://chromium.googlesource.com/chromium/src/third_party" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/boringssl/src": { + "args": { + "hash": "sha256-5Efqc8pLs4ZskXQGpFdTb5cw//v3+DR285m/DsrWSWA=", + "rev": "34492c89a8e381e0e856a686cc71b1eb5bd728db", + "url": "https://boringssl.googlesource.com/boringssl.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/breakpad/breakpad": { + "args": { + "hash": "sha256-0ynZuxIqBIpNkfD3Y9XdPFQr7HeQcsUO3lhnqvH+k8c=", + "rev": "232a723f5096ab02d53d87931efa485fa77d3b03", + "url": "https://chromium.googlesource.com/breakpad/breakpad.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/catapult": { + "args": { + "hash": "sha256-FIJZE1Qu1MLZA4qxB68k1NjhgSbFTjf57YF85JicVZw=", + "rev": "000f47cfa393d7f9557025a252862e2a61a60d44", + "url": "https://chromium.googlesource.com/catapult.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/ced/src": { + "args": { + "hash": "sha256-ySG74Rj2i2c/PltEgHVEDq+N8yd9gZmxNktc56zIUiY=", + "rev": "ba412eaaacd3186085babcd901679a48863c7dd5", + "url": "https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/clang-format/script": { + "args": { + "hash": "sha256-d9uweklBffiuCWEb03ti1eFLnMac2qRtvggzXY1n/RU=", + "rev": "37f6e68a107df43b7d7e044fd36a13cbae3413f2", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/clang/tools/clang-format.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/colorama/src": { + "args": { + "hash": "sha256-6ZTdPYSHdQOLYMSnE+Tp7PgsVTs3U2awGu9Qb4Rg/tk=", + "rev": "3de9f013df4b470069d03d250224062e8cf15c49", + "url": "https://chromium.googlesource.com/external/colorama.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/compiler-rt/src": { + "args": { + "hash": "sha256-yo7BFGgwJNScsXwnCAu8gFBdZVS8/HJplzUk2e73mVg=", + "rev": "57213f125d03209892fed26189feb3b736e96735", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/crc32c/src": { + "args": { + "hash": "sha256-KBraGaO5LmmPP+p8RuDogGldbTWdNDK+WzF4Q09keuE=", + "rev": "d3d60ac6e0f16780bcfcc825385e1d338801a558", + "url": "https://chromium.googlesource.com/external/github.com/google/crc32c.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/dav1d/libdav1d": { + "args": { + "hash": "sha256-+DY4p41VuAlx7NvOfXjWzgEhvtpebjkjbFwSYOzSjv4=", + "rev": "8d956180934f16244bdb58b39175824775125e55", + "url": "https://chromium.googlesource.com/external/github.com/videolan/dav1d.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/depot_tools": { + "args": { + "hash": "sha256-DWQyYtpAAGiryeGJzIWlUwY5yn4cNwXY957vlPDUNak=", + "rev": "fa8fc854e1766b86f10c9a15902cf3cc23adaac2", + "url": "https://chromium.googlesource.com/chromium/tools/depot_tools.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/ffmpeg": { + "args": { + "hash": "sha256-hNzQZQxaa2Wtl7GWWF852cFmmXy4pc15Pp0d59TTfnI=", + "rev": "01f23648c6b84de6c0f717fa4e1816f53b9ee72e", + "url": "https://chromium.googlesource.com/chromium/third_party/ffmpeg.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/flatbuffers/src": { + "args": { + "hash": "sha256-tbc45o0MbMvK5XqRUJt5Eg8BU6+TJqlmwFgQhHq6wRM=", + "rev": "8db59321d9f02cdffa30126654059c7d02f70c32", + "url": "https://chromium.googlesource.com/external/github.com/google/flatbuffers.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/fontconfig/src": { + "args": { + "hash": "sha256-W5WIgC6A52kY4fNkbsDEa0o+dfd97Rl5NKfgnIRpI00=", + "rev": "14d466b30a8ab4a9d789977ed94f2c30e7209267", + "url": "https://chromium.googlesource.com/external/fontconfig.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/freetype/src": { + "args": { + "hash": "sha256-Vlin6Z+QisUyj6R+TclVOm8x6673YhUIWob9Ih6gzC8=", + "rev": "1da283b8ae6d6b94f34a5c4b8c1227adc9dbb1d8", + "url": "https://chromium.googlesource.com/chromium/src/third_party/freetype2.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/fuzztest/src": { + "args": { + "hash": "sha256-L2QG0pUmGjGdtdlivxYfxSqO9YaVHpIT6lvJwBMTxMw=", + "rev": "b10387fdbbca18192f85eaa5323a59f44bf9c468", + "url": "https://chromium.googlesource.com/external/github.com/google/fuzztest.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/google_benchmark/src": { + "args": { + "hash": "sha256-cH8s1gP6kCcojAAfTt5iQCVqiAaSooNk4BdaILujM3w=", + "rev": "761305ec3b33abf30e08d50eb829e19a802581cc", + "url": "https://chromium.googlesource.com/external/github.com/google/benchmark.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/googletest/src": { + "args": { + "hash": "sha256-QT9PQ9bF+eCPfRLkcHpH4jc0UZfGPc98fHf8QDV5bZg=", + "rev": "cd430b47a54841ec45d64d2377d7cabaf0eba610", + "url": "https://chromium.googlesource.com/external/github.com/google/googletest.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/grpc/src": { + "args": { + "hash": "sha256-xivmP36VCSbiMAV3PDUjzCrF+AJzFXJdMe5e2q9yW/k=", + "rev": "957c9f95224b1e1318c0ecb98d0e7584ea5ccff2", + "url": "https://chromium.googlesource.com/external/github.com/grpc/grpc.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/gtest-parallel": { + "args": { + "hash": "sha256-VUuk5tBTh+aU2dxVWUF1FePWlKUJaWSiGSXk/J5zgHw=", + "rev": "96f4f904922f9bf66689e749c40f314845baaac8", + "url": "https://chromium.googlesource.com/external/github.com/google/gtest-parallel" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/harfbuzz-ng/src": { + "args": { + "hash": "sha256-lNnCtgIegUy4DLhYaGZXcEaFw83KWAHoKpz69AEsWp4=", + "rev": "9f83bbbe64654b45ba5bb06927ff36c2e7588495", + "url": "https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/icu": { + "args": { + "hash": "sha256-eGI/6wk6IOUPvX7pRTm4VJk1CqkkxalTu84L36i/D6k=", + "rev": "4c8cc4b365a505ce35be1e0bd488476c5f79805d", + "url": "https://chromium.googlesource.com/chromium/deps/icu.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/instrumented_libs": { + "args": { + "hash": "sha256-8kokdsnn5jD9KgM/6g0NuITBbKkGXWEM4BMr1nCrfdU=", + "rev": "69015643b3f68dbd438c010439c59adc52cac808", + "url": "https://chromium.googlesource.com/chromium/third_party/instrumented_libraries.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/jsoncpp/source": { + "args": { + "hash": "sha256-bSLNcoYBz3QCt5VuTR056V9mU2PmBuYBa0W6hFg2m8Q=", + "rev": "42e892d96e47b1f6e29844cc705e148ec4856448", + "url": "https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libFuzzer/src": { + "args": { + "hash": "sha256-Lb+HczYax0T7qvC0/Nwhc5l2szQTUYDouWRMD/Qz7sA=", + "rev": "e31b99917861f891308269c36a32363b120126bb", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/compiler-rt/lib/fuzzer.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libaom/source/libaom": { + "args": { + "hash": "sha256-ngVZ+xK0b+jKUmawteQ7VFAQzoebX4jqZ3hP9pW+Q0Q=", + "rev": "a23a4799ec2d7dd6e436c7b64a34553773014ed7", + "url": "https://aomedia.googlesource.com/aom.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libc++/src": { + "args": { + "hash": "sha256-lqeuVUgeAKm1pxo+w1vyUbBkBXBzLCQ+Lfu44neKLPo=", + "rev": "917609c669e43edc850eeb192a342434a54e1dfd", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libc++abi/src": { + "args": { + "hash": "sha256-X9cAbyd8ZPSwqOGhPYwIZ6b9E3tVwAuAYZKMgbZQxgk=", + "rev": "f2a7f2987f9dcdf8b04c2d8cd4dcb186641a7c3e", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libjpeg_turbo": { + "args": { + "hash": "sha256-Ig+tmprZDvlf/M72/DTar2pbxat9ZElgSqdXdoM0lPs=", + "rev": "e14cbfaa85529d47f9f55b0f104a579c1061f9ad", + "url": "https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libsrtp": { + "args": { + "hash": "sha256-bkG1+ss+1a2rCHGwZjhvf5UaNVbPPZJt9HZSIPBKGwM=", + "rev": "a52756acb1c5e133089c798736dd171567df11f5", + "url": "https://chromium.googlesource.com/chromium/deps/libsrtp.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libunwind/src": { + "args": { + "hash": "sha256-XdFKn+cGOxA0fHkVMG9UAhCmpML44ocoyHB7XnumX7o=", + "rev": "81e2cb40a70de2b6978e6d8658891ded9a77f7e3", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libvpx/source/libvpx": { + "args": { + "hash": "sha256-NIGpzP6elcPScHJlZmnPHJdmXsuHcbuELT0C4Ha5PcA=", + "rev": "ff1d193f4b9dfa9b2ced51efbb6ec7a69e58e88c", + "url": "https://chromium.googlesource.com/webm/libvpx.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/libyuv": { + "args": { + "hash": "sha256-b/EYCWBQvsNoGhea31DPBKpG8eouf0OBi5TgdHDHs9A=", + "rev": "1e40e34573c3861480d107cd4a4ce290df79951f", + "url": "https://chromium.googlesource.com/libyuv/libyuv.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/llvm-libc/src": { + "args": { + "hash": "sha256-yNNx3gOGafMNvZ+aebDKHVj6QM8g0zt0d69PWlWLkyk=", + "rev": "912274164f0877ca917c06e8484ad3be1784833a", + "url": "https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libc.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/lss": { + "args": { + "hash": "sha256-rhp4EcZYdgSfu9cqn+zxxGx6v2IW8uX8V+iA0UfZhFY=", + "rev": "ed31caa60f20a4f6569883b2d752ef7522de51e0", + "url": "https://chromium.googlesource.com/linux-syscall-support.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/nasm": { + "args": { + "hash": "sha256-neYrS4kQ76ihUh22Q3uPR67Ld8+yerA922YSZU1KxJs=", + "rev": "9f916e90e6fc34ec302573f6ce147e43e33d68ca", + "url": "https://chromium.googlesource.com/chromium/deps/nasm.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/openh264/src": { + "args": { + "hash": "sha256-tf0lnxATCkoq+xRti6gK6J47HwioAYWnpEsLGSA5Xdg=", + "rev": "652bdb7719f30b52b08e506645a7322ff1b2cc6f", + "url": "https://chromium.googlesource.com/external/github.com/cisco/openh264" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/perfetto": { + "args": { + "hash": "sha256-I0qiAh3VliVop+3S2/tP6VwCAJOk0Vu7xy8vHJZ1w2A=", + "rev": "a54dd38d60593129ae56d400f1a72860670abea4", + "url": "https://chromium.googlesource.com/external/github.com/google/perfetto.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/protobuf-javascript/src": { + "args": { + "hash": "sha256-zq86SrDASl6aYPFPijRZp03hJqXUFz2Al/KkiNq7i0M=", + "rev": "eb785a9363664a402b6336dfe96aad27fb33ffa8", + "url": "https://chromium.googlesource.com/external/github.com/protocolbuffers/protobuf-javascript" + }, + "fetcher": "fetchFromGitiles" + }, + "src/third_party/re2/src": { + "args": { + "hash": "sha256-f/k2rloV2Nwb0KuJGUX4SijFxAx69EXcsXOG4vo+Kis=", + "rev": "c84a140c93352cdabbfb547c531be34515b12228", + "url": "https://chromium.googlesource.com/external/github.com/google/re2.git" + }, + "fetcher": "fetchFromGitiles" + }, + "src/tools": { + "args": { + "hash": "sha256-kZFZl8SC9nZIIOVtNl/5H4huw6BCBsBkJVJ4gaUmly4=", + "rev": "ffcbc837bbb14d80d09147c2af5302ff6bd4bd69", + "url": "https://chromium.googlesource.com/chromium/src/tools" + }, + "fetcher": "fetchFromGitiles" + } +} diff --git a/nix/livekit-libwebrtc/update.sh b/nix/livekit-libwebrtc/update.sh new file mode 100644 index 0000000000000000000000000000000000000000..b28c405b300280b25ab7aa3b85936d0f3ae75878 --- /dev/null +++ b/nix/livekit-libwebrtc/update.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env nix-shell +#!nix-shell -i bash -p gitMinimal curl gojq gclient2nix + +set -eou pipefail +package="livekit-libwebrtc" +pkg_dir="$(dirname "$0")" +nixpkgs="$(git rev-parse --show-toplevel)" + +gh-curl () { + curl --silent ${GITHUB_TOKEN:+-u ":$GITHUB_TOKEN"} "$1" +} + +# Get the current version part before the "-unstable-" for the branch name. +# To manually update to a new major version, you can also invoke the script +# with the new major version, e.g., UPDATE_MAJOR_VERSION=137. +old_version="${UPDATE_NIX_OLD_VERSION:-$(nix-instantiate --eval -E "(import \"$nixpkgs\" { }).$package.version" | tr -d '"')}" +major_version="${UPDATE_MAJOR_VERSION:-${old_version%%-unstable-*}}" +branch="m${major_version}_release" + +# Fetch the current HEAD commit of the release branch +head="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/refs/heads/$branch" | gojq '.object.sha' --raw-output)" +if gojq -e ".src.args.rev == \"$head\"" "$pkg_dir/sources.json"; then + echo "$package is already up-to-date: $head" + exit 0 +fi + +# Get the commit's date for the version field +date="$(gh-curl "https://api.github.com/repos/webrtc-sdk/webrtc/git/commits/$head" | gojq '.committer.date| split("T") | .[0]' --raw-output)" + +echo "Updating sources.json to $head" +gclient2nix generate --root src "https://github.com/webrtc-sdk/webrtc@$head" > "$pkg_dir/sources.json" + +sed -i "s|$old_version|$major_version-unstable-$date|g" "$pkg_dir/package.nix" From 73ceaec7f5075c3934116d3d83d2e615d664768a Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Fri, 27 Feb 2026 22:48:14 +0100 Subject: [PATCH 175/548] ci: Use clang/++ in Build docs step to build libwebrtc (#50353) Release Notes: - N/A --- .github/workflows/deploy_cloudflare.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index 2650cce1406b16e691565077b95d07730845664b..cb0dfc2187a06cf62255b049b7e5fe74b10c505a 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -23,6 +23,8 @@ jobs: - name: Build docs uses: ./.github/actions/build_docs env: + CC: clang + CXX: clang++ DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} - name: Deploy Docs From 324b1d5263b128ff302351f0f8386d6e61e0ffd8 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Sat, 28 Feb 2026 00:11:06 +0100 Subject: [PATCH 176/548] acp: Fix session loading paths for wsl (#50307) Release Notes: - N/A --- crates/agent_ui/src/connection_view.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index f5efa8aa2834829630bd60dd3ef012a92a33cb17..9b3c3cd4270722ca309de3f18c0a61894029c3df 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -570,13 +570,21 @@ impl ConnectionView { resume .cwd .as_ref() - .and_then(|cwd| util::paths::normalize_lexically(cwd).ok()) .filter(|cwd| { - worktree_roots - .iter() - .any(|root| cwd.starts_with(root.as_ref())) + // Validate with the normalized path (rejects `..` traversals), + // but return the original cwd to preserve its path separators. + // On Windows, `normalize_lexically` rebuilds the path with + // backslashes via `PathBuf::push`, which would corrupt + // forward-slash Linux paths used by WSL agents. + util::paths::normalize_lexically(cwd) + .ok() + .is_some_and(|normalized| { + worktree_roots + .iter() + .any(|root| normalized.starts_with(root.as_ref())) + }) }) - .map(|path| path.into()) + .map(|path| Arc::from(path.as_path())) }) .or_else(|| worktree_roots.first().cloned()) .unwrap_or_else(|| paths::home_dir().as_path().into()); From 1123140e40f47ad7b12815c16de0d49e42e36617 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Fri, 27 Feb 2026 20:35:59 -0300 Subject: [PATCH 177/548] agent_ui: Add round of improvements to subagent UI (#50357) --- crates/agent/src/agent.rs | 2 +- crates/agent/src/tools/spawn_agent_tool.rs | 10 +- .../src/connection_view/thread_view.rs | 242 +++++++++++------- 3 files changed, 153 insertions(+), 101 deletions(-) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 85b943da4bb65b038100b2b842d81bc34662325d..8de0aaee0c05c07e0b3c86a1b7570a1a61dc5332 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1822,7 +1822,7 @@ impl SubagentHandle for NativeSubagentHandle { .map(|m| m.to_markdown()) .context("No response from subagent") }), - SubagentPromptResult::Cancelled => Err(anyhow!("User cancelled")), + SubagentPromptResult::Cancelled => Err(anyhow!("User canceled")), SubagentPromptResult::Error(message) => Err(anyhow!("{message}")), SubagentPromptResult::ContextWindowWarning => { thread.update(cx, |thread, cx| thread.cancel(cx)).await; diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index 2c5c40c704464639ca43b7da32ab8ae0239e3b6a..7713da050996f6fb4c07d56f51a218dfb88d5db5 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -142,9 +142,13 @@ impl AgentTool for SpawnAgentTool { } Err(e) => { let error = e.to_string(); - event_stream.update_fields( - acp::ToolCallUpdateFields::new().content(vec![error.clone().into()]), - ); + // workaround for now because the agent loop will always mark this as ToolCallStatus::Failed + let canceled = error == "User canceled"; + event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![ + acp::ToolCallContent::Content(acp::Content::new(error.clone()).meta( + acp::Meta::from_iter([("cancelled".into(), canceled.into())]), + )), + ])); Err(SpawnAgentToolOutput::Error { session_id: Some(subagent_session_id), error, diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 20d860c5c14fd8c5c50be3b2bc8eefb89d9d7db6..9f38ba9ba778b6c23f7a1ee4adecea501c98bfdb 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -1,3 +1,4 @@ +use acp_thread::ContentBlock; use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody}; use editor::actions::OpenExcerpts; use gpui::{Corner, List}; @@ -3840,6 +3841,7 @@ impl ThreadView { entry_ix, tool_call, &self.focus_handle(cx), + false, window, cx, ) @@ -4689,6 +4691,7 @@ impl ThreadView { terminal: &Entity, tool_call: &ToolCall, focus_handle: &FocusHandle, + is_subagent: bool, window: &Window, cx: &Context, ) -> AnyElement { @@ -4907,12 +4910,14 @@ impl ThreadView { .and_then(|entry| entry.terminal(terminal)); v_flex() - .my_1p5() - .mx_5() - .border_1() - .when(tool_failed || command_failed, |card| card.border_dashed()) - .border_color(border_color) - .rounded_md() + .when(!is_subagent, |this| { + this.my_1p5() + .mx_5() + .border_1() + .when(tool_failed || command_failed, |card| card.border_dashed()) + .border_color(border_color) + .rounded_md() + }) .overflow_hidden() .child( v_flex() @@ -4989,6 +4994,7 @@ impl ThreadView { entry_ix: usize, tool_call: &ToolCall, focus_handle: &FocusHandle, + is_subagent: bool, window: &Window, cx: &Context, ) -> Div { @@ -5013,6 +5019,7 @@ impl ThreadView { terminal, tool_call, focus_handle, + is_subagent, window, cx, ) @@ -5023,6 +5030,7 @@ impl ThreadView { entry_ix, tool_call, focus_handle, + is_subagent, window, cx, )) @@ -5036,6 +5044,7 @@ impl ThreadView { entry_ix: usize, tool_call: &ToolCall, focus_handle: &FocusHandle, + is_subagent: bool, window: &Window, cx: &Context, ) -> Div { @@ -5256,7 +5265,9 @@ impl ThreadView { v_flex() .map(|this| { - if use_card_layout { + if is_subagent { + this + } else if use_card_layout { this.my_1p5() .rounded_md() .border_1() @@ -5268,14 +5279,16 @@ impl ThreadView { this.my_1() } }) - .map(|this| { - if has_location && !use_card_layout { - this.ml_4() - } else { - this.ml_5() - } + .when(!is_subagent, |this| { + this.map(|this| { + if has_location && !use_card_layout { + this.ml_4() + } else { + this.ml_5() + } + }) + .mr_5() }) - .mr_5() .map(|this| { if is_terminal_tool { let label_source = tool_call.label.read(cx).source(); @@ -6069,6 +6082,7 @@ impl ThreadView { terminal, tool_call, focus_handle, + false, window, cx, ), @@ -6352,6 +6366,15 @@ impl ThreadView { .map(|log| log.read(cx).changed_buffers(cx)) .unwrap_or_default(); + let is_pending_tool_call = thread + .as_ref() + .and_then(|thread| { + self.conversation + .read(cx) + .pending_tool_call(thread.read(cx).session_id(), cx) + }) + .is_some(); + let is_expanded = self.expanded_tool_calls.contains(&tool_call.id); let files_changed = changed_buffers.len(); let diff_stats = DiffStats::all_files(&changed_buffers, cx); @@ -6360,11 +6383,20 @@ impl ThreadView { tool_call.status, ToolCallStatus::Pending | ToolCallStatus::InProgress ); - let is_canceled_or_failed = matches!( + + let is_failed = matches!( tool_call.status, - ToolCallStatus::Canceled | ToolCallStatus::Failed | ToolCallStatus::Rejected + ToolCallStatus::Failed | ToolCallStatus::Rejected ); + let is_cancelled = matches!(tool_call.status, ToolCallStatus::Canceled) + || tool_call.content.iter().any(|c| match c { + ToolCallContent::ContentBlock(ContentBlock::Markdown { markdown }) => { + markdown.read(cx).source() == "User canceled" + } + _ => false, + }); + let thread_title = thread .as_ref() .map(|t| t.read(cx).title()) @@ -6373,29 +6405,49 @@ impl ThreadView { let has_tool_call_label = !tool_call_label.is_empty(); let has_title = thread_title.is_some() || has_tool_call_label; - let has_no_title_or_canceled = !has_title || is_canceled_or_failed; + let has_no_title_or_canceled = !has_title || is_failed || is_cancelled; let title: SharedString = if let Some(thread_title) = thread_title { thread_title } else if !tool_call_label.is_empty() { tool_call_label.into() - } else if is_canceled_or_failed { + } else if is_cancelled { "Subagent Canceled".into() + } else if is_failed { + "Subagent Failed".into() } else { - "Spawning agent…".into() + "Spawning Agent…".into() }; let card_header_id = format!("subagent-header-{}", entry_ix); + let status_icon = format!("status-icon-{}", entry_ix); let diff_stat_id = format!("subagent-diff-{}", entry_ix); let icon = h_flex().w_4().justify_center().child(if is_running { SpinnerLabel::new() .size(LabelSize::Small) .into_any_element() - } else if is_canceled_or_failed { - Icon::new(IconName::Close) - .size(IconSize::Small) - .color(Color::Error) + } else if is_cancelled { + div() + .id(status_icon) + .child( + Icon::new(IconName::Circle) + .size(IconSize::Small) + .color(Color::Custom( + cx.theme().colors().icon_disabled.opacity(0.5), + )), + ) + .tooltip(Tooltip::text("Subagent Cancelled")) + .into_any_element() + } else if is_failed { + div() + .id(status_icon) + .child( + Icon::new(IconName::Close) + .size(IconSize::Small) + .color(Color::Error), + ) + .tooltip(Tooltip::text("Subagent Failed")) .into_any_element() } else { Icon::new(IconName::Check) @@ -6414,6 +6466,8 @@ impl ThreadView { "Click to Preview" }; + let error_message = self.subagent_error_message(&tool_call.status, tool_call, cx); + v_flex() .w_full() .rounded_md() @@ -6474,7 +6528,7 @@ impl ThreadView { ) }), ) - .when(!has_no_title_or_canceled, |this| { + .when(!has_no_title_or_canceled && !is_pending_tool_call, |this| { this.tooltip(move |_, cx| { Tooltip::with_meta( title.to_string(), @@ -6484,7 +6538,7 @@ impl ThreadView { ) }) }) - .when(has_expandable_content, |this| { + .when(has_expandable_content && !is_pending_tool_call, |this| { this.cursor_pointer() .hover(|s| s.bg(cx.theme().colors().element_hover)) .child( @@ -6546,14 +6600,16 @@ impl ThreadView { if let Some((entry_ix, tool_call)) = thread.read(cx).tool_call(&subagent_tool_call_id) { - this.child(thread_view.read(cx).render_any_tool_call( - active_session_id, - entry_ix, - tool_call, - focus_handle, - window, - cx, - )) + this.child(Divider::horizontal().color(DividerColor::Border)) + .child(thread_view.read(cx).render_any_tool_call( + active_session_id, + entry_ix, + tool_call, + focus_handle, + true, + window, + cx, + )) } else { this } @@ -6567,6 +6623,14 @@ impl ThreadView { window, cx, )) + .when_some(error_message, |this, message| { + this.child( + Callout::new() + .severity(Severity::Error) + .icon(IconName::XCircle) + .title(message), + ) + }) .child( h_flex() .id(entry_ix) @@ -6574,8 +6638,8 @@ impl ThreadView { .w_full() .justify_center() .border_t_1() - .when(is_canceled_or_failed, |this| this.border_dashed()) - .border_color(cx.theme().colors().border_variant) + .when(is_failed, |this| this.border_dashed()) + .border_color(self.tool_card_border_color(cx)) .hover(|s| s.bg(cx.theme().colors().element_hover)) .child( Icon::new(IconName::Maximize) @@ -6611,6 +6675,30 @@ impl ThreadView { ) -> impl IntoElement { const MAX_PREVIEW_ENTRIES: usize = 8; + let parent_thread = self.thread.read(cx); + let mut started_subagent_count = 0usize; + let mut turn_has_our_call = false; + for entry in parent_thread.entries().iter() { + match entry { + AgentThreadEntry::UserMessage(_) => { + if turn_has_our_call { + break; + } + started_subagent_count = 0; + turn_has_our_call = false; + } + AgentThreadEntry::ToolCall(tc) + if tc.is_subagent() && !matches!(tc.status, ToolCallStatus::Pending) => + { + started_subagent_count += 1; + if tc.id == tool_call.id { + turn_has_our_call = true; + } + } + _ => {} + } + } + let subagent_view = thread_view.read(cx); let session_id = subagent_view.thread.read(cx).session_id().clone(); @@ -6635,7 +6723,11 @@ impl ThreadView { let entries = subagent_view.thread.read(cx).entries(); let total_entries = entries.len(); - let start_ix = total_entries.saturating_sub(MAX_PREVIEW_ENTRIES); + let start_ix = if started_subagent_count > 1 { + total_entries.saturating_sub(MAX_PREVIEW_ENTRIES) + } else { + 0 + }; let scroll_handle = self .subagent_scroll_handles @@ -6656,35 +6748,7 @@ impl ThreadView { }) .collect(); - let error_message = - self.subagent_error_message(subagent_view, &tool_call.status, tool_call, cx); - - let parent_thread = self.thread.read(cx); - let mut started_subagent_count = 0usize; - let mut turn_has_our_call = false; - for entry in parent_thread.entries().iter() { - match entry { - AgentThreadEntry::UserMessage(_) => { - if turn_has_our_call { - break; - } - started_subagent_count = 0; - turn_has_our_call = false; - } - AgentThreadEntry::ToolCall(tc) - if tc.is_subagent() && !matches!(tc.status, ToolCallStatus::Pending) => - { - started_subagent_count += 1; - if tc.id == tool_call.id { - turn_has_our_call = true; - } - } - _ => {} - } - } - v_flex() - .relative() .w_full() .border_t_1() .when(is_canceled_or_failed, |this| this.border_dashed()) @@ -6692,22 +6756,12 @@ impl ThreadView { .overflow_hidden() .child( div() - .id(format!("subagent-entries-{}", session_id)) - .flex_1() - .min_h_0() .pb_1() - .overflow_hidden() + .min_h_0() + .id(format!("subagent-entries-{}", session_id)) .track_scroll(&scroll_handle) .children(rendered_entries), ) - .when_some(error_message, |this, message| { - this.child( - Callout::new() - .severity(Severity::Error) - .icon(IconName::XCircle) - .title(message), - ) - }) .when(started_subagent_count > 1, |this| { this.h_56().child(overlay) }) @@ -6716,37 +6770,31 @@ impl ThreadView { fn subagent_error_message( &self, - subagent_view: &ThreadView, status: &ToolCallStatus, tool_call: &ToolCall, cx: &App, ) -> Option { - if matches!(status, ToolCallStatus::Canceled | ToolCallStatus::Rejected) { - return None; - } - - subagent_view - .thread_error - .as_ref() - .and_then(|e| match e { - ThreadError::Refusal => Some("The agent refused to respond to this prompt.".into()), - ThreadError::Other { message, .. } => Some(message.clone()), - ThreadError::PaymentRequired | ThreadError::AuthenticationRequired(_) => None, - }) - .or_else(|| { - tool_call.content.iter().find_map(|content| { - if let ToolCallContent::ContentBlock(block) = content { - if let acp_thread::ContentBlock::Markdown { markdown } = block { - let source = markdown.read(cx).source().to_string(); - if !source.is_empty() { + if matches!(status, ToolCallStatus::Failed) { + tool_call.content.iter().find_map(|content| { + if let ToolCallContent::ContentBlock(block) = content { + if let acp_thread::ContentBlock::Markdown { markdown } = block { + let source = markdown.read(cx).source().to_string(); + if !source.is_empty() { + if source == "User canceled" { + return None; + } else { return Some(SharedString::from(source)); } } } - None - }) + } + None }) + } else { + None + } } + fn render_rules_item(&self, cx: &Context) -> Option { let project_context = self .as_native_thread(cx)? From fb01b1953a32f7ce64aaca8b77f7ad5b04dda2b8 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Fri, 27 Feb 2026 18:09:45 -0600 Subject: [PATCH 178/548] ep: Replace snapshot events with settled event (#50356) Release Notes: - N/A --------- Co-authored-by: Max Co-authored-by: Max Brunsfeld --- crates/edit_prediction/Cargo.toml | 1 + crates/edit_prediction/src/edit_prediction.rs | 145 ++++++++++++++- .../src/edit_prediction_tests.rs | 176 +++++++++++++++++- crates/edit_prediction/src/zeta.rs | 49 +---- 4 files changed, 329 insertions(+), 42 deletions(-) diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml index ace898fb6004668fbde916ab4b0447d8e5b8a553..9f867584b57c8aed86f7003cca3a2b034c184476 100644 --- a/crates/edit_prediction/Cargo.toml +++ b/crates/edit_prediction/Cargo.toml @@ -21,6 +21,7 @@ arrayvec.workspace = true brotli.workspace = true buffer_diff.workspace = true client.workspace = true +clock.workspace = true cloud_api_types.workspace = true cloud_llm_client.workspace = true collections.workspace = true diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 836b4a477f62e2da6674568d0a7a1ccfc2b603cf..02ffcbe065e8b0334ab7c200c0e43b817cdad416 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -40,7 +40,7 @@ use settings::{ }; use std::collections::{VecDeque, hash_map}; use std::env; -use text::Edit; +use text::{AnchorRangeExt, Edit}; use workspace::Workspace; use zeta_prompt::{ZetaFormat, ZetaPromptInput}; @@ -103,6 +103,9 @@ const CHANGE_GROUPING_LINE_SPAN: u32 = 8; const LAST_CHANGE_GROUPING_TIME: Duration = Duration::from_secs(1); const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice"; const REJECT_REQUEST_DEBOUNCE: Duration = Duration::from_secs(15); +const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled"; +const EDIT_PREDICTION_SETTLED_TTL: Duration = Duration::from_secs(60 * 5); +const EDIT_PREDICTION_SETTLED_QUIESCENCE: Duration = Duration::from_secs(10); pub struct Zeta2FeatureFlag; pub struct EditPredictionJumpsFeatureFlag; @@ -142,8 +145,11 @@ pub struct EditPredictionStore { pub mercury: Mercury, data_collection_choice: DataCollectionChoice, reject_predictions_tx: mpsc::UnboundedSender, + settled_predictions_tx: mpsc::UnboundedSender, shown_predictions: VecDeque, rated_predictions: HashSet, + #[cfg(test)] + settled_event_callback: Option>, } #[derive(Copy, Clone, PartialEq, Eq)] @@ -482,9 +488,18 @@ impl std::ops::Deref for BufferEditPrediction<'_> { } } +#[derive(Clone)] +struct PendingSettledPrediction { + request_id: EditPredictionId, + editable_anchor_range: Range, + enqueued_at: Instant, + last_edit_at: Instant, +} + struct RegisteredBuffer { file: Option>, snapshot: TextBufferSnapshot, + pending_predictions: Vec, last_position: Option, _subscriptions: [gpui::Subscription; 2], } @@ -676,6 +691,12 @@ impl EditPredictionStore { }) .detach(); + let (settled_predictions_tx, settled_predictions_rx) = mpsc::unbounded(); + cx.spawn(async move |this, cx| { + Self::run_settled_predictions_worker(this, settled_predictions_rx, cx).await; + }) + .detach(); + let this = Self { projects: HashMap::default(), client, @@ -701,8 +722,11 @@ impl EditPredictionStore { data_collection_choice, reject_predictions_tx: reject_tx, + settled_predictions_tx, rated_predictions: Default::default(), shown_predictions: Default::default(), + #[cfg(test)] + settled_event_callback: None, }; this @@ -1091,6 +1115,7 @@ impl EditPredictionStore { snapshot, file, last_position: None, + pending_predictions: Vec::new(), _subscriptions: [ cx.subscribe(buffer, { let project = project.downgrade(); @@ -1139,6 +1164,7 @@ impl EditPredictionStore { let mut total_inserted = 0usize; let mut edit_range: Option> = None; let mut last_offset: Option = None; + let now = cx.background_executor().now(); for (edit, anchor_range) in new_snapshot.anchored_edits_since::(&old_snapshot.version) @@ -1157,6 +1183,12 @@ impl EditPredictionStore { return; }; + for pending_prediction in &mut registered_buffer.pending_predictions { + if edit_range.overlaps(&pending_prediction.editable_anchor_range, &new_snapshot) { + pending_prediction.last_edit_at = now; + } + } + let action_type = match (total_deleted, total_inserted, num_edits) { (0, ins, n) if ins == n => UserActionType::InsertChar, (0, _, _) => UserActionType::InsertSelection, @@ -1183,7 +1215,6 @@ impl EditPredictionStore { let events = &mut project_state.events; - let now = cx.background_executor().now(); if let Some(last_event) = project_state.last_event.as_mut() { let is_next_snapshot_of_same_buffer = old_snapshot.remote_id() == last_event.new_snapshot.remote_id() @@ -1386,6 +1417,116 @@ impl EditPredictionStore { } } + async fn run_settled_predictions_worker( + this: WeakEntity, + mut rx: UnboundedReceiver, + cx: &mut AsyncApp, + ) { + let mut next_wake_time: Option = None; + loop { + let now = cx.background_executor().now(); + if let Some(wake_time) = next_wake_time.take() { + cx.background_executor() + .timer(wake_time.duration_since(now)) + .await; + } else { + let Some(new_enqueue_time) = rx.next().await else { + break; + }; + next_wake_time = Some(new_enqueue_time + EDIT_PREDICTION_SETTLED_QUIESCENCE); + while rx.next().now_or_never().flatten().is_some() {} + continue; + } + + let Some(this) = this.upgrade() else { + break; + }; + + let now = cx.background_executor().now(); + + let mut oldest_edited_at = None; + + this.update(cx, |this, _| { + for (_, project_state) in this.projects.iter_mut() { + for (_, registered_buffer) in project_state.registered_buffers.iter_mut() { + registered_buffer + .pending_predictions + .retain_mut(|pending_prediction| { + let age = + now.saturating_duration_since(pending_prediction.enqueued_at); + if age >= EDIT_PREDICTION_SETTLED_TTL { + return false; + } + + let quiet_for = + now.saturating_duration_since(pending_prediction.last_edit_at); + if quiet_for >= EDIT_PREDICTION_SETTLED_QUIESCENCE { + let settled_editable_region = registered_buffer + .snapshot + .text_for_range( + pending_prediction.editable_anchor_range.clone(), + ) + .collect::(); + + #[cfg(test)] + if let Some(callback) = &this.settled_event_callback { + callback( + pending_prediction.request_id.clone(), + settled_editable_region.clone(), + ); + } + + telemetry::event!( + EDIT_PREDICTION_SETTLED_EVENT, + request_id = pending_prediction.request_id.0.clone(), + settled_editable_region, + ); + + return false; + } + + if oldest_edited_at + .is_none_or(|t| pending_prediction.last_edit_at < t) + { + oldest_edited_at = Some(pending_prediction.last_edit_at); + } + + true + }); + } + } + }); + + next_wake_time = oldest_edited_at.map(|t| t + EDIT_PREDICTION_SETTLED_QUIESCENCE); + } + } + + pub(crate) fn enqueue_settled_prediction( + &mut self, + request_id: EditPredictionId, + project: &Entity, + edited_buffer: &Entity, + edited_buffer_snapshot: &BufferSnapshot, + editable_offset_range: Range, + cx: &mut Context, + ) { + let project_state = self.get_or_init_project(project, cx); + if let Some(buffer) = project_state + .registered_buffers + .get_mut(&edited_buffer.entity_id()) + { + let now = cx.background_executor().now(); + buffer.pending_predictions.push(PendingSettledPrediction { + request_id, + editable_anchor_range: edited_buffer_snapshot + .anchor_range_around(editable_offset_range), + enqueued_at: now, + last_edit_at: now, + }); + self.settled_predictions_tx.unbounded_send(now).ok(); + } + } + fn reject_current_prediction( &mut self, reason: EditPredictionRejectReason, diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index abe522494fc8962a995313ffb1a57b8672c22ca4..beeb855c7b84bae53ea2f8f8bd6a117403e77db1 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -29,7 +29,10 @@ use util::path; use uuid::Uuid; use zeta_prompt::ZetaPromptInput; -use crate::{BufferEditPrediction, EditPredictionId, EditPredictionStore, REJECT_REQUEST_DEBOUNCE}; +use crate::{ + BufferEditPrediction, EDIT_PREDICTION_SETTLED_QUIESCENCE, EditPredictionId, + EditPredictionStore, REJECT_REQUEST_DEBOUNCE, +}; #[gpui::test] async fn test_current_state(cx: &mut TestAppContext) { @@ -2574,6 +2577,177 @@ async fn test_diagnostic_jump_excludes_collaborator_regions(cx: &mut TestAppCont ); } +#[gpui::test] +async fn test_edit_prediction_settled(cx: &mut TestAppContext) { + let (ep_store, _requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + + // Buffer with two clearly separated regions: + // Region A = lines 0-9 (offsets 0..50) + // Region B = lines 20-29 (offsets 105..155) + // A big gap in between so edits in one region never overlap the other. + let mut content = String::new(); + for i in 0..30 { + content.push_str(&format!("line {i:02}\n")); + } + + fs.insert_tree( + "/root", + json!({ + "foo.md": content.clone() + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.md"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + let settled_events: Arc>> = + Arc::new(Mutex::new(Vec::new())); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx); + + let settled_events = settled_events.clone(); + ep_store.settled_event_callback = Some(Box::new(move |id, text| { + settled_events.lock().push((id, text)); + })); + }); + + // --- Phase 1: edit in region A and enqueue prediction A --- + + buffer.update(cx, |buffer, cx| { + // Edit at the start of line 0. + buffer.edit(vec![(0..0, "ADDED ")], None, cx); + }); + cx.run_until_parked(); + + let snapshot_a = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + + // Region A: first 10 lines of the buffer. + let editable_region_a = 0..snapshot_a.point_to_offset(Point::new(10, 0)); + ep_store.update(cx, |ep_store, cx| { + ep_store.enqueue_settled_prediction( + EditPredictionId("prediction-a".into()), + &project, + &buffer, + &snapshot_a, + editable_region_a, + cx, + ); + }); + + // --- Phase 2: repeatedly edit in region A to keep it unsettled --- + + // Let the worker process the channel message before we start advancing. + cx.run_until_parked(); + + let mut region_a_edit_offset = 5; + for _ in 0..3 { + // Edit inside region A (not at the boundary) so `last_edit_at` is + // updated before the worker's next wake. + buffer.update(cx, |buffer, cx| { + buffer.edit( + vec![(region_a_edit_offset..region_a_edit_offset, "x")], + None, + cx, + ); + }); + region_a_edit_offset += 1; + cx.run_until_parked(); + + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 2); + cx.run_until_parked(); + assert!( + settled_events.lock().is_empty(), + "no settled events should fire while region A is still being edited" + ); + } + + // Still nothing settled. + assert!(settled_events.lock().is_empty()); + + // --- Phase 3: edit in distinct region B, enqueue prediction B --- + // Advance a small amount so B's quiescence window starts later than A's, + // but not so much that A settles (A's last edit was at the start of + // iteration 3, and it needs a full Q to settle). + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4); + cx.run_until_parked(); + assert!(settled_events.lock().is_empty()); + + let snapshot_b = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let line_20_offset = snapshot_b.point_to_offset(Point::new(20, 0)); + + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(line_20_offset..line_20_offset, "NEW ")], None, cx); + }); + cx.run_until_parked(); + + let snapshot_b2 = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); + let editable_region_b = line_20_offset..snapshot_b2.point_to_offset(Point::new(25, 0)); + ep_store.update(cx, |ep_store, cx| { + ep_store.enqueue_settled_prediction( + EditPredictionId("prediction-b".into()), + &project, + &buffer, + &snapshot_b2, + editable_region_b, + cx, + ); + }); + + cx.run_until_parked(); + assert!( + settled_events.lock().is_empty(), + "neither prediction should have settled yet" + ); + + // --- Phase 4: let enough time pass for region A to settle --- + // A's last edit was at T_a (during the last loop iteration). The worker is + // sleeping until T_a + Q. We advance just enough to reach that wake time + // (Q/4 since we already advanced Q/4 in phase 3 on top of the loop's + // 3*Q/2). At that point A has been quiet for Q and settles, but B was + // enqueued only Q/4 ago and stays pending. + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE / 4); + cx.run_until_parked(); + + { + let events = settled_events.lock().clone(); + assert_eq!( + events.len(), + 1, + "only prediction A should have settled, got: {events:?}" + ); + assert_eq!(events[0].0, EditPredictionId("prediction-a".into())); + } + + // --- Phase 5: let more time pass for region B to settle --- + // B's last edit was Q/4 before A settled. The worker rescheduled to + // B's last_edit_at + Q, which is 3Q/4 from now. + cx.executor() + .advance_clock(EDIT_PREDICTION_SETTLED_QUIESCENCE * 3 / 4); + cx.run_until_parked(); + + { + let events = settled_events.lock().clone(); + assert_eq!( + events.len(), + 2, + "both predictions should have settled, got: {events:?}" + ); + assert_eq!(events[1].0, EditPredictionId("prediction-b".into())); + } +} + #[ctor::ctor] fn init_logger() { zlog::init_test(); diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 25f9900dcba4a8f29f7e1268560bcbb40ded9778..9c6e9e30d94c5e1988d54da7966a58fd8e69e233 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -309,7 +309,6 @@ pub fn request_prediction_with_zeta( edits, cursor_position, received_response_at, - full_context_offset_range, editable_range_in_buffer, )), model_version, @@ -333,7 +332,6 @@ pub fn request_prediction_with_zeta( edits, cursor_position, received_response_at, - full_context_offset_range, editable_range_in_buffer, )) = prediction else { @@ -344,44 +342,17 @@ pub fn request_prediction_with_zeta( }; if can_collect_data { - cx.spawn({ - let weak_buffer = edited_buffer.downgrade(); - let context_anchor_range = - edited_buffer_snapshot.anchor_range_around(full_context_offset_range); - let editable_anchor_range = - edited_buffer_snapshot.anchor_range_around(editable_range_in_buffer); - let request_id = id.0.clone(); - async move |cx| { - cx.background_executor() - .timer(std::time::Duration::from_secs(30)) - .await; - - let Some(buffer) = weak_buffer.upgrade() else { - return; - }; - let (new_cursor_region, editable_range_in_excerpt) = - buffer.read_with(cx, |buffer, _| { - let context_start = - buffer.offset_for_anchor(&context_anchor_range.start); - let editable_range_in_excerpt = (buffer - .offset_for_anchor(&editable_anchor_range.start) - - context_start) - ..(buffer.offset_for_anchor(&editable_anchor_range.end) - - context_start); - let text = buffer - .text_for_range(context_anchor_range) - .collect::(); - (text, editable_range_in_excerpt) - }); - telemetry::event!( - "Edit Prediction Snapshot", - request_id, - new_cursor_region, - editable_range_in_excerpt, - ); - } + this.update(cx, |this, cx| { + this.enqueue_settled_prediction( + id.clone(), + &project, + &edited_buffer, + &edited_buffer_snapshot, + editable_range_in_buffer, + cx, + ); }) - .detach(); + .ok(); } Ok(Some( From b06522e978b5ed24bcc2cf07a6de794179d69176 Mon Sep 17 00:00:00 2001 From: John Tur Date: Fri, 27 Feb 2026 21:54:15 -0500 Subject: [PATCH 179/548] Use `dispatch2` crate (#50171) Release Notes: - N/A --- Cargo.lock | 8 +++-- crates/gpui_macos/Cargo.toml | 4 +-- crates/gpui_macos/build.rs | 35 ------------------ crates/gpui_macos/src/dispatch.h | 2 -- crates/gpui_macos/src/dispatcher.rs | 51 +++++++-------------------- crates/gpui_macos/src/display_link.rs | 50 ++++++++++---------------- crates/gpui_macos/src/platform.rs | 17 +++------ crates/gpui_macos/src/window.rs | 40 ++++++++++----------- 8 files changed, 62 insertions(+), 145 deletions(-) delete mode 100644 crates/gpui_macos/src/dispatch.h diff --git a/Cargo.lock b/Cargo.lock index d37563dc8595c72f71901dd84cdd4fca5a34ee84..6ae4f57301f2882e7f5e66c5960078393d2ac2de 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5001,11 +5001,13 @@ checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" [[package]] name = "dispatch2" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +checksum = "1e0e367e4e7da84520dedcac1901e4da967309406d1e51017ae1abfb97adbd38" dependencies = [ "bitflags 2.10.0", + "block2", + "libc", "objc2", ] @@ -7677,7 +7679,6 @@ version = "0.1.0" dependencies = [ "anyhow", "async-task", - "bindgen 0.71.1", "block", "cbindgen", "cocoa 0.26.0", @@ -7689,6 +7690,7 @@ dependencies = [ "core-video", "ctor", "derive_more 0.99.20", + "dispatch2", "etagere", "foreign-types 0.5.0", "futures 0.3.31", diff --git a/crates/gpui_macos/Cargo.toml b/crates/gpui_macos/Cargo.toml index 4aedb1f4f1bed02e22f0dc6a881d60cc39ddd3a1..06e5d0e7321af523a249f19ec0d5ac50e2da5d3f 100644 --- a/crates/gpui_macos/Cargo.toml +++ b/crates/gpui_macos/Cargo.toml @@ -34,6 +34,7 @@ core-text = "21" core-video.workspace = true ctor.workspace = true derive_more.workspace = true +dispatch2 = "0.3.1" etagere = "0.2" # WARNING: If you change this, you must also publish a new version of zed-font-kit to crates.io font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "110523127440aefb11ce0cf280ae7c5071337ec5", package = "zed-font-kit", version = "0.14.1-zed", optional = true } @@ -57,6 +58,5 @@ util.workspace = true uuid.workspace = true [target.'cfg(target_os = "macos")'.build-dependencies] -bindgen = "0.71" cbindgen = { version = "0.28.0", default-features = false } -gpui.workspace = true \ No newline at end of file +gpui.workspace = true diff --git a/crates/gpui_macos/build.rs b/crates/gpui_macos/build.rs index 32dfc571d257495c9c0a8cae54bc9fb567b51489..d5c1893f4ce18190a546aed1a708685cf66dc0e9 100644 --- a/crates/gpui_macos/build.rs +++ b/crates/gpui_macos/build.rs @@ -15,8 +15,6 @@ mod macos_build { use cbindgen::Config; pub fn run() { - generate_dispatch_bindings(); - let header_path = generate_shader_bindings(); #[cfg(feature = "runtime_shaders")] @@ -25,39 +23,6 @@ mod macos_build { compile_metal_shaders(&header_path); } - fn generate_dispatch_bindings() { - println!("cargo:rustc-link-lib=framework=System"); - - let bindings = bindgen::Builder::default() - .header("src/dispatch.h") - .allowlist_var("_dispatch_main_q") - .allowlist_var("_dispatch_source_type_data_add") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_HIGH") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_DEFAULT") - .allowlist_var("DISPATCH_QUEUE_PRIORITY_LOW") - .allowlist_var("DISPATCH_TIME_NOW") - .allowlist_function("dispatch_get_global_queue") - .allowlist_function("dispatch_async_f") - .allowlist_function("dispatch_after_f") - .allowlist_function("dispatch_time") - .allowlist_function("dispatch_source_merge_data") - .allowlist_function("dispatch_source_create") - .allowlist_function("dispatch_source_set_event_handler_f") - .allowlist_function("dispatch_resume") - .allowlist_function("dispatch_suspend") - .allowlist_function("dispatch_source_cancel") - .allowlist_function("dispatch_set_context") - .parse_callbacks(Box::new(bindgen::CargoCallbacks::new())) - .layout_tests(false) - .generate() - .expect("unable to generate bindings"); - - let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); - bindings - .write_to_file(out_path.join("dispatch_sys.rs")) - .expect("couldn't write dispatch bindings"); - } - fn generate_shader_bindings() -> PathBuf { let output_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("scene.h"); diff --git a/crates/gpui_macos/src/dispatch.h b/crates/gpui_macos/src/dispatch.h deleted file mode 100644 index 54f3818738042b00938ad566ec0269fc0d80241d..0000000000000000000000000000000000000000 --- a/crates/gpui_macos/src/dispatch.h +++ /dev/null @@ -1,2 +0,0 @@ -#include -#include diff --git a/crates/gpui_macos/src/dispatcher.rs b/crates/gpui_macos/src/dispatcher.rs index 755016e44be84f585631fbf311ef499adfc69367..07638639e4bf5d3f002c1babfc213bc330e63dce 100644 --- a/crates/gpui_macos/src/dispatcher.rs +++ b/crates/gpui_macos/src/dispatcher.rs @@ -1,7 +1,4 @@ -#![allow(non_upper_case_globals)] -#![allow(non_camel_case_types)] -#![allow(non_snake_case)] - +use dispatch2::{DispatchQueue, DispatchQueueGlobalPriority, DispatchTime, GlobalQueueIdentifier}; use gpui::{ GLOBAL_THREAD_TIMINGS, PlatformDispatcher, Priority, RunnableMeta, RunnableVariant, THREAD_TIMINGS, TaskTiming, ThreadTaskTimings, @@ -26,21 +23,10 @@ use objc::{ }; use std::{ ffi::c_void, - ptr::{NonNull, addr_of}, + ptr::NonNull, time::{Duration, Instant}, }; -/// All items in the generated file are marked as pub, so we're gonna wrap it in a separate mod to prevent -/// these pub items from leaking into public API. -pub(crate) mod dispatch_sys { - include!(concat!(env!("OUT_DIR"), "/dispatch_sys.rs")); -} - -use dispatch_sys::*; -pub(crate) fn dispatch_get_main_queue() -> dispatch_queue_t { - addr_of!(_dispatch_main_q) as *const _ as dispatch_queue_t -} - pub(crate) struct MacDispatcher; impl MacDispatcher { @@ -89,43 +75,32 @@ impl PlatformDispatcher for MacDispatcher { Priority::RealtimeAudio => { panic!("RealtimeAudio priority should use spawn_realtime, not dispatch") } - Priority::High => DISPATCH_QUEUE_PRIORITY_HIGH as isize, - Priority::Medium => DISPATCH_QUEUE_PRIORITY_DEFAULT as isize, - Priority::Low => DISPATCH_QUEUE_PRIORITY_LOW as isize, + Priority::High => DispatchQueueGlobalPriority::High, + Priority::Medium => DispatchQueueGlobalPriority::Default, + Priority::Low => DispatchQueueGlobalPriority::Low, }; unsafe { - dispatch_async_f( - dispatch_get_global_queue(queue_priority, 0), - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::global_queue(GlobalQueueIdentifier::Priority(queue_priority)) + .exec_async_f(context, trampoline); } } fn dispatch_on_main_thread(&self, runnable: RunnableVariant, _priority: Priority) { let context = runnable.into_raw().as_ptr() as *mut c_void; unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::main().exec_async_f(context, trampoline); } } fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) { let context = runnable.into_raw().as_ptr() as *mut c_void; + let queue = DispatchQueue::global_queue(GlobalQueueIdentifier::Priority( + DispatchQueueGlobalPriority::High, + )); + let when = DispatchTime::NOW.time(duration.as_nanos() as i64); unsafe { - let queue = - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0); - let when = dispatch_time(DISPATCH_TIME_NOW as u64, duration.as_nanos() as i64); - dispatch_after_f( - when, - queue, - context, - Some(trampoline as unsafe extern "C" fn(*mut c_void)), - ); + DispatchQueue::exec_after_f(when, &queue, context, trampoline); } } diff --git a/crates/gpui_macos/src/display_link.rs b/crates/gpui_macos/src/display_link.rs index b086cc1b12182db661e5fa1cb82b671c7fd5b8bc..bd1c21ca5c063b2ed9fa79d939f205698023f42b 100644 --- a/crates/gpui_macos/src/display_link.rs +++ b/crates/gpui_macos/src/display_link.rs @@ -1,26 +1,21 @@ -use crate::{ - dispatch_get_main_queue, - dispatcher::dispatch_sys::{ - _dispatch_source_type_data_add, dispatch_resume, dispatch_set_context, - dispatch_source_cancel, dispatch_source_create, dispatch_source_merge_data, - dispatch_source_set_event_handler_f, dispatch_source_t, dispatch_suspend, - }, -}; use anyhow::Result; use core_graphics::display::CGDirectDisplayID; +use dispatch2::{ + _dispatch_source_type_data_add, DispatchObject, DispatchQueue, DispatchRetained, DispatchSource, +}; use std::ffi::c_void; use util::ResultExt; pub struct DisplayLink { display_link: Option, - frame_requests: dispatch_source_t, + frame_requests: DispatchRetained, } impl DisplayLink { pub fn new( display_id: CGDirectDisplayID, data: *mut c_void, - callback: unsafe extern "C" fn(*mut c_void), + callback: extern "C" fn(*mut c_void), ) -> Result { unsafe extern "C" fn display_link_callback( _display_link_out: *mut sys::CVDisplayLink, @@ -31,31 +26,26 @@ impl DisplayLink { frame_requests: *mut c_void, ) -> i32 { unsafe { - let frame_requests = frame_requests as dispatch_source_t; - dispatch_source_merge_data(frame_requests, 1); + let frame_requests = &*(frame_requests as *const DispatchSource); + frame_requests.merge_data(1); 0 } } unsafe { - let frame_requests = dispatch_source_create( - &_dispatch_source_type_data_add, + let frame_requests = DispatchSource::new( + &raw const _dispatch_source_type_data_add as *mut _, 0, 0, - dispatch_get_main_queue(), - ); - dispatch_set_context( - crate::dispatch_sys::dispatch_object_t { - _ds: frame_requests, - }, - data, + Some(DispatchQueue::main()), ); - dispatch_source_set_event_handler_f(frame_requests, Some(callback)); + frame_requests.set_context(data); + frame_requests.set_event_handler_f(callback); let display_link = sys::DisplayLink::new( display_id, display_link_callback, - frame_requests as *mut c_void, + &*frame_requests as *const DispatchSource as *mut c_void, )?; Ok(Self { @@ -67,9 +57,7 @@ impl DisplayLink { pub fn start(&mut self) -> Result<()> { unsafe { - dispatch_resume(crate::dispatch_sys::dispatch_object_t { - _ds: self.frame_requests, - }); + self.frame_requests.resume(); self.display_link.as_mut().unwrap().start()?; } Ok(()) @@ -77,9 +65,7 @@ impl DisplayLink { pub fn stop(&mut self) -> Result<()> { unsafe { - dispatch_suspend(crate::dispatch_sys::dispatch_object_t { - _ds: self.frame_requests, - }); + self.frame_requests.suspend(); self.display_link.as_mut().unwrap().stop()?; } Ok(()) @@ -97,9 +83,9 @@ impl Drop for DisplayLink { // // We might also want to upgrade to CADisplayLink, but that requires dropping old macOS support. std::mem::forget(self.display_link.take()); - unsafe { - dispatch_source_cancel(self.frame_requests); - } + self.frame_requests.cancel(); + // A suspended DispatchSource cannot be destroyed. + self.frame_requests.resume(); } } diff --git a/crates/gpui_macos/src/platform.rs b/crates/gpui_macos/src/platform.rs index c982f6da191f6b657e51238d8b6ac3d11f724149..d9c22cbea0354caff9bd5dd80d7ea98fa7e891de 100644 --- a/crates/gpui_macos/src/platform.rs +++ b/crates/gpui_macos/src/platform.rs @@ -24,6 +24,7 @@ use core_foundation::{ string::{CFString, CFStringRef}, }; use ctor::ctor; +use dispatch2::DispatchQueue; use futures::channel::oneshot; use gpui::{ Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, ForegroundExecutor, @@ -493,13 +494,11 @@ impl Platform for MacPlatform { // this, we make quitting the application asynchronous so that we aren't holding borrows to // the app state on the stack when we actually terminate the app. - use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f}; - unsafe { - dispatch_async_f(dispatch_get_main_queue(), ptr::null_mut(), Some(quit)); + DispatchQueue::main().exec_async_f(ptr::null_mut(), quit); } - unsafe extern "C" fn quit(_: *mut c_void) { + extern "C" fn quit(_: *mut c_void) { unsafe { let app = NSApplication::sharedApplication(nil); let _: () = msg_send![app, terminate: nil]; @@ -1261,19 +1260,13 @@ extern "C" fn on_thermal_state_change(this: &mut Object, _: Sel, _: id) { // Defer to the next run loop iteration to avoid re-entrant borrows of the App RefCell, // as NSNotificationCenter delivers this notification synchronously and it may fire while // the App is already borrowed (same pattern as quit() above). - use crate::dispatcher::{dispatch_get_main_queue, dispatch_sys::dispatch_async_f}; - let platform = unsafe { get_mac_platform(this) }; let platform_ptr = platform as *const MacPlatform as *mut c_void; unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - platform_ptr, - Some(on_thermal_state_change), - ); + DispatchQueue::main().exec_async_f(platform_ptr, on_thermal_state_change); } - unsafe extern "C" fn on_thermal_state_change(context: *mut c_void) { + extern "C" fn on_thermal_state_change(context: *mut c_void) { let platform = unsafe { &*(context as *const MacPlatform) }; let mut lock = platform.0.lock(); if let Some(mut callback) = lock.on_thermal_state_change.take() { diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index 87cd5ee21d5e448ee43b604657ddbe89e705035b..456ee31ac3b03780e68267621d66435b1ceab4a9 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -1,7 +1,6 @@ use crate::{ - BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, dispatch_get_main_queue, - dispatcher::dispatch_sys::dispatch_async_f, events::platform_input_from_native, ns_string, - renderer, + BoolExt, DisplayLink, MacDisplay, NSRange, NSStringExt, events::platform_input_from_native, + ns_string, renderer, }; #[cfg(any(test, feature = "test-support"))] use anyhow::Result; @@ -22,6 +21,7 @@ use cocoa::{ NSUserDefaults, }, }; +use dispatch2::DispatchQueue; use gpui::{ AnyWindowHandle, BackgroundExecutor, Bounds, Capslock, ExternalPaths, FileDropEvent, ForegroundExecutor, KeyDownEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, @@ -1050,34 +1050,32 @@ impl PlatformWindow for MacWindow { fn merge_all_windows(&self) { let native_window = self.0.lock().native_window; - unsafe extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) { - let native_window = context as id; - let _: () = msg_send![native_window, mergeAllWindows:nil]; + extern "C" fn merge_windows_async(context: *mut std::ffi::c_void) { + unsafe { + let native_window = context as id; + let _: () = msg_send![native_window, mergeAllWindows:nil]; + } } unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - native_window as *mut std::ffi::c_void, - Some(merge_windows_async), - ); + DispatchQueue::main() + .exec_async_f(native_window as *mut std::ffi::c_void, merge_windows_async); } } fn move_tab_to_new_window(&self) { let native_window = self.0.lock().native_window; - unsafe extern "C" fn move_tab_async(context: *mut std::ffi::c_void) { - let native_window = context as id; - let _: () = msg_send![native_window, moveTabToNewWindow:nil]; - let _: () = msg_send![native_window, makeKeyAndOrderFront: nil]; + extern "C" fn move_tab_async(context: *mut std::ffi::c_void) { + unsafe { + let native_window = context as id; + let _: () = msg_send![native_window, moveTabToNewWindow:nil]; + let _: () = msg_send![native_window, makeKeyAndOrderFront: nil]; + } } unsafe { - dispatch_async_f( - dispatch_get_main_queue(), - native_window as *mut std::ffi::c_void, - Some(move_tab_async), - ); + DispatchQueue::main() + .exec_async_f(native_window as *mut std::ffi::c_void, move_tab_async); } } @@ -2252,7 +2250,7 @@ extern "C" fn display_layer(this: &Object, _: Sel, _: id) { } } -unsafe extern "C" fn step(view: *mut c_void) { +extern "C" fn step(view: *mut c_void) { let view = view as id; let window_state = unsafe { get_window_state(&*view) }; let mut lock = window_state.lock(); From 5a40e687e57ace3fe29c1d0917b072de73603552 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 28 Feb 2026 10:44:36 +0100 Subject: [PATCH 180/548] editor: Fix a bunch of inlay hint bugs (#50377) Release Notes: - Fixed multiple language servers applying to the same buffer overwriting each others inlay hints - Fixed multiple language servers applying to the same multibuffer discarding each others inlay hints - Fixed a bug that caused some inlay hints to sometimes duplicate --- crates/editor/src/display_map.rs | 2 +- crates/editor/src/display_map/inlay_map.rs | 2 +- crates/editor/src/inlays/inlay_hints.rs | 734 +++++++++++++++++++-- crates/project/src/lsp_store.rs | 66 +- crates/rope/src/rope.rs | 105 +++ 5 files changed, 833 insertions(+), 76 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 888a3729ca769551954712dc2e8c3fb197367551..10c17871709e7f6ac237cb3ecb000724b0095c01 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1355,7 +1355,7 @@ impl DisplayMap { widths_changed } - pub(crate) fn current_inlays(&self) -> impl Iterator { + pub(crate) fn current_inlays(&self) -> impl Iterator + Default { self.inlay_map.current_inlays() } diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 9e853c8292c8073f20af58ee4d8f71c8db269cfa..63e315ab250d5ddbc0ffa9d37cb1c42b3803efac 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -745,7 +745,7 @@ impl InlayMap { } #[ztracing::instrument(skip_all)] - pub fn current_inlays(&self) -> impl Iterator { + pub fn current_inlays(&self) -> impl Iterator + Default { self.inlays.iter() } diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 4158ebbf7c5c3594dc4f9f43e8c3a7f1a19c38cb..23c97ced906844c1ac6c8fa5ce6932631284384a 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -27,6 +27,7 @@ use util::debug_panic; use super::{Inlay, InlayId}; use crate::{ Editor, EditorSnapshot, PointForPosition, ToggleInlayHints, ToggleInlineValues, debounce_value, + display_map::{DisplayMap, InlayOffset}, hover_links::{InlayHighlight, TriggerPoint, show_link_definition}, hover_popover::{self, InlayHover}, inlays::InlaySplice, @@ -104,13 +105,34 @@ impl LspInlayHintData { self.added_hints.clear(); } + /// Like `clear`, but only wipes tracking state for the given buffer IDs. + /// Hints belonging to other buffers are left intact so they are neither + /// re-fetched nor duplicated on the next `NewLinesShown`. + pub fn clear_for_buffers( + &mut self, + buffer_ids: &HashSet, + current_hints: impl IntoIterator, + ) { + for buffer_id in buffer_ids { + self.hint_refresh_tasks.remove(buffer_id); + self.hint_chunk_fetching.remove(buffer_id); + } + for hint in current_hints { + if let Some(buffer_id) = hint.position.text_anchor.buffer_id { + if buffer_ids.contains(&buffer_id) { + self.added_hints.remove(&hint.id); + } + } + } + } + /// Checks inlay hint settings for enabled hint kinds and general enabled state. /// Generates corresponding inlay_map splice updates on settings changes. /// Does not update inlay hint cache state on disabling or inlay hint kinds change: only reenabling forces new LSP queries. fn update_settings( &mut self, new_hint_settings: InlayHintSettings, - visible_hints: Vec, + visible_hints: impl IntoIterator, ) -> ControlFlow, Option> { let old_enabled = self.enabled; // If the setting for inlay hints has changed, update `enabled`. This condition avoids inlay @@ -140,7 +162,7 @@ impl LspInlayHintData { ControlFlow::Continue( Some(InlaySplice { to_remove: visible_hints - .iter() + .into_iter() .filter_map(|inlay| { let inlay_kind = self.added_hints.get(&inlay.id).copied()?; if !self.allowed_hint_kinds.contains(&inlay_kind) { @@ -159,12 +181,13 @@ impl LspInlayHintData { (true, false) => { self.modifiers_override = false; self.allowed_hint_kinds = new_allowed_hint_kinds; - if visible_hints.is_empty() { + let mut visible_hints = visible_hints.into_iter().peekable(); + if visible_hints.peek().is_none() { ControlFlow::Break(None) } else { self.clear(); ControlFlow::Break(Some(InlaySplice { - to_remove: visible_hints.iter().map(|inlay| inlay.id).collect(), + to_remove: visible_hints.map(|inlay| inlay.id).collect(), to_insert: Vec::new(), })) } @@ -175,7 +198,7 @@ impl LspInlayHintData { ControlFlow::Continue( Some(InlaySplice { to_remove: visible_hints - .iter() + .into_iter() .filter_map(|inlay| { let inlay_kind = self.added_hints.get(&inlay.id).copied()?; if !self.allowed_hint_kinds.contains(&inlay_kind) { @@ -338,12 +361,20 @@ impl Editor { }; let multi_buffer = self.buffer().clone(); + let Some(inlay_hints) = self.inlay_hints.as_mut() else { return; }; if invalidate_cache.should_invalidate() { - inlay_hints.clear(); + if invalidate_hints_for_buffers.is_empty() { + inlay_hints.clear(); + } else if invalidate_cache.should_invalidate() { + inlay_hints.clear_for_buffers( + &invalidate_hints_for_buffers, + Self::visible_inlay_hints(self.display_map.read(cx)), + ); + } } inlay_hints .invalidate_hints_for_buffers @@ -420,16 +451,8 @@ impl Editor { } pub fn clear_inlay_hints(&mut self, cx: &mut Context) { - let to_remove = self - .visible_inlay_hints(cx) - .into_iter() - .map(|inlay| { - let inlay_id = inlay.id; - if let Some(inlay_hints) = &mut self.inlay_hints { - inlay_hints.added_hints.remove(&inlay_id); - } - inlay_id - }) + let to_remove = Self::visible_inlay_hints(self.display_map.read(cx)) + .map(|inlay| inlay.id) .collect::>(); self.splice_inlays(&to_remove, Vec::new(), cx); } @@ -439,7 +462,6 @@ impl Editor { reason: &InlayHintRefreshReason, cx: &mut Context<'_, Editor>, ) -> Option { - let visible_inlay_hints = self.visible_inlay_hints(cx); let Some(inlay_hints) = self.inlay_hints.as_mut() else { return None; }; @@ -471,6 +493,8 @@ impl Editor { } } InlayHintRefreshReason::SettingsChange(new_settings) => { + let visible_inlay_hints = + Self::visible_inlay_hints(self.display_map.read(cx)).collect::>(); match inlay_hints.update_settings(*new_settings, visible_inlay_hints) { ControlFlow::Break(Some(InlaySplice { to_remove, @@ -534,13 +558,11 @@ impl Editor { Some(invalidate_cache) } - pub(crate) fn visible_inlay_hints(&self, cx: &Context) -> Vec { - self.display_map - .read(cx) + fn visible_inlay_hints(display_map: &DisplayMap) -> impl Iterator + use<'_> { + display_map .current_inlays() .filter(move |inlay| matches!(inlay.id, InlayId::Hint(_))) .cloned() - .collect() } pub fn update_inlay_link_and_hover_points( @@ -575,9 +597,7 @@ impl Editor { point_for_position.next_valid.to_point(snapshot), Bias::Right, ); - if let Some(hovered_hint) = self - .visible_inlay_hints(cx) - .into_iter() + if let Some(hovered_hint) = Self::visible_inlay_hints(self.display_map.read(cx)) .filter(|hint| snapshot.can_resolve(&hint.position)) .skip_while(|hint| { hint.position @@ -603,15 +623,19 @@ impl Editor { { match cached_hint.resolve_state { ResolveState::Resolved => { - let mut extra_shift_left = 0; - let mut extra_shift_right = 0; - if cached_hint.padding_left { - extra_shift_left += 1; - extra_shift_right += 1; - } - if cached_hint.padding_right { - extra_shift_right += 1; - } + let original_text = cached_hint.text(); + let actual_left_padding = + if cached_hint.padding_left && !original_text.starts_with(" ") { + 1 + } else { + 0 + }; + let actual_right_padding = + if cached_hint.padding_right && !original_text.ends_with(" ") { + 1 + } else { + 0 + }; match cached_hint.label { InlayHintLabel::String(_) => { if let Some(tooltip) = cached_hint.tooltip { @@ -633,9 +657,9 @@ impl Editor { range: InlayHighlight { inlay: hovered_hint.id, inlay_position: hovered_hint.position, - range: extra_shift_left + range: actual_left_padding ..hovered_hint.text().len() - + extra_shift_right, + - actual_right_padding, }, }, window, @@ -647,17 +671,17 @@ impl Editor { InlayHintLabel::LabelParts(label_parts) => { let hint_start = snapshot.anchor_to_inlay_offset(hovered_hint.position); + let content_start = + InlayOffset(hint_start.0 + actual_left_padding); if let Some((hovered_hint_part, part_range)) = hover_popover::find_hovered_hint_part( label_parts, - hint_start, + content_start, hovered_offset, ) { - let highlight_start = - (part_range.start - hint_start) + extra_shift_left; - let highlight_end = - (part_range.end - hint_start) + extra_shift_right; + let highlight_start = part_range.start - hint_start; + let highlight_end = part_range.end - hint_start; let highlight = InlayHighlight { inlay: hovered_hint.id, inlay_position: hovered_hint.position, @@ -764,9 +788,7 @@ impl Editor { new_hints: Vec<(Range, anyhow::Result)>, cx: &mut Context, ) { - let visible_inlay_hint_ids = self - .visible_inlay_hints(cx) - .iter() + let visible_inlay_hint_ids = Self::visible_inlay_hints(self.display_map.read(cx)) .filter(|inlay| inlay.position.text_anchor.buffer_id == Some(buffer_id)) .map(|inlay| inlay.id) .collect::>(); @@ -795,6 +817,18 @@ impl Editor { // from the cache. if invalidate_cache.should_invalidate() { hints_to_remove.extend(visible_inlay_hint_ids); + + // When invalidating, this task removes ALL visible hints for the buffer + // but only adds back hints for its own chunk ranges. Chunks fetched by + // other concurrent tasks (e.g., a scroll task that completed before this + // edit task) would have their hints removed but remain marked as "already + // fetched" in hint_chunk_fetching, preventing re-fetch on the next + // NewLinesShown. Fix: retain only chunks that this task has results for. + let task_chunk_ranges: HashSet<&Range> = + new_hints.iter().map(|(range, _)| range).collect(); + if let Some((_, fetched_chunks)) = inlay_hints.hint_chunk_fetching.get_mut(&buffer_id) { + fetched_chunks.retain(|chunk| task_chunk_ranges.contains(chunk)); + } } let mut inserted_hint_text = HashMap::default(); @@ -875,8 +909,7 @@ impl Editor { std::mem::take(&mut inlay_hints.invalidate_hints_for_buffers); if !invalidate_hints_for_buffers.is_empty() { hints_to_remove.extend( - self.visible_inlay_hints(cx) - .iter() + Self::visible_inlay_hints(self.display_map.read(cx)) .filter(|inlay| { inlay .position @@ -4155,6 +4188,613 @@ let c = 3;"# ); } + #[gpui::test] + async fn test_edit_then_scroll_race(cx: &mut gpui::TestAppContext) { + // Bug 1: An edit fires with a long debounce, and a scroll brings new lines + // before that debounce elapses. The edit task's apply_fetched_hints removes + // ALL visible hints (including the scroll-added ones) but only adds back + // hints for its own chunks. The scroll chunk remains in hint_chunk_fetching, + // so it is never re-queried, leaving it permanently empty. + init_test(cx, &|settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + enabled: Some(true), + edit_debounce_ms: Some(700), + scroll_debounce_ms: Some(50), + show_type_hints: Some(true), + show_parameter_hints: Some(true), + show_other_hints: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + let mut file_content = String::from("fn main() {\n"); + for i in 0..150 { + file_content.push_str(&format!(" let v{i} = {i};\n")); + } + file_content.push_str("}\n"); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": file_content, + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + let lsp_request_ranges = Arc::new(Mutex::new(Vec::new())); + let mut fake_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new({ + let lsp_request_ranges = lsp_request_ranges.clone(); + move |fake_server| { + let lsp_request_ranges = lsp_request_ranges.clone(); + fake_server.set_request_handler::( + move |params, _| { + let lsp_request_ranges = lsp_request_ranges.clone(); + async move { + lsp_request_ranges.lock().push(params.range); + let start_line = params.range.start.line; + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(start_line + 1, 9), + label: lsp::InlayHintLabel::String(format!( + "chunk_{start_line}" + )), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + let editor = + cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx)); + cx.executor().run_until_parked(); + let _fake_server = fake_servers.next().await.unwrap(); + + editor + .update(cx, |editor, window, cx| { + editor.set_visible_line_count(50.0, window, cx); + editor.set_visible_column_count(120.0); + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + assert!( + visible.iter().any(|h| h.starts_with("chunk_0")), + "Should have chunk_0 hints initially, got: {visible:?}" + ); + }) + .unwrap(); + + lsp_request_ranges.lock().clear(); + + // Step 1: Make an edit → triggers BufferEdited with 700ms debounce. + editor + .update(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)]) + }); + editor.handle_input("x", window, cx); + }) + .unwrap(); + // Let the BufferEdited event propagate and the edit task get spawned. + cx.executor().run_until_parked(); + + // Step 2: Scroll down to reveal a new chunk, then trigger NewLinesShown. + // This spawns a scroll task with the shorter 50ms debounce. + editor + .update(cx, |editor, window, cx| { + editor.scroll_screen(&ScrollAmount::Page(1.0), window, cx); + }) + .unwrap(); + // Explicitly trigger NewLinesShown for the new visible range. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + + // Step 3: Advance clock past scroll debounce (50ms) but NOT past edit + // debounce (700ms). The scroll task completes and adds hints for the + // new chunk. + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // The scroll task's apply_fetched_hints also processes + // invalidate_hints_for_buffers (set by the earlier BufferEdited), which + // removes the old chunk_0 hint. Only the scroll chunk's hint remains. + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + assert!( + visible.iter().any(|h| h.starts_with("chunk_50")), + "After scroll task completes, the scroll chunk's hints should be \ + present, got: {visible:?}" + ); + }) + .unwrap(); + + // Step 4: Advance clock past the edit debounce (700ms). The edit task + // completes, calling apply_fetched_hints with should_invalidate()=true, + // which removes ALL visible hints (including the scroll chunk's) but only + // adds back hints for its own chunks (chunk_0). + cx.executor().advance_clock(Duration::from_millis(700)); + cx.executor().run_until_parked(); + + // At this point the edit task has: + // - removed chunk_50's hint (via should_invalidate removing all visible) + // - added chunk_0's hint (from its own fetch) + // - (with fix) cleared chunk_50 from hint_chunk_fetching + // Without the fix, chunk_50 is stuck in hint_chunk_fetching and will + // never be re-queried by NewLinesShown. + + // Step 5: Trigger NewLinesShown to give the system a chance to re-fetch + // any chunks whose hints were lost. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + assert!( + visible.iter().any(|h| h.starts_with("chunk_0")), + "chunk_0 hints (from edit task) should be present. Got: {visible:?}" + ); + assert!( + visible.iter().any(|h| h.starts_with("chunk_50")), + "chunk_50 hints should have been re-fetched after NewLinesShown. \ + Bug 1: the scroll chunk's hints were removed by the edit task \ + and the chunk was stuck in hint_chunk_fetching, preventing \ + re-fetch. Got: {visible:?}" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_refresh_requested_multi_server(cx: &mut gpui::TestAppContext) { + // Bug 2: When one LSP server sends workspace/inlayHint/refresh, the editor + // wipes all tracking state via clear(), then spawns tasks that call + // LspStore::inlay_hints with for_server=Some(requesting_server). The LspStore + // filters out other servers' cached hints via the for_server guard, so only + // the requesting server's hints are returned. apply_fetched_hints removes ALL + // visible hints (should_invalidate()=true) but only adds back the requesting + // server's hints. Other servers' hints disappear permanently. + init_test(cx, &|settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + enabled: Some(true), + edit_debounce_ms: Some(0), + scroll_debounce_ms: Some(0), + show_type_hints: Some(true), + show_parameter_hints: Some(true), + show_other_hints: Some(true), + ..InlayHintSettingsContent::default() + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": "fn main() { let x = 1; } // padding to keep hints from being trimmed", + "other.rs": "// Test file", + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + // Server A returns a hint labeled "server_a". + let server_a_request_count = Arc::new(AtomicU32::new(0)); + let mut fake_servers_a = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "rust-analyzer", + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new({ + let server_a_request_count = server_a_request_count.clone(); + move |fake_server| { + let server_a_request_count = server_a_request_count.clone(); + fake_server.set_request_handler::( + move |_params, _| { + let count = + server_a_request_count.fetch_add(1, Ordering::Release) + 1; + async move { + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 9), + label: lsp::InlayHintLabel::String(format!( + "server_a_{count}" + )), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + // Server B returns a hint labeled "server_b" at a different position. + let server_b_request_count = Arc::new(AtomicU32::new(0)); + let mut fake_servers_b = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "secondary-ls", + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..lsp::ServerCapabilities::default() + }, + initializer: Some(Box::new({ + let server_b_request_count = server_b_request_count.clone(); + move |fake_server| { + let server_b_request_count = server_b_request_count.clone(); + fake_server.set_request_handler::( + move |_params, _| { + let count = + server_b_request_count.fetch_add(1, Ordering::Release) + 1; + async move { + Ok(Some(vec![lsp::InlayHint { + position: lsp::Position::new(0, 22), + label: lsp::InlayHintLabel::String(format!( + "server_b_{count}" + )), + kind: Some(lsp::InlayHintKind::TYPE), + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..FakeLspAdapter::default() + }, + ); + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + let editor = + cx.add_window(|window, cx| Editor::for_buffer(buffer, Some(project), window, cx)); + cx.executor().run_until_parked(); + + let fake_server_a = fake_servers_a.next().await.unwrap(); + let _fake_server_b = fake_servers_b.next().await.unwrap(); + + editor + .update(cx, |editor, window, cx| { + editor.set_visible_line_count(50.0, window, cx); + editor.set_visible_column_count(120.0); + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // Verify both servers' hints are present initially. + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let has_a = visible.iter().any(|h| h.starts_with("server_a")); + let has_b = visible.iter().any(|h| h.starts_with("server_b")); + assert!( + has_a && has_b, + "Both servers should have hints initially. Got: {visible:?}" + ); + }) + .unwrap(); + + // Trigger RefreshRequested from server A. This should re-fetch server A's + // hints while keeping server B's hints intact. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints( + InlayHintRefreshReason::RefreshRequested { + server_id: fake_server_a.server.server_id(), + request_id: Some(1), + }, + cx, + ); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // Also trigger NewLinesShown to give the system a chance to recover + // any chunks that might have been cleared. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let has_a = visible.iter().any(|h| h.starts_with("server_a")); + let has_b = visible.iter().any(|h| h.starts_with("server_b")); + assert!( + has_a, + "Server A hints should be present after its own refresh. Got: {visible:?}" + ); + assert!( + has_b, + "Server B hints should NOT be lost when server A triggers \ + RefreshRequested. Bug 2: clear() wipes all tracking, then \ + LspStore filters out server B's cached hints via the for_server \ + guard, and apply_fetched_hints removes all visible hints but only \ + adds back server A's. Got: {visible:?}" + ); + }) + .unwrap(); + } + + #[gpui::test] + async fn test_multi_language_multibuffer_no_duplicate_hints(cx: &mut gpui::TestAppContext) { + init_test(cx, &|settings| { + settings.defaults.inlay_hints = Some(InlayHintSettingsContent { + show_value_hints: Some(true), + enabled: Some(true), + edit_debounce_ms: Some(0), + scroll_debounce_ms: Some(0), + show_type_hints: Some(true), + show_parameter_hints: Some(true), + show_other_hints: Some(true), + show_background: Some(false), + toggle_on_modifiers_press: None, + }) + }); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/a"), + json!({ + "main.rs": "fn main() { let x = 1; } // padding to keep hints from being trimmed", + "index.ts": "const y = 2; // padding to keep hints from being trimmed in typescript", + }), + ) + .await; + + let project = Project::test(fs, [path!("/a").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + + let mut rs_fake_servers = None; + let mut ts_fake_servers = None; + for (name, path_suffix) in [("Rust", "rs"), ("TypeScript", "ts")] { + language_registry.add(Arc::new(Language::new( + LanguageConfig { + name: name.into(), + matcher: LanguageMatcher { + path_suffixes: vec![path_suffix.to_string()], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_rust::LANGUAGE.into()), + ))); + let fake_servers = language_registry.register_fake_lsp( + name, + FakeLspAdapter { + name, + capabilities: lsp::ServerCapabilities { + inlay_hint_provider: Some(lsp::OneOf::Left(true)), + ..Default::default() + }, + initializer: Some(Box::new({ + move |fake_server| { + let request_count = Arc::new(AtomicU32::new(0)); + fake_server + .set_request_handler::( + move |params, _| { + let count = + request_count.fetch_add(1, Ordering::Release) + 1; + let prefix = match name { + "Rust" => "rs_hint", + "TypeScript" => "ts_hint", + other => panic!("Unexpected language: {other}"), + }; + async move { + Ok(Some(vec![lsp::InlayHint { + position: params.range.start, + label: lsp::InlayHintLabel::String(format!( + "{prefix}_{count}" + )), + kind: None, + text_edits: None, + tooltip: None, + padding_left: None, + padding_right: None, + data: None, + }])) + } + }, + ); + } + })), + ..Default::default() + }, + ); + match name { + "Rust" => rs_fake_servers = Some(fake_servers), + "TypeScript" => ts_fake_servers = Some(fake_servers), + _ => unreachable!(), + } + } + + let (rs_buffer, _rs_handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) + }) + .await + .unwrap(); + let (ts_buffer, _ts_handle) = project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/a/index.ts"), cx) + }) + .await + .unwrap(); + + let multi_buffer = cx.new(|cx| { + let mut multibuffer = MultiBuffer::new(Capability::ReadWrite); + multibuffer.set_excerpts_for_path( + PathKey::sorted(0), + rs_buffer.clone(), + [Point::new(0, 0)..Point::new(1, 0)], + 0, + cx, + ); + multibuffer.set_excerpts_for_path( + PathKey::sorted(1), + ts_buffer.clone(), + [Point::new(0, 0)..Point::new(1, 0)], + 0, + cx, + ); + multibuffer + }); + + cx.executor().run_until_parked(); + let editor = cx.add_window(|window, cx| { + Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx) + }); + + let _rs_fake_server = rs_fake_servers.unwrap().next().await.unwrap(); + let _ts_fake_server = ts_fake_servers.unwrap().next().await.unwrap(); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + // Verify initial state: both languages have exactly one hint each + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let rs_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("rs_hint")) + .collect(); + let ts_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("ts_hint")) + .collect(); + assert_eq!( + rs_hints.len(), + 1, + "Should have exactly 1 Rust hint initially, got: {rs_hints:?}" + ); + assert_eq!( + ts_hints.len(), + 1, + "Should have exactly 1 TypeScript hint initially, got: {ts_hints:?}" + ); + }) + .unwrap(); + + // Edit the Rust buffer — triggers BufferEdited(rust_buffer_id). + // The language filter in refresh_inlay_hints excludes TypeScript excerpts + // from processing, but the global clear() wipes added_hints for ALL buffers. + editor + .update(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(0)..MultiBufferOffset(0)]) + }); + editor.handle_input("x", window, cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + + // Trigger NewLinesShown — this causes TypeScript chunks to be re-fetched + // because hint_chunk_fetching was wiped by clear(). The cached hints pass + // the added_hints.insert(...).is_none() filter (also wiped) and get inserted + // alongside the still-displayed copies, causing duplicates. + editor + .update(cx, |editor, _window, cx| { + editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + }) + .unwrap(); + cx.executor().run_until_parked(); + + // Assert: TypeScript hints must NOT be duplicated + editor + .update(cx, |editor, _window, cx| { + let visible = visible_hint_labels(editor, cx); + let ts_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("ts_hint")) + .collect(); + assert_eq!( + ts_hints.len(), + 1, + "TypeScript hints should NOT be duplicated after editing Rust buffer \ + and triggering NewLinesShown. Got: {ts_hints:?}" + ); + + let rs_hints: Vec<_> = visible + .iter() + .filter(|h| h.starts_with("rs_hint")) + .collect(); + assert_eq!( + rs_hints.len(), + 1, + "Rust hints should still be present after editing. Got: {rs_hints:?}" + ); + }) + .unwrap(); + } + pub(crate) fn init_test(cx: &mut TestAppContext, f: &dyn Fn(&mut AllLanguageSettingsContent)) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); @@ -4264,9 +4904,7 @@ let c = 3;"# } pub fn visible_hint_labels(editor: &Editor, cx: &Context) -> Vec { - editor - .visible_inlay_hints(cx) - .into_iter() + Editor::visible_inlay_hints(editor.display_map.read(cx)) .map(|hint| hint.text().to_string()) .collect() } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 676fd06f495ed6a69b246cc6a0df2ca6ca60a6b0..e2fab975cf455677ff0c92c2902151cc6712b6e0 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7030,6 +7030,21 @@ impl LspStore { .collect() } else { for (chunk, range_to_query) in ranges_to_query.into_iter().flatten() { + // When a server refresh was requested, other servers' cached hints + // are unaffected by the refresh and must be included in the result. + // Otherwise apply_fetched_hints (with should_invalidate()=true) + // removes all visible hints but only adds back the requesting + // server's new hints, permanently losing other servers' hints. + let other_servers_cached: CacheInlayHints = if lsp_refresh_requested { + lsp_data + .inlay_hints + .cached_hints(&chunk) + .cloned() + .unwrap_or_default() + } else { + HashMap::default() + }; + let next_hint_id = next_hint_id.clone(); let buffer = buffer.clone(); let query_version = query_version.clone(); @@ -7048,33 +7063,32 @@ impl LspStore { if update_cache { lsp_data.inlay_hints.invalidate_for_chunk(chunk); } - HashMap::default() + other_servers_cached } else { - new_hints_by_server - .into_iter() - .map(|(server_id, new_hints)| { - let new_hints = new_hints - .into_iter() - .map(|new_hint| { - ( - InlayId::Hint(next_hint_id.fetch_add( - 1, - atomic::Ordering::AcqRel, - )), - new_hint, - ) - }) - .collect::>(); - if update_cache { - lsp_data.inlay_hints.insert_new_hints( - chunk, - server_id, - new_hints.clone(), - ); - } - (server_id, new_hints) - }) - .collect() + let mut result = other_servers_cached; + for (server_id, new_hints) in new_hints_by_server { + let new_hints = new_hints + .into_iter() + .map(|new_hint| { + ( + InlayId::Hint(next_hint_id.fetch_add( + 1, + atomic::Ordering::AcqRel, + )), + new_hint, + ) + }) + .collect::>(); + if update_cache { + lsp_data.inlay_hints.insert_new_hints( + chunk, + server_id, + new_hints.clone(), + ); + } + result.insert(server_id, new_hints); + } + result } }) }) diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 7ab273be7bfa3fa84a608c69174cfcc6a038eac5..8b5ea03c66945519c955a7d43324b8f5e4b32d1b 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -548,6 +548,43 @@ impl Rope { } } + pub fn starts_with(&self, pattern: &str) -> bool { + if pattern.len() > self.len() { + return false; + } + let mut remaining = pattern; + for chunk in self.chunks_in_range(0..pattern.len()) { + if remaining.starts_with(chunk) { + remaining = &remaining[chunk.len()..]; + if remaining.is_empty() { + return true; + } + } else { + return false; + } + } + remaining.is_empty() + } + + pub fn ends_with(&self, pattern: &str) -> bool { + let len = self.len(); + if pattern.len() > len { + return false; + } + let mut remaining = pattern; + for chunk in self.reversed_chunks_in_range(len - pattern.len()..len) { + if remaining.ends_with(chunk) { + remaining = &remaining[..remaining.len() - chunk.len()]; + if remaining.is_empty() { + return true; + } + } else { + return false; + } + } + remaining.is_empty() + } + pub fn line_len(&self, row: u32) -> u32 { self.clip_point(Point::new(row, u32::MAX), Bias::Left) .column @@ -2168,6 +2205,74 @@ mod tests { assert!(!rope.reversed_chunks_in_range(0..0).equals_str("foo")); } + #[test] + fn test_starts_with() { + let text = "Hello, world! 🌍🌎🌏"; + let rope = Rope::from(text); + + assert!(rope.starts_with("")); + assert!(rope.starts_with("H")); + assert!(rope.starts_with("Hello")); + assert!(rope.starts_with("Hello, world! 🌍🌎🌏")); + assert!(!rope.starts_with("ello")); + assert!(!rope.starts_with("Hello, world! 🌍🌎🌏!")); + + let empty_rope = Rope::from(""); + assert!(empty_rope.starts_with("")); + assert!(!empty_rope.starts_with("a")); + } + + #[test] + fn test_ends_with() { + let text = "Hello, world! 🌍🌎🌏"; + let rope = Rope::from(text); + + assert!(rope.ends_with("")); + assert!(rope.ends_with("🌏")); + assert!(rope.ends_with("🌍🌎🌏")); + assert!(rope.ends_with("Hello, world! 🌍🌎🌏")); + assert!(!rope.ends_with("🌎")); + assert!(!rope.ends_with("!Hello, world! 🌍🌎🌏")); + + let empty_rope = Rope::from(""); + assert!(empty_rope.ends_with("")); + assert!(!empty_rope.ends_with("a")); + } + + #[test] + fn test_starts_with_ends_with_random() { + let mut rng = StdRng::seed_from_u64(0); + for _ in 0..100 { + let len = rng.random_range(0..100); + let text: String = RandomCharIter::new(&mut rng).take(len).collect(); + let rope = Rope::from(text.as_str()); + + for _ in 0..10 { + let start = rng.random_range(0..=text.len()); + let start = text.ceil_char_boundary(start); + let end = rng.random_range(start..=text.len()); + let end = text.ceil_char_boundary(end); + let prefix = &text[..end]; + let suffix = &text[start..]; + + assert_eq!( + rope.starts_with(prefix), + text.starts_with(prefix), + "starts_with mismatch for {:?} in {:?}", + prefix, + text + ); + assert_eq!( + rope.ends_with(suffix), + text.ends_with(suffix), + "ends_with mismatch for {:?} in {:?}", + suffix, + text + ); + } + } + } + #[test] fn test_is_char_boundary() { let fixture = "地"; From 6401cc361968adc0df1010793166249af62373c8 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 28 Feb 2026 10:58:32 +0100 Subject: [PATCH 181/548] Revert "settings: Await config worktree before opening settings.json" (#50380) Reverts zed-industries/zed#47199 Closes https://github.com/zed-industries/zed/issues/50237 Release Notes: - Fixed a bug that prevented settings files from opening on remotes --- crates/zed/src/zed.rs | 59 +++++++++++++++++++++---------------------- 1 file changed, 29 insertions(+), 30 deletions(-) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index bbbce4986607aa5b64453e8bceb61375a49a7122..aa1870ba2a9cb07cd7ffee040c68ffa73759e728 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -2060,40 +2060,39 @@ fn open_settings_file( cx: &mut Context, ) { cx.spawn_in(window, async move |workspace, cx| { - let settings_open_task = workspace + let (worktree_creation_task, settings_open_task) = workspace .update_in(cx, |workspace, window, cx| { - workspace.with_local_workspace(window, cx, move |_workspace, window, cx| { - cx.spawn_in(window, async move |workspace, cx| { - let worktree_creation_task = - workspace.update_in(cx, |workspace, _window, cx| { - workspace.project().update(cx, |project, cx| { - // Set up a dedicated worktree for settings, since - // otherwise we're dropping and re-starting LSP servers - // for each file inside on every settings file - // close/open - - // TODO: Do note that all other external files (e.g. - // drag and drop from OS) still have their worktrees - // released on file close, causing LSP servers' - // restarts. - project.find_or_create_worktree( - paths::config_dir().as_path(), - false, - cx, - ) - }) - })?; - let _ = worktree_creation_task.await?; - let settings_open_task = - workspace.update_in(cx, |_workspace, window, cx| { - create_and_open_local_file(abs_path, window, cx, default_content) - })?; - let _ = settings_open_task.await?; - anyhow::Ok(()) - }) + workspace.with_local_or_wsl_workspace(window, cx, move |workspace, window, cx| { + let project = workspace.project().clone(); + + let worktree_creation_task = cx.spawn_in(window, async move |_, cx| { + let config_dir = project + .update(cx, |project, cx| { + project.try_windows_path_to_wsl(paths::config_dir().as_path(), cx) + }) + .await?; + // Set up a dedicated worktree for settings, since + // otherwise we're dropping and re-starting LSP servers + // for each file inside on every settings file + // close/open + + // TODO: Do note that all other external files (e.g. + // drag and drop from OS) still have their worktrees + // released on file close, causing LSP servers' + // restarts. + project + .update(cx, |project, cx| { + project.find_or_create_worktree(&config_dir, false, cx) + }) + .await + }); + let settings_open_task = + create_and_open_local_file(abs_path, window, cx, default_content); + (worktree_creation_task, settings_open_task) }) })? .await?; + let _ = worktree_creation_task.await?; let _ = settings_open_task.await?; anyhow::Ok(()) }) From 746ecb082d3d88eabc9bfb74695e4af77b1aa85e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 28 Feb 2026 12:53:06 +0100 Subject: [PATCH 182/548] buffer_diff: Do not block on parsing in `set_snapshot_with_secondary_inner` (#50385) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/buffer_diff/src/buffer_diff.rs | 2 ++ .../src/edit_prediction_context.rs | 14 ++++++-------- crates/editor/src/inlays.rs | 6 ++++-- crates/editor/src/inlays/inlay_hints.rs | 2 +- crates/language/src/buffer.rs | 14 ++++++++++++-- crates/language_tools/src/highlights_tree_view.rs | 8 +++----- crates/rope/src/rope.rs | 13 +++++++++---- 7 files changed, 37 insertions(+), 22 deletions(-) diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 2c9a68d5526f2cb0f03bc3da7ab611233091b143..8e61a9b633930655e296433711013645ea873dfd 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -1753,6 +1753,7 @@ impl BufferDiff { let should_compare_hunks = update.base_text_edits.is_some() || !base_text_changed; let parsing_idle = if let Some(diff) = update.base_text_edits { state.base_text.update(cx, |base_text, cx| { + base_text.set_sync_parse_timeout(None); base_text.set_capability(Capability::ReadWrite, cx); base_text.apply_diff(diff, cx); base_text.set_capability(Capability::ReadOnly, cx); @@ -1760,6 +1761,7 @@ impl BufferDiff { }) } else if update.base_text_changed { state.base_text.update(cx, |base_text, cx| { + base_text.set_sync_parse_timeout(None); base_text.set_capability(Capability::ReadWrite, cx); base_text.set_text(new_state.base_text.clone(), cx); base_text.set_capability(Capability::ReadOnly, cx); diff --git a/crates/edit_prediction_context/src/edit_prediction_context.rs b/crates/edit_prediction_context/src/edit_prediction_context.rs index b93fef49296e493b4f06e93e8d855d6a8e111e97..a44ff8b2e3e873c23c2eaa914298a4d50aee3bdc 100644 --- a/crates/edit_prediction_context/src/edit_prediction_context.rs +++ b/crates/edit_prediction_context/src/edit_prediction_context.rs @@ -644,14 +644,12 @@ fn identifiers_for_position( let outer_range = ranges.first().map_or(0, |r| r.start)..ranges.last().map_or(buffer.len(), |r| r.end); - let mut captures = buffer - .syntax - .captures(outer_range.clone(), &buffer.text, |grammar| { - grammar - .highlights_config - .as_ref() - .map(|config| &config.query) - }); + let mut captures = buffer.captures(outer_range.clone(), |grammar| { + grammar + .highlights_config + .as_ref() + .map(|config| &config.query) + }); for range in ranges { captures.set_byte_range(range.start..outer_range.end); diff --git a/crates/editor/src/inlays.rs b/crates/editor/src/inlays.rs index a240837918340f3a2540491a175d13e90de2931e..8c46e797cada703c9101fd91e670cbdd4ea713ac 100644 --- a/crates/editor/src/inlays.rs +++ b/crates/editor/src/inlays.rs @@ -58,10 +58,12 @@ pub enum InlayContent { impl Inlay { pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self { let mut text = hint.text(); - if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') { + let needs_right_padding = hint.padding_right && !text.ends_with(" "); + let needs_left_padding = hint.padding_left && !text.starts_with(" "); + if needs_right_padding { text.push(" "); } - if hint.padding_left && text.chars_at(0).next() != Some(' ') { + if needs_left_padding { text.push_front(" "); } Self { diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 23c97ced906844c1ac6c8fa5ce6932631284384a..d7a116065101dcc5070a7280ba7c3424e74685fe 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -369,7 +369,7 @@ impl Editor { if invalidate_cache.should_invalidate() { if invalidate_hints_for_buffers.is_empty() { inlay_hints.clear(); - } else if invalidate_cache.should_invalidate() { + } else { inlay_hints.clear_for_buffers( &invalidate_hints_for_buffers, Self::visible_inlay_hints(self.display_map.read(cx)), diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs index 1449052983a49a539201360ec48dd37c04a4ccae..eb9bb0827a7be9f4a725246c6d38777e340eee2c 100644 --- a/crates/language/src/buffer.rs +++ b/crates/language/src/buffer.rs @@ -187,7 +187,7 @@ struct BufferBranchState { /// state of a buffer. pub struct BufferSnapshot { pub text: text::BufferSnapshot, - pub syntax: SyntaxSnapshot, + pub(crate) syntax: SyntaxSnapshot, tree_sitter_data: Arc, diagnostics: TreeMap, remote_selections: TreeMap, @@ -1776,7 +1776,9 @@ impl Buffer { self.syntax_map.lock().contains_unknown_injections() } - #[cfg(any(test, feature = "test-support"))] + /// Sets the sync parse timeout for this buffer. + /// + /// Setting this to `None` disables sync parsing entirely. pub fn set_sync_parse_timeout(&mut self, timeout: Option) { self.sync_parse_timeout = timeout; } @@ -3706,6 +3708,14 @@ impl BufferSnapshot { None } + pub fn captures( + &self, + range: Range, + query: fn(&Grammar) -> Option<&tree_sitter::Query>, + ) -> SyntaxMapCaptures<'_> { + self.syntax.captures(range, &self.text, query) + } + #[ztracing::instrument(skip_all)] fn get_highlights(&self, range: Range) -> (SyntaxMapCaptures<'_>, Vec) { let captures = self.syntax.captures(range, &self.text, |grammar| { diff --git a/crates/language_tools/src/highlights_tree_view.rs b/crates/language_tools/src/highlights_tree_view.rs index 9796c1c07375956184bdd28fbd8f5bb52bff2a32..fb92e21ab33eb3b6a3cd498a6ffbdd764947ea9e 100644 --- a/crates/language_tools/src/highlights_tree_view.rs +++ b/crates/language_tools/src/highlights_tree_view.rs @@ -397,11 +397,9 @@ impl HighlightsTreeView { let end_offset = excerpt_range.context.end.to_offset(buffer_snapshot); let range = start_offset..end_offset; - let captures = buffer_snapshot - .syntax - .captures(range, buffer_snapshot, |grammar| { - grammar.highlights_config.as_ref().map(|c| &c.query) - }); + let captures = buffer_snapshot.captures(range, |grammar| { + grammar.highlights_config.as_ref().map(|c| &c.query) + }); let grammars: Vec<_> = captures.grammars().to_vec(); let highlight_maps: Vec<_> = grammars.iter().map(|g| g.highlight_map()).collect(); diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 8b5ea03c66945519c955a7d43324b8f5e4b32d1b..9d54f1ba5302291e20289dcdf2c71cceb2e6e349 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -553,7 +553,10 @@ impl Rope { return false; } let mut remaining = pattern; - for chunk in self.chunks_in_range(0..pattern.len()) { + for chunk in self.chunks_in_range(0..self.len()) { + let Some(chunk) = chunk.get(..remaining.len().min(chunk.len())) else { + return false; + }; if remaining.starts_with(chunk) { remaining = &remaining[chunk.len()..]; if remaining.is_empty() { @@ -567,12 +570,14 @@ impl Rope { } pub fn ends_with(&self, pattern: &str) -> bool { - let len = self.len(); - if pattern.len() > len { + if pattern.len() > self.len() { return false; } let mut remaining = pattern; - for chunk in self.reversed_chunks_in_range(len - pattern.len()..len) { + for chunk in self.reversed_chunks_in_range(0..self.len()) { + let Some(chunk) = chunk.get(chunk.len() - remaining.len().min(chunk.len())..) else { + return false; + }; if remaining.ends_with(chunk) { remaining = &remaining[..remaining.len() - chunk.len()]; if remaining.is_empty() { From e4937eb02262bbf16c606effbf13e64a28119661 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 28 Feb 2026 12:54:09 +0100 Subject: [PATCH 183/548] rope: Improve prepend performance for small inputs on small ropes (#50389) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/rope/src/chunk.rs | 45 +++++++++++ crates/rope/src/rope.rs | 135 ++++++++++++++++++++++++++++++++ crates/sum_tree/src/sum_tree.rs | 45 +++++++++++ 3 files changed, 225 insertions(+) diff --git a/crates/rope/src/chunk.rs b/crates/rope/src/chunk.rs index e5a3ed045a7e44e2208941e908718bdf7ee5b00a..594f8f5c67e2e151c1ba933b59344d8542f381e1 100644 --- a/crates/rope/src/chunk.rs +++ b/crates/rope/src/chunk.rs @@ -102,6 +102,11 @@ impl Chunk { self.append(Chunk::new(text).as_slice()); } + #[inline(always)] + pub fn prepend_str(&mut self, text: &str) { + self.prepend(Chunk::new(text).as_slice()); + } + #[inline(always)] pub fn append(&mut self, slice: ChunkSlice) { if slice.is_empty() { @@ -116,6 +121,28 @@ impl Chunk { self.text.push_str(slice.text); } + #[inline(always)] + pub fn prepend(&mut self, slice: ChunkSlice) { + if slice.is_empty() { + return; + } + if self.text.is_empty() { + *self = Chunk::new(slice.text); + return; + } + + let shift = slice.text.len(); + self.chars = slice.chars | (self.chars << shift); + self.chars_utf16 = slice.chars_utf16 | (self.chars_utf16 << shift); + self.newlines = slice.newlines | (self.newlines << shift); + self.tabs = slice.tabs | (self.tabs << shift); + + let mut new_text = ArrayString::::new(); + new_text.push_str(slice.text); + new_text.push_str(&self.text); + self.text = new_text; + } + #[inline(always)] pub fn as_slice(&self) -> ChunkSlice<'_> { ChunkSlice { @@ -890,6 +917,24 @@ mod tests { verify_chunk(chunk1.as_slice(), &(str1 + &str2[start_offset..end_offset])); } + #[gpui::test(iterations = 1000)] + fn test_prepend_random_strings(mut rng: StdRng) { + let len1 = rng.random_range(0..=MAX_BASE); + let len2 = rng.random_range(0..=MAX_BASE).saturating_sub(len1); + let str1 = random_string_with_utf8_len(&mut rng, len1); + let str2 = random_string_with_utf8_len(&mut rng, len2); + let mut chunk1 = Chunk::new(&str1); + let chunk2 = Chunk::new(&str2); + let char_offsets = char_offsets_with_end(&str2); + let start_index = rng.random_range(0..char_offsets.len()); + let start_offset = char_offsets[start_index]; + let end_offset = char_offsets[rng.random_range(start_index..char_offsets.len())]; + let slice = chunk2.slice(start_offset..end_offset); + let prefix_text = &str2[start_offset..end_offset]; + chunk1.prepend(slice); + verify_chunk(chunk1.as_slice(), &(prefix_text.to_owned() + &str1)); + } + /// Return the byte offsets for each character in a string. /// /// These are valid offsets to split the string. diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 9d54f1ba5302291e20289dcdf2c71cceb2e6e349..5b599bad51c2f571cca11625be0b290e7e748504 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -167,6 +167,11 @@ impl Rope { (), ); + if text.is_empty() { + self.check_invariants(); + return; + } + #[cfg(all(test, not(rust_analyzer)))] const NUM_CHUNKS: usize = 16; #[cfg(not(all(test, not(rust_analyzer))))] @@ -269,6 +274,23 @@ impl Rope { } pub fn push_front(&mut self, text: &str) { + if text.is_empty() { + return; + } + if self.is_empty() { + self.push(text); + return; + } + if self + .chunks + .first() + .is_some_and(|c| c.text.len() + text.len() <= chunk::MAX_BASE) + { + self.chunks + .update_first(|first_chunk| first_chunk.prepend_str(text), ()); + self.check_invariants(); + return; + } let suffix = mem::replace(self, Rope::from(text)); self.append(suffix); } @@ -2339,6 +2361,119 @@ mod tests { } } + #[test] + fn test_push_front_empty_text_on_empty_rope() { + let mut rope = Rope::new(); + rope.push_front(""); + assert_eq!(rope.text(), ""); + assert_eq!(rope.len(), 0); + } + + #[test] + fn test_push_front_empty_text_on_nonempty_rope() { + let mut rope = Rope::from("hello"); + rope.push_front(""); + assert_eq!(rope.text(), "hello"); + } + + #[test] + fn test_push_front_on_empty_rope() { + let mut rope = Rope::new(); + rope.push_front("hello"); + assert_eq!(rope.text(), "hello"); + assert_eq!(rope.len(), 5); + assert_eq!(rope.max_point(), Point::new(0, 5)); + } + + #[test] + fn test_push_front_single_space() { + let mut rope = Rope::from("hint"); + rope.push_front(" "); + assert_eq!(rope.text(), " hint"); + assert_eq!(rope.len(), 5); + } + + #[gpui::test(iterations = 50)] + fn test_push_front_random(mut rng: StdRng) { + let initial_len = rng.random_range(0..=64); + let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect(); + let mut rope = Rope::from(initial_text.as_str()); + + let mut expected = initial_text; + + for _ in 0..rng.random_range(1..=10) { + let prefix_len = rng.random_range(0..=32); + let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect(); + + rope.push_front(&prefix); + expected.insert_str(0, &prefix); + + assert_eq!( + rope.text(), + expected, + "text mismatch after push_front({:?})", + prefix + ); + assert_eq!(rope.len(), expected.len()); + + let actual_summary = rope.summary(); + let expected_summary = TextSummary::from(expected.as_str()); + assert_eq!( + actual_summary.len, expected_summary.len, + "len mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.lines, expected_summary.lines, + "lines mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.chars, expected_summary.chars, + "chars mismatch for {:?}", + expected + ); + assert_eq!( + actual_summary.longest_row, expected_summary.longest_row, + "longest_row mismatch for {:?}", + expected + ); + + // Verify offset-to-point and point-to-offset round-trip at boundaries. + for (ix, _) in expected.char_indices().chain(Some((expected.len(), '\0'))) { + assert_eq!( + rope.point_to_offset(rope.offset_to_point(ix)), + ix, + "offset round-trip failed at {} for {:?}", + ix, + expected + ); + } + } + } + + #[gpui::test(iterations = 50)] + fn test_push_front_large_prefix(mut rng: StdRng) { + let initial_len = rng.random_range(0..=32); + let initial_text: String = RandomCharIter::new(&mut rng).take(initial_len).collect(); + let mut rope = Rope::from(initial_text.as_str()); + + let prefix_len = rng.random_range(64..=256); + let prefix: String = RandomCharIter::new(&mut rng).take(prefix_len).collect(); + + rope.push_front(&prefix); + let expected = format!("{}{}", prefix, initial_text); + + assert_eq!(rope.text(), expected); + assert_eq!(rope.len(), expected.len()); + + let actual_summary = rope.summary(); + let expected_summary = TextSummary::from(expected.as_str()); + assert_eq!(actual_summary.len, expected_summary.len); + assert_eq!(actual_summary.lines, expected_summary.lines); + assert_eq!(actual_summary.chars, expected_summary.chars); + } + fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize { while !text.is_char_boundary(offset) { match bias { diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index fa83dd937489f0c52e6c02b83b52112b5ff52ec1..068bc4bce56816962a3b75d6f6497b033a9209a5 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -661,6 +661,51 @@ impl SumTree { } } + pub fn update_first( + &mut self, + f: impl FnOnce(&mut T), + cx: ::Context<'_>, + ) { + self.update_first_recursive(f, cx); + } + + fn update_first_recursive( + &mut self, + f: impl FnOnce(&mut T), + cx: ::Context<'_>, + ) -> Option { + match Arc::make_mut(&mut self.0) { + Node::Internal { + summary, + child_summaries, + child_trees, + .. + } => { + let first_summary = child_summaries.first_mut().unwrap(); + let first_child = child_trees.first_mut().unwrap(); + *first_summary = first_child.update_first_recursive(f, cx).unwrap(); + *summary = sum(child_summaries.iter(), cx); + Some(summary.clone()) + } + Node::Leaf { + summary, + items, + item_summaries, + } => { + if let Some((item, item_summary)) = + items.first_mut().zip(item_summaries.first_mut()) + { + (f)(item); + *item_summary = item.summary(cx); + *summary = sum(item_summaries.iter(), cx); + Some(summary.clone()) + } else { + None + } + } + } + } + pub fn extent<'a, D: Dimension<'a, T::Summary>>( &'a self, cx: ::Context<'_>, From 2eea301b62b1673908f223d9abf1ce65650970ef Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Sat, 28 Feb 2026 20:59:33 +0100 Subject: [PATCH 184/548] ci: Fix bundling linux (#50375) Release Notes: - N/A --- .github/workflows/release.yml | 4 +++ .github/workflows/release_nightly.yml | 4 +++ .github/workflows/run_bundling.yml | 4 +++ Cargo.lock | 14 ++++---- Cargo.toml | 4 +-- .../xtask/src/tasks/workflows/run_bundling.rs | 34 +++++++++++-------- .../xtask/src/tasks/workflows/run_tests.rs | 9 ++--- tooling/xtask/src/tasks/workflows/steps.rs | 5 +++ 8 files changed, 47 insertions(+), 31 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4fe1d4dd7d698b624e9dc2391d371acc4335cdcb..69bb80b40d7e7f21db21562e7aceb5a98706801f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -299,6 +299,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -339,6 +341,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index d3f01447e52f418713499b84ad454085fd3cb646..faf7788eeafa856985ba5bdf21a1a37c5fdd8506 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -103,6 +103,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -149,6 +151,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 2b536425a1dc4b9663c726fd9259c95e0626efda..683d74a264e53e621e77730a91cdd01adff17316 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -19,6 +19,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -58,6 +60,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/Cargo.lock b/Cargo.lock index 6ae4f57301f2882e7f5e66c5960078393d2ac2de..c28d777c3f4723fc5a498e3047de759e711dafad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9698,7 +9698,7 @@ dependencies = [ [[package]] name = "libwebrtc" version = "0.3.26" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" dependencies = [ "cxx", "glib", @@ -9796,7 +9796,7 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "livekit" version = "0.7.32" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" dependencies = [ "base64 0.22.1", "bmrng", @@ -9822,7 +9822,7 @@ dependencies = [ [[package]] name = "livekit-api" version = "0.4.14" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" dependencies = [ "base64 0.21.7", "futures-util", @@ -9849,7 +9849,7 @@ dependencies = [ [[package]] name = "livekit-protocol" version = "0.7.1" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" dependencies = [ "futures-util", "livekit-runtime", @@ -9865,7 +9865,7 @@ dependencies = [ [[package]] name = "livekit-runtime" version = "0.4.0" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" dependencies = [ "tokio", "tokio-stream", @@ -19882,7 +19882,7 @@ dependencies = [ [[package]] name = "webrtc-sys" version = "0.3.23" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" dependencies = [ "cc", "cxx", @@ -19896,7 +19896,7 @@ dependencies = [ [[package]] name = "webrtc-sys-build" version = "0.3.13" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=9c38d9a0a91951967f8fa84ed86e193626436774#9c38d9a0a91951967f8fa84ed86e193626436774" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" dependencies = [ "anyhow", "fs2", diff --git a/Cargo.toml b/Cargo.toml index 3b63808046e98c5e314cb5ab0bb731c32fce716b..148a909ccc8edb8f37ea7fd992ea6464c46ce0d5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -837,8 +837,8 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24c notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24cad542c28e04ced02e20325a4ec28a31d" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } calloop = { git = "https://github.com/zed-industries/calloop" } -livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "9c38d9a0a91951967f8fa84ed86e193626436774" } -libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "9c38d9a0a91951967f8fa84ed86e193626436774" } +livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" } +libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" } [profile.dev] split-debuginfo = "unpacked" diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index 7fa82e80c52b9e6faec6a377d906269e7a3dbb77..2de7000360b2be564efb8107da47964dbab0ceb6 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -4,7 +4,9 @@ use crate::tasks::workflows::{ nix_build::build_nix, release::ReleaseBundleJobs, runners::{Arch, Platform, ReleaseChannel}, - steps::{DEFAULT_REPOSITORY_OWNER_GUARD, FluentBuilder, NamedJob, dependant_job, named}, + steps::{ + DEFAULT_REPOSITORY_OWNER_GUARD, FluentBuilder, NamedJob, dependant_job, named, use_clang, + }, vars::{assets, bundle_envs}, }; @@ -143,20 +145,22 @@ pub(crate) fn bundle_linux( }; NamedJob { name: format!("bundle_linux_{arch}"), - job: bundle_job(deps) - .runs_on(arch.linux_bundler()) - .envs(bundle_envs(platform)) - .add_step(steps::checkout_repo()) - .when_some(release_channel, |job, release_channel| { - job.add_step(set_release_channel(platform, release_channel)) - }) - .add_step(steps::setup_sentry()) - .map(steps::install_linux_dependencies) - .add_step(steps::script("./script/bundle-linux")) - .add_step(upload_artifact(&format!("target/release/{artifact_name}"))) - .add_step(upload_artifact(&format!( - "target/{remote_server_artifact_name}" - ))), + job: use_clang( + bundle_job(deps) + .runs_on(arch.linux_bundler()) + .envs(bundle_envs(platform)), + ) + .add_step(steps::checkout_repo()) + .when_some(release_channel, |job, release_channel| { + job.add_step(set_release_channel(platform, release_channel)) + }) + .add_step(steps::setup_sentry()) + .map(steps::install_linux_dependencies) + .add_step(steps::script("./script/bundle-linux")) + .add_step(upload_artifact(&format!("target/release/{artifact_name}"))) + .add_step(upload_artifact(&format!( + "target/{remote_server_artifact_name}" + ))), } } diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 378af0eba5dcea9c98f23d57ced5951073fb9cc2..9c5529cc5361d9581b1bd59de5bfb4201298c692 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -1,12 +1,12 @@ use gh_workflow::{ - Concurrency, Container, Env, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use, + Concurrency, Container, Event, Expression, Job, Port, PullRequest, Push, Run, Step, Use, Workflow, }; use indexmap::IndexMap; use indoc::formatdoc; use crate::tasks::workflows::{ - steps::{CommonJobConditions, repository_owner_guard_expression}, + steps::{CommonJobConditions, repository_owner_guard_expression, use_clang}, vars::{self, PathCondition}, }; @@ -15,11 +15,6 @@ use super::{ steps::{self, FluentBuilder, NamedJob, named, release_job}, }; -fn use_clang(job: Job) -> Job { - job.add_env(Env::new("CC", "clang")) - .add_env(Env::new("CXX", "clang++")) -} - pub(crate) fn run_tests() -> Workflow { // Specify anything which should potentially skip full test suite in this regex: // - docs/ diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 8220d8311ff7ee0ee3a955dabacb067701bb8d51..9e54452424dba36d64a209c71b281e3b72eaafc8 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -3,6 +3,11 @@ use serde_json::Value; use crate::tasks::workflows::{runners::Platform, vars, vars::StepOutput}; +pub(crate) fn use_clang(job: Job) -> Job { + job.add_env(Env::new("CC", "clang")) + .add_env(Env::new("CXX", "clang++")) +} + const SCCACHE_R2_BUCKET: &str = "sccache-zed"; const BASH_SHELL: &str = "bash -euxo pipefail {0}"; From 92c99c6bfd9791381c0a11a9dba69d6bc2fe3d75 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Sun, 1 Mar 2026 02:42:35 +0530 Subject: [PATCH 185/548] markdown: Fix horizontal autoscroll for code blocks in Agent Panel (#50416) Release Notes: - Fixed Agent Panel code blocks not scrolling horizontally during text selection. --- crates/markdown/src/markdown.rs | 34 ++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs index 1cd19ffb8f7cfa16ab1aa95af9425690aba78707..087b7153704c215ec27eae653879ffe9f11ebf09 100644 --- a/crates/markdown/src/markdown.rs +++ b/crates/markdown/src/markdown.rs @@ -15,6 +15,7 @@ use ui::Checkbox; use ui::CopyButton; use std::borrow::Cow; +use std::collections::BTreeMap; use std::iter; use std::mem; use std::ops::Range; @@ -246,7 +247,7 @@ pub struct Markdown { fallback_code_block_language: Option, options: Options, copied_code_blocks: HashSet, - code_block_scroll_handles: HashMap, + code_block_scroll_handles: BTreeMap, context_menu_selected_text: Option, } @@ -316,7 +317,7 @@ impl Markdown { parse_links_only: false, }, copied_code_blocks: HashSet::default(), - code_block_scroll_handles: HashMap::default(), + code_block_scroll_handles: BTreeMap::default(), context_menu_selected_text: None, }; this.parse(cx); @@ -341,7 +342,7 @@ impl Markdown { parse_links_only: true, }, copied_code_blocks: HashSet::default(), - code_block_scroll_handles: HashMap::default(), + code_block_scroll_handles: BTreeMap::default(), context_menu_selected_text: None, }; this.parse(cx); @@ -364,6 +365,32 @@ impl Markdown { self.code_block_scroll_handles.clear(); } + fn autoscroll_code_block(&self, source_index: usize, cursor_position: Point) { + let Some((_, scroll_handle)) = self + .code_block_scroll_handles + .range(..=source_index) + .next_back() + else { + return; + }; + + let bounds = scroll_handle.bounds(); + if cursor_position.y < bounds.top() || cursor_position.y > bounds.bottom() { + return; + } + + let horizontal_delta = if cursor_position.x < bounds.left() { + bounds.left() - cursor_position.x + } else if cursor_position.x > bounds.right() { + bounds.right() - cursor_position.x + } else { + return; + }; + + let offset = scroll_handle.offset(); + scroll_handle.set_offset(point(offset.x + horizontal_delta, offset.y)); + } + pub fn is_parsing(&self) -> bool { self.pending_parse.is_some() } @@ -902,6 +929,7 @@ impl MarkdownElement { Ok(ix) | Err(ix) => ix, }; markdown.selection.set_head(source_index, &rendered_text); + markdown.autoscroll_code_block(source_index, event.position); markdown.autoscroll_request = Some(source_index); cx.notify(); } else { From 56a88a848be09cbcb66bcb3d85ec1f5644909f72 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Sat, 28 Feb 2026 23:48:54 +0100 Subject: [PATCH 186/548] Format proto files (#50413) This fixes a problem where editing a proto file would make the proto LSP show a ton of warnings and avoids the giant diff that formatting a .proto file would cause. I also added a lint check when our clippy script is ran locally Release Notes: - N/A *or* Added/Fixed/Improved ... --------- Co-authored-by: MrSubidubi --- crates/proto/proto/ai.proto | 298 ++++----- crates/proto/proto/app.proto | 77 ++- crates/proto/proto/buffer.proto | 389 ++++++------ crates/proto/proto/call.proto | 456 +++++++------- crates/proto/proto/channel.proto | 270 ++++---- crates/proto/proto/core.proto | 30 +- crates/proto/proto/debugger.proto | 648 ++++++++++--------- crates/proto/proto/download.proto | 32 +- crates/proto/proto/git.proto | 721 +++++++++++---------- crates/proto/proto/image.proto | 32 +- crates/proto/proto/lsp.proto | 2 - crates/proto/proto/notification.proto | 28 +- crates/proto/proto/task.proto | 58 +- crates/proto/proto/toolchain.proto | 60 +- crates/proto/proto/zed.proto | 870 +++++++++++++------------- 15 files changed, 1981 insertions(+), 1990 deletions(-) diff --git a/crates/proto/proto/ai.proto b/crates/proto/proto/ai.proto index b2a8a371c4422e80ad5edd677f2b75288f69ebd4..428d971c536f6e830e0c056372d311dc7ed7028f 100644 --- a/crates/proto/proto/ai.proto +++ b/crates/proto/proto/ai.proto @@ -5,245 +5,245 @@ import "buffer.proto"; import "task.proto"; message Context { - repeated ContextOperation operations = 1; + repeated ContextOperation operations = 1; } message ContextMetadata { - string context_id = 1; - optional string summary = 2; + string context_id = 1; + optional string summary = 2; } message ContextMessageStatus { - oneof variant { - Done done = 1; - Pending pending = 2; - Error error = 3; - Canceled canceled = 4; - } + oneof variant { + Done done = 1; + Pending pending = 2; + Error error = 3; + Canceled canceled = 4; + } - message Done {} + message Done {} - message Pending {} + message Pending {} - message Error { - string message = 1; - } + message Error { + string message = 1; + } - message Canceled {} + message Canceled {} } message ContextMessage { - LamportTimestamp id = 1; - Anchor start = 2; - LanguageModelRole role = 3; - ContextMessageStatus status = 4; + LamportTimestamp id = 1; + Anchor start = 2; + LanguageModelRole role = 3; + ContextMessageStatus status = 4; } message SlashCommandOutputSection { - AnchorRange range = 1; - string icon_name = 2; - string label = 3; - optional string metadata = 4; + AnchorRange range = 1; + string icon_name = 2; + string label = 3; + optional string metadata = 4; } message ThoughtProcessOutputSection { - AnchorRange range = 1; + AnchorRange range = 1; } message ContextOperation { - oneof variant { - InsertMessage insert_message = 1; - UpdateMessage update_message = 2; - UpdateSummary update_summary = 3; - BufferOperation buffer_operation = 5; - SlashCommandStarted slash_command_started = 6; - SlashCommandOutputSectionAdded slash_command_output_section_added = 7; - SlashCommandCompleted slash_command_completed = 8; - ThoughtProcessOutputSectionAdded thought_process_output_section_added = 9; - } - - reserved 4; - - message InsertMessage { - ContextMessage message = 1; - repeated VectorClockEntry version = 2; - } - - message UpdateMessage { - LamportTimestamp message_id = 1; - LanguageModelRole role = 2; - ContextMessageStatus status = 3; - LamportTimestamp timestamp = 4; - repeated VectorClockEntry version = 5; - } - - message UpdateSummary { - string summary = 1; - bool done = 2; - LamportTimestamp timestamp = 3; - repeated VectorClockEntry version = 4; - } - - message SlashCommandStarted { - LamportTimestamp id = 1; - AnchorRange output_range = 2; - string name = 3; - repeated VectorClockEntry version = 4; - } - - message SlashCommandOutputSectionAdded { - LamportTimestamp timestamp = 1; - SlashCommandOutputSection section = 2; - repeated VectorClockEntry version = 3; - } - - message SlashCommandCompleted { - LamportTimestamp id = 1; - LamportTimestamp timestamp = 3; - optional string error_message = 4; - repeated VectorClockEntry version = 5; - } - - message ThoughtProcessOutputSectionAdded { - LamportTimestamp timestamp = 1; - ThoughtProcessOutputSection section = 2; - repeated VectorClockEntry version = 3; - } - - message BufferOperation { - Operation operation = 1; - } + oneof variant { + InsertMessage insert_message = 1; + UpdateMessage update_message = 2; + UpdateSummary update_summary = 3; + BufferOperation buffer_operation = 5; + SlashCommandStarted slash_command_started = 6; + SlashCommandOutputSectionAdded slash_command_output_section_added = 7; + SlashCommandCompleted slash_command_completed = 8; + ThoughtProcessOutputSectionAdded thought_process_output_section_added = 9; + } + + reserved 4; + + message InsertMessage { + ContextMessage message = 1; + repeated VectorClockEntry version = 2; + } + + message UpdateMessage { + LamportTimestamp message_id = 1; + LanguageModelRole role = 2; + ContextMessageStatus status = 3; + LamportTimestamp timestamp = 4; + repeated VectorClockEntry version = 5; + } + + message UpdateSummary { + string summary = 1; + bool done = 2; + LamportTimestamp timestamp = 3; + repeated VectorClockEntry version = 4; + } + + message SlashCommandStarted { + LamportTimestamp id = 1; + AnchorRange output_range = 2; + string name = 3; + repeated VectorClockEntry version = 4; + } + + message SlashCommandOutputSectionAdded { + LamportTimestamp timestamp = 1; + SlashCommandOutputSection section = 2; + repeated VectorClockEntry version = 3; + } + + message SlashCommandCompleted { + LamportTimestamp id = 1; + LamportTimestamp timestamp = 3; + optional string error_message = 4; + repeated VectorClockEntry version = 5; + } + + message ThoughtProcessOutputSectionAdded { + LamportTimestamp timestamp = 1; + ThoughtProcessOutputSection section = 2; + repeated VectorClockEntry version = 3; + } + + message BufferOperation { + Operation operation = 1; + } } message AdvertiseContexts { - uint64 project_id = 1; - repeated ContextMetadata contexts = 2; + uint64 project_id = 1; + repeated ContextMetadata contexts = 2; } message OpenContext { - uint64 project_id = 1; - string context_id = 2; + uint64 project_id = 1; + string context_id = 2; } message OpenContextResponse { - Context context = 1; + Context context = 1; } message CreateContext { - uint64 project_id = 1; + uint64 project_id = 1; } message CreateContextResponse { - string context_id = 1; - Context context = 2; + string context_id = 1; + Context context = 2; } message UpdateContext { - uint64 project_id = 1; - string context_id = 2; - ContextOperation operation = 3; + uint64 project_id = 1; + string context_id = 2; + ContextOperation operation = 3; } message ContextVersion { - string context_id = 1; - repeated VectorClockEntry context_version = 2; - repeated VectorClockEntry buffer_version = 3; + string context_id = 1; + repeated VectorClockEntry context_version = 2; + repeated VectorClockEntry buffer_version = 3; } message SynchronizeContexts { - uint64 project_id = 1; - repeated ContextVersion contexts = 2; + uint64 project_id = 1; + repeated ContextVersion contexts = 2; } message SynchronizeContextsResponse { - repeated ContextVersion contexts = 1; + repeated ContextVersion contexts = 1; } enum LanguageModelRole { - LanguageModelUser = 0; - LanguageModelAssistant = 1; - LanguageModelSystem = 2; - reserved 3; + LanguageModelUser = 0; + LanguageModelAssistant = 1; + LanguageModelSystem = 2; + reserved 3; } message GetAgentServerCommand { - uint64 project_id = 1; - string name = 2; - optional string root_dir = 3; + uint64 project_id = 1; + string name = 2; + optional string root_dir = 3; } message GetContextServerCommand { - uint64 project_id = 1; - string server_id = 2; - optional string root_dir = 3; + uint64 project_id = 1; + string server_id = 2; + optional string root_dir = 3; } message ContextServerCommand { - string path = 1; - repeated string args = 2; - map env = 3; + string path = 1; + repeated string args = 2; + map env = 3; } message AgentServerCommand { - string path = 1; - repeated string args = 2; - map env = 3; - string root_dir = 4; + string path = 1; + repeated string args = 2; + map env = 3; + string root_dir = 4; - optional SpawnInTerminal login = 5; + optional SpawnInTerminal login = 5; } message ExternalAgentsUpdated { - uint64 project_id = 1; - repeated string names = 2; + uint64 project_id = 1; + repeated string names = 2; } message ExternalExtensionAgentTarget { - string archive = 1; - string cmd = 2; - repeated string args = 3; - optional string sha256 = 4; - map env = 5; + string archive = 1; + string cmd = 2; + repeated string args = 3; + optional string sha256 = 4; + map env = 5; } message ExternalExtensionAgent { - string name = 1; - optional string icon_path = 2; - string extension_id = 3; - map targets = 4; - map env = 5; + string name = 1; + optional string icon_path = 2; + string extension_id = 3; + map targets = 4; + map env = 5; } message ExternalExtensionAgentsUpdated { - uint64 project_id = 1; - repeated ExternalExtensionAgent agents = 2; + uint64 project_id = 1; + repeated ExternalExtensionAgent agents = 2; } message ExternalAgentLoadingStatusUpdated { - uint64 project_id = 1; - string name = 2; - string status = 3; + uint64 project_id = 1; + string name = 2; + string status = 3; } message NewExternalAgentVersionAvailable { - uint64 project_id = 1; - string name = 2; - string version = 3; + uint64 project_id = 1; + string name = 2; + string version = 3; } message ShareAgentThread { - string session_id = 1; // Client-generated UUID (acp::SessionId) - string title = 2; - bytes thread_data = 3; + string session_id = 1; // Client-generated UUID (acp::SessionId) + string title = 2; + bytes thread_data = 3; } message GetSharedAgentThread { - string session_id = 1; // UUID string + string session_id = 1; // UUID string } message GetSharedAgentThreadResponse { - string title = 1; - bytes thread_data = 2; - string sharer_username = 3; - string created_at = 4; + string title = 1; + bytes thread_data = 2; + string sharer_username = 3; + string created_at = 4; } diff --git a/crates/proto/proto/app.proto b/crates/proto/proto/app.proto index 3aa3b23a889228903e14755e90eecfa168702f0c..2ced6a16d4441c11c124b73115a41a9e7008843a 100644 --- a/crates/proto/proto/app.proto +++ b/crates/proto/proto/app.proto @@ -4,60 +4,59 @@ package zed.messages; message ShutdownRemoteServer {} message Toast { - uint64 project_id = 1; - string notification_id = 2; - string message = 3; + uint64 project_id = 1; + string notification_id = 2; + string message = 3; } message HideToast { - uint64 project_id = 1; - string notification_id = 2; + uint64 project_id = 1; + string notification_id = 2; } message OpenServerSettings { - uint64 project_id = 1; + uint64 project_id = 1; } -message GetCrashFiles { -} +message GetCrashFiles {} message GetCrashFilesResponse { - repeated CrashReport crashes = 1; - reserved 2; // old panics + repeated CrashReport crashes = 1; + reserved 2; // old panics } message CrashReport { - reserved 1, 2; - string metadata = 3; - bytes minidump_contents = 4; + reserved 1, 2; + string metadata = 3; + bytes minidump_contents = 4; } message Extension { - string id = 1; - string version = 2; - bool dev = 3; + string id = 1; + string version = 2; + bool dev = 3; } message SyncExtensions { - repeated Extension extensions = 1; + repeated Extension extensions = 1; } message SyncExtensionsResponse { - string tmp_dir = 1; - repeated Extension missing_extensions = 2; + string tmp_dir = 1; + repeated Extension missing_extensions = 2; } message InstallExtension { - Extension extension = 1; - string tmp_dir = 2; + Extension extension = 1; + string tmp_dir = 2; } message AskPassRequest { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - uint64 askpass_id = 4; - string prompt = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + uint64 askpass_id = 4; + string prompt = 5; } message AskPassResponse { @@ -65,29 +64,29 @@ message AskPassResponse { } message GetRemoteProfilingData { - uint64 project_id = 1; - bool foreground_only = 2; + uint64 project_id = 1; + bool foreground_only = 2; } message GetRemoteProfilingDataResponse { - repeated RemoteProfilingThread threads = 1; - uint64 now_nanos = 2; + repeated RemoteProfilingThread threads = 1; + uint64 now_nanos = 2; } message RemoteProfilingThread { - optional string thread_name = 1; - uint64 thread_id = 2; - repeated RemoteProfilingTiming timings = 3; + optional string thread_name = 1; + uint64 thread_id = 2; + repeated RemoteProfilingTiming timings = 3; } message RemoteProfilingTiming { - RemoteProfilingLocation location = 1; - uint64 start_nanos = 2; - uint64 duration_nanos = 3; + RemoteProfilingLocation location = 1; + uint64 start_nanos = 2; + uint64 duration_nanos = 3; } message RemoteProfilingLocation { - string file = 1; - uint32 line = 2; - uint32 column = 3; + string file = 1; + uint32 line = 2; + uint32 column = 3; } diff --git a/crates/proto/proto/buffer.proto b/crates/proto/proto/buffer.proto index 4cd83af2aab8a44feb9f9646ec85d343b8875f82..01f4bda9e9f450ed65d4f6cb8dc9abc7c35451dd 100644 --- a/crates/proto/proto/buffer.proto +++ b/crates/proto/proto/buffer.proto @@ -5,313 +5,312 @@ import "core.proto"; import "worktree.proto"; message OpenNewBuffer { - uint64 project_id = 1; + uint64 project_id = 1; } message OpenBufferResponse { - uint64 buffer_id = 1; + uint64 buffer_id = 1; } message CreateBufferForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - BufferState state = 3; - BufferChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + BufferState state = 3; + BufferChunk chunk = 4; + } } message UpdateBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated Operation operations = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated Operation operations = 3; } message OpenBufferByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; } message OpenBufferById { - uint64 project_id = 1; - uint64 id = 2; + uint64 project_id = 1; + uint64 id = 2; } message UpdateBufferFile { - uint64 project_id = 1; - uint64 buffer_id = 2; - File file = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + File file = 3; } message SaveBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - optional ProjectPath new_path = 4; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + optional ProjectPath new_path = 4; } message CloseBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message BufferSaved { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - Timestamp mtime = 4; - reserved 5; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + Timestamp mtime = 4; + reserved 5; } message BufferReloaded { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; - Timestamp mtime = 4; - reserved 5; - LineEnding line_ending = 6; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; + Timestamp mtime = 4; + reserved 5; + LineEnding line_ending = 6; } message ReloadBuffers { - uint64 project_id = 1; - repeated uint64 buffer_ids = 2; + uint64 project_id = 1; + repeated uint64 buffer_ids = 2; } message ReloadBuffersResponse { - ProjectTransaction transaction = 1; + ProjectTransaction transaction = 1; } message SynchronizeBuffers { - uint64 project_id = 1; - repeated BufferVersion buffers = 2; + uint64 project_id = 1; + repeated BufferVersion buffers = 2; } message SynchronizeBuffersResponse { - repeated BufferVersion buffers = 1; + repeated BufferVersion buffers = 1; } message BufferVersion { - uint64 id = 1; - repeated VectorClockEntry version = 2; + uint64 id = 1; + repeated VectorClockEntry version = 2; } message BufferState { - uint64 id = 1; - optional File file = 2; - string base_text = 3; - LineEnding line_ending = 5; - repeated VectorClockEntry saved_version = 6; - Timestamp saved_mtime = 8; + uint64 id = 1; + optional File file = 2; + string base_text = 3; + LineEnding line_ending = 5; + repeated VectorClockEntry saved_version = 6; + Timestamp saved_mtime = 8; - reserved 7; - reserved 4; + reserved 7; + reserved 4; } message BufferChunk { - uint64 buffer_id = 1; - repeated Operation operations = 2; - bool is_last = 3; + uint64 buffer_id = 1; + repeated Operation operations = 2; + bool is_last = 3; } enum LineEnding { - Unix = 0; - Windows = 1; + Unix = 0; + Windows = 1; } message VectorClockEntry { - uint32 replica_id = 1; - uint32 timestamp = 2; + uint32 replica_id = 1; + uint32 timestamp = 2; } message UndoMapEntry { - uint32 replica_id = 1; - uint32 local_timestamp = 2; - repeated UndoCount counts = 3; + uint32 replica_id = 1; + uint32 local_timestamp = 2; + repeated UndoCount counts = 3; } message UndoCount { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - uint32 count = 3; + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + uint32 count = 3; } message Operation { - oneof variant { - Edit edit = 1; - Undo undo = 2; - UpdateSelections update_selections = 3; - UpdateDiagnostics update_diagnostics = 4; - UpdateCompletionTriggers update_completion_triggers = 5; - UpdateLineEnding update_line_ending = 6; - } - - message Edit { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated VectorClockEntry version = 3; - repeated Range ranges = 4; - repeated string new_text = 5; - } - - message Undo { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated VectorClockEntry version = 3; - repeated UndoCount counts = 4; - } - - message UpdateSelections { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated Selection selections = 3; - bool line_mode = 4; - CursorShape cursor_shape = 5; - } - - message UpdateCompletionTriggers { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - repeated string triggers = 3; - uint64 language_server_id = 4; - } - - message UpdateLineEnding { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - LineEnding line_ending = 3; - } + oneof variant { + Edit edit = 1; + Undo undo = 2; + UpdateSelections update_selections = 3; + UpdateDiagnostics update_diagnostics = 4; + UpdateCompletionTriggers update_completion_triggers = 5; + UpdateLineEnding update_line_ending = 6; + } + + message Edit { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated VectorClockEntry version = 3; + repeated Range ranges = 4; + repeated string new_text = 5; + } + + message Undo { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated VectorClockEntry version = 3; + repeated UndoCount counts = 4; + } + + message UpdateSelections { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated Selection selections = 3; + bool line_mode = 4; + CursorShape cursor_shape = 5; + } + + message UpdateCompletionTriggers { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + repeated string triggers = 3; + uint64 language_server_id = 4; + } + + message UpdateLineEnding { + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + LineEnding line_ending = 3; + } } message ProjectTransaction { - repeated uint64 buffer_ids = 1; - repeated Transaction transactions = 2; + repeated uint64 buffer_ids = 1; + repeated Transaction transactions = 2; } message Transaction { - LamportTimestamp id = 1; - repeated LamportTimestamp edit_ids = 2; - repeated VectorClockEntry start = 3; + LamportTimestamp id = 1; + repeated LamportTimestamp edit_ids = 2; + repeated VectorClockEntry start = 3; } message LamportTimestamp { - uint32 replica_id = 1; - uint32 value = 2; + uint32 replica_id = 1; + uint32 value = 2; } message Range { - uint64 start = 1; - uint64 end = 2; + uint64 start = 1; + uint64 end = 2; } message Selection { - uint64 id = 1; - EditorAnchor start = 2; - EditorAnchor end = 3; - bool reversed = 4; + uint64 id = 1; + EditorAnchor start = 2; + EditorAnchor end = 3; + bool reversed = 4; } message EditorAnchor { - uint64 excerpt_id = 1; - Anchor anchor = 2; + uint64 excerpt_id = 1; + Anchor anchor = 2; } enum CursorShape { - CursorBar = 0; - CursorBlock = 1; - CursorUnderscore = 2; - CursorHollow = 3; + CursorBar = 0; + CursorBlock = 1; + CursorUnderscore = 2; + CursorHollow = 3; } message UpdateDiagnostics { - uint32 replica_id = 1; - uint32 lamport_timestamp = 2; - uint64 server_id = 3; - repeated Diagnostic diagnostics = 4; + uint32 replica_id = 1; + uint32 lamport_timestamp = 2; + uint64 server_id = 3; + repeated Diagnostic diagnostics = 4; } message Anchor { - uint32 replica_id = 1; - uint32 timestamp = 2; - uint64 offset = 3; - Bias bias = 4; - optional uint64 buffer_id = 5; + uint32 replica_id = 1; + uint32 timestamp = 2; + uint64 offset = 3; + Bias bias = 4; + optional uint64 buffer_id = 5; } message AnchorRange { - Anchor start = 1; - Anchor end = 2; + Anchor start = 1; + Anchor end = 2; } message Location { - uint64 buffer_id = 1; - Anchor start = 2; - Anchor end = 3; + uint64 buffer_id = 1; + Anchor start = 2; + Anchor end = 3; } enum Bias { - Left = 0; - Right = 1; + Left = 0; + Right = 1; } message Diagnostic { - Anchor start = 1; - Anchor end = 2; - optional string source = 3; - optional string registration_id = 17; - - enum SourceKind { - Pulled = 0; - Pushed = 1; - Other = 2; - } - - SourceKind source_kind = 16; - Severity severity = 4; - string message = 5; - optional string code = 6; - uint64 group_id = 7; - bool is_primary = 8; - - reserved 9; - - bool is_disk_based = 10; - bool is_unnecessary = 11; - bool underline = 15; - - enum Severity { - None = 0; - Error = 1; - Warning = 2; - Information = 3; - Hint = 4; - } - optional string data = 12; - optional string code_description = 13; - optional string markdown = 14; + Anchor start = 1; + Anchor end = 2; + optional string source = 3; + optional string registration_id = 17; + + enum SourceKind { + Pulled = 0; + Pushed = 1; + Other = 2; + } + + SourceKind source_kind = 16; + Severity severity = 4; + string message = 5; + optional string code = 6; + uint64 group_id = 7; + bool is_primary = 8; + + reserved 9; + + bool is_disk_based = 10; + bool is_unnecessary = 11; + bool underline = 15; + + enum Severity { + None = 0; + Error = 1; + Warning = 2; + Information = 3; + Hint = 4; + } + optional string data = 12; + optional string code_description = 13; + optional string markdown = 14; } message SearchQuery { - string query = 2; - bool regex = 3; - bool whole_word = 4; - bool case_sensitive = 5; - repeated string files_to_include = 10; - repeated string files_to_exclude = 11; - bool match_full_paths = 9; - bool include_ignored = 8; - string files_to_include_legacy = 6; - string files_to_exclude_legacy = 7; + string query = 2; + bool regex = 3; + bool whole_word = 4; + bool case_sensitive = 5; + repeated string files_to_include = 10; + repeated string files_to_exclude = 11; + bool match_full_paths = 9; + bool include_ignored = 8; + string files_to_include_legacy = 6; + string files_to_exclude_legacy = 7; } message FindSearchCandidates { - uint64 project_id = 1; - SearchQuery query = 2; - uint64 limit = 3; - uint64 handle = 4; + uint64 project_id = 1; + SearchQuery query = 2; + uint64 limit = 3; + uint64 handle = 4; } - message FindSearchCandidatesDone {} message FindSearchCandidatesMatches { @@ -330,6 +329,6 @@ message FindSearchCandidatesChunk { } message FindSearchCandidatesCancelled { - uint64 project_id = 1; - uint64 handle = 2; + uint64 project_id = 1; + uint64 handle = 2; } diff --git a/crates/proto/proto/call.proto b/crates/proto/proto/call.proto index a7fe607bb5aaaff53518652186f46bcb6529e661..31448a8819d13f50088aa7eafcd6af8b6d52bc17 100644 --- a/crates/proto/proto/call.proto +++ b/crates/proto/proto/call.proto @@ -1,424 +1,424 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; -import "worktree.proto"; import "buffer.proto"; -import "lsp.proto"; import "channel.proto"; +import "core.proto"; import "git.proto"; +import "lsp.proto"; +import "worktree.proto"; message CreateRoom {} message CreateRoomResponse { - Room room = 1; - optional LiveKitConnectionInfo live_kit_connection_info = 2; + Room room = 1; + optional LiveKitConnectionInfo live_kit_connection_info = 2; } message JoinRoom { - uint64 id = 1; + uint64 id = 1; } message JoinRoomResponse { - Room room = 1; - optional uint64 channel_id = 2; - optional LiveKitConnectionInfo live_kit_connection_info = 3; + Room room = 1; + optional uint64 channel_id = 2; + optional LiveKitConnectionInfo live_kit_connection_info = 3; } message RejoinRoom { - uint64 id = 1; - repeated UpdateProject reshared_projects = 2; - repeated RejoinProject rejoined_projects = 3; + uint64 id = 1; + repeated UpdateProject reshared_projects = 2; + repeated RejoinProject rejoined_projects = 3; } message RejoinRemoteProjects { - repeated RejoinProject rejoined_projects = 1; + repeated RejoinProject rejoined_projects = 1; } message RejoinRemoteProjectsResponse { - repeated RejoinedProject rejoined_projects = 1; + repeated RejoinedProject rejoined_projects = 1; } message RejoinProject { - uint64 id = 1; - repeated RejoinWorktree worktrees = 2; - repeated RejoinRepository repositories = 3; + uint64 id = 1; + repeated RejoinWorktree worktrees = 2; + repeated RejoinRepository repositories = 3; } message RejoinWorktree { - uint64 id = 1; - uint64 scan_id = 2; + uint64 id = 1; + uint64 scan_id = 2; } message RejoinRepository { - uint64 id = 1; - uint64 scan_id = 2; + uint64 id = 1; + uint64 scan_id = 2; } message RejoinRoomResponse { - Room room = 1; - repeated ResharedProject reshared_projects = 2; - repeated RejoinedProject rejoined_projects = 3; + Room room = 1; + repeated ResharedProject reshared_projects = 2; + repeated RejoinedProject rejoined_projects = 3; } message ResharedProject { - uint64 id = 1; - repeated Collaborator collaborators = 2; + uint64 id = 1; + repeated Collaborator collaborators = 2; } message RejoinedProject { - uint64 id = 1; - repeated WorktreeMetadata worktrees = 2; - repeated Collaborator collaborators = 3; - repeated LanguageServer language_servers = 4; - repeated string language_server_capabilities = 5; + uint64 id = 1; + repeated WorktreeMetadata worktrees = 2; + repeated Collaborator collaborators = 3; + repeated LanguageServer language_servers = 4; + repeated string language_server_capabilities = 5; } message LeaveRoom {} message Room { - uint64 id = 1; - repeated Participant participants = 2; - repeated PendingParticipant pending_participants = 3; - repeated Follower followers = 4; - string livekit_room = 5; + uint64 id = 1; + repeated Participant participants = 2; + repeated PendingParticipant pending_participants = 3; + repeated Follower followers = 4; + string livekit_room = 5; } message Participant { - uint64 user_id = 1; - PeerId peer_id = 2; - repeated ParticipantProject projects = 3; - ParticipantLocation location = 4; - uint32 participant_index = 5; - ChannelRole role = 6; - reserved 7; + uint64 user_id = 1; + PeerId peer_id = 2; + repeated ParticipantProject projects = 3; + ParticipantLocation location = 4; + uint32 participant_index = 5; + ChannelRole role = 6; + reserved 7; } message PendingParticipant { - uint64 user_id = 1; - uint64 calling_user_id = 2; - optional uint64 initial_project_id = 3; + uint64 user_id = 1; + uint64 calling_user_id = 2; + optional uint64 initial_project_id = 3; } message ParticipantProject { - uint64 id = 1; - repeated string worktree_root_names = 2; + uint64 id = 1; + repeated string worktree_root_names = 2; } message Follower { - PeerId leader_id = 1; - PeerId follower_id = 2; - uint64 project_id = 3; + PeerId leader_id = 1; + PeerId follower_id = 2; + uint64 project_id = 3; } message ParticipantLocation { - oneof variant { - SharedProject shared_project = 1; - UnsharedProject unshared_project = 2; - External external = 3; - } + oneof variant { + SharedProject shared_project = 1; + UnsharedProject unshared_project = 2; + External external = 3; + } - message SharedProject { - uint64 id = 1; - } + message SharedProject { + uint64 id = 1; + } - message UnsharedProject {} + message UnsharedProject {} - message External {} + message External {} } message Call { - uint64 room_id = 1; - uint64 called_user_id = 2; - optional uint64 initial_project_id = 3; + uint64 room_id = 1; + uint64 called_user_id = 2; + optional uint64 initial_project_id = 3; } message IncomingCall { - uint64 room_id = 1; - uint64 calling_user_id = 2; - repeated uint64 participant_user_ids = 3; - optional ParticipantProject initial_project = 4; + uint64 room_id = 1; + uint64 calling_user_id = 2; + repeated uint64 participant_user_ids = 3; + optional ParticipantProject initial_project = 4; } message CallCanceled { - uint64 room_id = 1; + uint64 room_id = 1; } message CancelCall { - uint64 room_id = 1; - uint64 called_user_id = 2; + uint64 room_id = 1; + uint64 called_user_id = 2; } message DeclineCall { - uint64 room_id = 1; + uint64 room_id = 1; } message UpdateParticipantLocation { - uint64 room_id = 1; - ParticipantLocation location = 2; + uint64 room_id = 1; + ParticipantLocation location = 2; } message RoomUpdated { - Room room = 1; + Room room = 1; } message LiveKitConnectionInfo { - string server_url = 1; - string token = 2; - bool can_publish = 3; + string server_url = 1; + string token = 2; + bool can_publish = 3; } message ShareProject { - uint64 room_id = 1; - repeated WorktreeMetadata worktrees = 2; - reserved 3; - bool is_ssh_project = 4; - optional bool windows_paths = 5; + uint64 room_id = 1; + repeated WorktreeMetadata worktrees = 2; + reserved 3; + bool is_ssh_project = 4; + optional bool windows_paths = 5; } message ShareProjectResponse { - uint64 project_id = 1; + uint64 project_id = 1; } message UnshareProject { - uint64 project_id = 1; + uint64 project_id = 1; } message UpdateProject { - uint64 project_id = 1; - repeated WorktreeMetadata worktrees = 2; + uint64 project_id = 1; + repeated WorktreeMetadata worktrees = 2; } message JoinProject { - uint64 project_id = 1; - optional string committer_email = 2; - optional string committer_name = 3; + uint64 project_id = 1; + optional string committer_email = 2; + optional string committer_name = 3; } message JoinProjectResponse { - uint64 project_id = 5; - uint32 replica_id = 1; - repeated WorktreeMetadata worktrees = 2; - repeated Collaborator collaborators = 3; - repeated LanguageServer language_servers = 4; - repeated string language_server_capabilities = 8; - ChannelRole role = 6; - bool windows_paths = 9; - reserved 7; + uint64 project_id = 5; + uint32 replica_id = 1; + repeated WorktreeMetadata worktrees = 2; + repeated Collaborator collaborators = 3; + repeated LanguageServer language_servers = 4; + repeated string language_server_capabilities = 8; + ChannelRole role = 6; + bool windows_paths = 9; + reserved 7; } message LeaveProject { - uint64 project_id = 1; + uint64 project_id = 1; } message UpdateWorktree { - uint64 project_id = 1; - uint64 worktree_id = 2; - string root_name = 3; - repeated Entry updated_entries = 4; - repeated uint64 removed_entries = 5; - repeated RepositoryEntry updated_repositories = 6; // deprecated - repeated uint64 removed_repositories = 7; // deprecated - uint64 scan_id = 8; - bool is_last_update = 9; - string abs_path = 10; + uint64 project_id = 1; + uint64 worktree_id = 2; + string root_name = 3; + repeated Entry updated_entries = 4; + repeated uint64 removed_entries = 5; + repeated RepositoryEntry updated_repositories = 6; // deprecated + repeated uint64 removed_repositories = 7; // deprecated + uint64 scan_id = 8; + bool is_last_update = 9; + string abs_path = 10; } // deprecated message RepositoryEntry { - uint64 repository_id = 1; - reserved 2; - repeated StatusEntry updated_statuses = 3; - repeated string removed_statuses = 4; - repeated string current_merge_conflicts = 5; - optional Branch branch_summary = 6; + uint64 repository_id = 1; + reserved 2; + repeated StatusEntry updated_statuses = 3; + repeated string removed_statuses = 4; + repeated string current_merge_conflicts = 5; + optional Branch branch_summary = 6; } message AddProjectCollaborator { - uint64 project_id = 1; - Collaborator collaborator = 2; + uint64 project_id = 1; + Collaborator collaborator = 2; } message UpdateProjectCollaborator { - uint64 project_id = 1; - PeerId old_peer_id = 2; - PeerId new_peer_id = 3; + uint64 project_id = 1; + PeerId old_peer_id = 2; + PeerId new_peer_id = 3; } message RemoveProjectCollaborator { - uint64 project_id = 1; - PeerId peer_id = 2; + uint64 project_id = 1; + PeerId peer_id = 2; } message GetUsers { - repeated uint64 user_ids = 1; + repeated uint64 user_ids = 1; } message FuzzySearchUsers { - string query = 1; + string query = 1; } message UsersResponse { - repeated User users = 1; + repeated User users = 1; } message RequestContact { - uint64 responder_id = 1; + uint64 responder_id = 1; } message RemoveContact { - uint64 user_id = 1; + uint64 user_id = 1; } message RespondToContactRequest { - uint64 requester_id = 1; - ContactRequestResponse response = 2; + uint64 requester_id = 1; + ContactRequestResponse response = 2; } enum ContactRequestResponse { - Accept = 0; - Decline = 1; - Block = 2; - Dismiss = 3; + Accept = 0; + Decline = 1; + Block = 2; + Dismiss = 3; } message UpdateContacts { - repeated Contact contacts = 1; - repeated uint64 remove_contacts = 2; - repeated IncomingContactRequest incoming_requests = 3; - repeated uint64 remove_incoming_requests = 4; - repeated uint64 outgoing_requests = 5; - repeated uint64 remove_outgoing_requests = 6; + repeated Contact contacts = 1; + repeated uint64 remove_contacts = 2; + repeated IncomingContactRequest incoming_requests = 3; + repeated uint64 remove_incoming_requests = 4; + repeated uint64 outgoing_requests = 5; + repeated uint64 remove_outgoing_requests = 6; } message ShowContacts {} message IncomingContactRequest { - uint64 requester_id = 1; + uint64 requester_id = 1; } message Follow { - uint64 room_id = 1; - optional uint64 project_id = 2; - PeerId leader_id = 3; + uint64 room_id = 1; + optional uint64 project_id = 2; + PeerId leader_id = 3; } message FollowResponse { - View active_view = 3; - reserved 1; - repeated View views = 2; + View active_view = 3; + reserved 1; + repeated View views = 2; } message UpdateFollowers { - uint64 room_id = 1; - optional uint64 project_id = 2; - reserved 3; - oneof variant { - View create_view = 5; - UpdateActiveView update_active_view = 4; - UpdateView update_view = 6; - } + uint64 room_id = 1; + optional uint64 project_id = 2; + reserved 3; + oneof variant { + View create_view = 5; + UpdateActiveView update_active_view = 4; + UpdateView update_view = 6; + } } message Unfollow { - uint64 room_id = 1; - optional uint64 project_id = 2; - PeerId leader_id = 3; + uint64 room_id = 1; + optional uint64 project_id = 2; + PeerId leader_id = 3; } message ViewId { - PeerId creator = 1; - uint64 id = 2; + PeerId creator = 1; + uint64 id = 2; } message UpdateActiveView { - reserved 1, 2; - View view = 3; + reserved 1, 2; + View view = 3; } enum PanelId { - AssistantPanel = 0; - DebugPanel = 1; + AssistantPanel = 0; + DebugPanel = 1; } message UpdateView { - ViewId id = 1; - optional PeerId leader_id = 2; - - oneof variant { - Editor editor = 3; - } - - message Editor { - repeated ExcerptInsertion inserted_excerpts = 1; - repeated uint64 deleted_excerpts = 2; - repeated Selection selections = 3; - optional Selection pending_selection = 4; - EditorAnchor scroll_top_anchor = 5; - reserved 6; - reserved 7; - double scroll_x = 8; - double scroll_y = 9; - } + ViewId id = 1; + optional PeerId leader_id = 2; + + oneof variant { + Editor editor = 3; + } + + message Editor { + repeated ExcerptInsertion inserted_excerpts = 1; + repeated uint64 deleted_excerpts = 2; + repeated Selection selections = 3; + optional Selection pending_selection = 4; + EditorAnchor scroll_top_anchor = 5; + reserved 6; + reserved 7; + double scroll_x = 8; + double scroll_y = 9; + } } message View { - ViewId id = 1; - optional PeerId leader_id = 2; - optional PanelId panel_id = 6; - - oneof variant { - Editor editor = 3; - ChannelView channel_view = 4; - ContextEditor context_editor = 5; - } - - message Editor { - bool singleton = 1; - optional string title = 2; - repeated Excerpt excerpts = 3; - repeated Selection selections = 4; - optional Selection pending_selection = 5; - EditorAnchor scroll_top_anchor = 6; - reserved 7; - reserved 8; - double scroll_x = 9; - double scroll_y = 10; - } - - message ChannelView { - uint64 channel_id = 1; - Editor editor = 2; - } - - message ContextEditor { - string context_id = 1; - Editor editor = 2; - } + ViewId id = 1; + optional PeerId leader_id = 2; + optional PanelId panel_id = 6; + + oneof variant { + Editor editor = 3; + ChannelView channel_view = 4; + ContextEditor context_editor = 5; + } + + message Editor { + bool singleton = 1; + optional string title = 2; + repeated Excerpt excerpts = 3; + repeated Selection selections = 4; + optional Selection pending_selection = 5; + EditorAnchor scroll_top_anchor = 6; + reserved 7; + reserved 8; + double scroll_x = 9; + double scroll_y = 10; + } + + message ChannelView { + uint64 channel_id = 1; + Editor editor = 2; + } + + message ContextEditor { + string context_id = 1; + Editor editor = 2; + } } message ExcerptInsertion { - Excerpt excerpt = 1; - optional uint64 previous_excerpt_id = 2; + Excerpt excerpt = 1; + optional uint64 previous_excerpt_id = 2; } message Excerpt { - uint64 id = 1; - uint64 buffer_id = 2; - Anchor context_start = 3; - Anchor context_end = 4; - Anchor primary_start = 5; - Anchor primary_end = 6; + uint64 id = 1; + uint64 buffer_id = 2; + Anchor context_start = 3; + Anchor context_end = 4; + Anchor primary_start = 5; + Anchor primary_end = 6; } message Contact { - uint64 user_id = 1; - bool online = 2; - bool busy = 3; + uint64 user_id = 1; + bool online = 2; + bool busy = 3; } message SetRoomParticipantRole { - uint64 room_id = 1; - uint64 user_id = 2; - ChannelRole role = 3; + uint64 room_id = 1; + uint64 user_id = 2; + ChannelRole role = 3; } diff --git a/crates/proto/proto/channel.proto b/crates/proto/proto/channel.proto index cada21cd5b7ede4730f2f4e71e98fb9a3dc12ff0..f1238b20a37815c9c6db999b8031a8eff2ba6cea 100644 --- a/crates/proto/proto/channel.proto +++ b/crates/proto/proto/channel.proto @@ -1,294 +1,294 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; import "buffer.proto"; +import "core.proto"; message Channel { - uint64 id = 1; - string name = 2; - ChannelVisibility visibility = 3; - int32 channel_order = 4; - repeated uint64 parent_path = 5; + uint64 id = 1; + string name = 2; + ChannelVisibility visibility = 3; + int32 channel_order = 4; + repeated uint64 parent_path = 5; } enum ChannelVisibility { - Public = 0; - Members = 1; + Public = 0; + Members = 1; } message UpdateChannels { - repeated Channel channels = 1; - repeated uint64 delete_channels = 4; - repeated Channel channel_invitations = 5; - repeated uint64 remove_channel_invitations = 6; - repeated ChannelParticipants channel_participants = 7; - repeated ChannelBufferVersion latest_channel_buffer_versions = 9; + repeated Channel channels = 1; + repeated uint64 delete_channels = 4; + repeated Channel channel_invitations = 5; + repeated uint64 remove_channel_invitations = 6; + repeated ChannelParticipants channel_participants = 7; + repeated ChannelBufferVersion latest_channel_buffer_versions = 9; - reserved 8; - reserved 10 to 15; + reserved 8; + reserved 10 to 15; } message UpdateUserChannels { - repeated ChannelBufferVersion observed_channel_buffer_version = 2; - repeated ChannelMembership channel_memberships = 3; + repeated ChannelBufferVersion observed_channel_buffer_version = 2; + repeated ChannelMembership channel_memberships = 3; - reserved 1; + reserved 1; } message ChannelMembership { - uint64 channel_id = 1; - ChannelRole role = 2; + uint64 channel_id = 1; + ChannelRole role = 2; } message ChannelMessageId { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message ChannelPermission { - uint64 channel_id = 1; - ChannelRole role = 3; + uint64 channel_id = 1; + ChannelRole role = 3; } message ChannelParticipants { - uint64 channel_id = 1; - repeated uint64 participant_user_ids = 2; + uint64 channel_id = 1; + repeated uint64 participant_user_ids = 2; } message JoinChannel { - uint64 channel_id = 1; + uint64 channel_id = 1; } message DeleteChannel { - uint64 channel_id = 1; + uint64 channel_id = 1; } message GetChannelMembers { - uint64 channel_id = 1; - string query = 2; - uint64 limit = 3; + uint64 channel_id = 1; + string query = 2; + uint64 limit = 3; } message GetChannelMembersResponse { - repeated ChannelMember members = 1; - repeated User users = 2; + repeated ChannelMember members = 1; + repeated User users = 2; } message ChannelMember { - uint64 user_id = 1; - Kind kind = 3; - ChannelRole role = 4; + uint64 user_id = 1; + Kind kind = 3; + ChannelRole role = 4; - enum Kind { - Member = 0; - Invitee = 1; - } + enum Kind { + Member = 0; + Invitee = 1; + } } message SubscribeToChannels {} message CreateChannel { - string name = 1; - optional uint64 parent_id = 2; + string name = 1; + optional uint64 parent_id = 2; } message CreateChannelResponse { - Channel channel = 1; - optional uint64 parent_id = 2; + Channel channel = 1; + optional uint64 parent_id = 2; } message InviteChannelMember { - uint64 channel_id = 1; - uint64 user_id = 2; - ChannelRole role = 4; + uint64 channel_id = 1; + uint64 user_id = 2; + ChannelRole role = 4; } message RemoveChannelMember { - uint64 channel_id = 1; - uint64 user_id = 2; + uint64 channel_id = 1; + uint64 user_id = 2; } enum ChannelRole { - Admin = 0; - Member = 1; - Guest = 2; - Banned = 3; - Talker = 4; + Admin = 0; + Member = 1; + Guest = 2; + Banned = 3; + Talker = 4; } message SetChannelMemberRole { - uint64 channel_id = 1; - uint64 user_id = 2; - ChannelRole role = 3; + uint64 channel_id = 1; + uint64 user_id = 2; + ChannelRole role = 3; } message SetChannelVisibility { - uint64 channel_id = 1; - ChannelVisibility visibility = 2; + uint64 channel_id = 1; + ChannelVisibility visibility = 2; } message RenameChannel { - uint64 channel_id = 1; - string name = 2; + uint64 channel_id = 1; + string name = 2; } message RenameChannelResponse { - Channel channel = 1; + Channel channel = 1; } message JoinChannelChat { - uint64 channel_id = 1; + uint64 channel_id = 1; } message JoinChannelChatResponse { - repeated ChannelMessage messages = 1; - bool done = 2; + repeated ChannelMessage messages = 1; + bool done = 2; } message LeaveChannelChat { - uint64 channel_id = 1; + uint64 channel_id = 1; } message SendChannelMessage { - uint64 channel_id = 1; - string body = 2; - Nonce nonce = 3; - repeated ChatMention mentions = 4; - optional uint64 reply_to_message_id = 5; + uint64 channel_id = 1; + string body = 2; + Nonce nonce = 3; + repeated ChatMention mentions = 4; + optional uint64 reply_to_message_id = 5; } message RemoveChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message UpdateChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; - Nonce nonce = 4; - string body = 5; - repeated ChatMention mentions = 6; + uint64 channel_id = 1; + uint64 message_id = 2; + Nonce nonce = 4; + string body = 5; + repeated ChatMention mentions = 6; } message AckChannelMessage { - uint64 channel_id = 1; - uint64 message_id = 2; + uint64 channel_id = 1; + uint64 message_id = 2; } message SendChannelMessageResponse { - ChannelMessage message = 1; + ChannelMessage message = 1; } message ChannelMessageSent { - uint64 channel_id = 1; - ChannelMessage message = 2; + uint64 channel_id = 1; + ChannelMessage message = 2; } message ChannelMessageUpdate { - uint64 channel_id = 1; - ChannelMessage message = 2; + uint64 channel_id = 1; + ChannelMessage message = 2; } message GetChannelMessages { - uint64 channel_id = 1; - uint64 before_message_id = 2; + uint64 channel_id = 1; + uint64 before_message_id = 2; } message GetChannelMessagesResponse { - repeated ChannelMessage messages = 1; - bool done = 2; + repeated ChannelMessage messages = 1; + bool done = 2; } message GetChannelMessagesById { - repeated uint64 message_ids = 1; + repeated uint64 message_ids = 1; } message MoveChannel { - uint64 channel_id = 1; - uint64 to = 2; + uint64 channel_id = 1; + uint64 to = 2; } message ReorderChannel { - uint64 channel_id = 1; - enum Direction { - Up = 0; - Down = 1; - } - Direction direction = 2; + uint64 channel_id = 1; + enum Direction { + Up = 0; + Down = 1; + } + Direction direction = 2; } message JoinChannelBuffer { - uint64 channel_id = 1; + uint64 channel_id = 1; } message ChannelBufferVersion { - uint64 channel_id = 1; - repeated VectorClockEntry version = 2; - uint64 epoch = 3; + uint64 channel_id = 1; + repeated VectorClockEntry version = 2; + uint64 epoch = 3; } message UpdateChannelBufferCollaborators { - uint64 channel_id = 1; - repeated Collaborator collaborators = 2; + uint64 channel_id = 1; + repeated Collaborator collaborators = 2; } message UpdateChannelBuffer { - uint64 channel_id = 1; - repeated Operation operations = 2; + uint64 channel_id = 1; + repeated Operation operations = 2; } message ChannelMessage { - uint64 id = 1; - string body = 2; - uint64 timestamp = 3; - uint64 sender_id = 4; - Nonce nonce = 5; - repeated ChatMention mentions = 6; - optional uint64 reply_to_message_id = 7; - optional uint64 edited_at = 8; + uint64 id = 1; + string body = 2; + uint64 timestamp = 3; + uint64 sender_id = 4; + Nonce nonce = 5; + repeated ChatMention mentions = 6; + optional uint64 reply_to_message_id = 7; + optional uint64 edited_at = 8; } message ChatMention { - Range range = 1; - uint64 user_id = 2; + Range range = 1; + uint64 user_id = 2; } message RejoinChannelBuffers { - repeated ChannelBufferVersion buffers = 1; + repeated ChannelBufferVersion buffers = 1; } message RejoinChannelBuffersResponse { - repeated RejoinedChannelBuffer buffers = 1; + repeated RejoinedChannelBuffer buffers = 1; } message AckBufferOperation { - uint64 buffer_id = 1; - uint64 epoch = 2; - repeated VectorClockEntry version = 3; + uint64 buffer_id = 1; + uint64 epoch = 2; + repeated VectorClockEntry version = 3; } message JoinChannelBufferResponse { - uint64 buffer_id = 1; - uint32 replica_id = 2; - string base_text = 3; - repeated Operation operations = 4; - repeated Collaborator collaborators = 5; - uint64 epoch = 6; + uint64 buffer_id = 1; + uint32 replica_id = 2; + string base_text = 3; + repeated Operation operations = 4; + repeated Collaborator collaborators = 5; + uint64 epoch = 6; } message RejoinedChannelBuffer { - uint64 channel_id = 1; - repeated VectorClockEntry version = 2; - repeated Operation operations = 3; - repeated Collaborator collaborators = 4; + uint64 channel_id = 1; + repeated VectorClockEntry version = 2; + repeated Operation operations = 3; + repeated Collaborator collaborators = 4; } message LeaveChannelBuffer { - uint64 channel_id = 1; + uint64 channel_id = 1; } message RespondToChannelInvite { - uint64 channel_id = 1; - bool accept = 2; + uint64 channel_id = 1; + bool accept = 2; } diff --git a/crates/proto/proto/core.proto b/crates/proto/proto/core.proto index 121ea749127d7af4bbc34da2a1edbad78b7763df..c721ab62a11620895f8d54e69b4eb0bf168e43d0 100644 --- a/crates/proto/proto/core.proto +++ b/crates/proto/proto/core.proto @@ -2,28 +2,28 @@ syntax = "proto3"; package zed.messages; message PeerId { - uint32 owner_id = 1; - uint32 id = 2; + uint32 owner_id = 1; + uint32 id = 2; } message User { - reserved 4; - uint64 id = 1; - string github_login = 2; - string avatar_url = 3; - optional string name = 5; + reserved 4; + uint64 id = 1; + string github_login = 2; + string avatar_url = 3; + optional string name = 5; } message Nonce { - uint64 upper_half = 1; - uint64 lower_half = 2; + uint64 upper_half = 1; + uint64 lower_half = 2; } message Collaborator { - PeerId peer_id = 1; - uint32 replica_id = 2; - uint64 user_id = 3; - bool is_host = 4; - optional string committer_name = 5; - optional string committer_email = 6; + PeerId peer_id = 1; + uint32 replica_id = 2; + uint64 user_id = 3; + bool is_host = 4; + optional string committer_name = 5; + optional string committer_email = 6; } diff --git a/crates/proto/proto/debugger.proto b/crates/proto/proto/debugger.proto index dcfb91c77dd0004bfb248d4e4c23dcf269b7bc11..bf29411f96a45a26265650727d1529e9351245d2 100644 --- a/crates/proto/proto/debugger.proto +++ b/crates/proto/proto/debugger.proto @@ -1,555 +1,553 @@ syntax = "proto3"; package zed.messages; -import "core.proto"; import "buffer.proto"; import "task.proto"; enum BreakpointState { - Enabled = 0; - Disabled = 1; + Enabled = 0; + Disabled = 1; } message Breakpoint { - Anchor position = 1; - BreakpointState state = 2; - reserved 3; - optional string message = 4; - optional string condition = 5; - optional string hit_condition = 6; - map session_state = 7; + Anchor position = 1; + BreakpointState state = 2; + reserved 3; + optional string message = 4; + optional string condition = 5; + optional string hit_condition = 6; + map session_state = 7; } message BreakpointSessionState { - uint64 id = 1; - bool verified = 2; + uint64 id = 1; + bool verified = 2; } message BreakpointsForFile { - uint64 project_id = 1; - string path = 2; - repeated Breakpoint breakpoints = 3; + uint64 project_id = 1; + string path = 2; + repeated Breakpoint breakpoints = 3; } message ToggleBreakpoint { - uint64 project_id = 1; - string path = 2; - Breakpoint breakpoint = 3; + uint64 project_id = 1; + string path = 2; + Breakpoint breakpoint = 3; } enum DapThreadStatus { - Running = 0; - Stopped = 1; - Exited = 2; - Ended = 3; + Running = 0; + Stopped = 1; + Exited = 2; + Ended = 3; } enum VariablesArgumentsFilter { - Indexed = 0; - Named = 1; + Indexed = 0; + Named = 1; } message ValueFormat { - optional bool hex = 1; + optional bool hex = 1; } message VariablesRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 variables_reference = 3; - optional VariablesArgumentsFilter filter = 4; - optional uint64 start = 5; - optional uint64 count = 6; - optional ValueFormat format = 7; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 variables_reference = 3; + optional VariablesArgumentsFilter filter = 4; + optional uint64 start = 5; + optional uint64 count = 6; + optional ValueFormat format = 7; } enum SteppingGranularity { - Statement = 0; - Line = 1; - Instruction = 2; + Statement = 0; + Line = 1; + Instruction = 2; } message DapLocationsRequest { - uint64 project_id = 1; - uint64 session_id = 2; - uint64 location_reference = 3; + uint64 project_id = 1; + uint64 session_id = 2; + uint64 location_reference = 3; } message DapLocationsResponse { - DapSource source = 1; - uint64 line = 2; - optional uint64 column = 3; - optional uint64 end_line = 4; - optional uint64 end_column = 5; + DapSource source = 1; + uint64 line = 2; + optional uint64 column = 3; + optional uint64 end_line = 4; + optional uint64 end_column = 5; } enum DapEvaluateContext { - Repl = 0; - Watch = 1; - Hover = 2; - Clipboard = 3; - EvaluateVariables = 4; - EvaluateUnknown = 5; + Repl = 0; + Watch = 1; + Hover = 2; + Clipboard = 3; + EvaluateVariables = 4; + EvaluateUnknown = 5; } message DapEvaluateRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string expression = 3; - optional uint64 frame_id = 4; - optional DapEvaluateContext context = 5; + uint64 project_id = 1; + uint64 client_id = 2; + string expression = 3; + optional uint64 frame_id = 4; + optional DapEvaluateContext context = 5; } message DapEvaluateResponse { - string result = 1; - optional string evaluate_type = 2; - uint64 variable_reference = 3; - optional uint64 named_variables = 4; - optional uint64 indexed_variables = 5; - optional string memory_reference = 6; + string result = 1; + optional string evaluate_type = 2; + uint64 variable_reference = 3; + optional uint64 named_variables = 4; + optional uint64 indexed_variables = 5; + optional string memory_reference = 6; } - message DapCompletionRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string query = 3; - optional uint64 frame_id = 4; - optional uint64 line = 5; - uint64 column = 6; + uint64 project_id = 1; + uint64 client_id = 2; + string query = 3; + optional uint64 frame_id = 4; + optional uint64 line = 5; + uint64 column = 6; } enum DapCompletionItemType { - Method = 0; - Function = 1; - Constructor = 2; - Field = 3; - Variable = 4; - Class = 5; - Interface = 6; - Module = 7; - Property = 8; - Unit = 9; - Value = 10; - Enum = 11; - Keyword = 12; - Snippet = 13; - Text = 14; - Color = 15; - CompletionItemFile = 16; - Reference = 17; - Customcolor = 19; + Method = 0; + Function = 1; + Constructor = 2; + Field = 3; + Variable = 4; + Class = 5; + Interface = 6; + Module = 7; + Property = 8; + Unit = 9; + Value = 10; + Enum = 11; + Keyword = 12; + Snippet = 13; + Text = 14; + Color = 15; + CompletionItemFile = 16; + Reference = 17; + Customcolor = 19; } message DapCompletionItem { - string label = 1; - optional string text = 2; - optional string sort_text = 3; - optional string detail = 4; - optional DapCompletionItemType typ = 5; - optional uint64 start = 6; - optional uint64 length = 7; - optional uint64 selection_start = 8; - optional uint64 selection_length = 9; + string label = 1; + optional string text = 2; + optional string sort_text = 3; + optional string detail = 4; + optional DapCompletionItemType typ = 5; + optional uint64 start = 6; + optional uint64 length = 7; + optional uint64 selection_start = 8; + optional uint64 selection_length = 9; } message DapCompletionResponse { - uint64 client_id = 1; - repeated DapCompletionItem completions = 2; + uint64 client_id = 1; + repeated DapCompletionItem completions = 2; } message DapScopesRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 stack_frame_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 stack_frame_id = 3; } message DapScopesResponse { - repeated DapScope scopes = 1; + repeated DapScope scopes = 1; } message DapSetVariableValueRequest { - uint64 project_id = 1; - uint64 client_id = 2; - string name = 3; - string value = 4; - uint64 variables_reference = 5; + uint64 project_id = 1; + uint64 client_id = 2; + string name = 3; + string value = 4; + uint64 variables_reference = 5; } message DapSetVariableValueResponse { - uint64 client_id = 1; - string value = 2; - optional string variable_type = 3; - optional uint64 variables_reference = 4; - optional uint64 named_variables = 5; - optional uint64 indexed_variables = 6; - optional string memory_reference = 7; + uint64 client_id = 1; + string value = 2; + optional string variable_type = 3; + optional uint64 variables_reference = 4; + optional uint64 named_variables = 5; + optional uint64 indexed_variables = 6; + optional string memory_reference = 7; } message DapPauseRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; } message DapDisconnectRequest { - uint64 project_id = 1; - uint64 client_id = 2; - optional bool restart = 3; - optional bool terminate_debuggee = 4; - optional bool suspend_debuggee = 5; + uint64 project_id = 1; + uint64 client_id = 2; + optional bool restart = 3; + optional bool terminate_debuggee = 4; + optional bool suspend_debuggee = 5; } message DapTerminateThreadsRequest { - uint64 project_id = 1; - uint64 client_id = 2; - repeated int64 thread_ids = 3; + uint64 project_id = 1; + uint64 client_id = 2; + repeated int64 thread_ids = 3; } message DapThreadsRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapThreadsResponse { - repeated DapThread threads = 1; + repeated DapThread threads = 1; } message DapTerminateRequest { - uint64 project_id = 1; - uint64 client_id = 2; - optional bool restart = 3; + uint64 project_id = 1; + uint64 client_id = 2; + optional bool restart = 3; } message DapRestartRequest { - uint64 project_id = 1; - uint64 client_id = 2; - bytes raw_args = 3; + uint64 project_id = 1; + uint64 client_id = 2; + bytes raw_args = 3; } message DapRestartStackFrameRequest { - uint64 project_id = 1; - uint64 client_id = 2; - uint64 stack_frame_id = 3; + uint64 project_id = 1; + uint64 client_id = 2; + uint64 stack_frame_id = 3; } message ToggleIgnoreBreakpoints { - uint64 project_id = 1; - uint32 session_id = 2; + uint64 project_id = 1; + uint32 session_id = 2; } message IgnoreBreakpointState { - uint64 project_id = 1; - uint64 session_id = 2; - bool ignore = 3; + uint64 project_id = 1; + uint64 session_id = 2; + bool ignore = 3; } message DapNextRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapStepInRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional uint64 target_id = 4; - optional bool single_thread = 5; - optional SteppingGranularity granularity = 6; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional uint64 target_id = 4; + optional bool single_thread = 5; + optional SteppingGranularity granularity = 6; } message DapStepOutRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapStepBackRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; - optional SteppingGranularity granularity = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; + optional SteppingGranularity granularity = 5; } message DapContinueRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional bool single_thread = 4; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional bool single_thread = 4; } message DapContinueResponse { - uint64 client_id = 1; - optional bool all_threads_continued = 2; + uint64 client_id = 1; + optional bool all_threads_continued = 2; } message DapModulesRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapModulesResponse { - uint64 client_id = 1; - repeated DapModule modules = 2; + uint64 client_id = 1; + repeated DapModule modules = 2; } message DapLoadedSourcesRequest { - uint64 project_id = 1; - uint64 client_id = 2; + uint64 project_id = 1; + uint64 client_id = 2; } message DapLoadedSourcesResponse { - uint64 client_id = 1; - repeated DapSource sources = 2; + uint64 client_id = 1; + repeated DapSource sources = 2; } message DapStackTraceRequest { - uint64 project_id = 1; - uint64 client_id = 2; - int64 thread_id = 3; - optional uint64 start_frame = 4; - optional uint64 stack_trace_levels = 5; + uint64 project_id = 1; + uint64 client_id = 2; + int64 thread_id = 3; + optional uint64 start_frame = 4; + optional uint64 stack_trace_levels = 5; } message DapStackTraceResponse { - repeated DapStackFrame frames = 1; + repeated DapStackFrame frames = 1; } message DapStackFrame { - uint64 id = 1; - string name = 2; - optional DapSource source = 3; - uint64 line = 4; - uint64 column = 5; - optional uint64 end_line = 6; - optional uint64 end_column = 7; - optional bool can_restart = 8; - optional string instruction_pointer_reference = 9; - optional DapModuleId module_id = 10; - optional DapStackPresentationHint presentation_hint = 11; + uint64 id = 1; + string name = 2; + optional DapSource source = 3; + uint64 line = 4; + uint64 column = 5; + optional uint64 end_line = 6; + optional uint64 end_column = 7; + optional bool can_restart = 8; + optional string instruction_pointer_reference = 9; + optional DapModuleId module_id = 10; + optional DapStackPresentationHint presentation_hint = 11; } message DebuggerLoadedSourceList { - uint64 client_id = 1; - repeated DapSource sources = 2; + uint64 client_id = 1; + repeated DapSource sources = 2; } message DapVariables { - uint64 client_id = 1; - repeated DapVariable variables = 2; + uint64 client_id = 1; + repeated DapVariable variables = 2; } // Remote Debugging: Dap Types message DapVariable { - string name = 1; - string value = 2; - optional string type = 3; - // optional DapVariablePresentationHint presentation_hint = 4; - optional string evaluate_name = 5; - uint64 variables_reference = 6; - optional uint64 named_variables = 7; - optional uint64 indexed_variables = 8; - optional string memory_reference = 9; + string name = 1; + string value = 2; + optional string type = 3; + // optional DapVariablePresentationHint presentation_hint = 4; + optional string evaluate_name = 5; + uint64 variables_reference = 6; + optional uint64 named_variables = 7; + optional uint64 indexed_variables = 8; + optional string memory_reference = 9; } message DapThread { - int64 id = 1; - string name = 2; + int64 id = 1; + string name = 2; } message DapScope { - string name = 1; - optional DapScopePresentationHint presentation_hint = 2; - uint64 variables_reference = 3; - optional uint64 named_variables = 4; - optional uint64 indexed_variables = 5; - bool expensive = 6; - optional DapSource source = 7; - optional uint64 line = 8; - optional uint64 column = 9; - optional uint64 end_line = 10; - optional uint64 end_column = 11; + string name = 1; + optional DapScopePresentationHint presentation_hint = 2; + uint64 variables_reference = 3; + optional uint64 named_variables = 4; + optional uint64 indexed_variables = 5; + bool expensive = 6; + optional DapSource source = 7; + optional uint64 line = 8; + optional uint64 column = 9; + optional uint64 end_line = 10; + optional uint64 end_column = 11; } message DapSource { - optional string name = 1; - optional string path = 2; - optional uint64 source_reference = 3; - optional DapSourcePresentationHint presentation_hint = 4; - optional string origin = 5; - repeated DapSource sources = 6; - optional bytes adapter_data = 7; - repeated DapChecksum checksums = 8; + optional string name = 1; + optional string path = 2; + optional uint64 source_reference = 3; + optional DapSourcePresentationHint presentation_hint = 4; + optional string origin = 5; + repeated DapSource sources = 6; + optional bytes adapter_data = 7; + repeated DapChecksum checksums = 8; } enum DapOutputCategory { - ConsoleOutput = 0; - Important = 1; - Stdout = 2; - Stderr = 3; - Unknown = 4; + ConsoleOutput = 0; + Important = 1; + Stdout = 2; + Stderr = 3; + Unknown = 4; } enum DapOutputEventGroup { - Start = 0; - StartCollapsed = 1; - End = 2; + Start = 0; + StartCollapsed = 1; + End = 2; } message DapOutputEvent { - string output = 1; - optional DapOutputCategory category = 2; - optional uint64 variables_reference = 3; - optional DapOutputEventGroup group = 4; - optional DapSource source = 5; - optional uint32 line = 6; - optional uint32 column = 7; + string output = 1; + optional DapOutputCategory category = 2; + optional uint64 variables_reference = 3; + optional DapOutputEventGroup group = 4; + optional DapSource source = 5; + optional uint32 line = 6; + optional uint32 column = 7; } enum DapChecksumAlgorithm { - CHECKSUM_ALGORITHM_UNSPECIFIED = 0; - MD5 = 1; - SHA1 = 2; - SHA256 = 3; - TIMESTAMP = 4; + CHECKSUM_ALGORITHM_UNSPECIFIED = 0; + MD5 = 1; + SHA1 = 2; + SHA256 = 3; + TIMESTAMP = 4; } message DapChecksum { - DapChecksumAlgorithm algorithm = 1; - string checksum = 2; + DapChecksumAlgorithm algorithm = 1; + string checksum = 2; } enum DapScopePresentationHint { - Arguments = 0; - Locals = 1; - Registers = 2; - ReturnValue = 3; - ScopeUnknown = 4; + Arguments = 0; + Locals = 1; + Registers = 2; + ReturnValue = 3; + ScopeUnknown = 4; } enum DapSourcePresentationHint { - SourceNormal = 0; - Emphasize = 1; - Deemphasize = 2; - SourceUnknown = 3; + SourceNormal = 0; + Emphasize = 1; + Deemphasize = 2; + SourceUnknown = 3; } enum DapStackPresentationHint { - StackNormal = 0; - Label = 1; - Subtle = 2; - StackUnknown = 3; + StackNormal = 0; + Label = 1; + Subtle = 2; + StackUnknown = 3; } message DapModule { - DapModuleId id = 1; - string name = 2; - optional string path = 3; - optional bool is_optimized = 4; - optional bool is_user_code = 5; - optional string version = 6; - optional string symbol_status = 7; - optional string symbol_file_path = 8; - optional string date_time_stamp = 9; - optional string address_range = 10; + DapModuleId id = 1; + string name = 2; + optional string path = 3; + optional bool is_optimized = 4; + optional bool is_user_code = 5; + optional string version = 6; + optional string symbol_status = 7; + optional string symbol_file_path = 8; + optional string date_time_stamp = 9; + optional string address_range = 10; } message DebugTaskDefinition { - string adapter = 1; - string label = 2; - string config = 3; - optional TcpHost tcp_connection = 4; + string adapter = 1; + string label = 2; + string config = 3; + optional TcpHost tcp_connection = 4; } message TcpHost { - optional uint32 port = 1; - optional string host = 2; - optional uint64 timeout = 3; + optional uint32 port = 1; + optional string host = 2; + optional uint64 timeout = 3; } message DebugLaunchRequest { - string program = 1; - optional string cwd = 2; - repeated string args = 3; - map env = 4; + string program = 1; + optional string cwd = 2; + repeated string args = 3; + map env = 4; } message DebugAttachRequest { - uint32 process_id = 1; + uint32 process_id = 1; } message DapModuleId { - oneof id { - uint32 number = 1; - string string = 2; - } + oneof id { + uint32 number = 1; + string string = 2; + } } message GetDebugAdapterBinary { - uint64 project_id = 1; - uint64 session_id = 3; - DebugTaskDefinition definition = 2; - uint64 worktree_id = 4; + uint64 project_id = 1; + uint64 session_id = 3; + DebugTaskDefinition definition = 2; + uint64 worktree_id = 4; } message DebugAdapterBinary { - optional string command = 1; - repeated string arguments = 2; - map envs = 3; - optional string cwd = 4; - optional TcpHost connection = 5; - string configuration = 7; - LaunchType launch_type = 8; - enum LaunchType { - Attach = 0; - Launch = 1; - } + optional string command = 1; + repeated string arguments = 2; + map envs = 3; + optional string cwd = 4; + optional TcpHost connection = 5; + string configuration = 7; + LaunchType launch_type = 8; + enum LaunchType { + Attach = 0; + Launch = 1; + } } message RunDebugLocators { - uint64 project_id = 1; - SpawnInTerminal build_command = 2; - string locator = 3; + uint64 project_id = 1; + SpawnInTerminal build_command = 2; + string locator = 3; } message DebugRequest { - oneof request { - DebugLaunchRequest debug_launch_request = 1; - DebugAttachRequest debug_attach_request = 2; - } + oneof request { + DebugLaunchRequest debug_launch_request = 1; + DebugAttachRequest debug_attach_request = 2; + } } message DebugScenario { - string label = 1; - string adapter = 2; - reserved 3; - DebugRequest request = 4; - optional TcpHost connection = 5; - optional bool stop_on_entry = 6; - optional string configuration = 7; + string label = 1; + string adapter = 2; + reserved 3; + DebugRequest request = 4; + optional TcpHost connection = 5; + optional bool stop_on_entry = 6; + optional string configuration = 7; } message LogToDebugConsole { - uint64 project_id = 1; - uint64 session_id = 2; - string message = 3; + uint64 project_id = 1; + uint64 session_id = 2; + string message = 3; } message GetProcesses { - uint64 project_id = 1; + uint64 project_id = 1; } message GetProcessesResponse { - repeated ProcessInfo processes = 1; + repeated ProcessInfo processes = 1; } message ProcessInfo { - uint32 pid = 1; - string name = 2; - repeated string command = 3; + uint32 pid = 1; + string name = 2; + repeated string command = 3; } diff --git a/crates/proto/proto/download.proto b/crates/proto/proto/download.proto index fd1d63e78db581866981cb90372f84716be8a958..44b1da3389abc2996e2fb9acf6e42d2b3ae54f44 100644 --- a/crates/proto/proto/download.proto +++ b/crates/proto/proto/download.proto @@ -5,32 +5,32 @@ import "core.proto"; import "worktree.proto"; message DownloadFileByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; - uint64 file_id = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; + uint64 file_id = 4; } message DownloadFileResponse { - uint64 file_id = 1; + uint64 file_id = 1; } message CreateFileForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - FileState state = 3; - FileChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + FileState state = 3; + FileChunk chunk = 4; + } } message FileState { - uint64 id = 1; - optional File file = 2; - uint64 content_size = 3; + uint64 id = 1; + optional File file = 2; + uint64 content_size = 3; } message FileChunk { - uint64 file_id = 1; - bytes data = 2; + uint64 file_id = 1; + bytes data = 2; } diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index facaf43fd5ae3e7ff655f0b4006dc1661d503e10..86f3d4c328af06e1a3f4f7cc406ac84272577cd0 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -1,277 +1,276 @@ syntax = "proto3"; package zed.messages; -import "worktree.proto"; import "buffer.proto"; +import "worktree.proto"; message GitBranchesResponse { - repeated Branch branches = 1; + repeated Branch branches = 1; } message UpdateDiffBases { - uint64 project_id = 1; - uint64 buffer_id = 2; - - enum Mode { - // No collaborator is using the unstaged diff. - HEAD_ONLY = 0; - // No collaborator is using the diff from HEAD. - INDEX_ONLY = 1; - // Both the unstaged and uncommitted diffs are demanded, - // and the contents of the index and HEAD are the same for this path. - INDEX_MATCHES_HEAD = 2; - // Both the unstaged and uncommitted diffs are demanded, - // and the contents of the index and HEAD differ for this path, - // where None means the path doesn't exist in that state of the repo. - INDEX_AND_HEAD = 3; - } - - optional string staged_text = 3; - optional string committed_text = 4; - Mode mode = 5; + uint64 project_id = 1; + uint64 buffer_id = 2; + + enum Mode { + // No collaborator is using the unstaged diff. + HEAD_ONLY = 0; + // No collaborator is using the diff from HEAD. + INDEX_ONLY = 1; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD are the same for this path. + INDEX_MATCHES_HEAD = 2; + // Both the unstaged and uncommitted diffs are demanded, + // and the contents of the index and HEAD differ for this path, + // where None means the path doesn't exist in that state of the repo. + INDEX_AND_HEAD = 3; + } + + optional string staged_text = 3; + optional string committed_text = 4; + Mode mode = 5; } message OpenUnstagedDiff { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message OpenUnstagedDiffResponse { - optional string staged_text = 1; + optional string staged_text = 1; } message OpenUncommittedDiff { - uint64 project_id = 1; - uint64 buffer_id = 2; + uint64 project_id = 1; + uint64 buffer_id = 2; } message OpenUncommittedDiffResponse { - enum Mode { - INDEX_MATCHES_HEAD = 0; - INDEX_AND_HEAD = 1; - } - optional string staged_text = 1; - optional string committed_text = 2; - Mode mode = 3; + enum Mode { + INDEX_MATCHES_HEAD = 0; + INDEX_AND_HEAD = 1; + } + optional string staged_text = 1; + optional string committed_text = 2; + Mode mode = 3; } message SetIndexText { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string path = 4; - optional string text = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string path = 4; + optional string text = 5; } message GetPermalinkToLine { - uint64 project_id = 1; - uint64 buffer_id = 2; - Range selection = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + Range selection = 3; } message GetPermalinkToLineResponse { - string permalink = 1; + string permalink = 1; } message Branch { - bool is_head = 1; - string ref_name = 2; - optional uint64 unix_timestamp = 3; - optional GitUpstream upstream = 4; - optional CommitSummary most_recent_commit = 5; + bool is_head = 1; + string ref_name = 2; + optional uint64 unix_timestamp = 3; + optional GitUpstream upstream = 4; + optional CommitSummary most_recent_commit = 5; } message GitUpstream { - string ref_name = 1; - optional UpstreamTracking tracking = 2; + string ref_name = 1; + optional UpstreamTracking tracking = 2; } message UpstreamTracking { - uint64 ahead = 1; - uint64 behind = 2; + uint64 ahead = 1; + uint64 behind = 2; } message CommitSummary { - string sha = 1; - string subject = 2; - int64 commit_timestamp = 3; - string author_name = 4; + string sha = 1; + string subject = 2; + int64 commit_timestamp = 3; + string author_name = 4; } message GitBranches { - uint64 project_id = 1; - ProjectPath repository = 2; + uint64 project_id = 1; + ProjectPath repository = 2; } - message UpdateGitBranch { - uint64 project_id = 1; - string branch_name = 2; - ProjectPath repository = 3; + uint64 project_id = 1; + string branch_name = 2; + ProjectPath repository = 3; } message UpdateRepository { - uint64 project_id = 1; - uint64 id = 2; - string abs_path = 3; - repeated uint64 entry_ids = 4; - optional Branch branch_summary = 5; - repeated StatusEntry updated_statuses = 6; - repeated string removed_statuses = 7; - repeated string current_merge_conflicts = 8; - uint64 scan_id = 9; - bool is_last_update = 10; - optional GitCommitDetails head_commit_details = 11; - optional string merge_message = 12; - repeated StashEntry stash_entries = 13; - optional string remote_upstream_url = 14; - optional string remote_origin_url = 15; + uint64 project_id = 1; + uint64 id = 2; + string abs_path = 3; + repeated uint64 entry_ids = 4; + optional Branch branch_summary = 5; + repeated StatusEntry updated_statuses = 6; + repeated string removed_statuses = 7; + repeated string current_merge_conflicts = 8; + uint64 scan_id = 9; + bool is_last_update = 10; + optional GitCommitDetails head_commit_details = 11; + optional string merge_message = 12; + repeated StashEntry stash_entries = 13; + optional string remote_upstream_url = 14; + optional string remote_origin_url = 15; } message RemoveRepository { - uint64 project_id = 1; - uint64 id = 2; + uint64 project_id = 1; + uint64 id = 2; } enum GitStatus { - Added = 0; - Modified = 1; - Conflict = 2; - Deleted = 3; - Updated = 4; - TypeChanged = 5; - Renamed = 6; - Copied = 7; - Unmodified = 8; + Added = 0; + Modified = 1; + Conflict = 2; + Deleted = 3; + Updated = 4; + TypeChanged = 5; + Renamed = 6; + Copied = 7; + Unmodified = 8; } message GitFileStatus { - oneof variant { - Untracked untracked = 1; - Ignored ignored = 2; - Unmerged unmerged = 3; - Tracked tracked = 4; - } - - message Untracked {} - message Ignored {} - message Unmerged { - GitStatus first_head = 1; - GitStatus second_head = 2; - } - message Tracked { - GitStatus index_status = 1; - GitStatus worktree_status = 2; - } + oneof variant { + Untracked untracked = 1; + Ignored ignored = 2; + Unmerged unmerged = 3; + Tracked tracked = 4; + } + + message Untracked {} + message Ignored {} + message Unmerged { + GitStatus first_head = 1; + GitStatus second_head = 2; + } + message Tracked { + GitStatus index_status = 1; + GitStatus worktree_status = 2; + } } message GitGetBranches { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message GitCreateBranch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string branch_name = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string branch_name = 4; } message GitChangeBranch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string branch_name = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string branch_name = 4; } message GitRenameBranch { - uint64 project_id = 1; - uint64 repository_id = 2; - string branch = 3; - string new_name = 4; + uint64 project_id = 1; + uint64 repository_id = 2; + string branch = 3; + string new_name = 4; } message GitCreateRemote { - uint64 project_id = 1; - uint64 repository_id = 2; - string remote_name = 3; - string remote_url = 4; + uint64 project_id = 1; + uint64 repository_id = 2; + string remote_name = 3; + string remote_url = 4; } message GitRemoveRemote { - uint64 project_id = 1; - uint64 repository_id = 2; - string remote_name = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + string remote_name = 3; } message GitDeleteBranch { - uint64 project_id = 1; - uint64 repository_id = 2; - string branch_name = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + string branch_name = 3; } message GitDiff { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - DiffType diff_type = 4; - optional string merge_base_ref = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + DiffType diff_type = 4; + optional string merge_base_ref = 5; - enum DiffType { - HEAD_TO_WORKTREE = 0; - HEAD_TO_INDEX = 1; - MERGE_BASE = 2; - } + enum DiffType { + HEAD_TO_WORKTREE = 0; + HEAD_TO_INDEX = 1; + MERGE_BASE = 2; + } } message GitDiffResponse { - string diff = 1; + string diff = 1; } message GitDiffStat { - uint64 project_id = 1; - uint64 repository_id = 2; - DiffType diff_type = 3; - optional string merge_base_ref = 4; + uint64 project_id = 1; + uint64 repository_id = 2; + DiffType diff_type = 3; + optional string merge_base_ref = 4; - enum DiffType { - HEAD_TO_WORKTREE = 0; - HEAD_TO_INDEX = 1; - MERGE_BASE = 2; - } + enum DiffType { + HEAD_TO_WORKTREE = 0; + HEAD_TO_INDEX = 1; + MERGE_BASE = 2; + } } message GitDiffStatResponse { - repeated GitDiffStatEntry entries = 1; + repeated GitDiffStatEntry entries = 1; } message GitDiffStatEntry { - string path = 1; - uint32 added = 2; - uint32 deleted = 3; + string path = 1; + uint32 added = 2; + uint32 deleted = 3; } message GitInit { - uint64 project_id = 1; - string abs_path = 2; - string fallback_branch_name = 3; + uint64 project_id = 1; + string abs_path = 2; + string fallback_branch_name = 3; } message GitClone { - uint64 project_id = 1; - string abs_path = 2; - string remote_repo = 3; + uint64 project_id = 1; + string abs_path = 2; + string remote_repo = 3; } message GitCloneResponse { - bool success = 1; + bool success = 1; } message CheckForPushedCommits { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message CheckForPushedCommitsResponse { @@ -279,338 +278,338 @@ message CheckForPushedCommitsResponse { } message GitShow { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; } message GitCommitDetails { - string sha = 1; - string message = 2; - int64 commit_timestamp = 3; - string author_email = 4; - string author_name = 5; + string sha = 1; + string message = 2; + int64 commit_timestamp = 3; + string author_email = 4; + string author_name = 5; } message LoadCommitDiff { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; } message LoadCommitDiffResponse { - repeated CommitFile files = 1; + repeated CommitFile files = 1; } message CommitFile { - string path = 1; - optional string old_text = 2; - optional string new_text = 3; - bool is_binary = 4; + string path = 1; + optional string old_text = 2; + optional string new_text = 3; + bool is_binary = 4; } message GitReset { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; - ResetMode mode = 5; - enum ResetMode { - SOFT = 0; - MIXED = 1; - } + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; + ResetMode mode = 5; + enum ResetMode { + SOFT = 0; + MIXED = 1; + } } message GitCheckoutFiles { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string commit = 4; - repeated string paths = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string commit = 4; + repeated string paths = 5; } message GitFileHistory { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string path = 4; - uint64 skip = 5; - optional uint64 limit = 6; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string path = 4; + uint64 skip = 5; + optional uint64 limit = 6; } message GitFileHistoryResponse { - repeated FileHistoryEntry entries = 1; - string path = 2; + repeated FileHistoryEntry entries = 1; + string path = 2; } message FileHistoryEntry { - string sha = 1; - string subject = 2; - string message = 3; - int64 commit_timestamp = 4; - string author_name = 5; - string author_email = 6; + string sha = 1; + string subject = 2; + string message = 3; + int64 commit_timestamp = 4; + string author_name = 5; + string author_email = 6; } // Move to `git.proto` once collab's min version is >=0.171.0. message StatusEntry { - string repo_path = 1; - // Can be removed once collab's min version is >=0.171.0. - GitStatus simple_status = 2; - GitFileStatus status = 3; + string repo_path = 1; + // Can be removed once collab's min version is >=0.171.0. + GitStatus simple_status = 2; + GitFileStatus status = 3; } message StashEntry { - bytes oid = 1; - string message = 2; - optional string branch = 3; - uint64 index = 4; - int64 timestamp = 5; + bytes oid = 1; + string message = 2; + optional string branch = 3; + uint64 index = 4; + int64 timestamp = 5; } message Stage { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - repeated string paths = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + repeated string paths = 4; } message Unstage { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - repeated string paths = 4; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + repeated string paths = 4; } message Stash { - uint64 project_id = 1; - uint64 repository_id = 2; - repeated string paths = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + repeated string paths = 3; } message StashPop { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message StashApply { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message StashDrop { - uint64 project_id = 1; - uint64 repository_id = 2; - optional uint64 stash_index = 3; + uint64 project_id = 1; + uint64 repository_id = 2; + optional uint64 stash_index = 3; } message Commit { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - optional string name = 4; - optional string email = 5; - string message = 6; - optional CommitOptions options = 7; - reserved 8; - uint64 askpass_id = 9; - - message CommitOptions { - bool amend = 1; - bool signoff = 2; - } + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + optional string name = 4; + optional string email = 5; + string message = 6; + optional CommitOptions options = 7; + reserved 8; + uint64 askpass_id = 9; + + message CommitOptions { + bool amend = 1; + bool signoff = 2; + } } message OpenCommitMessageBuffer { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; } message Push { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string remote_name = 4; - string branch_name = 5; - optional PushOptions options = 6; - uint64 askpass_id = 7; - string remote_branch_name = 8; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string remote_name = 4; + string branch_name = 5; + optional PushOptions options = 6; + uint64 askpass_id = 7; + string remote_branch_name = 8; - enum PushOptions { - SET_UPSTREAM = 0; - FORCE = 1; - } + enum PushOptions { + SET_UPSTREAM = 0; + FORCE = 1; + } } message Fetch { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - uint64 askpass_id = 4; - optional string remote = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + uint64 askpass_id = 4; + optional string remote = 5; } message GetRemotes { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - optional string branch_name = 4; - bool is_push = 5; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + optional string branch_name = 4; + bool is_push = 5; } message GetRemotesResponse { - repeated Remote remotes = 1; + repeated Remote remotes = 1; - message Remote { - string name = 1; - } + message Remote { + string name = 1; + } } message Pull { - uint64 project_id = 1; - reserved 2; - uint64 repository_id = 3; - string remote_name = 4; - optional string branch_name = 5; - uint64 askpass_id = 6; - bool rebase = 7; + uint64 project_id = 1; + reserved 2; + uint64 repository_id = 3; + string remote_name = 4; + optional string branch_name = 5; + uint64 askpass_id = 6; + bool rebase = 7; } message RemoteMessageResponse { - string stdout = 1; - string stderr = 2; + string stdout = 1; + string stderr = 2; } message BlameBuffer { - uint64 project_id = 1; - uint64 buffer_id = 2; - repeated VectorClockEntry version = 3; + uint64 project_id = 1; + uint64 buffer_id = 2; + repeated VectorClockEntry version = 3; } message BlameEntry { - bytes sha = 1; + bytes sha = 1; - uint32 start_line = 2; - uint32 end_line = 3; - uint32 original_line_number = 4; + uint32 start_line = 2; + uint32 end_line = 3; + uint32 original_line_number = 4; - optional string author = 5; - optional string author_mail = 6; - optional int64 author_time = 7; - optional string author_tz = 8; + optional string author = 5; + optional string author_mail = 6; + optional int64 author_time = 7; + optional string author_tz = 8; - optional string committer = 9; - optional string committer_mail = 10; - optional int64 committer_time = 11; - optional string committer_tz = 12; + optional string committer = 9; + optional string committer_mail = 10; + optional int64 committer_time = 11; + optional string committer_tz = 12; - optional string summary = 13; - optional string previous = 14; + optional string summary = 13; + optional string previous = 14; - string filename = 15; + string filename = 15; } message CommitMessage { - bytes oid = 1; - string message = 2; + bytes oid = 1; + string message = 2; } message CommitPermalink { - bytes oid = 1; - string permalink = 2; + bytes oid = 1; + string permalink = 2; } message BlameBufferResponse { - message BlameResponse { - repeated BlameEntry entries = 1; - repeated CommitMessage messages = 2; - reserved 3; - reserved 4; - } + message BlameResponse { + repeated BlameEntry entries = 1; + repeated CommitMessage messages = 2; + reserved 3; + reserved 4; + } - optional BlameResponse blame_response = 5; + optional BlameResponse blame_response = 5; - reserved 1 to 4; + reserved 1 to 4; } message GetDefaultBranch { - uint64 project_id = 1; - uint64 repository_id = 2; + uint64 project_id = 1; + uint64 repository_id = 2; } message GetDefaultBranchResponse { - optional string branch = 1; + optional string branch = 1; } message GetTreeDiff { - uint64 project_id = 1; - uint64 repository_id = 2; - bool is_merge = 3; - string base = 4; - string head = 5; + uint64 project_id = 1; + uint64 repository_id = 2; + bool is_merge = 3; + string base = 4; + string head = 5; } message GetTreeDiffResponse { - repeated TreeDiffStatus entries = 1; + repeated TreeDiffStatus entries = 1; } message TreeDiffStatus { - enum Status { - ADDED = 0; - MODIFIED = 1; - DELETED = 2; - } + enum Status { + ADDED = 0; + MODIFIED = 1; + DELETED = 2; + } - Status status = 1; - string path = 2; - optional string oid = 3; + Status status = 1; + string path = 2; + optional string oid = 3; } message GetBlobContent { - uint64 project_id = 1; - uint64 repository_id = 2; - string oid =3; + uint64 project_id = 1; + uint64 repository_id = 2; + string oid = 3; } message GetBlobContentResponse { - string content = 1; + string content = 1; } message GitGetWorktrees { - uint64 project_id = 1; - uint64 repository_id = 2; + uint64 project_id = 1; + uint64 repository_id = 2; } message GitWorktreesResponse { - repeated Worktree worktrees = 1; + repeated Worktree worktrees = 1; } message Worktree { - string path = 1; - string ref_name = 2; - string sha = 3; + string path = 1; + string ref_name = 2; + string sha = 3; } message GitCreateWorktree { - uint64 project_id = 1; - uint64 repository_id = 2; - string name = 3; - string directory = 4; - optional string commit = 5; + uint64 project_id = 1; + uint64 repository_id = 2; + string name = 3; + string directory = 4; + optional string commit = 5; } message RunGitHook { - enum GitHook { - PRE_COMMIT = 0; - reserved 1; - } - - uint64 project_id = 1; - uint64 repository_id = 2; - GitHook hook = 3; + enum GitHook { + PRE_COMMIT = 0; + reserved 1; + } + + uint64 project_id = 1; + uint64 repository_id = 2; + GitHook hook = 3; } diff --git a/crates/proto/proto/image.proto b/crates/proto/proto/image.proto index e3232e6847cbc719280bc3ccd5254e5e368dbeb6..ff791e1f87b6089e6e87ec746fad173b180f10ef 100644 --- a/crates/proto/proto/image.proto +++ b/crates/proto/proto/image.proto @@ -5,32 +5,32 @@ import "core.proto"; import "worktree.proto"; message OpenImageByPath { - uint64 project_id = 1; - uint64 worktree_id = 2; - string path = 3; + uint64 project_id = 1; + uint64 worktree_id = 2; + string path = 3; } message OpenImageResponse { - uint64 image_id = 1; + uint64 image_id = 1; } message CreateImageForPeer { - uint64 project_id = 1; - PeerId peer_id = 2; - oneof variant { - ImageState state = 3; - ImageChunk chunk = 4; - } + uint64 project_id = 1; + PeerId peer_id = 2; + oneof variant { + ImageState state = 3; + ImageChunk chunk = 4; + } } message ImageState { - uint64 id = 1; - optional File file = 2; - uint64 content_size = 3; - string format = 4; // e.g., "png", "jpeg", "webp", etc. + uint64 id = 1; + optional File file = 2; + uint64 content_size = 3; + string format = 4; // e.g., "png", "jpeg", "webp", etc. } message ImageChunk { - uint64 image_id = 1; - bytes data = 2; + uint64 image_id = 1; + bytes data = 2; } diff --git a/crates/proto/proto/lsp.proto b/crates/proto/proto/lsp.proto index 9132dafbd42be8e1f7d0de2b1278d7bf757aa9ac..226373a111b6e29e4731edd638a5317dcd244273 100644 --- a/crates/proto/proto/lsp.proto +++ b/crates/proto/proto/lsp.proto @@ -2,8 +2,6 @@ syntax = "proto3"; package zed.messages; import "buffer.proto"; -import "core.proto"; -import "worktree.proto"; message GetDefinition { uint64 project_id = 1; diff --git a/crates/proto/proto/notification.proto b/crates/proto/proto/notification.proto index ebd3d7fe447991c38c9d616fc944f366f51782c0..8a41854ac161100c60d66d0b27b49bc4b2182a22 100644 --- a/crates/proto/proto/notification.proto +++ b/crates/proto/proto/notification.proto @@ -2,36 +2,36 @@ syntax = "proto3"; package zed.messages; message GetNotifications { - optional uint64 before_id = 1; + optional uint64 before_id = 1; } message AddNotification { - Notification notification = 1; + Notification notification = 1; } message GetNotificationsResponse { - repeated Notification notifications = 1; - bool done = 2; + repeated Notification notifications = 1; + bool done = 2; } message DeleteNotification { - uint64 notification_id = 1; + uint64 notification_id = 1; } message UpdateNotification { - Notification notification = 1; + Notification notification = 1; } message MarkNotificationRead { - uint64 notification_id = 1; + uint64 notification_id = 1; } message Notification { - uint64 id = 1; - uint64 timestamp = 2; - string kind = 3; - optional uint64 entity_id = 4; - string content = 5; - bool is_read = 6; - optional bool response = 7; + uint64 id = 1; + uint64 timestamp = 2; + string kind = 3; + optional uint64 entity_id = 4; + string content = 5; + bool is_read = 6; + optional bool response = 7; } diff --git a/crates/proto/proto/task.proto b/crates/proto/proto/task.proto index 1844087d623cc3eac0e5d7500a50dfb31028f304..8d941c2438c55045d8d38cb4c97d918be8abbeb4 100644 --- a/crates/proto/proto/task.proto +++ b/crates/proto/proto/task.proto @@ -4,57 +4,57 @@ package zed.messages; import "buffer.proto"; message TaskContextForLocation { - uint64 project_id = 1; - Location location = 2; - map task_variables = 3; + uint64 project_id = 1; + Location location = 2; + map task_variables = 3; } message TaskContext { - optional string cwd = 1; - map task_variables = 2; - map project_env = 3; + optional string cwd = 1; + map task_variables = 2; + map project_env = 3; } message Shell { - message WithArguments { - string program = 1; - repeated string args = 2; - } + message WithArguments { + string program = 1; + repeated string args = 2; + } - oneof shell_type { - System system = 1; - string program = 2; - WithArguments with_arguments = 3; - } + oneof shell_type { + System system = 1; + string program = 2; + WithArguments with_arguments = 3; + } } message System {} enum RevealStrategy { - RevealAlways = 0; - RevealNever = 1; + RevealAlways = 0; + RevealNever = 1; } enum HideStrategy { - HideAlways = 0; - HideNever = 1; - HideOnSuccess = 2; + HideAlways = 0; + HideNever = 1; + HideOnSuccess = 2; } message SpawnInTerminal { - string label = 1; - optional string command = 2; - repeated string args = 3; - map env = 4; - optional string cwd = 5; + string label = 1; + optional string command = 2; + repeated string args = 3; + map env = 4; + optional string cwd = 5; } message GetDirectoryEnvironment { - uint64 project_id = 1; - Shell shell = 2; - string directory = 3; + uint64 project_id = 1; + Shell shell = 2; + string directory = 3; } message DirectoryEnvironment { - map environment = 1; + map environment = 1; } diff --git a/crates/proto/proto/toolchain.proto b/crates/proto/proto/toolchain.proto index b190322ca0602078ea28d00fe970e4958fb17fb0..a91948148e64eb9eff7f1ca657dab203a9ca7f1f 100644 --- a/crates/proto/proto/toolchain.proto +++ b/crates/proto/proto/toolchain.proto @@ -2,58 +2,58 @@ syntax = "proto3"; package zed.messages; message ListToolchains { - uint64 project_id = 1; - uint64 worktree_id = 2; - string language_name = 3; - optional string path = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; + optional string path = 4; } message Toolchain { - string name = 1; - string path = 2; - string raw_json = 3; + string name = 1; + string path = 2; + string raw_json = 3; } message ToolchainGroup { - uint64 start_index = 1; - string name = 2; + uint64 start_index = 1; + string name = 2; } message ListToolchainsResponse { - repeated Toolchain toolchains = 1; - bool has_values = 2; - repeated ToolchainGroup groups = 3; - optional string relative_worktree_path = 4; + repeated Toolchain toolchains = 1; + bool has_values = 2; + repeated ToolchainGroup groups = 3; + optional string relative_worktree_path = 4; } message ActivateToolchain { - uint64 project_id = 1; - uint64 worktree_id = 2; - Toolchain toolchain = 3; - string language_name = 4; - optional string path = 5; + uint64 project_id = 1; + uint64 worktree_id = 2; + Toolchain toolchain = 3; + string language_name = 4; + optional string path = 5; } message ActiveToolchain { - uint64 project_id = 1; - uint64 worktree_id = 2; - string language_name = 3; - optional string path = 4; + uint64 project_id = 1; + uint64 worktree_id = 2; + string language_name = 3; + optional string path = 4; } message ActiveToolchainResponse { - optional Toolchain toolchain = 1; + optional Toolchain toolchain = 1; } message ResolveToolchain { - uint64 project_id = 1; - string abs_path = 2; - string language_name = 3; + uint64 project_id = 1; + string abs_path = 2; + string language_name = 3; } message ResolveToolchainResponse { - oneof response { - Toolchain toolchain = 1; - string error = 2; - } + oneof response { + Toolchain toolchain = 1; + string error = 2; + } } diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index fa55e1f27330fb5fee88fb19296f607b1bf9f3a6..d6139f5342d153221d13917e26565a4c0eb5a707 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -18,495 +18,493 @@ import "toolchain.proto"; import "worktree.proto"; // Looking for a number? Search "// current max" - message Envelope { - uint32 id = 1; - optional uint32 responding_to = 2; - optional PeerId original_sender_id = 3; - optional uint32 ack_id = 266; - - oneof payload { - Hello hello = 4; - Ack ack = 5; - Error error = 6; - Ping ping = 7; - Test test = 8; - EndStream end_stream = 165; - - CreateRoom create_room = 9; - CreateRoomResponse create_room_response = 10; - JoinRoom join_room = 11; - JoinRoomResponse join_room_response = 12; - RejoinRoom rejoin_room = 13; - RejoinRoomResponse rejoin_room_response = 14; - LeaveRoom leave_room = 15; - Call call = 16; - IncomingCall incoming_call = 17; - CallCanceled call_canceled = 18; - CancelCall cancel_call = 19; - DeclineCall decline_call = 20; - UpdateParticipantLocation update_participant_location = 21; - RoomUpdated room_updated = 22; - - ShareProject share_project = 23; - ShareProjectResponse share_project_response = 24; - UnshareProject unshare_project = 25; - JoinProject join_project = 26; - JoinProjectResponse join_project_response = 27; - LeaveProject leave_project = 28; - AddProjectCollaborator add_project_collaborator = 29; - UpdateProjectCollaborator update_project_collaborator = 30; - RemoveProjectCollaborator remove_project_collaborator = 31; - - GetDefinition get_definition = 32; - GetDefinitionResponse get_definition_response = 33; - GetDeclaration get_declaration = 237; - GetDeclarationResponse get_declaration_response = 238; - GetTypeDefinition get_type_definition = 34; - GetTypeDefinitionResponse get_type_definition_response = 35; - - GetReferences get_references = 36; - GetReferencesResponse get_references_response = 37; - GetDocumentHighlights get_document_highlights = 38; - GetDocumentHighlightsResponse get_document_highlights_response = 39; - GetProjectSymbols get_project_symbols = 40; - GetProjectSymbolsResponse get_project_symbols_response = 41; - OpenBufferForSymbol open_buffer_for_symbol = 42; - OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43; - - UpdateProject update_project = 44; - UpdateWorktree update_worktree = 45; - - CreateProjectEntry create_project_entry = 46; - RenameProjectEntry rename_project_entry = 47; - CopyProjectEntry copy_project_entry = 48; - DeleteProjectEntry delete_project_entry = 49; - ProjectEntryResponse project_entry_response = 50; - ExpandProjectEntry expand_project_entry = 51; - ExpandProjectEntryResponse expand_project_entry_response = 52; - ExpandAllForProjectEntry expand_all_for_project_entry = 291; - ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292; - UpdateDiagnosticSummary update_diagnostic_summary = 53; - StartLanguageServer start_language_server = 54; - UpdateLanguageServer update_language_server = 55; - - OpenBufferById open_buffer_by_id = 56; - OpenBufferByPath open_buffer_by_path = 57; - OpenBufferResponse open_buffer_response = 58; - CreateBufferForPeer create_buffer_for_peer = 59; - UpdateBuffer update_buffer = 60; - UpdateBufferFile update_buffer_file = 61; - SaveBuffer save_buffer = 62; - BufferSaved buffer_saved = 63; - BufferReloaded buffer_reloaded = 64; - ReloadBuffers reload_buffers = 65; - ReloadBuffersResponse reload_buffers_response = 66; - SynchronizeBuffers synchronize_buffers = 67; - SynchronizeBuffersResponse synchronize_buffers_response = 68; - FormatBuffers format_buffers = 69; - FormatBuffersResponse format_buffers_response = 70; - GetCompletions get_completions = 71; - GetCompletionsResponse get_completions_response = 72; - ResolveCompletionDocumentation resolve_completion_documentation = 73; - ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74; - ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75; - ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76; - GetCodeActions get_code_actions = 77; - GetCodeActionsResponse get_code_actions_response = 78; - GetHover get_hover = 79; - GetHoverResponse get_hover_response = 80; - ApplyCodeAction apply_code_action = 81; - ApplyCodeActionResponse apply_code_action_response = 82; - PrepareRename prepare_rename = 83; - PrepareRenameResponse prepare_rename_response = 84; - PerformRename perform_rename = 85; - PerformRenameResponse perform_rename_response = 86; - - UpdateContacts update_contacts = 89; - ShowContacts show_contacts = 91; - - GetUsers get_users = 92; - FuzzySearchUsers fuzzy_search_users = 93; - UsersResponse users_response = 94; - RequestContact request_contact = 95; - RespondToContactRequest respond_to_contact_request = 96; - RemoveContact remove_contact = 97; - - Follow follow = 98; - FollowResponse follow_response = 99; - UpdateFollowers update_followers = 100; - Unfollow unfollow = 101; - UpdateDiffBases update_diff_bases = 104; - - OnTypeFormatting on_type_formatting = 105; - OnTypeFormattingResponse on_type_formatting_response = 106; - - UpdateWorktreeSettings update_worktree_settings = 107; - - InlayHints inlay_hints = 108; - InlayHintsResponse inlay_hints_response = 109; - ResolveInlayHint resolve_inlay_hint = 110; - ResolveInlayHintResponse resolve_inlay_hint_response = 111; - RefreshInlayHints refresh_inlay_hints = 112; - - CreateChannel create_channel = 113; - CreateChannelResponse create_channel_response = 114; - InviteChannelMember invite_channel_member = 115; - RemoveChannelMember remove_channel_member = 116; - RespondToChannelInvite respond_to_channel_invite = 117; - UpdateChannels update_channels = 118; - JoinChannel join_channel = 119; - DeleteChannel delete_channel = 120; - GetChannelMembers get_channel_members = 121; - GetChannelMembersResponse get_channel_members_response = 122; - SetChannelMemberRole set_channel_member_role = 123; - RenameChannel rename_channel = 124; - RenameChannelResponse rename_channel_response = 125; - SubscribeToChannels subscribe_to_channels = 207; - - JoinChannelBuffer join_channel_buffer = 126; - JoinChannelBufferResponse join_channel_buffer_response = 127; - UpdateChannelBuffer update_channel_buffer = 128; - LeaveChannelBuffer leave_channel_buffer = 129; - UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130; - RejoinChannelBuffers rejoin_channel_buffers = 131; - RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132; - AckBufferOperation ack_buffer_operation = 133; - - JoinChannelChat join_channel_chat = 134; - JoinChannelChatResponse join_channel_chat_response = 135; - LeaveChannelChat leave_channel_chat = 136; - SendChannelMessage send_channel_message = 137; - SendChannelMessageResponse send_channel_message_response = 138; - ChannelMessageSent channel_message_sent = 139; - GetChannelMessages get_channel_messages = 140; - GetChannelMessagesResponse get_channel_messages_response = 141; - RemoveChannelMessage remove_channel_message = 142; - AckChannelMessage ack_channel_message = 143; - GetChannelMessagesById get_channel_messages_by_id = 144; - - MoveChannel move_channel = 147; - ReorderChannel reorder_channel = 349; - SetChannelVisibility set_channel_visibility = 148; - - AddNotification add_notification = 149; - GetNotifications get_notifications = 150; - GetNotificationsResponse get_notifications_response = 151; - DeleteNotification delete_notification = 152; - MarkNotificationRead mark_notification_read = 153; - LspExtExpandMacro lsp_ext_expand_macro = 154; - LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155; - SetRoomParticipantRole set_room_participant_role = 156; - - UpdateUserChannels update_user_channels = 157; - - GetImplementation get_implementation = 162; - GetImplementationResponse get_implementation_response = 163; - - UpdateChannelMessage update_channel_message = 170; - ChannelMessageUpdate channel_message_update = 171; - - BlameBuffer blame_buffer = 172; - BlameBufferResponse blame_buffer_response = 173; - - UpdateNotification update_notification = 174; - - RestartLanguageServers restart_language_servers = 208; - - RejoinRemoteProjects rejoin_remote_projects = 186; - RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187; + uint32 id = 1; + optional uint32 responding_to = 2; + optional PeerId original_sender_id = 3; + optional uint32 ack_id = 266; + + oneof payload { + Hello hello = 4; + Ack ack = 5; + Error error = 6; + Ping ping = 7; + Test test = 8; + EndStream end_stream = 165; + + CreateRoom create_room = 9; + CreateRoomResponse create_room_response = 10; + JoinRoom join_room = 11; + JoinRoomResponse join_room_response = 12; + RejoinRoom rejoin_room = 13; + RejoinRoomResponse rejoin_room_response = 14; + LeaveRoom leave_room = 15; + Call call = 16; + IncomingCall incoming_call = 17; + CallCanceled call_canceled = 18; + CancelCall cancel_call = 19; + DeclineCall decline_call = 20; + UpdateParticipantLocation update_participant_location = 21; + RoomUpdated room_updated = 22; + + ShareProject share_project = 23; + ShareProjectResponse share_project_response = 24; + UnshareProject unshare_project = 25; + JoinProject join_project = 26; + JoinProjectResponse join_project_response = 27; + LeaveProject leave_project = 28; + AddProjectCollaborator add_project_collaborator = 29; + UpdateProjectCollaborator update_project_collaborator = 30; + RemoveProjectCollaborator remove_project_collaborator = 31; + + GetDefinition get_definition = 32; + GetDefinitionResponse get_definition_response = 33; + GetDeclaration get_declaration = 237; + GetDeclarationResponse get_declaration_response = 238; + GetTypeDefinition get_type_definition = 34; + GetTypeDefinitionResponse get_type_definition_response = 35; + + GetReferences get_references = 36; + GetReferencesResponse get_references_response = 37; + GetDocumentHighlights get_document_highlights = 38; + GetDocumentHighlightsResponse get_document_highlights_response = 39; + GetProjectSymbols get_project_symbols = 40; + GetProjectSymbolsResponse get_project_symbols_response = 41; + OpenBufferForSymbol open_buffer_for_symbol = 42; + OpenBufferForSymbolResponse open_buffer_for_symbol_response = 43; + + UpdateProject update_project = 44; + UpdateWorktree update_worktree = 45; + + CreateProjectEntry create_project_entry = 46; + RenameProjectEntry rename_project_entry = 47; + CopyProjectEntry copy_project_entry = 48; + DeleteProjectEntry delete_project_entry = 49; + ProjectEntryResponse project_entry_response = 50; + ExpandProjectEntry expand_project_entry = 51; + ExpandProjectEntryResponse expand_project_entry_response = 52; + ExpandAllForProjectEntry expand_all_for_project_entry = 291; + ExpandAllForProjectEntryResponse expand_all_for_project_entry_response = 292; + UpdateDiagnosticSummary update_diagnostic_summary = 53; + StartLanguageServer start_language_server = 54; + UpdateLanguageServer update_language_server = 55; + + OpenBufferById open_buffer_by_id = 56; + OpenBufferByPath open_buffer_by_path = 57; + OpenBufferResponse open_buffer_response = 58; + CreateBufferForPeer create_buffer_for_peer = 59; + UpdateBuffer update_buffer = 60; + UpdateBufferFile update_buffer_file = 61; + SaveBuffer save_buffer = 62; + BufferSaved buffer_saved = 63; + BufferReloaded buffer_reloaded = 64; + ReloadBuffers reload_buffers = 65; + ReloadBuffersResponse reload_buffers_response = 66; + SynchronizeBuffers synchronize_buffers = 67; + SynchronizeBuffersResponse synchronize_buffers_response = 68; + FormatBuffers format_buffers = 69; + FormatBuffersResponse format_buffers_response = 70; + GetCompletions get_completions = 71; + GetCompletionsResponse get_completions_response = 72; + ResolveCompletionDocumentation resolve_completion_documentation = 73; + ResolveCompletionDocumentationResponse resolve_completion_documentation_response = 74; + ApplyCompletionAdditionalEdits apply_completion_additional_edits = 75; + ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 76; + GetCodeActions get_code_actions = 77; + GetCodeActionsResponse get_code_actions_response = 78; + GetHover get_hover = 79; + GetHoverResponse get_hover_response = 80; + ApplyCodeAction apply_code_action = 81; + ApplyCodeActionResponse apply_code_action_response = 82; + PrepareRename prepare_rename = 83; + PrepareRenameResponse prepare_rename_response = 84; + PerformRename perform_rename = 85; + PerformRenameResponse perform_rename_response = 86; + + UpdateContacts update_contacts = 89; + ShowContacts show_contacts = 91; + + GetUsers get_users = 92; + FuzzySearchUsers fuzzy_search_users = 93; + UsersResponse users_response = 94; + RequestContact request_contact = 95; + RespondToContactRequest respond_to_contact_request = 96; + RemoveContact remove_contact = 97; + + Follow follow = 98; + FollowResponse follow_response = 99; + UpdateFollowers update_followers = 100; + Unfollow unfollow = 101; + UpdateDiffBases update_diff_bases = 104; + + OnTypeFormatting on_type_formatting = 105; + OnTypeFormattingResponse on_type_formatting_response = 106; + + UpdateWorktreeSettings update_worktree_settings = 107; + + InlayHints inlay_hints = 108; + InlayHintsResponse inlay_hints_response = 109; + ResolveInlayHint resolve_inlay_hint = 110; + ResolveInlayHintResponse resolve_inlay_hint_response = 111; + RefreshInlayHints refresh_inlay_hints = 112; + + CreateChannel create_channel = 113; + CreateChannelResponse create_channel_response = 114; + InviteChannelMember invite_channel_member = 115; + RemoveChannelMember remove_channel_member = 116; + RespondToChannelInvite respond_to_channel_invite = 117; + UpdateChannels update_channels = 118; + JoinChannel join_channel = 119; + DeleteChannel delete_channel = 120; + GetChannelMembers get_channel_members = 121; + GetChannelMembersResponse get_channel_members_response = 122; + SetChannelMemberRole set_channel_member_role = 123; + RenameChannel rename_channel = 124; + RenameChannelResponse rename_channel_response = 125; + SubscribeToChannels subscribe_to_channels = 207; + + JoinChannelBuffer join_channel_buffer = 126; + JoinChannelBufferResponse join_channel_buffer_response = 127; + UpdateChannelBuffer update_channel_buffer = 128; + LeaveChannelBuffer leave_channel_buffer = 129; + UpdateChannelBufferCollaborators update_channel_buffer_collaborators = 130; + RejoinChannelBuffers rejoin_channel_buffers = 131; + RejoinChannelBuffersResponse rejoin_channel_buffers_response = 132; + AckBufferOperation ack_buffer_operation = 133; + + JoinChannelChat join_channel_chat = 134; + JoinChannelChatResponse join_channel_chat_response = 135; + LeaveChannelChat leave_channel_chat = 136; + SendChannelMessage send_channel_message = 137; + SendChannelMessageResponse send_channel_message_response = 138; + ChannelMessageSent channel_message_sent = 139; + GetChannelMessages get_channel_messages = 140; + GetChannelMessagesResponse get_channel_messages_response = 141; + RemoveChannelMessage remove_channel_message = 142; + AckChannelMessage ack_channel_message = 143; + GetChannelMessagesById get_channel_messages_by_id = 144; + + MoveChannel move_channel = 147; + ReorderChannel reorder_channel = 349; + SetChannelVisibility set_channel_visibility = 148; + + AddNotification add_notification = 149; + GetNotifications get_notifications = 150; + GetNotificationsResponse get_notifications_response = 151; + DeleteNotification delete_notification = 152; + MarkNotificationRead mark_notification_read = 153; + LspExtExpandMacro lsp_ext_expand_macro = 154; + LspExtExpandMacroResponse lsp_ext_expand_macro_response = 155; + SetRoomParticipantRole set_room_participant_role = 156; + + UpdateUserChannels update_user_channels = 157; + + GetImplementation get_implementation = 162; + GetImplementationResponse get_implementation_response = 163; + + UpdateChannelMessage update_channel_message = 170; + ChannelMessageUpdate channel_message_update = 171; + + BlameBuffer blame_buffer = 172; + BlameBufferResponse blame_buffer_response = 173; + + UpdateNotification update_notification = 174; + + RestartLanguageServers restart_language_servers = 208; + + RejoinRemoteProjects rejoin_remote_projects = 186; + RejoinRemoteProjectsResponse rejoin_remote_projects_response = 187; - OpenNewBuffer open_new_buffer = 196; + OpenNewBuffer open_new_buffer = 196; - TaskContextForLocation task_context_for_location = 203; - TaskContext task_context = 204; + TaskContextForLocation task_context_for_location = 203; + TaskContext task_context = 204; - LinkedEditingRange linked_editing_range = 209; - LinkedEditingRangeResponse linked_editing_range_response = 210; + LinkedEditingRange linked_editing_range = 209; + LinkedEditingRangeResponse linked_editing_range_response = 210; - AdvertiseContexts advertise_contexts = 211; - OpenContext open_context = 212; - OpenContextResponse open_context_response = 213; - CreateContext create_context = 232; - CreateContextResponse create_context_response = 233; - UpdateContext update_context = 214; - SynchronizeContexts synchronize_contexts = 215; - SynchronizeContextsResponse synchronize_contexts_response = 216; + AdvertiseContexts advertise_contexts = 211; + OpenContext open_context = 212; + OpenContextResponse open_context_response = 213; + CreateContext create_context = 232; + CreateContextResponse create_context_response = 233; + UpdateContext update_context = 214; + SynchronizeContexts synchronize_contexts = 215; + SynchronizeContextsResponse synchronize_contexts_response = 216; - GetSignatureHelp get_signature_help = 217; - GetSignatureHelpResponse get_signature_help_response = 218; + GetSignatureHelp get_signature_help = 217; + GetSignatureHelpResponse get_signature_help_response = 218; - ListRemoteDirectory list_remote_directory = 219; - ListRemoteDirectoryResponse list_remote_directory_response = 220; - AddWorktree add_worktree = 222; - AddWorktreeResponse add_worktree_response = 223; + ListRemoteDirectory list_remote_directory = 219; + ListRemoteDirectoryResponse list_remote_directory_response = 220; + AddWorktree add_worktree = 222; + AddWorktreeResponse add_worktree_response = 223; - LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241; - LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242; + LspExtSwitchSourceHeader lsp_ext_switch_source_header = 241; + LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242; - FindSearchCandidates find_search_candidates = 243; + FindSearchCandidates find_search_candidates = 243; - CloseBuffer close_buffer = 245; + CloseBuffer close_buffer = 245; - ShutdownRemoteServer shutdown_remote_server = 257; + ShutdownRemoteServer shutdown_remote_server = 257; - RemoveWorktree remove_worktree = 258; + RemoveWorktree remove_worktree = 258; - LanguageServerLog language_server_log = 260; + LanguageServerLog language_server_log = 260; - Toast toast = 261; - HideToast hide_toast = 262; + Toast toast = 261; + HideToast hide_toast = 262; - OpenServerSettings open_server_settings = 263; + OpenServerSettings open_server_settings = 263; - GetPermalinkToLine get_permalink_to_line = 264; - GetPermalinkToLineResponse get_permalink_to_line_response = 265; + GetPermalinkToLine get_permalink_to_line = 264; + GetPermalinkToLineResponse get_permalink_to_line_response = 265; - FlushBufferedMessages flush_buffered_messages = 267; + FlushBufferedMessages flush_buffered_messages = 267; - LanguageServerPromptRequest language_server_prompt_request = 268; - LanguageServerPromptResponse language_server_prompt_response = 269; + LanguageServerPromptRequest language_server_prompt_request = 268; + LanguageServerPromptResponse language_server_prompt_response = 269; - GitBranchesResponse git_branches_response = 271; + GitBranchesResponse git_branches_response = 271; - UpdateGitBranch update_git_branch = 272; + UpdateGitBranch update_git_branch = 272; - ListToolchains list_toolchains = 273; - ListToolchainsResponse list_toolchains_response = 274; - ActivateToolchain activate_toolchain = 275; - ActiveToolchain active_toolchain = 276; - ActiveToolchainResponse active_toolchain_response = 277; + ListToolchains list_toolchains = 273; + ListToolchainsResponse list_toolchains_response = 274; + ActivateToolchain activate_toolchain = 275; + ActiveToolchain active_toolchain = 276; + ActiveToolchainResponse active_toolchain_response = 277; - GetPathMetadata get_path_metadata = 278; - GetPathMetadataResponse get_path_metadata_response = 279; + GetPathMetadata get_path_metadata = 278; + GetPathMetadataResponse get_path_metadata_response = 279; - CancelLanguageServerWork cancel_language_server_work = 282; + CancelLanguageServerWork cancel_language_server_work = 282; - LspExtOpenDocs lsp_ext_open_docs = 283; - LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; + LspExtOpenDocs lsp_ext_open_docs = 283; + LspExtOpenDocsResponse lsp_ext_open_docs_response = 284; - SyncExtensions sync_extensions = 285; - SyncExtensionsResponse sync_extensions_response = 286; - InstallExtension install_extension = 287; + SyncExtensions sync_extensions = 285; + SyncExtensionsResponse sync_extensions_response = 286; + InstallExtension install_extension = 287; - OpenUnstagedDiff open_unstaged_diff = 288; - OpenUnstagedDiffResponse open_unstaged_diff_response = 289; + OpenUnstagedDiff open_unstaged_diff = 288; + OpenUnstagedDiffResponse open_unstaged_diff_response = 289; - RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; + RegisterBufferWithLanguageServers register_buffer_with_language_servers = 290; - Stage stage = 293; - Unstage unstage = 294; - Commit commit = 295; - OpenCommitMessageBuffer open_commit_message_buffer = 296; + Stage stage = 293; + Unstage unstage = 294; + Commit commit = 295; + OpenCommitMessageBuffer open_commit_message_buffer = 296; - OpenUncommittedDiff open_uncommitted_diff = 297; - OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; + OpenUncommittedDiff open_uncommitted_diff = 297; + OpenUncommittedDiffResponse open_uncommitted_diff_response = 298; - SetIndexText set_index_text = 299; + SetIndexText set_index_text = 299; - GitShow git_show = 300; - GitReset git_reset = 301; - GitCommitDetails git_commit_details = 302; - GitCheckoutFiles git_checkout_files = 303; + GitShow git_show = 300; + GitReset git_reset = 301; + GitCommitDetails git_commit_details = 302; + GitCheckoutFiles git_checkout_files = 303; - Push push = 304; - Fetch fetch = 305; - GetRemotes get_remotes = 306; - GetRemotesResponse get_remotes_response = 307; - Pull pull = 308; + Push push = 304; + Fetch fetch = 305; + GetRemotes get_remotes = 306; + GetRemotesResponse get_remotes_response = 307; + Pull pull = 308; - ApplyCodeActionKind apply_code_action_kind = 309; - ApplyCodeActionKindResponse apply_code_action_kind_response = 310; + ApplyCodeActionKind apply_code_action_kind = 309; + ApplyCodeActionKindResponse apply_code_action_kind_response = 310; - RemoteMessageResponse remote_message_response = 311; + RemoteMessageResponse remote_message_response = 311; - GitGetBranches git_get_branches = 312; - GitCreateBranch git_create_branch = 313; - GitChangeBranch git_change_branch = 314; + GitGetBranches git_get_branches = 312; + GitCreateBranch git_create_branch = 313; + GitChangeBranch git_change_branch = 314; - CheckForPushedCommits check_for_pushed_commits = 315; - CheckForPushedCommitsResponse check_for_pushed_commits_response = 316; + CheckForPushedCommits check_for_pushed_commits = 315; + CheckForPushedCommitsResponse check_for_pushed_commits_response = 316; - AskPassRequest ask_pass_request = 317; - AskPassResponse ask_pass_response = 318; + AskPassRequest ask_pass_request = 317; + AskPassResponse ask_pass_response = 318; - GitDiff git_diff = 319; - GitDiffResponse git_diff_response = 320; - GitInit git_init = 321; + GitDiff git_diff = 319; + GitDiffResponse git_diff_response = 320; + GitInit git_init = 321; - CodeLens code_lens = 322; - GetCodeLens get_code_lens = 323; - GetCodeLensResponse get_code_lens_response = 324; - RefreshCodeLens refresh_code_lens = 325; + CodeLens code_lens = 322; + GetCodeLens get_code_lens = 323; + GetCodeLensResponse get_code_lens_response = 324; + RefreshCodeLens refresh_code_lens = 325; - ToggleBreakpoint toggle_breakpoint = 326; - BreakpointsForFile breakpoints_for_file = 327; + ToggleBreakpoint toggle_breakpoint = 326; + BreakpointsForFile breakpoints_for_file = 327; - UpdateRepository update_repository = 328; - RemoveRepository remove_repository = 329; + UpdateRepository update_repository = 328; + RemoveRepository remove_repository = 329; - GetDocumentSymbols get_document_symbols = 330; - GetDocumentSymbolsResponse get_document_symbols_response = 331; + GetDocumentSymbols get_document_symbols = 330; + GetDocumentSymbolsResponse get_document_symbols_response = 331; - LoadCommitDiff load_commit_diff = 334; - LoadCommitDiffResponse load_commit_diff_response = 335; + LoadCommitDiff load_commit_diff = 334; + LoadCommitDiffResponse load_commit_diff_response = 335; - StopLanguageServers stop_language_servers = 336; + StopLanguageServers stop_language_servers = 336; - LspExtRunnables lsp_ext_runnables = 337; - LspExtRunnablesResponse lsp_ext_runnables_response = 338; + LspExtRunnables lsp_ext_runnables = 337; + LspExtRunnablesResponse lsp_ext_runnables_response = 338; - GetDebugAdapterBinary get_debug_adapter_binary = 339; - DebugAdapterBinary debug_adapter_binary = 340; - RunDebugLocators run_debug_locators = 341; - DebugRequest debug_request = 342; + GetDebugAdapterBinary get_debug_adapter_binary = 339; + DebugAdapterBinary debug_adapter_binary = 340; + RunDebugLocators run_debug_locators = 341; + DebugRequest debug_request = 342; - LspExtGoToParentModule lsp_ext_go_to_parent_module = 343; - LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344; - LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345; - LspExtRunFlycheck lsp_ext_run_flycheck = 346; - LspExtClearFlycheck lsp_ext_clear_flycheck = 347; + LspExtGoToParentModule lsp_ext_go_to_parent_module = 343; + LspExtGoToParentModuleResponse lsp_ext_go_to_parent_module_response = 344; + LspExtCancelFlycheck lsp_ext_cancel_flycheck = 345; + LspExtRunFlycheck lsp_ext_run_flycheck = 346; + LspExtClearFlycheck lsp_ext_clear_flycheck = 347; - LogToDebugConsole log_to_debug_console = 348; + LogToDebugConsole log_to_debug_console = 348; - GetDocumentDiagnostics get_document_diagnostics = 350; - GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351; - PullWorkspaceDiagnostics pull_workspace_diagnostics = 352; + GetDocumentDiagnostics get_document_diagnostics = 350; + GetDocumentDiagnosticsResponse get_document_diagnostics_response = 351; + PullWorkspaceDiagnostics pull_workspace_diagnostics = 352; - GetDocumentColor get_document_color = 353; - GetDocumentColorResponse get_document_color_response = 354; - GetColorPresentation get_color_presentation = 355; - GetColorPresentationResponse get_color_presentation_response = 356; + GetDocumentColor get_document_color = 353; + GetDocumentColorResponse get_document_color_response = 354; + GetColorPresentation get_color_presentation = 355; + GetColorPresentationResponse get_color_presentation_response = 356; - Stash stash = 357; - StashPop stash_pop = 358; + Stash stash = 357; + StashPop stash_pop = 358; - GetDefaultBranch get_default_branch = 359; - GetDefaultBranchResponse get_default_branch_response = 360; + GetDefaultBranch get_default_branch = 359; + GetDefaultBranchResponse get_default_branch_response = 360; - GetCrashFiles get_crash_files = 361; - GetCrashFilesResponse get_crash_files_response = 362; + GetCrashFiles get_crash_files = 361; + GetCrashFilesResponse get_crash_files_response = 362; - GitClone git_clone = 363; - GitCloneResponse git_clone_response = 364; + GitClone git_clone = 363; + GitCloneResponse git_clone_response = 364; - LspQuery lsp_query = 365; - LspQueryResponse lsp_query_response = 366; - ToggleLspLogs toggle_lsp_logs = 367; + LspQuery lsp_query = 365; + LspQueryResponse lsp_query_response = 366; + ToggleLspLogs toggle_lsp_logs = 367; - UpdateUserSettings update_user_settings = 368; + UpdateUserSettings update_user_settings = 368; - GetProcesses get_processes = 369; - GetProcessesResponse get_processes_response = 370; + GetProcesses get_processes = 369; + GetProcessesResponse get_processes_response = 370; - ResolveToolchain resolve_toolchain = 371; - ResolveToolchainResponse resolve_toolchain_response = 372; + ResolveToolchain resolve_toolchain = 371; + ResolveToolchainResponse resolve_toolchain_response = 372; - GetAgentServerCommand get_agent_server_command = 373; - AgentServerCommand agent_server_command = 374; + GetAgentServerCommand get_agent_server_command = 373; + AgentServerCommand agent_server_command = 374; - ExternalAgentsUpdated external_agents_updated = 375; - ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376; - NewExternalAgentVersionAvailable new_external_agent_version_available = 377; + ExternalAgentsUpdated external_agents_updated = 375; + ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376; + NewExternalAgentVersionAvailable new_external_agent_version_available = 377; - StashDrop stash_drop = 378; - StashApply stash_apply = 379; + StashDrop stash_drop = 378; + StashApply stash_apply = 379; - GitRenameBranch git_rename_branch = 380; + GitRenameBranch git_rename_branch = 380; - RemoteStarted remote_started = 381; + RemoteStarted remote_started = 381; - GetDirectoryEnvironment get_directory_environment = 382; - DirectoryEnvironment directory_environment = 383; + GetDirectoryEnvironment get_directory_environment = 382; + DirectoryEnvironment directory_environment = 383; - GetTreeDiff get_tree_diff = 384; - GetTreeDiffResponse get_tree_diff_response = 385; + GetTreeDiff get_tree_diff = 384; + GetTreeDiffResponse get_tree_diff_response = 385; - GetBlobContent get_blob_content = 386; - GetBlobContentResponse get_blob_content_response = 387; + GetBlobContent get_blob_content = 386; + GetBlobContentResponse get_blob_content_response = 387; - GitWorktreesResponse git_worktrees_response = 388; - GitGetWorktrees git_get_worktrees = 389; - GitCreateWorktree git_create_worktree = 390; + GitWorktreesResponse git_worktrees_response = 388; + GitGetWorktrees git_get_worktrees = 389; + GitCreateWorktree git_create_worktree = 390; - OpenImageByPath open_image_by_path = 391; - OpenImageResponse open_image_response = 392; - CreateImageForPeer create_image_for_peer = 393; + OpenImageByPath open_image_by_path = 391; + OpenImageResponse open_image_response = 392; + CreateImageForPeer create_image_for_peer = 393; + GitFileHistory git_file_history = 397; + GitFileHistoryResponse git_file_history_response = 398; - GitFileHistory git_file_history = 397; - GitFileHistoryResponse git_file_history_response = 398; + RunGitHook run_git_hook = 399; - RunGitHook run_git_hook = 399; + GitDeleteBranch git_delete_branch = 400; - GitDeleteBranch git_delete_branch = 400; + ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; - ExternalExtensionAgentsUpdated external_extension_agents_updated = 401; + GitCreateRemote git_create_remote = 402; + GitRemoveRemote git_remove_remote = 403; - GitCreateRemote git_create_remote = 402; - GitRemoveRemote git_remove_remote = 403; + TrustWorktrees trust_worktrees = 404; + RestrictWorktrees restrict_worktrees = 405; - TrustWorktrees trust_worktrees = 404; - RestrictWorktrees restrict_worktrees = 405; + ShareAgentThread share_agent_thread = 406; + GetSharedAgentThread get_shared_agent_thread = 407; + GetSharedAgentThreadResponse get_shared_agent_thread_response = 408; - ShareAgentThread share_agent_thread = 406; - GetSharedAgentThread get_shared_agent_thread = 407; - GetSharedAgentThreadResponse get_shared_agent_thread_response = 408; + FindSearchCandidatesChunk find_search_candidates_chunk = 409; + FindSearchCandidatesCancelled find_search_candidates_cancelled = 410; + GetContextServerCommand get_context_server_command = 411; + ContextServerCommand context_server_command = 412; - FindSearchCandidatesChunk find_search_candidates_chunk = 409; - FindSearchCandidatesCancelled find_search_candidates_cancelled = 410; - GetContextServerCommand get_context_server_command = 411; - ContextServerCommand context_server_command = 412; + AllocateWorktreeId allocate_worktree_id = 413; + AllocateWorktreeIdResponse allocate_worktree_id_response = 414; - AllocateWorktreeId allocate_worktree_id = 413; - AllocateWorktreeIdResponse allocate_worktree_id_response = 414; + DownloadFileByPath download_file_by_path = 415; + DownloadFileResponse download_file_response = 416; + CreateFileForPeer create_file_for_peer = 417; - DownloadFileByPath download_file_by_path = 415; - DownloadFileResponse download_file_response = 416; - CreateFileForPeer create_file_for_peer = 417; + SemanticTokens semantic_tokens = 418; + SemanticTokensResponse semantic_tokens_response = 419; + RefreshSemanticTokens refresh_semantic_tokens = 420; + GetFoldingRanges get_folding_ranges = 421; + GetFoldingRangesResponse get_folding_ranges_response = 422; - SemanticTokens semantic_tokens = 418; - SemanticTokensResponse semantic_tokens_response = 419; - RefreshSemanticTokens refresh_semantic_tokens = 420; - GetFoldingRanges get_folding_ranges = 421; - GetFoldingRangesResponse get_folding_ranges_response = 422; + GetRemoteProfilingData get_remote_profiling_data = 423; + GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424; - GetRemoteProfilingData get_remote_profiling_data = 423; - GetRemoteProfilingDataResponse get_remote_profiling_data_response = 424; - - SpawnKernel spawn_kernel = 426; - SpawnKernelResponse spawn_kernel_response = 427; - KillKernel kill_kernel = 428; - GitDiffStat git_diff_stat = 429; - GitDiffStatResponse git_diff_stat_response = 430; // current max - } + SpawnKernel spawn_kernel = 426; + SpawnKernelResponse spawn_kernel_response = 427; + KillKernel kill_kernel = 428; + GitDiffStat git_diff_stat = 429; + GitDiffStatResponse git_diff_stat_response = 430; // current max + } - reserved 87 to 88; - reserved 90; - reserved 102 to 103; - reserved 158 to 161; - reserved 164; - reserved 166 to 169; - reserved 175 to 185; - reserved 188 to 195; - reserved 197; - reserved 198 to 202; - reserved 205 to 206; - reserved 221; - reserved 224 to 231; - reserved 234 to 236; - reserved 239 to 240; - reserved 244; - reserved 246 to 256; - reserved 259; - reserved 270; - reserved 280 to 281; - reserved 332 to 333; - reserved 394 to 396; + reserved 87 to 88; + reserved 90; + reserved 102 to 103; + reserved 158 to 161; + reserved 164; + reserved 166 to 169; + reserved 175 to 185; + reserved 188 to 195; + reserved 197; + reserved 198 to 202; + reserved 205 to 206; + reserved 221; + reserved 224 to 231; + reserved 234 to 236; + reserved 239 to 240; + reserved 244; + reserved 246 to 256; + reserved 259; + reserved 270; + reserved 280 to 281; + reserved 332 to 333; + reserved 394 to 396; } message Hello { - PeerId peer_id = 1; + PeerId peer_id = 1; } message Ping {} @@ -514,37 +512,37 @@ message Ping {} message Ack {} message Error { - string message = 1; - ErrorCode code = 2; - repeated string tags = 3; + string message = 1; + ErrorCode code = 2; + repeated string tags = 3; } enum ErrorCode { - Internal = 0; - NoSuchChannel = 1; - Disconnected = 2; - SignedOut = 3; - UpgradeRequired = 4; - Forbidden = 5; - NeedsCla = 7; - NotARootChannel = 8; - BadPublicNesting = 9; - CircularNesting = 10; - WrongMoveTarget = 11; - UnsharedItem = 12; - NoSuchProject = 13; - DevServerProjectPathDoesNotExist = 16; - RemoteUpgradeRequired = 17; - RateLimitExceeded = 18; - CommitFailed = 19; - reserved 6; - reserved 14 to 15; + Internal = 0; + NoSuchChannel = 1; + Disconnected = 2; + SignedOut = 3; + UpgradeRequired = 4; + Forbidden = 5; + NeedsCla = 7; + NotARootChannel = 8; + BadPublicNesting = 9; + CircularNesting = 10; + WrongMoveTarget = 11; + UnsharedItem = 12; + NoSuchProject = 13; + DevServerProjectPathDoesNotExist = 16; + RemoteUpgradeRequired = 17; + RateLimitExceeded = 18; + CommitFailed = 19; + reserved 6; + reserved 14 to 15; } message EndStream {} message Test { - uint64 id = 1; + uint64 id = 1; } message FlushBufferedMessages {} @@ -554,19 +552,19 @@ message FlushBufferedMessagesResponse {} message RemoteStarted {} message SpawnKernel { - string kernel_name = 1; - string working_directory = 2; - uint64 project_id = 3; - string command = 4; - repeated string args = 5; + string kernel_name = 1; + string working_directory = 2; + uint64 project_id = 3; + string command = 4; + repeated string args = 5; } message SpawnKernelResponse { - string kernel_id = 1; - string connection_file = 2; + string kernel_id = 1; + string connection_file = 2; } message KillKernel { - string kernel_id = 1; - uint64 project_id = 2; + string kernel_id = 1; + uint64 project_id = 2; } From 30844b906383b07e01ec81baae118e2be52c5bf5 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Sun, 1 Mar 2026 00:50:33 +0100 Subject: [PATCH 187/548] Add PR 50413 to `.git-blame-ignore-revs` (#50421) This PR adds https://github.com/zed-industries/zed/pull/50413 to the `.git-blame-ignore-revs` file. Release Notes: - N/A --- .git-blame-ignore-revs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 041826ad8a676e154edac9c306cee4e5816e6f62..2650e36997655b1ab7376e8ed7052a8fc24b2fc6 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -38,3 +38,7 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e # 2026-02-27 Format Tree-sitter query files # https://github.com/zed-industries/zed/pull/50138 5ed538f49c54ca464bb9d1e59446060a3a925668 + +# 2026-02-28 Format proto files +# https://github.com/zed-industries/zed/pull/50413 +56a88a848be09cbcb66bcb3d85ec1f5644909f72 From 4668aeb7284780cca830ba1173c4d3eb8bd11e2b Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Sun, 1 Mar 2026 00:59:00 +0100 Subject: [PATCH 188/548] ci: Install newer LLVM toolchain on Ubuntu 20.04 runners (#50414) Release Notes: - N/A --- .github/workflows/release.yml | 8 ++--- .github/workflows/release_nightly.yml | 8 ++--- .github/workflows/run_bundling.yml | 8 ++--- .../gpui_linux/src/linux/headless/client.rs | 1 + script/bundle-linux | 2 +- script/linux | 12 ++++++- .../xtask/src/tasks/workflows/run_bundling.rs | 36 +++++++++---------- 7 files changed, 42 insertions(+), 33 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 69bb80b40d7e7f21db21562e7aceb5a98706801f..8ac5eeb998f5102d5af9b2775a82093b6ea29858 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -299,8 +299,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - CC: clang - CXX: clang++ + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -341,8 +341,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - CC: clang - CXX: clang++ + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index faf7788eeafa856985ba5bdf21a1a37c5fdd8506..7f243411b4f540d6c7bc611df4883f5341d6a83b 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -103,8 +103,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - CC: clang - CXX: clang++ + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -151,8 +151,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - CC: clang - CXX: clang++ + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 683d74a264e53e621e77730a91cdd01adff17316..7cb1665f9d0bd4fe3b0f3c05527bf39aab5f610a 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -19,8 +19,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - CC: clang - CXX: clang++ + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -60,8 +60,8 @@ jobs: CARGO_INCREMENTAL: 0 ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }} - CC: clang - CXX: clang++ + CC: clang-18 + CXX: clang++-18 steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 diff --git a/crates/gpui_linux/src/linux/headless/client.rs b/crates/gpui_linux/src/linux/headless/client.rs index 6dbdc556751b27d144feb4a40c916910bc6ff5f7..56cc9e8df008abcb0904c7178e5b333eaade1d84 100644 --- a/crates/gpui_linux/src/linux/headless/client.rs +++ b/crates/gpui_linux/src/linux/headless/client.rs @@ -64,6 +64,7 @@ impl LinuxClient for HeadlessClient { None } + #[cfg(feature = "screen-capture")] fn screen_capture_sources( &self, ) -> futures::channel::oneshot::Receiver>>> diff --git a/script/bundle-linux b/script/bundle-linux index 4e58ac315bd231fd4ae9208abbc15007abc30631..c89d21082dd6c33a11ffcfc908ef87a91554dc18 100755 --- a/script/bundle-linux +++ b/script/bundle-linux @@ -71,7 +71,7 @@ if "$rustup_installed"; then rustup target add "$remote_server_triple" fi -export CC=$(which clang) +export CC=${CC:-$(which clang)} # Build binary in release mode export RUSTFLAGS="${RUSTFLAGS:-} -C link-args=-Wl,--disable-new-dtags,-rpath,\$ORIGIN/../lib" diff --git a/script/linux b/script/linux index 3f098ec21e3a84734d5f25c7b63b12d8588b8264..c20f154eaf2f7e6b79c7f8539e9e8c13271c3ecd 100755 --- a/script/linux +++ b/script/linux @@ -57,11 +57,21 @@ if [[ -n $apt ]]; then elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+12|Linux Mint 21|.+22\.04)' /etc/os-release); then deps+=( mold libstdc++-12-dev ) elif (grep -qP 'PRETTY_NAME="((Debian|Raspbian).+11|Linux Mint 20|.+20\.04)' /etc/os-release); then - deps+=( libstdc++-10-dev ) + # Ubuntu 20.04 ships clang-10 and libstdc++-10 which lack adequate C++20 + # support for building webrtc-sys (requires -std=c++20, lambdas in + # unevaluated contexts from clang 17+, and working std::ranges in the + # stdlib). clang-18 is available in focal-security/universe as an official + # backport, and libstdc++-11-dev from the ubuntu-toolchain-r PPA provides + # headers with working pointer_traits/contiguous_range. + # Note: the prebuilt libwebrtc.a is compiled with libstdc++, so we must + # use libstdc++ (not libc++) to avoid ABI mismatches at link time. + $maysudo add-apt-repository -y ppa:ubuntu-toolchain-r/test + deps+=( clang-18 libstdc++-11-dev ) fi $maysudo "$apt" update $maysudo "$apt" install -y "${deps[@]}" + finalize exit 0 fi diff --git a/tooling/xtask/src/tasks/workflows/run_bundling.rs b/tooling/xtask/src/tasks/workflows/run_bundling.rs index 2de7000360b2be564efb8107da47964dbab0ceb6..6b9d3b9e36c3ba3b3de4b02a53e83ee4faaa4785 100644 --- a/tooling/xtask/src/tasks/workflows/run_bundling.rs +++ b/tooling/xtask/src/tasks/workflows/run_bundling.rs @@ -4,9 +4,7 @@ use crate::tasks::workflows::{ nix_build::build_nix, release::ReleaseBundleJobs, runners::{Arch, Platform, ReleaseChannel}, - steps::{ - DEFAULT_REPOSITORY_OWNER_GUARD, FluentBuilder, NamedJob, dependant_job, named, use_clang, - }, + steps::{DEFAULT_REPOSITORY_OWNER_GUARD, FluentBuilder, NamedJob, dependant_job, named}, vars::{assets, bundle_envs}, }; @@ -145,22 +143,22 @@ pub(crate) fn bundle_linux( }; NamedJob { name: format!("bundle_linux_{arch}"), - job: use_clang( - bundle_job(deps) - .runs_on(arch.linux_bundler()) - .envs(bundle_envs(platform)), - ) - .add_step(steps::checkout_repo()) - .when_some(release_channel, |job, release_channel| { - job.add_step(set_release_channel(platform, release_channel)) - }) - .add_step(steps::setup_sentry()) - .map(steps::install_linux_dependencies) - .add_step(steps::script("./script/bundle-linux")) - .add_step(upload_artifact(&format!("target/release/{artifact_name}"))) - .add_step(upload_artifact(&format!( - "target/{remote_server_artifact_name}" - ))), + job: bundle_job(deps) + .runs_on(arch.linux_bundler()) + .envs(bundle_envs(platform)) + .add_env(Env::new("CC", "clang-18")) + .add_env(Env::new("CXX", "clang++-18")) + .add_step(steps::checkout_repo()) + .when_some(release_channel, |job, release_channel| { + job.add_step(set_release_channel(platform, release_channel)) + }) + .add_step(steps::setup_sentry()) + .map(steps::install_linux_dependencies) + .add_step(steps::script("./script/bundle-linux")) + .add_step(upload_artifact(&format!("target/release/{artifact_name}"))) + .add_step(upload_artifact(&format!( + "target/{remote_server_artifact_name}" + ))), } } From 6a4dfd46ba57bbb4bdabd45751790ac40af8e792 Mon Sep 17 00:00:00 2001 From: scuzqy <80660355+scuzqy@users.noreply.github.com> Date: Sun, 1 Mar 2026 10:47:30 +0800 Subject: [PATCH 189/548] time_format: Add Windows implementation (#50227) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 图片 Release Notes: - Date and time formatting on Windows now respects the system time formatting preferences. --------- Co-authored-by: John Tur --- Cargo.lock | 1 + Cargo.toml | 2 + crates/etw_tracing/Cargo.toml | 8 +- crates/time_format/Cargo.toml | 3 + crates/time_format/src/time_format.rs | 115 ++++++++++++++++++++++---- 5 files changed, 108 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c28d777c3f4723fc5a498e3047de759e711dafad..18fd93aed13bebee782d4204bfbf095e750d7096 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -17527,6 +17527,7 @@ dependencies = [ "core-foundation-sys", "sys-locale", "time", + "windows 0.61.3", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 148a909ccc8edb8f37ea7fd992ea6464c46ce0d5..39d331fd9ebde7ac0b861b6bf7dfc2ad28805c10 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -783,11 +783,13 @@ zstd = "0.11" version = "0.61" features = [ "Foundation_Numerics", + "Globalization_DateTimeFormatting", "Storage_Search", "Storage_Streams", "System_Threading", "UI_ViewManagement", "Wdk_System_SystemServices", + "Win32_Foundation", "Win32_Globalization", "Win32_Graphics_Direct3D", "Win32_Graphics_Direct3D11", diff --git a/crates/etw_tracing/Cargo.toml b/crates/etw_tracing/Cargo.toml index 7f287307bc90e4462257fbeae8d5716dc5056ee7..c46e3b820a950f30f991f7de3dd27510db8825f8 100644 --- a/crates/etw_tracing/Cargo.toml +++ b/crates/etw_tracing/Cargo.toml @@ -21,10 +21,4 @@ workspace.workspace = true [target.'cfg(target_os = "windows")'.dependencies] wprcontrol = { git = "https://github.com/zed-industries/wprcontrol", rev = "cd811f7" } windows-core = "0.61" -windows = { workspace = true, features = [ - "Win32_Foundation", - "Win32_System_Com", - "Win32_System_Ole", - "Win32_System_Variant", - "Win32_UI_Shell", -] } +windows.workspace = true diff --git a/crates/time_format/Cargo.toml b/crates/time_format/Cargo.toml index b598d19887e128a0c5951c1d1bd5ec42f27f975b..7f5f2d9f1b56666036816d43bfa3564bf9721f05 100644 --- a/crates/time_format/Cargo.toml +++ b/crates/time_format/Cargo.toml @@ -19,3 +19,6 @@ time.workspace = true [target.'cfg(target_os = "macos")'.dependencies] core-foundation.workspace = true core-foundation-sys.workspace = true + +[target.'cfg(target_os = "windows")'.dependencies] +windows.workspace = true diff --git a/crates/time_format/src/time_format.rs b/crates/time_format/src/time_format.rs index 25a7ae84232b69570e8c800c5955e684a13dc08a..bbf214623eb4b5b9dd978a675551c25f5e937a8d 100644 --- a/crates/time_format/src/time_format.rs +++ b/crates/time_format/src/time_format.rs @@ -86,10 +86,25 @@ fn format_absolute_date( macos::format_date(×tamp) } } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + if !enhanced_date_formatting { + return windows::format_date(×tamp); + } + + let timestamp_date = timestamp.date(); + let reference_date = reference.date(); + if timestamp_date == reference_date { + "Today".to_string() + } else if reference_date.previous_day() == Some(timestamp_date) { + "Yesterday".to_string() + } else { + windows::format_date(×tamp) + } + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); format_timestamp_naive_date( @@ -105,10 +120,13 @@ fn format_absolute_time(timestamp: OffsetDateTime) -> String { { macos::format_time(×tamp) } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + windows::format_time(×tamp) + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); format_timestamp_naive_time( @@ -123,7 +141,7 @@ fn format_absolute_timestamp( reference: OffsetDateTime, #[allow(unused_variables)] enhanced_date_formatting: bool, ) -> String { - #[cfg(target_os = "macos")] + #[cfg(any(target_os = "macos", target_os = "windows"))] { if !enhanced_date_formatting { return format!( @@ -147,10 +165,9 @@ fn format_absolute_timestamp( ) } } - #[cfg(not(target_os = "macos"))] + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences format_timestamp_fallback(timestamp, reference) } } @@ -176,10 +193,25 @@ fn format_absolute_date_medium( macos::format_date_medium(×tamp) } } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + if !enhanced_formatting { + return windows::format_date_medium(×tamp); + } + + let timestamp_date = timestamp.date(); + let reference_date = reference.date(); + if timestamp_date == reference_date { + "Today".to_string() + } else if reference_date.previous_day() == Some(timestamp_date) { + "Yesterday".to_string() + } else { + windows::format_date_medium(×tamp) + } + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences - // todo(windows) respect user's date/time preferences let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); if !enhanced_formatting { @@ -212,7 +244,11 @@ fn format_absolute_timestamp_medium( { format_absolute_date_medium(timestamp, reference, false) } - #[cfg(not(target_os = "macos"))] + #[cfg(target_os = "windows")] + { + format_absolute_date_medium(timestamp, reference, false) + } + #[cfg(not(any(target_os = "macos", target_os = "windows")))] { // todo(linux) respect user's date/time preferences // todo(windows) respect user's date/time preferences @@ -360,7 +396,7 @@ fn format_timestamp_naive_date( } } -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn format_timestamp_naive_date_medium( timestamp_local: OffsetDateTime, is_12_hour_time: bool, @@ -415,10 +451,10 @@ pub fn format_timestamp_naive( } } -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] static CURRENT_LOCALE: std::sync::OnceLock = std::sync::OnceLock::new(); -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTime) -> String { let current_locale = CURRENT_LOCALE .get_or_init(|| sys_locale::get_locale().unwrap_or_else(|| String::from("en-US"))); @@ -428,7 +464,7 @@ fn format_timestamp_fallback(timestamp: OffsetDateTime, reference: OffsetDateTim } /// Returns `true` if the locale is recognized as a 12-hour time locale. -#[cfg(not(target_os = "macos"))] +#[cfg(not(any(target_os = "macos", target_os = "windows")))] fn is_12_hour_time_by_locale(locale: &str) -> bool { [ "es-MX", "es-CO", "es-SV", "es-NI", @@ -522,6 +558,57 @@ mod macos { } } +#[cfg(target_os = "windows")] +mod windows { + use windows::Globalization::DateTimeFormatting::DateTimeFormatter; + + pub fn format_time(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter(DateTimeFormatter::ShortTime(), timestamp, true) + } + + pub fn format_date(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter(DateTimeFormatter::ShortDate(), timestamp, false) + } + + pub fn format_date_medium(timestamp: &time::OffsetDateTime) -> String { + format_with_formatter( + DateTimeFormatter::CreateDateTimeFormatter(windows::core::h!( + "month.abbreviated day year.full" + )), + timestamp, + false, + ) + } + + fn format_with_formatter( + formatter: windows::core::Result, + timestamp: &time::OffsetDateTime, + is_time: bool, + ) -> String { + formatter + .and_then(|formatter| formatter.Format(to_winrt_datetime(timestamp))) + .map(|hstring| hstring.to_string()) + .unwrap_or_else(|_| { + if is_time { + super::format_timestamp_naive_time(*timestamp, true) + } else { + super::format_timestamp_naive_date(*timestamp, *timestamp, true) + } + }) + } + + fn to_winrt_datetime(timestamp: &time::OffsetDateTime) -> windows::Foundation::DateTime { + // DateTime uses 100-nanosecond intervals since January 1, 1601 (UTC). + const WINDOWS_EPOCH: time::OffsetDateTime = time::macros::datetime!(1601-01-01 0:00 UTC); + let duration_since_winrt_epoch = *timestamp - WINDOWS_EPOCH; + let universal_time = duration_since_winrt_epoch.whole_nanoseconds() / 100; + + windows::Foundation::DateTime { + UniversalTime: universal_time as i64, + } + } +} + #[cfg(test)] mod tests { use super::*; From ce1517a3a1e1b731d9adffbebd85aa815b718162 Mon Sep 17 00:00:00 2001 From: Xiaobo Liu Date: Sun, 1 Mar 2026 11:42:19 +0800 Subject: [PATCH 190/548] explorer_command_injector: Avoid COM out-pointer overwrite in class factory exports (#49210) Release Notes: - N/A Signed-off-by: Xiaobo Liu --- .../src/explorer_command_injector.rs | 25 ++++++++----------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/crates/explorer_command_injector/src/explorer_command_injector.rs b/crates/explorer_command_injector/src/explorer_command_injector.rs index bfa2a0326c9975037ed860acfdee7cd32e3075d8..1bd85339a9fd8958c496eccf2bedcb1610c56557 100644 --- a/crates/explorer_command_injector/src/explorer_command_injector.rs +++ b/crates/explorer_command_injector/src/explorer_command_injector.rs @@ -106,18 +106,17 @@ impl IClassFactory_Impl for ExplorerCommandInjectorFactory_Impl { riid: *const windows_core::GUID, ppvobject: *mut *mut core::ffi::c_void, ) -> Result<()> { + if ppvobject.is_null() || riid.is_null() { + return Err(windows::Win32::Foundation::E_POINTER.into()); + } + unsafe { *ppvobject = std::ptr::null_mut(); } + if punkouter.is_none() { let factory: IExplorerCommand = ExplorerCommandInjector {}.into(); - let ret = unsafe { factory.query(riid, ppvobject).ok() }; - if ret.is_ok() { - unsafe { - *ppvobject = factory.into_raw(); - } - } - ret + unsafe { factory.query(riid, ppvobject).ok() } } else { Err(E_INVALIDARG.into()) } @@ -145,19 +144,17 @@ extern "system" fn DllGetClassObject( iid: *const GUID, out: *mut *mut std::ffi::c_void, ) -> HRESULT { + if out.is_null() || class_id.is_null() || iid.is_null() { + return E_INVALIDARG; + } + unsafe { *out = std::ptr::null_mut(); } let class_id = unsafe { *class_id }; if class_id == MODULE_ID { let instance: IClassFactory = ExplorerCommandInjectorFactory {}.into(); - let ret = unsafe { instance.query(iid, out) }; - if ret.is_ok() { - unsafe { - *out = instance.into_raw(); - } - } - ret + unsafe { instance.query(iid, out) } } else { CLASS_E_CLASSNOTAVAILABLE } From 72b30223cd6f0500fe6125d1b39448afb591823b Mon Sep 17 00:00:00 2001 From: John Tur Date: Sat, 28 Feb 2026 23:03:49 -0500 Subject: [PATCH 191/548] Improve documentation for ETW profiling (#50426) Release Notes: - N/A --- docs/src/development.md | 26 ++++++++++---------------- 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/docs/src/development.md b/docs/src/development.md index d8ea0b3f980317ff1f38a8325534e57f321bd8de..b4c9ea387da020be8d2d0dd517b0c5998bde41e2 100644 --- a/docs/src/development.md +++ b/docs/src/development.md @@ -88,33 +88,27 @@ in-depth examples and explanations. ## ETW Profiling on Windows -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). +Zed supports performance profiling with Event Tracing for Windows (ETW) to capture detailed performance data, including CPU, GPU, memory, disk, and file I/O activity. Data is saved to an `.etl` file, which can be opened in standard profiling tools for analysis. -Zed supports Event Tracing for Windows (ETW) to capture detailed performance data. You can record CPU, GPU, disk I/O, and file I/O activity, with optional heap allocation tracking. +ETW recordings may contain personally identifiable or security-sensitive information, such as paths to files and registry keys accessed, as well as process names. Please keep this in mind when sharing traces with others. ### Recording a trace -Open the command palette and run: +Open the command palette and run one of the following: -- **`etw_tracing: Record Etw Trace`** — Records CPU, GPU, and I/O activity -- **`etw_tracing: Record Etw Trace With Heap Tracing`** — Includes heap allocation data for the Zed process +- `zed: record etw trace`: records CPU, GPU, memory, and I/O activity +- `zed: record etw trace with heap tracing`: includes heap allocation data for the Zed process -Zed prompts you to choose a save location for the `.etl` trace file. +Zed will prompt you to choose a save location for the `.etl` file, then request administrator permission. Once granted, recording will begin. ### Saving or canceling -While recording: +While a trace is recording, open the command palette and run one of the following: -- **`etw_tracing: Save Etw Trace`** — Stops recording and saves the trace to disk -- **`etw_tracing: Cancel Etw Trace`** — Stops recording without saving +- `zed: save etw trace`: stops recording and saves the trace to disk +- `zed: cancel etw trace`: stops recording without saving -Zed buffers trace data in memory. Recordings automatically save after 60 seconds if you don't manually stop them. - -### Analyzing traces - -Open `.etl` files with [Windows Performance Analyzer](https://learn.microsoft.com/en-us/windows-hardware/test/wpt/windows-performance-analyzer) to inspect CPU stacks, GPU usage, disk I/O patterns, and heap allocations. - -**Note for existing keybindings**: The `etw_tracing::StopEtwTrace` action was renamed to `etw_tracing::SaveEtwTrace`. Update any custom keybindings. +Recordings automatically save after 60 seconds if not stopped manually. ## Contributor links From c8e5494d6add89f20bb73cf82e62e15c1f3593ae Mon Sep 17 00:00:00 2001 From: Liffindra Angga Zaaldian <3760093+findrakecil@users.noreply.github.com> Date: Sun, 1 Mar 2026 16:52:30 +0700 Subject: [PATCH 192/548] docs: Improve Ansible docs (#49682) Small fixes to documentation: - split paragraph for better readability and context understanding. - add curly braces for settings example for consistency - capitalize abbreviation (YAML, JSON) - change note style for consistency Release Notes: - N/A --------- Co-authored-by: Kunall Banerjee --- docs/src/languages/ansible.md | 68 +++++++++++++++++++---------------- 1 file changed, 37 insertions(+), 31 deletions(-) diff --git a/docs/src/languages/ansible.md b/docs/src/languages/ansible.md index 99980a1a1642717d8306cf8d98ce81be33326207..fd595bc7e3391ab95d90c3d4e34742e6a8bd7c1f 100644 --- a/docs/src/languages/ansible.md +++ b/docs/src/languages/ansible.md @@ -14,10 +14,13 @@ Support for Ansible in Zed is provided via a community-maintained [Ansible exten ### File detection -To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default. To change this behavior you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example: +To avoid mishandling non-Ansible YAML files, the Ansible Language is not associated with any file extensions by default. + +To change this behavior, you can add a `"file_types"` section to Zed settings inside your project (`.zed/settings.json`) or your Zed user settings (`~/.config/zed/settings.json`) to match your folder/naming conventions. For example: ```json [settings] -"file_types": { +{ + "file_types": { "Ansible": [ "**.ansible.yml", "**.ansible.yaml", @@ -39,6 +42,7 @@ To avoid mishandling non-Ansible YAML files, the Ansible Language is not associa "**playbook*.yaml" ] } +} ``` Feel free to modify this list as per your needs. @@ -47,34 +51,36 @@ Feel free to modify this list as per your needs. If your inventory file is in the YAML format, you can either: -- Append the `ansible-lint` inventory json schema to it via the following comment at the top of your inventory file: +- Append the `ansible-lint` inventory JSON schema to it via the following comment at the top of your inventory file: ```yml # yaml-language-server: $schema=https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json ``` -- Or configure the yaml language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)): +- or, configure the YAML language server settings to set this schema for all your inventory files, that match your inventory pattern, under your Zed settings ([ref](https://zed.dev/docs/languages/yaml)): ```json [settings] -"lsp": { +{ + "lsp": { "yaml-language-server": { "settings": { "yaml": { "schemas": { "https://raw.githubusercontent.com/ansible/ansible-lint/main/src/ansiblelint/schemas/inventory.json": [ "./inventory/*.yaml", - "hosts.yml", + "hosts.yml" ] } } } } -}, + } +} ``` ### LSP Configuration -By default, the following default config is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server: +By default, the following configuration is passed to the Ansible language server. It conveniently mirrors the defaults set by [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig/blob/03bc581e05e81d33808b42b2d7e76d70adb3b595/lua/lspconfig/configs/ansiblels.lua) for the Ansible language server: ```json { @@ -99,31 +105,32 @@ By default, the following default config is passed to the Ansible language serve } ``` -> [!NOTE] -> In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your PATH +> **Note:** In order for linting to work, ensure that `ansible-lint` is installed and discoverable on your `$PATH`. When desired, any of the above default settings can be overridden under the `"lsp"` section of your Zed settings file. For example: ```json [settings] -"lsp": { - // Note, the Zed Ansible extension prefixes all settings with `ansible` - // so instead of using `ansible.ansible.path` use `ansible.path`. - "ansible-language-server": { - "settings": { - "ansible": { - "path": "ansible" - }, - "executionEnvironment": { - "enabled": false - }, - "python": { - "interpreterPath": "python3" - }, - "validation": { - "enabled": false, // disable validation - "lint": { - "enabled": false, // disable ansible-lint - "path": "ansible-lint" +{ + "lsp": { + // The Zed Ansible extension prefixes all settings with `ansible` + // so use `ansible.path` instead of `ansible.ansible.path`. + "ansible-language-server": { + "settings": { + "ansible": { + "path": "ansible" + }, + "executionEnvironment": { + "enabled": false + }, + "python": { + "interpreterPath": "python3" + }, + "validation": { + "enabled": false, + "lint": { + "enabled": false, + "path": "ansible-lint" + } } } } @@ -131,5 +138,4 @@ When desired, any of the above default settings can be overridden under the `"ls } ``` -A full list of options/settings, that can be passed to the server, can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/5a89836d66d470fb9d20e7ea8aa2af96f12f61fb/docs/als/settings.md). -Feel free to modify option values as needed. +A full list of options/settings that can be passed to the server can be found at the project's page [here](https://github.com/ansible/vscode-ansible/blob/main/docs/als/settings.md). From 924c74bd3b7f1733543a482f86c1230df4663ba1 Mon Sep 17 00:00:00 2001 From: Dale Seo <5466341+DaleSeo@users.noreply.github.com> Date: Sun, 1 Mar 2026 05:02:33 -0500 Subject: [PATCH 193/548] docs: Update File History menu label from Open to View (#49859) Fixed the incorrect menu label from "Open File History" to "View File History" in the docs so it matches the actual UI. 2026-02-22 at 22 49 35 Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A Co-authored-by: Kunall Banerjee --- docs/.doc-examples/complex-feature.md | 8 ++++---- docs/src/git.md | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/.doc-examples/complex-feature.md b/docs/.doc-examples/complex-feature.md index 6a859ce5041f0e39834cc5f47f5b18248a15295e..745e6b3bcdc97dc35092bb651903f37435acc1ef 100644 --- a/docs/.doc-examples/complex-feature.md +++ b/docs/.doc-examples/complex-feature.md @@ -91,11 +91,11 @@ To disable word diff for specific languages only, add this to your settings.json File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit. -To open File History: +To view File History: -- Right-click on a file in the Project Panel and select "Open File History" -- Right-click on a file in the Git Panel and select "Open File History" -- Right-click on an editor tab and select "Open File History" +- Right-click on a file in the Project Panel and select "View File History" +- Right-click on a file in the Git Panel and select "View File History" +- Right-click on an editor tab and select "View File History" - Use the Command Palette and search for "file history" ## Fetch, Push, and Pull {#fetch-push-pull} diff --git a/docs/src/git.md b/docs/src/git.md index b33aa0690cbad99f792729dd780ab03716d0dc4c..f7b524925195a80af05387ad1b063ceccff66436 100644 --- a/docs/src/git.md +++ b/docs/src/git.md @@ -99,11 +99,11 @@ You can switch between modes at any time. Your preference applies to [Project Di File History shows the commit history for an individual file. Each entry displays the commit's author, timestamp, and message. Selecting a commit opens a diff view filtered to show only the changes made to that file in that commit. -To open File History: +To view File History: -- Right-click on a file in the Project Panel and select "Open File History" -- Right-click on a file in the Git Panel and select "Open File History" -- Right-click on an editor tab and select "Open File History" +- Right-click on a file in the Project Panel and select "View File History" +- Right-click on a file in the Git Panel and select "View File History" +- Right-click on an editor tab and select "View File History" - Use the Command Palette and search for "file history" ## Fetch, Push, and Pull From 32bd95107ef87bb9d3a35e2b6441e768d802f1d5 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Sun, 1 Mar 2026 05:11:15 -0500 Subject: [PATCH 194/548] search: Fix popover spacing for split diff buttons (#49655) Closes #49571. | Before | After | |--------|--------| | image | image | Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed popover spacing for split diff buttons --- crates/search/src/buffer_search.rs | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index 9ec4a8259f5d92b41ef8e3fc300bb23d8503b301..35cd25dc389d522fc2a3d0ed88b8e06a9e181e67 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -117,18 +117,17 @@ impl Render for BufferSearchBar { .toggle_state(!is_split) .tooltip(Tooltip::element(move |_, cx| { v_flex() - .gap_1() - .child(Label::new("Unified")) + .child("Unified") .child( h_flex() .gap_0p5() - .text_sm() + .text_ui_sm(cx) .text_color(Color::Muted.color(cx)) .children(render_modifiers( &gpui::Modifiers::secondary_key(), PlatformStyle::platform(), None, - Some(TextSize::Default.rems(cx).into()), + Some(TextSize::Small.rems(cx).into()), false, )) .child("click to set as default"), @@ -168,18 +167,17 @@ impl Render for BufferSearchBar { .toggle_state(is_split) .tooltip(Tooltip::element(move |_, cx| { v_flex() - .gap_1() - .child(Label::new("Split")) + .child("Split") .child( h_flex() .gap_0p5() - .text_sm() + .text_ui_sm(cx) .text_color(Color::Muted.color(cx)) .children(render_modifiers( &gpui::Modifiers::secondary_key(), PlatformStyle::platform(), None, - Some(TextSize::Default.rems(cx).into()), + Some(TextSize::Small.rems(cx).into()), false, )) .child("click to set as default"), From a66d37a44833e819fe70972046e8414783dbaab3 Mon Sep 17 00:00:00 2001 From: DeltaCalcium Date: Sun, 1 Mar 2026 07:16:26 -0300 Subject: [PATCH 195/548] docs: Update VS Code's outdated Copilot instructions to match other editors (#49823) Updated outdated instructions for configuring GitHub Copilot to match those on other editor's migration guides Before you mark this PR as ready for review, make sure that you have: - [X] Added a solid test coverage and/or screenshots from doing manual testing - [X] Done a self-review taking into account security and performance aspects - [X] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A Co-authored-by: Kunall Banerjee --- docs/src/migrate/vs-code.md | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/docs/src/migrate/vs-code.md b/docs/src/migrate/vs-code.md index 367cab469acb1969909457edecba8a10c633bfc4..820158c73ffc1ec2f869ad88e34fea4697e4fbec 100644 --- a/docs/src/migrate/vs-code.md +++ b/docs/src/migrate/vs-code.md @@ -317,18 +317,12 @@ If you’re used to GitHub Copilot in VS Code, you can do the same in Zed. You c #### Configuring GitHub Copilot -You should be able to sign-in to GitHub Copilot by clicking on the Zeta icon in the status bar and following the setup instructions. -You can also add this to your settings: +1. Open Settings with `Cmd+,` (macOS) or `Ctrl+,` (Linux/Windows) +2. Navigate to **AI → Edit Predictions** +3. Click **Configure** next to "Configure Providers" +4. Under **GitHub Copilot**, click **Sign in to GitHub** -```json -{ - "features": { - "edit_prediction_provider": "copilot" - } -} -``` - -To invoke completions, just start typing. Zed will offer suggestions inline for you to accept. +Once signed in, just start typing. Zed will offer suggestions inline for you to accept. #### Additional AI Options From 14358b711c9d789a1b39fb4683477e25fde783dc Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Sun, 1 Mar 2026 13:18:09 +0100 Subject: [PATCH 196/548] ci: Add check for protobuf formatting (#50418) This adds more checks for the protobuf files to CI Release Notes: - N/A --------- Co-authored-by: Anthony Eid --- .github/workflows/run_tests.yml | 4 ++++ crates/proto/proto/buf.yaml | 10 ++++++++++ script/clippy | 4 ++++ tooling/xtask/src/tasks/workflows/run_tests.rs | 12 +++++++++++- 4 files changed, 29 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 12a0dc2db1b6019e5e1e163f282f80e9bfcd0c66..96c763045cb75906e613744fcfb13764f617a278 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -700,6 +700,10 @@ jobs: with: input: crates/proto/proto/ against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/ + - name: run_tests::check_postgres_and_protobuf_migrations::buf_lint + run: buf lint crates/proto/proto + - name: run_tests::check_postgres_and_protobuf_migrations::check_protobuf_formatting + run: buf format --diff --exit-code crates/proto/proto timeout-minutes: 60 tests_pass: needs: diff --git a/crates/proto/proto/buf.yaml b/crates/proto/proto/buf.yaml index 93e819b2f771c2f2e3c032e6c50c0d126758ac19..37436d8d80f9435729d54da4326000be05b085f7 100644 --- a/crates/proto/proto/buf.yaml +++ b/crates/proto/proto/buf.yaml @@ -2,3 +2,13 @@ version: v1 breaking: use: - WIRE +lint: + except: + # Since we use post_build instead of buf this doesn't matter + - PACKAGE_DIRECTORY_MATCH + # This is internal to Zed only so we don't enforce versions + - PACKAGE_VERSION_SUFFIX + # Style rules we don't enforce + - ENUM_VALUE_PREFIX + - ENUM_VALUE_UPPER_SNAKE_CASE + - ENUM_ZERO_VALUE_SUFFIX diff --git a/script/clippy b/script/clippy index 5c13b0b39cea3937a43ca54de074e5f65fae7c3b..617d99a5623e6406d1dc01247ea2f5b8e5c3b762 100755 --- a/script/clippy +++ b/script/clippy @@ -16,4 +16,8 @@ if [[ -z "${GITHUB_ACTIONS+x}" ]]; then which typos >/dev/null 2>&1 || exit 0 typos --config typos.toml + + which buf >/dev/null 2>&1 || exit 0 + buf lint crates/proto/proto + buf format --diff --exit-code crates/proto/proto fi diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 9c5529cc5361d9581b1bd59de5bfb4201298c692..4130e53f724847b3f1bef5bf083f782cc7e9e0dc 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -533,6 +533,14 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob { .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/")) } + fn buf_lint() -> Step { + named::bash("buf lint crates/proto/proto") + } + + fn check_protobuf_formatting() -> Step { + named::bash("buf format --diff --exit-code crates/proto/proto") + } + named::job( release_job(&[]) .runs_on(runners::LINUX_DEFAULT) @@ -543,7 +551,9 @@ pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob { .add_step(steps::checkout_repo().with_full_history()) .add_step(ensure_fresh_merge()) .add_step(bufbuild_setup_action()) - .add_step(bufbuild_breaking_action()), + .add_step(bufbuild_breaking_action()) + .add_step(buf_lint()) + .add_step(check_protobuf_formatting()), ) } From ceb9d83dd73038246192be4bcdfffbecba457206 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Sun, 1 Mar 2026 10:20:32 -0600 Subject: [PATCH 197/548] ep: Add settled data fetching from snowflake (#50326) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/edit_prediction_cli/src/main.rs | 26 ++ .../edit_prediction_cli/src/pull_examples.rs | 348 +++++++++++++++++- 2 files changed, 372 insertions(+), 2 deletions(-) diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index 1ab126d32ee19b2eb754f4ad31fbaf38ed5eaafc..207a69328fb07277c39463c0c6a460862c95fe42 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -55,6 +55,7 @@ use crate::load_project::run_load_project; use crate::paths::{FAILED_EXAMPLES_DIR, RUN_DIR}; use crate::predict::run_prediction; use crate::progress::Progress; +use crate::pull_examples::{fetch_settled_examples_after, parse_settled_after_input}; use crate::retrieve_context::run_context_retrieval; use crate::score::run_scoring; use crate::split_commit::SplitCommitArgs; @@ -132,6 +133,10 @@ Inputs can be file paths or special specifiers: Fetch rejected edit predictions from Snowflake after the given RFC3339 timestamp. These are predictions that were shown to users but rejected (useful for DPO training). + settled-after:{timestamp} + Fetch settled stream examples from Snowflake after the given RFC3339 timestamp. + These are examples from the edit prediction settled stream. + rated-after:{timestamp} Fetch user-rated edit predictions from Snowflake after the given RFC3339 timestamp. These are predictions that users explicitly rated as positive or negative via the @@ -166,6 +171,9 @@ Examples: # Read user-rated predictions ep read rated-after:2025-01-01T00:00:00Z -o rated.jsonl + # Read settled stream examples + ep read settled-after:2025-01-01T00:00:00Z -o settled.jsonl + # Read only positively rated predictions ep read rated-positive-after:2025-01-01T00:00:00Z -o positive.jsonl @@ -635,6 +643,7 @@ async fn load_examples( let mut captured_after_timestamps = Vec::new(); let mut rejected_after_timestamps = Vec::new(); let mut requested_after_timestamps = Vec::new(); + let mut settled_after_timestamps = Vec::new(); let mut rated_after_inputs: Vec<(String, Option)> = Vec::new(); let mut file_inputs = Vec::new(); @@ -651,6 +660,8 @@ async fn load_examples( pull_examples::parse_requested_after_input(input_string.as_ref()) { requested_after_timestamps.push(timestamp.to_string()); + } else if let Some(timestamp) = parse_settled_after_input(input_string.as_ref()) { + settled_after_timestamps.push(timestamp.to_string()); } else if let Some((timestamp, rating_filter)) = pull_examples::parse_rated_after_input(input_string.as_ref()) { @@ -718,6 +729,21 @@ async fn load_examples( examples.append(&mut requested_examples); } + if !settled_after_timestamps.is_empty() { + settled_after_timestamps.sort(); + + let mut settled_examples = fetch_settled_examples_after( + http_client.clone(), + &settled_after_timestamps, + max_rows_per_timestamp, + remaining_offset, + background_executor.clone(), + Some(MIN_CAPTURE_VERSION), + ) + .await?; + examples.append(&mut settled_examples); + } + if !rated_after_inputs.is_empty() { rated_after_inputs.sort(); diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index cacfc9bb679acdcb3c709736c6e4b5e79af861e8..e34fc62c031cbfa411d9d5a701a3e327d0be8166 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -5,24 +5,25 @@ use http_client::{AsyncBody, HttpClient, Method, Request}; use indoc::indoc; use serde::Deserialize; use serde_json::{Value as JsonValue, json}; +use std::fmt::Write as _; use std::io::Read; use std::sync::Arc; use std::time::Duration; use telemetry_events::EditPredictionRating; -use zeta_prompt::ZetaPromptInput; +use zeta_prompt::{ZetaFormat, ZetaPromptInput, excerpt_range_for_format}; use crate::example::Example; use crate::progress::{InfoStyle, Progress, Step}; const EDIT_PREDICTION_DEPLOYMENT_EVENT: &str = "Edit Prediction Deployment"; use edit_prediction::example_spec::{ExampleSpec, TelemetrySource}; -use std::fmt::Write as _; pub(crate) const SNOWFLAKE_SUCCESS_CODE: &str = "090001"; pub(crate) const SNOWFLAKE_ASYNC_IN_PROGRESS_CODE: &str = "333334"; const PREDICTIVE_EDIT_REQUESTED_EVENT: &str = "Predictive Edit Requested"; const PREDICTIVE_EDIT_REJECTED_EVENT: &str = "Predictive Edit Rejected"; const EDIT_PREDICTION_RATED_EVENT: &str = "Edit Prediction Rated"; +const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled"; /// Minimum Zed version for filtering captured examples. /// For example, `MinCaptureVersion { minor: 224, patch: 1 }` means only pull examples @@ -34,6 +35,7 @@ pub struct MinCaptureVersion { } const DEFAULT_STATEMENT_TIMEOUT_SECONDS: u64 = 120; +const SETTLED_STATEMENT_TIMEOUT_SECONDS: u64 = 240; pub(crate) const POLL_INTERVAL: Duration = Duration::from_secs(2); pub(crate) const MAX_POLL_ATTEMPTS: usize = 120; @@ -52,6 +54,11 @@ pub fn parse_requested_after_input(input: &str) -> Option<&str> { input.strip_prefix("requested-after:") } +/// Parse an input token of the form `settled-after:{timestamp}`. +pub fn parse_settled_after_input(input: &str) -> Option<&str> { + input.strip_prefix("settled-after:") +} + /// Parse an input token of the form `rated-after:{timestamp}`, `rated-positive-after:{timestamp}`, /// or `rated-negative-after:{timestamp}`. /// Returns `(timestamp, Option)` where `None` means all ratings. @@ -596,6 +603,163 @@ pub async fn fetch_requested_examples_after( Ok(all_examples) } +pub async fn fetch_settled_examples_after( + http_client: Arc, + after_timestamps: &[String], + max_rows_per_timestamp: usize, + offset: usize, + background_executor: BackgroundExecutor, + min_capture_version: Option, +) -> Result> { + if after_timestamps.is_empty() { + return Ok(Vec::new()); + } + + let progress = Progress::global(); + + let token = std::env::var("EP_SNOWFLAKE_API_KEY") + .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; + let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( + "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", + )?; + let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); + + let mut all_examples = Vec::new(); + + for after_date in after_timestamps.iter() { + let step_progress_name = format!("settled>{after_date}"); + let step_progress = progress.start(Step::PullExamples, &step_progress_name); + step_progress.set_substatus("querying"); + + let statement = indoc! {r#" + WITH requested AS ( + SELECT + req.event_properties:request_id::string AS request_id, + req.device_id::string AS device_id, + req.time AS req_time, + req.time::string AS time, + req.event_properties:input AS input, + req.event_properties:format::string AS requested_format, + req.event_properties:output::string AS requested_output, + req.event_properties:zed_version::string AS zed_version + FROM events req + WHERE req.event_type = ? + AND req.event_properties:version = 'V3' + AND req.event_properties:input:can_collect_data = true + AND req.time > TRY_TO_TIMESTAMP_NTZ(?) + ) + SELECT + req.request_id AS request_id, + req.device_id AS device_id, + req.time AS time, + req.input AS input, + req.requested_output AS requested_output, + settled.event_properties:settled_editable_region::string AS settled_editable_region, + req.requested_format AS requested_format, + req.zed_version AS zed_version + FROM requested req + INNER JOIN events settled + ON req.request_id = settled.event_properties:request_id::string + WHERE settled.event_type = ? + ORDER BY req.req_time ASC + LIMIT ? + OFFSET ? + "#}; + + let _ = min_capture_version; + let request = json!({ + "statement": statement, + "timeout": SETTLED_STATEMENT_TIMEOUT_SECONDS, + "database": "EVENTS", + "schema": "PUBLIC", + "warehouse": "DBT", + "role": role, + "bindings": { + "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "2": { "type": "TEXT", "value": after_date }, + "3": { "type": "TEXT", "value": EDIT_PREDICTION_SETTLED_EVENT }, + "4": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "5": { "type": "FIXED", "value": offset.to_string() } + } + }); + + let response = run_sql_with_polling( + http_client.clone(), + &base_url, + &token, + &request, + &step_progress, + background_executor.clone(), + ) + .await?; + + let total_rows = response + .result_set_meta_data + .as_ref() + .and_then(|m| m.num_rows) + .unwrap_or(response.data.len() as i64); + + let num_partitions = response + .result_set_meta_data + .as_ref() + .map(|m| m.partition_info.len()) + .unwrap_or(1) + .max(1); + + step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); + step_progress.set_substatus("parsing"); + + let column_indices = get_column_indices( + &response.result_set_meta_data, + &[ + "request_id", + "device_id", + "time", + "input", + "requested_output", + "settled_editable_region", + "requested_format", + "zed_version", + ], + ); + + all_examples.extend(settled_examples_from_response(&response, &column_indices)?); + + if num_partitions > 1 { + let statement_handle = response + .statement_handle + .as_ref() + .context("response has multiple partitions but no statementHandle")?; + + for partition in 1..num_partitions { + step_progress.set_substatus(format!( + "fetching partition {}/{}", + partition + 1, + num_partitions + )); + + let partition_response = fetch_partition( + http_client.clone(), + &base_url, + &token, + statement_handle, + partition, + ) + .await?; + + all_examples.extend(settled_examples_from_response( + &partition_response, + &column_indices, + )?); + } + } + + step_progress.set_substatus("done"); + } + + Ok(all_examples) +} + pub async fn fetch_rated_examples_after( http_client: Arc, inputs: &[(String, Option)], @@ -989,6 +1153,186 @@ fn requested_examples_from_response<'a>( Ok(iter) } +fn settled_examples_from_response<'a>( + response: &'a SnowflakeStatementResponse, + column_indices: &'a std::collections::HashMap, +) -> Result + 'a> { + if let Some(code) = &response.code { + if code != SNOWFLAKE_SUCCESS_CODE { + anyhow::bail!( + "snowflake sql api returned error code={code} message={}", + response.message.as_deref().unwrap_or("") + ); + } + } + + let iter = response + .data + .iter() + .enumerate() + .filter_map(move |(row_index, data_row)| { + let get_value = |name: &str| -> Option { + let index = column_indices.get(name).copied()?; + let value = data_row.get(index)?; + if value.is_null() { + None + } else { + Some(value.clone()) + } + }; + + let get_string = |name: &str| -> Option { + match get_value(name)? { + JsonValue::String(s) => Some(s), + other => Some(other.to_string()), + } + }; + + let parse_json_value = |_: &str, raw: Option<&JsonValue>| -> Option { + let value = raw?; + match value { + JsonValue::String(s) => serde_json::from_str::(s).ok(), + other => Some(other.clone()), + } + }; + + let request_id_str = get_string("request_id"); + let device_id = get_string("device_id"); + let time = get_string("time"); + let input_raw = get_value("input"); + let input_json = parse_json_value("input", input_raw.as_ref()); + let input: Option = input_json + .as_ref() + .and_then(|parsed| serde_json::from_value(parsed.clone()).ok()); + let requested_output = get_string("requested_output"); + let settled_editable_region = get_string("settled_editable_region"); + let requested_format = + get_string("requested_format").and_then(|s| ZetaFormat::parse(&s).ok()); + let zed_version = get_string("zed_version"); + + match ( + request_id_str.clone(), + device_id.clone(), + time.clone(), + input.clone(), + requested_output.clone(), + settled_editable_region.clone(), + requested_format, + ) { + ( + Some(request_id), + Some(device_id), + Some(time), + Some(input), + Some(requested_output), + Some(settled_editable_region), + Some(requested_format), + ) => Some(build_settled_example( + request_id, + device_id, + time, + input, + requested_output, + settled_editable_region, + requested_format, + zed_version, + )), + _ => { + let mut missing_fields = Vec::new(); + + if request_id_str.is_none() { + missing_fields.push("request_id"); + } + if device_id.is_none() { + missing_fields.push("device_id"); + } + if time.is_none() { + missing_fields.push("time"); + } + if input_raw.is_none() || input_json.is_none() || input.is_none() { + missing_fields.push("input"); + } + if requested_output.is_none() { + missing_fields.push("requested_output"); + } + if settled_editable_region.is_none() { + missing_fields.push("settled_editable_region"); + } + if requested_format.is_none() { + missing_fields.push("requested_format"); + } + + log::warn!( + "skipping settled row {row_index}: [{}]", + missing_fields.join(", "), + ); + None + } + } + }); + + Ok(iter) +} + +fn build_settled_example( + request_id: String, + device_id: String, + time: String, + input: ZetaPromptInput, + requested_output: String, + settled_editable_region: String, + requested_format: ZetaFormat, + zed_version: Option, +) -> Example { + let requested_editable_range = input + .excerpt_ranges + .as_ref() + .map(|ranges| excerpt_range_for_format(requested_format, ranges).0) + .unwrap_or_else(|| input.editable_range_in_excerpt.clone()); + + let base_cursor_excerpt = input.cursor_excerpt.to_string(); + + let requested_range_is_valid = requested_editable_range.start <= requested_editable_range.end + && requested_editable_range.end <= base_cursor_excerpt.len(); + let mut example = build_example_from_snowflake( + request_id.clone(), + device_id, + time, + input, + vec!["settled".to_string()], + None, + zed_version, + ); + + if !requested_range_is_valid { + log::warn!( + "skipping malformed requested range for request {}: requested={:?} (base_len={})", + request_id, + requested_editable_range, + base_cursor_excerpt.len(), + ); + return example; + } + + let settled_replacement = settled_editable_region.as_str(); + let rejected_patch = build_output_patch( + &example.spec.cursor_path, + &base_cursor_excerpt, + &requested_editable_range, + &requested_output, + ); + let expected_patch = build_output_patch( + &example.spec.cursor_path, + &base_cursor_excerpt, + &requested_editable_range, + settled_replacement, + ); + + example.spec.expected_patches = vec![expected_patch]; + example.spec.rejected_patch = Some(rejected_patch); + example +} + fn rejected_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, From 7eb7458c6249f23be994f3da4ff6df22a9783edf Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Sun, 1 Mar 2026 20:17:49 -0600 Subject: [PATCH 198/548] ep_cli: Clean up snowflake fetching (#50450) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- .../edit_prediction_cli/src/pull_examples.rs | 517 ++++++------------ 1 file changed, 174 insertions(+), 343 deletions(-) diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index e34fc62c031cbfa411d9d5a701a3e327d0be8166..b53a3d5546e1a5697550ed24715f049c36c98178 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -152,6 +152,103 @@ async fn run_sql_with_polling( Ok(response) } +struct SnowflakeConfig { + token: String, + base_url: String, + role: Option, +} + +async fn fetch_examples_with_query( + http_client: Arc, + step_progress: &crate::progress::StepProgress, + background_executor: BackgroundExecutor, + statement: &str, + bindings: JsonValue, + timeout_seconds: u64, + required_columns: &[&str], + parse_response: for<'a> fn( + &'a SnowflakeStatementResponse, + &'a std::collections::HashMap, + ) -> Result + 'a>>, +) -> Result> { + let snowflake = SnowflakeConfig { + token: std::env::var("EP_SNOWFLAKE_API_KEY") + .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?, + base_url: std::env::var("EP_SNOWFLAKE_BASE_URL").context( + "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", + )?, + role: std::env::var("EP_SNOWFLAKE_ROLE").ok(), + }; + let request = json!({ + "statement": statement, + "timeout": timeout_seconds, + "database": "EVENTS", + "schema": "PUBLIC", + "warehouse": "DBT", + "role": snowflake.role.as_deref(), + "bindings": bindings + }); + + let response = run_sql_with_polling( + http_client.clone(), + &snowflake.base_url, + &snowflake.token, + &request, + step_progress, + background_executor, + ) + .await?; + + let total_rows = response + .result_set_meta_data + .as_ref() + .and_then(|meta| meta.num_rows) + .unwrap_or(response.data.len() as i64); + let partition_count = response + .result_set_meta_data + .as_ref() + .map(|meta| meta.partition_info.len()) + .unwrap_or(1) + .max(1); + + step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); + step_progress.set_substatus("parsing"); + + let column_indices = get_column_indices(&response.result_set_meta_data, required_columns); + + let mut parsed_examples = Vec::with_capacity(total_rows as usize); + parsed_examples.extend(parse_response(&response, &column_indices)?); + + if partition_count > 1 { + let statement_handle = response + .statement_handle + .as_ref() + .context("response has multiple partitions but no statementHandle")?; + + for partition in 1..partition_count { + step_progress.set_substatus(format!( + "fetching partition {}/{}", + partition + 1, + partition_count + )); + + let partition_response = fetch_partition( + http_client.clone(), + &snowflake.base_url, + &snowflake.token, + statement_handle, + partition, + ) + .await?; + + parsed_examples.extend(parse_response(&partition_response, &column_indices)?); + } + } + + step_progress.set_substatus("done"); + Ok(parsed_examples) +} + pub(crate) async fn fetch_partition( http_client: Arc, base_url: &str, @@ -305,13 +402,6 @@ pub async fn fetch_rejected_examples_after( let progress = Progress::global(); - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - let mut all_examples = Vec::new(); for after_date in after_timestamps.iter() { @@ -319,10 +409,11 @@ pub async fn fetch_rejected_examples_after( let step_progress = progress.start(Step::PullExamples, &step_progress_name); step_progress.set_substatus("querying"); - // Join rejected events with their corresponding request events to get the full context. - // We filter for V3 sampling data which contains the structured input we need. - // We also filter for predictions that were actually shown to the user (was_shown = true) - // to focus on explicit user rejections rather than implicit cancellations. + let min_minor_str = min_capture_version.map(|version| version.minor.to_string()); + let min_patch_str = min_capture_version.map(|version| version.patch.to_string()); + let min_minor_str_ref = min_minor_str.as_deref(); + let min_patch_str_ref = min_patch_str.as_deref(); + let statement = indoc! {r#" SELECT req.event_properties:request_id::string AS request_id, @@ -355,58 +446,25 @@ pub async fn fetch_rejected_examples_after( OFFSET ? "#}; - let min_minor_str = min_capture_version.map(|v| v.minor.to_string()); - let min_patch_str = min_capture_version.map(|v| v.patch.to_string()); - let min_minor_str_ref = min_minor_str.as_deref(); - let min_patch_str_ref = min_patch_str.as_deref(); - let request = json!({ - "statement": statement, - "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": { - "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, - "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REJECTED_EVENT }, - "3": { "type": "TEXT", "value": after_date }, - "4": { "type": "FIXED", "value": min_minor_str_ref }, - "5": { "type": "FIXED", "value": min_minor_str_ref }, - "6": { "type": "FIXED", "value": min_minor_str_ref }, - "7": { "type": "FIXED", "value": min_patch_str_ref }, - "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, - "9": { "type": "FIXED", "value": offset.to_string() } - } + let bindings = json!({ + "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "2": { "type": "TEXT", "value": PREDICTIVE_EDIT_REJECTED_EVENT }, + "3": { "type": "TEXT", "value": after_date }, + "4": { "type": "FIXED", "value": min_minor_str_ref }, + "5": { "type": "FIXED", "value": min_minor_str_ref }, + "6": { "type": "FIXED", "value": min_minor_str_ref }, + "7": { "type": "FIXED", "value": min_patch_str_ref }, + "8": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "9": { "type": "FIXED", "value": offset.to_string() } }); - let response = run_sql_with_polling( + let examples = fetch_examples_with_query( http_client.clone(), - &base_url, - &token, - &request, &step_progress, background_executor.clone(), - ) - .await?; - - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); - - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); - - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); - - let column_indices = get_column_indices( - &response.result_set_meta_data, + statement, + bindings, + DEFAULT_STATEMENT_TIMEOUT_SECONDS, &[ "request_id", "device_id", @@ -418,40 +476,11 @@ pub async fn fetch_rejected_examples_after( "reason", "zed_version", ], - ); - - all_examples.extend(rejected_examples_from_response(&response, &column_indices)?); - - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(rejected_examples_from_response( - &partition_response, - &column_indices, - )?); - } - } + rejected_examples_from_response, + ) + .await?; - step_progress.set_substatus("done"); + all_examples.extend(examples); } Ok(all_examples) @@ -471,13 +500,6 @@ pub async fn fetch_requested_examples_after( let progress = Progress::global(); - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - let mut all_examples = Vec::new(); for after_date in after_timestamps.iter() { @@ -485,6 +507,11 @@ pub async fn fetch_requested_examples_after( let step_progress = progress.start(Step::PullExamples, &step_progress_name); step_progress.set_substatus("querying"); + let min_minor_str = min_capture_version.map(|version| version.minor.to_string()); + let min_patch_str = min_capture_version.map(|version| version.patch.to_string()); + let min_minor_str_ref = min_minor_str.as_deref(); + let min_patch_str_ref = min_patch_str.as_deref(); + let statement = indoc! {r#" SELECT req.event_properties:request_id::string AS request_id, @@ -509,95 +536,30 @@ pub async fn fetch_requested_examples_after( OFFSET ? "#}; - let min_minor_str = min_capture_version.map(|v| v.minor.to_string()); - let min_patch_str = min_capture_version.map(|v| v.patch.to_string()); - let min_minor_str_ref = min_minor_str.as_deref(); - let min_patch_str_ref = min_patch_str.as_deref(); - let request = json!({ - "statement": statement, - "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": { - "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, - "2": { "type": "TEXT", "value": after_date }, - "3": { "type": "FIXED", "value": min_minor_str_ref }, - "4": { "type": "FIXED", "value": min_minor_str_ref }, - "5": { "type": "FIXED", "value": min_minor_str_ref }, - "6": { "type": "FIXED", "value": min_patch_str_ref }, - "7": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, - "8": { "type": "FIXED", "value": offset.to_string() } - } + let bindings = json!({ + "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "2": { "type": "TEXT", "value": after_date }, + "3": { "type": "FIXED", "value": min_minor_str_ref }, + "4": { "type": "FIXED", "value": min_minor_str_ref }, + "5": { "type": "FIXED", "value": min_minor_str_ref }, + "6": { "type": "FIXED", "value": min_patch_str_ref }, + "7": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "8": { "type": "FIXED", "value": offset.to_string() } }); - let response = run_sql_with_polling( + let examples = fetch_examples_with_query( http_client.clone(), - &base_url, - &token, - &request, &step_progress, background_executor.clone(), + statement, + bindings, + DEFAULT_STATEMENT_TIMEOUT_SECONDS, + &["request_id", "device_id", "time", "input", "zed_version"], + requested_examples_from_response, ) .await?; - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); - - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); - - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); - - let column_indices = get_column_indices( - &response.result_set_meta_data, - &["request_id", "device_id", "time", "input", "zed_version"], - ); - - all_examples.extend(requested_examples_from_response( - &response, - &column_indices, - )?); - - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(requested_examples_from_response( - &partition_response, - &column_indices, - )?); - } - } - - step_progress.set_substatus("done"); + all_examples.extend(examples); } Ok(all_examples) @@ -617,13 +579,6 @@ pub async fn fetch_settled_examples_after( let progress = Progress::global(); - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - let mut all_examples = Vec::new(); for after_date in after_timestamps.iter() { @@ -631,6 +586,8 @@ pub async fn fetch_settled_examples_after( let step_progress = progress.start(Step::PullExamples, &step_progress_name); step_progress.set_substatus("querying"); + let _ = min_capture_version; + let statement = indoc! {r#" WITH requested AS ( SELECT @@ -666,51 +623,21 @@ pub async fn fetch_settled_examples_after( OFFSET ? "#}; - let _ = min_capture_version; - let request = json!({ - "statement": statement, - "timeout": SETTLED_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": { - "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, - "2": { "type": "TEXT", "value": after_date }, - "3": { "type": "TEXT", "value": EDIT_PREDICTION_SETTLED_EVENT }, - "4": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, - "5": { "type": "FIXED", "value": offset.to_string() } - } + let bindings = json!({ + "1": { "type": "TEXT", "value": PREDICTIVE_EDIT_REQUESTED_EVENT }, + "2": { "type": "TEXT", "value": after_date }, + "3": { "type": "TEXT", "value": EDIT_PREDICTION_SETTLED_EVENT }, + "4": { "type": "FIXED", "value": max_rows_per_timestamp.to_string() }, + "5": { "type": "FIXED", "value": offset.to_string() } }); - let response = run_sql_with_polling( + let examples = fetch_examples_with_query( http_client.clone(), - &base_url, - &token, - &request, &step_progress, background_executor.clone(), - ) - .await?; - - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); - - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); - - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); - - let column_indices = get_column_indices( - &response.result_set_meta_data, + statement, + bindings, + SETTLED_STATEMENT_TIMEOUT_SECONDS, &[ "request_id", "device_id", @@ -721,40 +648,11 @@ pub async fn fetch_settled_examples_after( "requested_format", "zed_version", ], - ); - - all_examples.extend(settled_examples_from_response(&response, &column_indices)?); - - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(settled_examples_from_response( - &partition_response, - &column_indices, - )?); - } - } + settled_examples_from_response, + ) + .await?; - step_progress.set_substatus("done"); + all_examples.extend(examples); } Ok(all_examples) @@ -774,13 +672,6 @@ pub async fn fetch_rated_examples_after( let progress = Progress::global(); - let token = std::env::var("EP_SNOWFLAKE_API_KEY") - .context("missing required environment variable EP_SNOWFLAKE_API_KEY")?; - let base_url = std::env::var("EP_SNOWFLAKE_BASE_URL").context( - "missing required environment variable EP_SNOWFLAKE_BASE_URL (e.g. https://.snowflakecomputing.com)", - )?; - let role = std::env::var("EP_SNOWFLAKE_ROLE").ok(); - let mut all_examples = Vec::new(); for (after_date, rating_filter) in inputs.iter() { @@ -793,7 +684,7 @@ pub async fn fetch_rated_examples_after( let step_progress = progress.start(Step::PullExamples, &step_progress_name); step_progress.set_substatus("querying"); - let rating_value = rating_filter.as_ref().map(|r| match r { + let rating_value = rating_filter.as_ref().map(|rating| match rating { EditPredictionRating::Positive => "Positive", EditPredictionRating::Negative => "Negative", }); @@ -841,44 +732,13 @@ pub async fn fetch_rated_examples_after( "8": { "type": "FIXED", "value": offset.to_string() } }); - let request = json!({ - "statement": statement, - "timeout": DEFAULT_STATEMENT_TIMEOUT_SECONDS, - "database": "EVENTS", - "schema": "PUBLIC", - "warehouse": "DBT", - "role": role, - "bindings": bindings - }); - - let response = run_sql_with_polling( + let examples = fetch_examples_with_query( http_client.clone(), - &base_url, - &token, - &request, &step_progress, background_executor.clone(), - ) - .await?; - - let total_rows = response - .result_set_meta_data - .as_ref() - .and_then(|m| m.num_rows) - .unwrap_or(response.data.len() as i64); - - let num_partitions = response - .result_set_meta_data - .as_ref() - .map(|m| m.partition_info.len()) - .unwrap_or(1) - .max(1); - - step_progress.set_info(format!("{} rows", total_rows), InfoStyle::Normal); - step_progress.set_substatus("parsing"); - - let column_indices = get_column_indices( - &response.result_set_meta_data, + statement, + bindings, + DEFAULT_STATEMENT_TIMEOUT_SECONDS, &[ "request_id", "inputs", @@ -891,40 +751,11 @@ pub async fn fetch_rated_examples_after( "environment", "zed_version", ], - ); - - all_examples.extend(rated_examples_from_response(&response, &column_indices)?); - - if num_partitions > 1 { - let statement_handle = response - .statement_handle - .as_ref() - .context("response has multiple partitions but no statementHandle")?; - - for partition in 1..num_partitions { - step_progress.set_substatus(format!( - "fetching partition {}/{}", - partition + 1, - num_partitions - )); - - let partition_response = fetch_partition( - http_client.clone(), - &base_url, - &token, - statement_handle, - partition, - ) - .await?; - - all_examples.extend(rated_examples_from_response( - &partition_response, - &column_indices, - )?); - } - } + rated_examples_from_response, + ) + .await?; - step_progress.set_substatus("done"); + all_examples.extend(examples); } Ok(all_examples) @@ -933,7 +764,7 @@ pub async fn fetch_rated_examples_after( fn rated_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, -) -> Result + 'a> { +) -> Result + 'a>> { if let Some(code) = &response.code { if code != SNOWFLAKE_SUCCESS_CODE { anyhow::bail!( @@ -1021,7 +852,7 @@ fn rated_examples_from_response<'a>( } }); - Ok(iter) + Ok(Box::new(iter)) } fn build_rated_example( @@ -1081,7 +912,7 @@ fn build_rated_example( fn requested_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, -) -> Result + 'a> { +) -> Result + 'a>> { if let Some(code) = &response.code { if code != SNOWFLAKE_SUCCESS_CODE { anyhow::bail!( @@ -1150,13 +981,13 @@ fn requested_examples_from_response<'a>( } }); - Ok(iter) + Ok(Box::new(iter)) } fn settled_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, -) -> Result + 'a> { +) -> Result + 'a>> { if let Some(code) = &response.code { if code != SNOWFLAKE_SUCCESS_CODE { anyhow::bail!( @@ -1271,7 +1102,7 @@ fn settled_examples_from_response<'a>( } }); - Ok(iter) + Ok(Box::new(iter)) } fn build_settled_example( @@ -1336,7 +1167,7 @@ fn build_settled_example( fn rejected_examples_from_response<'a>( response: &'a SnowflakeStatementResponse, column_indices: &'a std::collections::HashMap, -) -> Result + 'a> { +) -> Result + 'a>> { if let Some(code) = &response.code { if code != SNOWFLAKE_SUCCESS_CODE { anyhow::bail!( @@ -1421,7 +1252,7 @@ fn rejected_examples_from_response<'a>( } }); - Ok(iter) + Ok(Box::new(iter)) } fn build_rejected_example( From ac4b14370c0254a27962e787bfa5c4c69e0ba1f0 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Mon, 2 Mar 2026 10:53:06 +0530 Subject: [PATCH 199/548] project_panel: Set diagnostic badges off by default (#50454) Release Notes: - N/A --- assets/settings/default.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 8f724f59b66486b6477fc19155d78c7dd89d33c8..b193c0f60d0087972381f4f85f2b864b52fdbc7d 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -800,7 +800,7 @@ // "files_first" "sort_mode": "directories_first", // Whether to show error and warning count badges next to file names in the project panel. - "diagnostic_badges": true, + "diagnostic_badges": false, // Whether to enable drag-and-drop operations in the project panel. "drag_and_drop": true, // Whether to hide the root entry when only one folder is open in the window; From da499966d2e222eac935a56f66cd7cfd3fe09b58 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 09:23:08 +0100 Subject: [PATCH 200/548] editor: Fix sticky diff hunk controls not being flush to buffer headers (#50402) Release Notes: - Fixed a visual gap between diff hunk controls and buffer headers --- crates/editor/src/editor.rs | 1 + crates/editor/src/element.rs | 32 ++++++++++++++++++-------------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index bc62d2a87a4fcd37f88fd013f779dc047a43f6e3..6820d598680de438f9244195acca0bcd5ff7476f 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -237,6 +237,7 @@ use crate::{ }; pub const FILE_HEADER_HEIGHT: u32 = 2; +pub const BUFFER_HEADER_PADDING: Rems = rems(0.25); pub const MULTI_BUFFER_EXCERPT_HEADER_HEIGHT: u32 = 1; const CURSOR_BLINK_INTERVAL: Duration = Duration::from_millis(500); const MAX_LINE_LEN: usize = 1024; diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 59086fbedf397e05fbec50481d04771f878eff7c..4779784ad75fbbe3740bf63572c2bd8cec06f1da 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1,15 +1,15 @@ use crate::{ - ActiveDiagnostic, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, ChunkReplacement, - CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, ConflictsOuter, - ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, CustomBlockId, - DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, Editor, EditorMode, - EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock, - GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, InlayHintRefreshReason, - JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, MINIMAP_FONT_SIZE, - MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, PhantomBreakpointIndicator, - PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt, SelectPhase, Selection, - SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, - ToggleFold, ToggleFoldAll, + ActiveDiagnostic, BUFFER_HEADER_PADDING, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, + ChunkReplacement, CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, + ConflictsOuter, ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, + CustomBlockId, DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, + Editor, EditorMode, EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, + FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, + InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, + MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, + PhantomBreakpointIndicator, PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt, + SelectPhase, Selection, SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap, + StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll, code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP}, column_pixels, display_map::{ @@ -8254,7 +8254,7 @@ pub(crate) fn render_buffer_header( let header = div() .id(("buffer-header", for_excerpt.buffer_id.to_proto())) - .p_1() + .p(BUFFER_HEADER_PADDING) .w_full() .h(FILE_HEADER_HEIGHT as f32 * window.line_height()) .child( @@ -10959,7 +10959,9 @@ impl Element for EditorElement { .and_then(|headers| headers.lines.last()) .map_or(Pixels::ZERO, |last| last.offset + line_height); - let sticky_header_height = if sticky_buffer_header.is_some() { + let has_sticky_buffer_header = + sticky_buffer_header.is_some() || sticky_header_excerpt_id.is_some(); + let sticky_header_height = if has_sticky_buffer_header { let full_height = FILE_HEADER_HEIGHT as f32 * line_height; let display_row = blocks .iter() @@ -10978,7 +10980,9 @@ impl Element for EditorElement { } None => full_height, }; - sticky_scroll_header_height + offset + let header_bottom_padding = + BUFFER_HEADER_PADDING.to_pixels(window.rem_size()); + sticky_scroll_header_height + offset - header_bottom_padding } else { sticky_scroll_header_height }; From 0f8f71d7949b6f5fbb2eb9cca90639e70b129943 Mon Sep 17 00:00:00 2001 From: Owen Law <81528246+someone13574@users.noreply.github.com> Date: Mon, 2 Mar 2026 03:40:24 -0500 Subject: [PATCH 201/548] gpui(web): Fix crash when resizing on firefox-linux (#50419) Fixes a crash which occurred on firefox-linux when resizing (and was causing blanks during resize in other browsers) The issue was due to attempting to resize the drawing buffer while it was being rendered to, which causes a browser crash since firefox destroys that buffer while it is still being accessed. This essentially defers the resize to just before drawing, so that it won't be resized *while* being drawn to. Release Notes: - N/A --- crates/gpui_web/src/window.rs | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/crates/gpui_web/src/window.rs b/crates/gpui_web/src/window.rs index c29fa509dd206406b24069053dc71bdc4dc18e75..ab6d6fc857dfd092ea7e3c5d2dcb46f9ddc96cfb 100644 --- a/crates/gpui_web/src/window.rs +++ b/crates/gpui_web/src/window.rs @@ -54,6 +54,7 @@ pub(crate) struct WebWindowInner { pub(crate) last_physical_size: Cell<(u32, u32)>, pub(crate) notify_scale: Cell, mql_handle: RefCell>, + pending_physical_size: Cell>, } pub struct WebWindow { @@ -163,6 +164,7 @@ impl WebWindow { last_physical_size: Cell::new((0, 0)), notify_scale: Cell::new(false), mql_handle: RefCell::new(None), + pending_physical_size: Cell::new(None), }); let raf_closure = inner.create_raf_closure(); @@ -252,8 +254,9 @@ impl WebWindow { let clamped_width = physical_width.min(max_texture_dimension); let clamped_height = physical_height.min(max_texture_dimension); - inner.canvas.set_width(clamped_width); - inner.canvas.set_height(clamped_height); + inner + .pending_physical_size + .set(Some((clamped_width, clamped_height))); { let mut s = inner.state.borrow_mut(); @@ -262,10 +265,6 @@ impl WebWindow { height: px(logical_height), }; s.scale_factor = dpr_f32; - s.renderer.update_drawable_size(Size { - width: DevicePixels(clamped_width as i32), - height: DevicePixels(clamped_height as i32), - }); } let new_size = Size { @@ -637,6 +636,20 @@ impl PlatformWindow for WebWindow { } fn draw(&self, scene: &Scene) { + if let Some((width, height)) = self.inner.pending_physical_size.take() { + if self.inner.canvas.width() != width || self.inner.canvas.height() != height { + self.inner.canvas.set_width(width); + self.inner.canvas.set_height(height); + } + + let mut state = self.inner.state.borrow_mut(); + state.renderer.update_drawable_size(Size { + width: DevicePixels(width as i32), + height: DevicePixels(height as i32), + }); + drop(state); + } + self.inner.state.borrow_mut().renderer.draw(scene); } From 3f57cb0f149b2d615a8d1b1d48162199fd765448 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Mon, 2 Mar 2026 14:26:58 +0530 Subject: [PATCH 202/548] markdown_preview: Remove `recoverable_panic` and fix mermaid hex parsing panic (#50470) #50176 added `recoverable_panic` to swallow mermaid panics, then #50280 fixed it in upstream. This PR removes the workaround so future panics reach Sentry. Also bumps `mermaid-rs-renderer` to fix a hex parsing panic. Release Notes: - N/A --- Cargo.lock | 6 ++-- Cargo.toml | 2 +- crates/crashes/Cargo.toml | 1 - crates/crashes/src/crashes.rs | 36 +------------------ crates/markdown_preview/Cargo.toml | 1 - .../markdown_preview/src/markdown_renderer.rs | 4 +-- 6 files changed, 5 insertions(+), 45 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 18fd93aed13bebee782d4204bfbf095e750d7096..1eea7b57e3c1a35870d1369a6066c2d36b8e6d26 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4120,7 +4120,6 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ - "anyhow", "bincode", "cfg-if", "crash-handler", @@ -8664,7 +8663,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.16.1", + "hashbrown 0.15.5", "serde", "serde_core", ] @@ -10174,7 +10173,6 @@ dependencies = [ "anyhow", "async-recursion", "collections", - "crashes", "editor", "fs", "gpui", @@ -10400,7 +10398,7 @@ dependencies = [ [[package]] name = "mermaid-rs-renderer" version = "0.2.0" -source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=9d8360d9cea10dc4bc86d7b8012cc6e9656e6cf2#9d8360d9cea10dc4bc86d7b8012cc6e9656e6cf2" +source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=a1f8fc03bf7293018136fb8e60d83551d2dd5732#a1f8fc03bf7293018136fb8e60d83551d2dd5732" dependencies = [ "anyhow", "fontdb 0.16.2", diff --git a/Cargo.toml b/Cargo.toml index 39d331fd9ebde7ac0b861b6bf7dfc2ad28805c10..235d5a1209621e6d31bcd1b0b747e2a596cebe45 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -370,7 +370,7 @@ markdown_preview = { path = "crates/markdown_preview" } svg_preview = { path = "crates/svg_preview" } media = { path = "crates/media" } menu = { path = "crates/menu" } -mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "9d8360d9cea10dc4bc86d7b8012cc6e9656e6cf2", default-features = false } +mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "a1f8fc03bf7293018136fb8e60d83551d2dd5732", default-features = false } migrator = { path = "crates/migrator" } mistral = { path = "crates/mistral" } multi_buffer = { path = "crates/multi_buffer" } diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 3523426752e670c2c1023a1e0af221029f501070..5e451853a925d86ffcc1491a5c95af1f94e6ed05 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -6,7 +6,6 @@ edition.workspace = true license = "GPL-3.0-or-later" [dependencies] -anyhow.workspace = true bincode.workspace = true cfg-if.workspace = true crash-handler.workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index 99ec14fc049d359f1720a3e0605bc4597ceadbbe..a1a43dbb88198b7afd4b89141f7578c0a5bc25ce 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -4,7 +4,6 @@ use log::info; use minidumper::{Client, LoopAction, MinidumpBinary}; use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; -use std::cell::Cell; use std::mem; #[cfg(not(target_os = "windows"))] @@ -16,7 +15,7 @@ use std::{ env, fs::{self, File}, io, - panic::{self, AssertUnwindSafe, PanicHookInfo}, + panic::{self, PanicHookInfo}, path::{Path, PathBuf}, process::{self}, sync::{ @@ -27,31 +26,6 @@ use std::{ time::Duration, }; -thread_local! { - static ALLOW_UNWIND: Cell = const { Cell::new(false) }; -} - -/// Catch a panic as an error instead of aborting the process. Unlike plain -/// `catch_unwind`, this bypasses the crash-reporting panic hook which would -/// normally abort before unwinding can occur. -/// -/// **Use sparingly.** Prefer this only for isolating third-party code -/// that is known to panic, where you want to handle the failure gracefully -/// instead of crashing. -pub fn recoverable_panic(closure: impl FnOnce() -> T) -> anyhow::Result { - ALLOW_UNWIND.with(|flag| flag.set(true)); - let result = panic::catch_unwind(AssertUnwindSafe(closure)); - ALLOW_UNWIND.with(|flag| flag.set(false)); - result.map_err(|payload| { - let message = payload - .downcast_ref::<&str>() - .map(|s| s.to_string()) - .or_else(|| payload.downcast_ref::().cloned()) - .unwrap_or_else(|| "unknown panic".to_string()); - anyhow::anyhow!("panic: {message}") - }) -} - // set once the crash handler has initialized and the client has connected to it pub static CRASH_HANDLER: OnceLock> = OnceLock::new(); // set when the first minidump request is made to avoid generating duplicate crash reports @@ -83,9 +57,6 @@ pub fn init(crash_init: InitCrashHandler, spawn: impl FnOnce(BoxFuture<'static, if !should_install_crash_handler() { let old_hook = panic::take_hook(); panic::set_hook(Box::new(move |info| { - if ALLOW_UNWIND.with(|flag| flag.get()) { - return; - } unsafe { env::set_var("RUST_BACKTRACE", "1") }; old_hook(info); // prevent the macOS crash dialog from popping up @@ -351,11 +322,6 @@ pub fn panic_hook(info: &PanicHookInfo) { let current_thread = std::thread::current(); let thread_name = current_thread.name().unwrap_or(""); - if ALLOW_UNWIND.with(|flag| flag.get()) { - log::error!("thread '{thread_name}' panicked at {span} (allowing unwind):\n{message}"); - return; - } - // wait 500ms for the crash handler process to start up // if it's still not there just write panic info and no minidump let retry_frequency = Duration::from_millis(100); diff --git a/crates/markdown_preview/Cargo.toml b/crates/markdown_preview/Cargo.toml index 1cfc1b4e59ef14b47ab5845dc67e2ad77c9232e5..55912c66a017fa22902f9b05e5fa924230710d69 100644 --- a/crates/markdown_preview/Cargo.toml +++ b/crates/markdown_preview/Cargo.toml @@ -18,7 +18,6 @@ test-support = [] anyhow.workspace = true async-recursion.workspace = true collections.workspace = true -crashes.workspace = true editor.workspace = true fs.workspace = true gpui.workspace = true diff --git a/crates/markdown_preview/src/markdown_renderer.rs b/crates/markdown_preview/src/markdown_renderer.rs index 67131a6b2cb81f82a2c550944c96fb4e1ed5a93a..4d26b7e8958a04f1bb64abc5be5502e23896f313 100644 --- a/crates/markdown_preview/src/markdown_renderer.rs +++ b/crates/markdown_preview/src/markdown_renderer.rs @@ -133,9 +133,7 @@ impl CachedMermaidDiagram { let _task = cx.spawn(async move |this, cx| { let value = cx .background_spawn(async move { - let svg_string = crashes::recoverable_panic(|| { - mermaid_rs_renderer::render(&contents.contents) - })??; + let svg_string = mermaid_rs_renderer::render(&contents.contents)?; let scale = contents.scale as f32 / 100.0; svg_renderer .render_single_frame(svg_string.as_bytes(), scale, true) From 18532995ecfe37f6f82861985e20f05e3c6f1ec0 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 10:23:37 +0100 Subject: [PATCH 203/548] language: Defer dropping the `SyntaxSnapshot` to a background thread (#50386) Dropping deep tree-sitter Trees can be quite slow due to deallocating lots of memory (in the 10s of milliseconds for big diffs). To avoid blocking the main thread, we offload the drop operation to a background thread. Instead of a static thread we could also integrate this with the gpui app or use the executors, but both of that would require threading that through as a field which I don't think is too great either Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/language/src/syntax_map.rs | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index bd24424679f3e6cb02303c91e0d86db335cd0a26..c5931c474d2962fc7ceb66954f2f00d3bf14b4f8 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -13,7 +13,7 @@ use std::{ collections::BinaryHeap, fmt, iter, ops::{ControlFlow, Deref, DerefMut, Range}, - sync::Arc, + sync::{Arc, LazyLock}, time::{Duration, Instant}, }; use streaming_iterator::StreamingIterator; @@ -40,6 +40,27 @@ pub struct SyntaxSnapshot { update_count: usize, } +// Dropping deep treesitter Trees can be quite slow due to deallocating lots of memory. +// To avoid blocking the main thread, we offload the drop operation to a background thread. +impl Drop for SyntaxSnapshot { + fn drop(&mut self) { + static DROP_TX: LazyLock>> = + LazyLock::new(|| { + let (tx, rx) = std::sync::mpsc::channel(); + std::thread::Builder::new() + .name("SyntaxSnapshot::drop".into()) + .spawn(move || while let Ok(_) = rx.recv() {}) + .expect("failed to spawn drop thread"); + tx + }); + // This does allocate a new Arc, but it's cheap and avoids blocking the main thread without needing to use an `Option` or `MaybeUninit`. + let _ = DROP_TX.send(std::mem::replace( + &mut self.layers, + SumTree::from_summary(Default::default()), + )); + } +} + #[derive(Default)] pub struct SyntaxMapCaptures<'a> { layers: Vec>, From 7cff4b0998321628cf31d563e8663b4f9f53a171 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 10:24:04 +0100 Subject: [PATCH 204/548] editor: Clean up tab_map (#49606) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/buffer_diff/src/buffer_diff.rs | 2 +- crates/editor/src/display_map/inlay_map.rs | 9 +- crates/editor/src/display_map/tab_map.rs | 655 +++++++++++---------- crates/editor/src/editor.rs | 18 +- 4 files changed, 352 insertions(+), 332 deletions(-) diff --git a/crates/buffer_diff/src/buffer_diff.rs b/crates/buffer_diff/src/buffer_diff.rs index 8e61a9b633930655e296433711013645ea873dfd..82ab2736b8bc207aa30952ae9f79f161eb9db8db 100644 --- a/crates/buffer_diff/src/buffer_diff.rs +++ b/crates/buffer_diff/src/buffer_diff.rs @@ -1721,7 +1721,7 @@ impl BufferDiff { if let Some(language_registry) = language_registry { base_text.set_language_registry(language_registry); } - base_text.set_language(language, cx); + base_text.set_language_async(language, cx); base_text.parsing_idle() }); cx.spawn(async move |this, cx| { diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs index 63e315ab250d5ddbc0ffa9d37cb1c42b3803efac..122ca6f698115c2f5e6c194246f6a378825e5675 100644 --- a/crates/editor/src/display_map/inlay_map.rs +++ b/crates/editor/src/display_map/inlay_map.rs @@ -17,7 +17,7 @@ use multi_buffer::{ }; use project::InlayId; use std::{ - cmp, + cmp, iter, ops::{Add, AddAssign, Range, Sub, SubAssign}, sync::Arc, }; @@ -546,8 +546,11 @@ impl InlayMap { pub fn new(buffer: MultiBufferSnapshot) -> (Self, InlaySnapshot) { let version = 0; let snapshot = InlaySnapshot { - buffer: buffer.clone(), - transforms: SumTree::from_iter(Some(Transform::Isomorphic(buffer.text_summary())), ()), + transforms: SumTree::from_iter( + iter::once(Transform::Isomorphic(buffer.text_summary())), + (), + ), + buffer, version, }; diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs index 5ff3979e7da848ddba98f5b6f8d1ea26ad990a81..187ed8614e01ddb8dcdae930fd484de9594cf63f 100644 --- a/crates/editor/src/display_map/tab_map.rs +++ b/crates/editor/src/display_map/tab_map.rs @@ -44,121 +44,146 @@ impl TabMap { mut fold_edits: Vec, tab_size: NonZeroU32, ) -> (TabSnapshot, Vec) { - let old_snapshot = &mut self.0; - let mut new_snapshot = TabSnapshot { - fold_snapshot, - tab_size: tab_size.min(MAX_TABS), - max_expansion_column: old_snapshot.max_expansion_column, - version: old_snapshot.version, - }; - - if old_snapshot.fold_snapshot.version != new_snapshot.fold_snapshot.version { - new_snapshot.version += 1; + let tab_size = tab_size.min(MAX_TABS); + + if self.0.tab_size != tab_size { + let old_max_point = self.0.max_point(); + self.0.version += 1; + self.0.fold_snapshot = fold_snapshot; + self.0.tab_size = tab_size; + return ( + self.0.clone(), + vec![TabEdit { + old: TabPoint::zero()..old_max_point, + new: TabPoint::zero()..self.0.max_point(), + }], + ); } - let tab_edits = if old_snapshot.tab_size == new_snapshot.tab_size { - // Expand each edit to include the next tab on the same line as the edit, - // and any subsequent tabs on that line that moved across the tab expansion - // boundary. - for fold_edit in &mut fold_edits { - let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); - let old_end_row_successor_offset = cmp::min( - FoldPoint::new(old_end.row() + 1, 0), - old_snapshot.fold_snapshot.max_point(), - ) - .to_offset(&old_snapshot.fold_snapshot); - let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); + let old_snapshot = &mut self.0; + let mut new_version = old_snapshot.version; + if old_snapshot.fold_snapshot.version != fold_snapshot.version { + new_version += 1; + } - let mut offset_from_edit = 0; - let mut first_tab_offset = None; - let mut last_tab_with_changed_expansion_offset = None; - 'outer: for chunk in old_snapshot.fold_snapshot.chunks( - fold_edit.old.end..old_end_row_successor_offset, - false, - Highlights::default(), - ) { - let mut remaining_tabs = chunk.tabs; - while remaining_tabs != 0 { - let ix = remaining_tabs.trailing_zeros(); - let offset_from_edit = offset_from_edit + ix; - if first_tab_offset.is_none() { - first_tab_offset = Some(offset_from_edit); - } - - let old_column = old_end.column() + offset_from_edit; - let new_column = new_end.column() + offset_from_edit; - let was_expanded = old_column < old_snapshot.max_expansion_column; - let is_expanded = new_column < new_snapshot.max_expansion_column; - if was_expanded != is_expanded { - last_tab_with_changed_expansion_offset = Some(offset_from_edit); - } else if !was_expanded && !is_expanded { - break 'outer; - } - - remaining_tabs &= remaining_tabs - 1; + if fold_edits.is_empty() { + old_snapshot.version = new_version; + old_snapshot.fold_snapshot = fold_snapshot; + old_snapshot.tab_size = tab_size; + return (old_snapshot.clone(), vec![]); + } + // Expand each edit to include the next tab on the same line as the edit, + // and any subsequent tabs on that line that moved across the tab expansion + // boundary. + // + // This is necessary because a tab's display width depends on its column + // position: it expands to fill up to the next tab stop. When an edit + // shifts text on a line, any tab character after the edit may now render + // at a different width even though the tab byte itself wasn't touched. + // Additionally, tabs beyond `max_expansion_column` are rendered as a + // single space instead of expanding to the next tab stop. An edit that + // shifts a tab across that boundary changes its display width, so the + // edit must cover it. We scan forward from the edit end to the end of + // the line, extending the edit to include the first subsequent tab (whose + // rendered width may have changed) and the last tab that crossed the + // expansion boundary (transitioning between expanded and non-expanded). + for fold_edit in &mut fold_edits { + let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); + let old_end_row_successor_offset = cmp::min( + FoldPoint::new(old_end.row() + 1, 0), + old_snapshot.fold_snapshot.max_point(), + ) + .to_offset(&old_snapshot.fold_snapshot); + let new_end = fold_edit.new.end.to_point(&fold_snapshot); + + let mut offset_from_edit = 0; + let mut first_tab_offset = None; + let mut last_tab_with_changed_expansion_offset = None; + 'outer: for chunk in old_snapshot.fold_snapshot.chunks( + fold_edit.old.end..old_end_row_successor_offset, + false, + Highlights::default(), + ) { + let mut remaining_tabs = chunk.tabs; + while remaining_tabs != 0 { + let ix = remaining_tabs.trailing_zeros(); + let offset_from_edit = offset_from_edit + ix; + if first_tab_offset.is_none() { + first_tab_offset = Some(offset_from_edit); } - offset_from_edit += chunk.text.len() as u32; - if old_end.column() + offset_from_edit >= old_snapshot.max_expansion_column - && new_end.column() + offset_from_edit >= new_snapshot.max_expansion_column - { - break; + let old_column = old_end.column() + offset_from_edit; + let new_column = new_end.column() + offset_from_edit; + let was_expanded = old_column < old_snapshot.max_expansion_column; + let is_expanded = new_column < old_snapshot.max_expansion_column; + if was_expanded != is_expanded { + last_tab_with_changed_expansion_offset = Some(offset_from_edit); + } else if !was_expanded && !is_expanded { + break 'outer; } + + remaining_tabs &= remaining_tabs - 1; } - if let Some(offset) = last_tab_with_changed_expansion_offset.or(first_tab_offset) { - fold_edit.old.end.0 += offset as usize + 1; - fold_edit.new.end.0 += offset as usize + 1; + offset_from_edit += chunk.text.len() as u32; + if old_end.column() + offset_from_edit >= old_snapshot.max_expansion_column + && new_end.column() + offset_from_edit >= old_snapshot.max_expansion_column + { + break; } } - let _old_alloc_ptr = fold_edits.as_ptr(); - // Combine any edits that overlap due to the expansion. - let mut fold_edits = fold_edits.into_iter(); - if let Some(mut first_edit) = fold_edits.next() { - // This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them. - #[allow(clippy::filter_map_identity)] - let mut v: Vec<_> = fold_edits - .scan(&mut first_edit, |state, edit| { - if state.old.end >= edit.old.start { - state.old.end = edit.old.end; - state.new.end = edit.new.end; - Some(None) // Skip this edit, it's merged - } else { - let new_state = edit; - let result = Some(Some(state.clone())); // Yield the previous edit - **state = new_state; - result - } - }) - .filter_map(|x| x) - .collect(); - v.push(first_edit); - debug_assert_eq!(v.as_ptr(), _old_alloc_ptr, "Fold edits were reallocated"); - v.into_iter() - .map(|fold_edit| { - let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot); - let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); - let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); - let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); - TabEdit { - old: old_snapshot.fold_point_to_tab_point(old_start) - ..old_snapshot.fold_point_to_tab_point(old_end), - new: new_snapshot.fold_point_to_tab_point(new_start) - ..new_snapshot.fold_point_to_tab_point(new_end), - } - }) - .collect() - } else { - vec![] + if let Some(offset) = last_tab_with_changed_expansion_offset.or(first_tab_offset) { + fold_edit.old.end.0 += offset as usize + 1; + fold_edit.new.end.0 += offset as usize + 1; } - } else { - new_snapshot.version += 1; - vec![TabEdit { - old: TabPoint::zero()..old_snapshot.max_point(), - new: TabPoint::zero()..new_snapshot.max_point(), - }] + } + + let new_snapshot = TabSnapshot { + fold_snapshot, + tab_size, + max_expansion_column: old_snapshot.max_expansion_column, + version: new_version, }; + + let _old_alloc_ptr = fold_edits.as_ptr(); + // Combine any edits that overlap due to the expansion. + let mut fold_edits = fold_edits.into_iter(); + let mut first_edit = fold_edits.next().unwrap(); + // This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them. + #[allow(clippy::filter_map_identity)] + let mut v: Vec<_> = fold_edits + .scan(&mut first_edit, |state, edit| { + if state.old.end >= edit.old.start { + state.old.end = edit.old.end; + state.new.end = edit.new.end; + Some(None) // Skip this edit, it's merged + } else { + let new_state = edit; + let result = Some(Some(state.clone())); // Yield the previous edit + **state = new_state; + result + } + }) + .filter_map(|x| x) + .collect(); + v.push(first_edit); + debug_assert_eq!(v.as_ptr(), _old_alloc_ptr, "Fold edits were reallocated"); + let tab_edits = v + .into_iter() + .map(|fold_edit| { + let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot); + let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot); + let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot); + let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot); + TabEdit { + old: old_snapshot.fold_point_to_tab_point(old_start) + ..old_snapshot.fold_point_to_tab_point(old_end), + new: new_snapshot.fold_point_to_tab_point(new_start) + ..new_snapshot.fold_point_to_tab_point(new_end), + } + }) + .collect(); *old_snapshot = new_snapshot; (old_snapshot.clone(), tab_edits) } @@ -168,6 +193,8 @@ impl TabMap { pub struct TabSnapshot { pub fold_snapshot: FoldSnapshot, pub tab_size: NonZeroU32, + /// The maximum column up to which a tab can expand. + /// Any tab after this column will not expand. pub max_expansion_column: u32, pub version: usize, } @@ -365,10 +392,11 @@ impl TabSnapshot { } #[ztracing::instrument(skip_all)] - fn expand_tabs<'a, I>(&self, mut cursor: TabStopCursor<'a, I>, column: u32) -> u32 - where - I: Iterator>, - { + fn expand_tabs<'a>(&self, mut cursor: TabStopCursor<'a>, column: u32) -> u32 { + // we only ever act on a single row at a time + // so the main difference is that other layers build a transform sumtree, and can then just run through that + // we cant quite do this here, as we need to work with the previous layer chunk to understand the tabs of the corresponding row + // we can still do forward searches for this though, we search for a row, then traverse the column up to where we need to be let tab_size = self.tab_size.get(); let end_column = column.min(self.max_expansion_column); @@ -376,7 +404,7 @@ impl TabSnapshot { let mut tab_count = 0; let mut expanded_tab_len = 0; - while let Some(tab_stop) = cursor.seek(seek_target) { + while let Some(tab_stop) = cursor.seek_forward(seek_target) { let expanded_chars_old = tab_stop.char_offset + expanded_tab_len - tab_count; let tab_len = tab_size - ((expanded_chars_old - 1) % tab_size); tab_count += 1; @@ -399,22 +427,19 @@ impl TabSnapshot { } #[ztracing::instrument(skip_all)] - fn collapse_tabs<'a, I>( + fn collapse_tabs<'a>( &self, - mut cursor: TabStopCursor<'a, I>, + mut cursor: TabStopCursor<'a>, column: u32, bias: Bias, - ) -> (u32, u32, u32) - where - I: Iterator>, - { + ) -> (u32, u32, u32) { let tab_size = self.tab_size.get(); let mut collapsed_column = column; let mut seek_target = column.min(self.max_expansion_column); let mut tab_count = 0; let mut expanded_tab_len = 0; - while let Some(tab_stop) = cursor.seek(seek_target) { + while let Some(tab_stop) = cursor.seek_forward(seek_target) { // Calculate how much we want to expand this tab stop (into spaces) let expanded_chars_old = tab_stop.char_offset + expanded_tab_len - tab_count; let tab_len = tab_size - ((expanded_chars_old - 1) % tab_size); @@ -617,13 +642,7 @@ impl<'a> Iterator for TabChunks<'a> { } } - let first_tab_ix = if self.chunk.tabs != 0 { - self.chunk.tabs.trailing_zeros() as usize - } else { - self.chunk.text.len() - }; - - if first_tab_ix == 0 { + if self.chunk.tabs & 1 != 0 { self.chunk.text = &self.chunk.text[1..]; self.chunk.tabs >>= 1; self.chunk.chars >>= 1; @@ -654,12 +673,46 @@ impl<'a> Iterator for TabChunks<'a> { }); } - let prefix_len = first_tab_ix; + // Fast path: no tabs in the remaining chunk, return it directly + if self.chunk.tabs == 0 { + let chunk = self.chunk.clone(); + self.chunk.text = ""; + self.chunk.tabs = 0; + self.chunk.chars = 0; + self.chunk.newlines = 0; + let chunk_len = chunk.text.len() as u32; + + let newline_count = chunk.newlines.count_ones(); + if newline_count > 0 { + let last_newline_bit = 128 - chunk.newlines.leading_zeros(); + let chars_after_last_newline = + chunk.chars.unbounded_shr(last_newline_bit).count_ones(); + let bytes_after_last_newline = chunk_len - last_newline_bit; + + self.column = chars_after_last_newline; + self.input_column = bytes_after_last_newline; + self.output_position = Point::new( + self.output_position.row + newline_count, + bytes_after_last_newline, + ); + } else { + let char_count = chunk.chars.count_ones(); + self.column += char_count; + if !self.inside_leading_tab { + self.input_column += chunk_len; + } + self.output_position.column += chunk_len; + } + + return Some(chunk); + } + + // Split at the next tab position + let prefix_len = self.chunk.tabs.trailing_zeros() as usize; let (prefix, suffix) = self.chunk.text.split_at(prefix_len); let mask = 1u128.unbounded_shl(prefix_len as u32).wrapping_sub(1); let prefix_chars = self.chunk.chars & mask; - let prefix_tabs = self.chunk.tabs & mask; let prefix_newlines = self.chunk.newlines & mask; self.chunk.text = suffix; @@ -692,13 +745,156 @@ impl<'a> Iterator for TabChunks<'a> { Some(Chunk { text: prefix, chars: prefix_chars, - tabs: prefix_tabs, + tabs: 0, newlines: prefix_newlines, ..self.chunk.clone() }) } } +struct TabStopCursor<'a> { + chunks: FoldChunks<'a>, + byte_offset: u32, + char_offset: u32, + /// Chunk + /// last tab position iterated through + current_chunk: Option<(TabStopChunk<'a>, u32)>, +} + +struct TabStopChunk<'a> { + chars: u128, + text: &'a str, + tabs: u128, +} + +impl<'a> TabStopCursor<'a> { + fn new(chunks: FoldChunks<'a>) -> Self { + Self { + chunks, + byte_offset: 0, + char_offset: 0, + current_chunk: None, + } + } + + fn bytes_until_next_char(&self) -> Option { + self.current_chunk.as_ref().map(|&(ref chunk, idx)| { + let higher_chars = chunk.chars.unbounded_shr(idx + 1); + + if higher_chars != 0 { + higher_chars.trailing_zeros() as usize + 1 + } else { + chunk.text.len() - idx as usize + } + }) + } + + fn is_char_boundary(&self) -> bool { + self.current_chunk + .as_ref() + .is_some_and(|&(ref chunk, idx)| { + (1u128.unbounded_shl(idx) & chunk.chars) != 0 || idx as usize == chunk.text.len() + }) + } + + /// distance: length to move forward while searching for the next tab stop + #[ztracing::instrument(skip_all)] + fn seek_forward(&mut self, distance: u32) -> Option { + if distance == 0 { + return None; + } + + let mut distance_remaining = distance; + + while let Some((mut chunk, chunk_position)) = self.current_chunk.take().or_else(|| { + self.chunks.next().map(|chunk| { + ( + TabStopChunk { + chars: chunk.chars, + text: chunk.text, + tabs: chunk.tabs, + }, + 0, + ) + }) + }) { + let chunk_len = chunk.text.len() as u32; + + if chunk.tabs == 0 { + let chunk_remaining = chunk_len - chunk_position; + if chunk_remaining >= distance_remaining { + let end = chunk_position + distance_remaining; + self.byte_offset += distance_remaining; + self.char_offset += + count_chars_in_byte_range(chunk_position..(end - 1), chunk.chars); + if end < 128 { + self.current_chunk = Some((chunk, end)); + } + return None; + } + + self.byte_offset += chunk_remaining; + self.char_offset += + count_chars_in_byte_range(chunk_position..(chunk_len - 1), chunk.chars); + distance_remaining -= chunk_remaining; + continue; + } + + let tab_end = chunk.tabs.trailing_zeros() + 1; + let bytes_to_tab = tab_end - chunk_position; + + if bytes_to_tab > distance_remaining { + let end = chunk_position + distance_remaining; + self.byte_offset += distance_remaining; + self.char_offset += + count_chars_in_byte_range(chunk_position..(end - 1), chunk.chars); + self.current_chunk = Some((chunk, end)); + return None; + } + + self.byte_offset += bytes_to_tab; + self.char_offset += + count_chars_in_byte_range(chunk_position..(tab_end - 1), chunk.chars); + + let tabstop = TabStop { + char_offset: self.char_offset, + byte_offset: self.byte_offset, + }; + + chunk.tabs = (chunk.tabs - 1) & chunk.tabs; + + if tab_end != chunk_len { + self.current_chunk = Some((chunk, tab_end)); + } + + return Some(tabstop); + } + + None + } + + fn byte_offset(&self) -> u32 { + self.byte_offset + } + + fn char_offset(&self) -> u32 { + self.char_offset + } +} + +#[inline(always)] +fn count_chars_in_byte_range(range: Range, bitmap: u128) -> u32 { + let low_mask = u128::MAX << range.start; + let high_mask = u128::MAX >> (127 - range.end); + (bitmap & low_mask & high_mask).count_ones() +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +struct TabStop { + char_offset: u32, + byte_offset: u32, +} + #[cfg(test)] mod tests { use std::mem; @@ -814,40 +1010,21 @@ mod tests { #[gpui::test] fn test_expand_tabs(cx: &mut gpui::App) { - let test_values = [ - ("κg🏀 f\nwo🏀❌by🍐❎β🍗c\tβ❎ \ncλ🎉", 17), - (" \twςe", 4), - ("fε", 1), - ("i❎\t", 3), - ]; - let buffer = MultiBuffer::build_simple("", cx); + let input = "A\tBC\tDEF\tG\tHI\tJ\tK\tL\tM"; + + let buffer = MultiBuffer::build_simple(input, cx); let buffer_snapshot = buffer.read(cx).snapshot(cx); let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot); let (_, fold_snapshot) = FoldMap::new(inlay_snapshot); let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap()); - for (text, column) in test_values { - let mut tabs = 0u128; - let mut chars = 0u128; - for (idx, c) in text.char_indices() { - if c == '\t' { - tabs |= 1 << idx; - } - chars |= 1 << idx; - } - - let chunks = [Chunk { - text, - tabs, - chars, - ..Default::default() - }]; - - let cursor = TabStopCursor::new(chunks); + for (ix, _) in input.char_indices() { + let fold_point = FoldPoint::new(0, ix as u32); assert_eq!( - tab_snapshot.expected_expand_tabs(text.chars(), column), - tab_snapshot.expand_tabs(cursor, column) + tab_snapshot.expected_to_tab_point(fold_point), + tab_snapshot.fold_point_to_tab_point(fold_point), + "Failed with fold_point at column {ix}" ); } } @@ -1263,7 +1440,7 @@ mod tests { Default::default(), ); let mut cursor = TabStopCursor::new(chunks); - assert!(cursor.seek(0).is_none()); + assert!(cursor.seek_forward(0).is_none()); let mut tab_stops = Vec::new(); let mut all_tab_stops = Vec::new(); @@ -1279,7 +1456,7 @@ mod tests { } } - while let Some(tab_stop) = cursor.seek(u32::MAX) { + while let Some(tab_stop) = cursor.seek_forward(u32::MAX) { tab_stops.push(tab_stop); } pretty_assertions::assert_eq!(tab_stops.as_slice(), all_tab_stops.as_slice(),); @@ -1314,7 +1491,7 @@ mod tests { } } - while let Some(tab_stop) = cursor.seek(u32::MAX) { + while let Some(tab_stop) = cursor.seek_forward(u32::MAX) { actual_tab_stops.push(tab_stop); } pretty_assertions::assert_eq!(actual_tab_stops.as_slice(), expected_tab_stops.as_slice(),); @@ -1379,7 +1556,7 @@ mod tests { let mut found_tab_stops = Vec::new(); let mut position = distance; - while let Some(tab_stop) = cursor.seek(position) { + while let Some(tab_stop) = cursor.seek_forward(position) { found_tab_stops.push(tab_stop); position = distance - tab_stop.byte_offset; } @@ -1425,7 +1602,7 @@ mod tests { Default::default(), ); let mut cursor = TabStopCursor::new(chunks); - assert!(cursor.seek(0).is_none()); + assert!(cursor.seek_forward(0).is_none()); let mut expected_tab_stops = Vec::new(); let mut byte_offset = 0; @@ -1441,7 +1618,7 @@ mod tests { } let mut actual_tab_stops = Vec::new(); - while let Some(tab_stop) = cursor.seek(u32::MAX) { + while let Some(tab_stop) = cursor.seek_forward(u32::MAX) { actual_tab_stops.push(tab_stop); } @@ -1487,7 +1664,7 @@ mod tests { let mut found_tab_stops = Vec::new(); let mut position = distance; - while let Some(tab_stop) = cursor.seek(position) { + while let Some(tab_stop) = cursor.seek_forward(position) { found_tab_stops.push(tab_stop); position = distance - tab_stop.byte_offset; } @@ -1520,165 +1697,3 @@ mod tests { } } } - -struct TabStopCursor<'a, I> -where - I: Iterator>, -{ - chunks: I, - byte_offset: u32, - char_offset: u32, - /// Chunk - /// last tab position iterated through - current_chunk: Option<(Chunk<'a>, u32)>, -} - -impl<'a, I> TabStopCursor<'a, I> -where - I: Iterator>, -{ - #[ztracing::instrument(skip_all)] - fn new(chunks: impl IntoIterator, IntoIter = I>) -> Self { - Self { - chunks: chunks.into_iter(), - byte_offset: 0, - char_offset: 0, - current_chunk: None, - } - } - - #[ztracing::instrument(skip_all)] - fn bytes_until_next_char(&self) -> Option { - self.current_chunk.as_ref().and_then(|(chunk, idx)| { - let mut idx = *idx; - let mut diff = 0; - while idx > 0 && chunk.chars & (1u128.unbounded_shl(idx)) == 0 { - idx -= 1; - diff += 1; - } - - if chunk.chars & (1 << idx) != 0 { - Some( - (chunk.text[idx as usize..].chars().next()?) - .len_utf8() - .saturating_sub(diff), - ) - } else { - None - } - }) - } - - #[ztracing::instrument(skip_all)] - fn is_char_boundary(&self) -> bool { - self.current_chunk - .as_ref() - .is_some_and(|(chunk, idx)| (chunk.chars & 1u128.unbounded_shl(*idx)) != 0) - } - - /// distance: length to move forward while searching for the next tab stop - #[ztracing::instrument(skip_all)] - fn seek(&mut self, distance: u32) -> Option { - if distance == 0 { - return None; - } - - let mut distance_traversed = 0; - - while let Some((mut chunk, chunk_position)) = self - .current_chunk - .take() - .or_else(|| self.chunks.next().zip(Some(0))) - { - if chunk.tabs == 0 { - let chunk_distance = chunk.text.len() as u32 - chunk_position; - if chunk_distance + distance_traversed >= distance { - let overshoot = distance_traversed.abs_diff(distance); - - self.byte_offset += overshoot; - self.char_offset += get_char_offset( - chunk_position..(chunk_position + overshoot).saturating_sub(1), - chunk.chars, - ); - - if chunk_position + overshoot < 128 { - self.current_chunk = Some((chunk, chunk_position + overshoot)); - } - - return None; - } - - self.byte_offset += chunk_distance; - self.char_offset += get_char_offset( - chunk_position..(chunk_position + chunk_distance).saturating_sub(1), - chunk.chars, - ); - distance_traversed += chunk_distance; - continue; - } - let tab_position = chunk.tabs.trailing_zeros() + 1; - - if distance_traversed + tab_position - chunk_position > distance { - let cursor_position = distance_traversed.abs_diff(distance); - - self.char_offset += get_char_offset( - chunk_position..(chunk_position + cursor_position - 1), - chunk.chars, - ); - self.current_chunk = Some((chunk, cursor_position + chunk_position)); - self.byte_offset += cursor_position; - - return None; - } - - self.byte_offset += tab_position - chunk_position; - self.char_offset += get_char_offset(chunk_position..(tab_position - 1), chunk.chars); - - let tabstop = TabStop { - char_offset: self.char_offset, - byte_offset: self.byte_offset, - }; - - chunk.tabs = (chunk.tabs - 1) & chunk.tabs; - - if tab_position as usize != chunk.text.len() { - self.current_chunk = Some((chunk, tab_position)); - } - - return Some(tabstop); - } - - None - } - - fn byte_offset(&self) -> u32 { - self.byte_offset - } - - fn char_offset(&self) -> u32 { - self.char_offset - } -} - -#[inline(always)] -fn get_char_offset(range: Range, bit_map: u128) -> u32 { - if range.start == range.end { - return if (1u128 << range.start) & bit_map == 0 { - 0 - } else { - 1 - }; - } - let end_shift: u128 = 127u128 - range.end as u128; - let mut bit_mask = (u128::MAX >> range.start) << range.start; - bit_mask = (bit_mask << end_shift) >> end_shift; - let bit_map = bit_map & bit_mask; - - bit_map.count_ones() -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -struct TabStop { - char_offset: u32, - byte_offset: u32, -} diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6820d598680de438f9244195acca0bcd5ff7476f..25bc9996604773bd67964dcb9f5196c41df6cdce 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3626,7 +3626,7 @@ impl Editor { self.refresh_document_highlights(cx); refresh_linked_ranges(self, window, cx); - self.refresh_selected_text_highlights(false, window, cx); + self.refresh_selected_text_highlights(&display_map, false, window, cx); self.refresh_matching_bracket_highlights(&display_map, cx); self.refresh_outline_symbols_at_cursor(cx); self.update_visible_edit_prediction(window, cx); @@ -7500,7 +7500,7 @@ impl Editor { fn prepare_highlight_query_from_selection( &mut self, - window: &Window, + snapshot: &DisplaySnapshot, cx: &mut Context, ) -> Option<(String, Range)> { if matches!(self.mode, EditorMode::SingleLine) { @@ -7512,7 +7512,6 @@ impl Editor { if self.selections.count() != 1 || self.selections.line_mode() { return None; } - let snapshot = self.snapshot(window, cx); let selection = self.selections.newest::(&snapshot); // If the selection spans multiple rows OR it is empty if selection.start.row != selection.end.row @@ -7534,6 +7533,7 @@ impl Editor { #[ztracing::instrument(skip_all)] fn update_selection_occurrence_highlights( &mut self, + multi_buffer_snapshot: MultiBufferSnapshot, query_text: String, query_range: Range, multi_buffer_range_to_query: Range, @@ -7541,7 +7541,6 @@ impl Editor { window: &mut Window, cx: &mut Context, ) -> Task<()> { - let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); cx.spawn_in(window, async move |editor, cx| { if use_debounce { cx.background_executor() @@ -7557,7 +7556,7 @@ impl Editor { .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()); let mut match_ranges = Vec::new(); let Ok(regex) = project::search::SearchQuery::text( - query_text.clone(), + query_text, false, false, false, @@ -7719,12 +7718,13 @@ impl Editor { #[ztracing::instrument(skip_all)] fn refresh_selected_text_highlights( &mut self, + snapshot: &DisplaySnapshot, on_buffer_edit: bool, window: &mut Window, cx: &mut Context, ) { let Some((query_text, query_range)) = - self.prepare_highlight_query_from_selection(window, cx) + self.prepare_highlight_query_from_selection(snapshot, cx) else { self.clear_background_highlights(HighlightKey::SelectedTextHighlight, cx); self.quick_selection_highlight_task.take(); @@ -7756,6 +7756,7 @@ impl Editor { self.quick_selection_highlight_task = Some(( query_range.clone(), self.update_selection_occurrence_highlights( + snapshot.buffer.clone(), query_text.clone(), query_range.clone(), multi_buffer_visible_range, @@ -7781,6 +7782,7 @@ impl Editor { self.debounced_selection_highlight_task = Some(( query_range.clone(), self.update_selection_occurrence_highlights( + snapshot.buffer.clone(), query_text, query_range, multi_buffer_full_range, @@ -24107,7 +24109,7 @@ impl Editor { self.update_lsp_data(Some(buffer_id), window, cx); self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); self.colorize_brackets(false, cx); - self.refresh_selected_text_highlights(true, window, cx); + self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); cx.emit(EditorEvent::ExcerptsAdded { buffer: buffer.clone(), predecessor: *predecessor, @@ -24166,7 +24168,7 @@ impl Editor { } multi_buffer::Event::Reparsed(buffer_id) => { self.tasks_update_task = Some(self.refresh_runnables(window, cx)); - self.refresh_selected_text_highlights(true, window, cx); + self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); self.colorize_brackets(true, cx); jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); From 6a63387790cae0a86722d6afa7c03d9bcdc8faa7 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 2 Mar 2026 10:27:01 +0100 Subject: [PATCH 205/548] extension_ci: Add formatting check for Tree-sitter queries (#50318) This rolls out the formatting check to extensions also. Release Notes: - N/A --- .github/workflows/extension_tests.yml | 15 ++++++ .github/workflows/run_tests.yml | 4 +- .../src/tasks/workflows/extension_tests.rs | 6 ++- .../xtask/src/tasks/workflows/run_tests.rs | 46 +++++++++---------- 4 files changed, 45 insertions(+), 26 deletions(-) diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index ef0e28715ce038c6ca9e38d4126b20e2276ce3c2..5160aba2869b1a3234c686a6508460784b0536b1 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -109,6 +109,21 @@ jobs: mkdir -p /tmp/ext-scratch mkdir -p /tmp/ext-output ./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output + - name: run_tests::fetch_ts_query_ls + uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c + with: + repo: ribru17/ts_query_ls + version: tags/v3.15.1 + file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + - name: run_tests::run_ts_query_ls + run: |- + tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz + ./ts_query_ls format --check . || { + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + } - id: compare-versions-check name: extension_bump::compare_versions run: | diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 96c763045cb75906e613744fcfb13764f617a278..29f888cbb596593052c6adebe2341171eac9055d 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -139,13 +139,13 @@ jobs: uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 with: config: ./typos.toml - - name: run_tests::check_style::fetch_ts_query_ls + - name: run_tests::fetch_ts_query_ls uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c with: repo: ribru17/ts_query_ls version: tags/v3.15.1 file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz - - name: run_tests::check_style::run_ts_query_ls + - name: run_tests::run_ts_query_ls run: |- tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz ./ts_query_ls format --check . || { diff --git a/tooling/xtask/src/tasks/workflows/extension_tests.rs b/tooling/xtask/src/tasks/workflows/extension_tests.rs index a650013bacfcfc1ac89a60ccfe8674a5621fb1c7..de4f1dd94267a55dcc3e1555c1a5673ff813ad26 100644 --- a/tooling/xtask/src/tasks/workflows/extension_tests.rs +++ b/tooling/xtask/src/tasks/workflows/extension_tests.rs @@ -3,7 +3,9 @@ use indoc::{formatdoc, indoc}; use crate::tasks::workflows::{ extension_bump::compare_versions, - run_tests::{orchestrate_without_package_filter, tests_pass}, + run_tests::{ + fetch_ts_query_ls, orchestrate_without_package_filter, run_ts_query_ls, tests_pass, + }, runners, steps::{ self, CommonJobConditions, FluentBuilder, NamedJob, cache_rust_dependencies_namespace, @@ -94,6 +96,8 @@ pub(crate) fn check_extension() -> NamedJob { .add_step(download_zed_extension_cli(cache_hit)) .add_step(cache_rust_dependencies_namespace()) // Extensions can compile Rust, so provide the cache if needed. .add_step(check()) + .add_step(fetch_ts_query_ls()) + .add_step(run_ts_query_ls()) .add_step(check_version_job) .add_step(verify_version_did_not_change(version_changed)); diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index 4130e53f724847b3f1bef5bf083f782cc7e9e0dc..d617dda5af0ad51d0e86cfeeb69a035a53c07663 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -271,6 +271,29 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob { const TS_QUERY_LS_FILE: &str = "ts_query_ls-x86_64-unknown-linux-gnu.tar.gz"; const CI_TS_QUERY_RELEASE: &str = "tags/v3.15.1"; +pub(crate) fn fetch_ts_query_ls() -> Step { + named::uses( + "dsaltares", + "fetch-gh-release-asset", + "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c", + ) // v1.1.1 + .add_with(("repo", "ribru17/ts_query_ls")) + .add_with(("version", CI_TS_QUERY_RELEASE)) + .add_with(("file", TS_QUERY_LS_FILE)) +} + +pub(crate) fn run_ts_query_ls() -> Step { + named::bash(formatdoc!( + r#"tar -xf {TS_QUERY_LS_FILE} + ./ts_query_ls format --check . || {{ + echo "Found unformatted queries, please format them with ts_query_ls." + echo "For easy use, install the Tree-sitter query extension:" + echo "zed://extension/tree-sitter-query" + false + }}"# + )) +} + fn check_style() -> NamedJob { fn check_for_typos() -> Step { named::uses( @@ -281,29 +304,6 @@ fn check_style() -> NamedJob { .with(("config", "./typos.toml")) } - fn fetch_ts_query_ls() -> Step { - named::uses( - "dsaltares", - "fetch-gh-release-asset", - "aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c", - ) // v1.1.1 - .add_with(("repo", "ribru17/ts_query_ls")) - .add_with(("version", CI_TS_QUERY_RELEASE)) - .add_with(("file", TS_QUERY_LS_FILE)) - } - - fn run_ts_query_ls() -> Step { - named::bash(formatdoc!( - r#"tar -xf {TS_QUERY_LS_FILE} - ./ts_query_ls format --check . || {{ - echo "Found unformatted queries, please format them with ts_query_ls." - echo "For easy use, install the Tree-sitter query extension:" - echo "zed://extension/tree-sitter-query" - false - }}"# - )) - } - named::job( release_job(&[]) .runs_on(runners::LINUX_MEDIUM) From 6a14388d385ae59f528bb54db0c10d2551cc1348 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 2 Mar 2026 10:33:48 +0100 Subject: [PATCH 206/548] languages: Add support for passing user settings to Go LSP adapter (#50472) Closes https://github.com/zed-industries/zed/issues/50276 Release Notes: - Added support for specifying settings for the Go LSP adapter --- crates/language/src/language.rs | 2 + .../src/extension_lsp_adapter.rs | 1 + crates/languages/src/css.rs | 1 + crates/languages/src/go.rs | 38 ++++++++++++++++--- crates/languages/src/json.rs | 1 + crates/languages/src/python.rs | 2 + crates/languages/src/tailwind.rs | 1 + crates/languages/src/tailwindcss.rs | 1 + crates/languages/src/typescript.rs | 1 + crates/project/src/lsp_store.rs | 5 ++- 10 files changed, 47 insertions(+), 6 deletions(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index fd14f42a93179ae0423f5acfa6ede3cceec94935..fe5c5d09aa0765e2c305d88c65e86d6832443b1e 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -491,6 +491,7 @@ pub trait LspAdapter: 'static + Send + Sync + DynLspInstaller { async fn initialization_options( self: Arc, _: &Arc, + _cx: &mut AsyncApp, ) -> Result> { Ok(None) } @@ -2638,6 +2639,7 @@ impl LspAdapter for FakeLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _cx: &mut AsyncApp, ) -> Result> { Ok(self.initialization_options.clone()) } diff --git a/crates/language_extension/src/extension_lsp_adapter.rs b/crates/language_extension/src/extension_lsp_adapter.rs index c2062a294d75657b1421982974019454ecba4aa3..6f5300991fd8afbfaba710ed2bde068dd4d3a969 100644 --- a/crates/language_extension/src/extension_lsp_adapter.rs +++ b/crates/language_extension/src/extension_lsp_adapter.rs @@ -309,6 +309,7 @@ impl LspAdapter for ExtensionLspAdapter { async fn initialization_options( self: Arc, delegate: &Arc, + _: &mut AsyncApp, ) -> Result> { let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _; let json_options = self diff --git a/crates/languages/src/css.rs b/crates/languages/src/css.rs index c5c89a0c66431380cf9f500a23b74a19230f3046..6a8fb730a0faa6430d252cdd189d0620fcd07e4a 100644 --- a/crates/languages/src/css.rs +++ b/crates/languages/src/css.rs @@ -134,6 +134,7 @@ impl LspAdapter for CssLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/go.rs b/crates/languages/src/go.rs index abcb890566d9c0d0d6d9fe85b565c74825775250..581159503ce8aaf642b62789cb895858f1f963c2 100644 --- a/crates/languages/src/go.rs +++ b/crates/languages/src/go.rs @@ -8,8 +8,9 @@ pub use language::*; use language::{LanguageToolchainStore, LspAdapterDelegate, LspInstaller}; use lsp::{LanguageServerBinary, LanguageServerName}; +use project::lsp_store::language_server_settings; use regex::Regex; -use serde_json::json; +use serde_json::{Value, json}; use smol::fs; use std::{ borrow::Cow, @@ -24,7 +25,7 @@ use std::{ }, }; use task::{TaskTemplate, TaskTemplates, TaskVariables, VariableName}; -use util::{ResultExt, fs::remove_matching, maybe}; +use util::{ResultExt, fs::remove_matching, maybe, merge_json_value_into}; fn server_binary_arguments() -> Vec { vec!["-mode=stdio".into()] @@ -192,9 +193,10 @@ impl LspAdapter for GoLspAdapter { async fn initialization_options( self: Arc, - _: &Arc, + delegate: &Arc, + cx: &mut AsyncApp, ) -> Result> { - Ok(Some(json!({ + let mut default_config = json!({ "usePlaceholders": false, "hints": { "assignVariableTypes": true, @@ -205,7 +207,33 @@ impl LspAdapter for GoLspAdapter { "parameterNames": true, "rangeVariableTypes": true } - }))) + }); + + let project_initialization_options = cx.update(|cx| { + language_server_settings(delegate.as_ref(), &self.name(), cx) + .and_then(|s| s.initialization_options.clone()) + }); + + if let Some(override_options) = project_initialization_options { + merge_json_value_into(override_options, &mut default_config); + } + + Ok(Some(default_config)) + } + + async fn workspace_configuration( + self: Arc, + delegate: &Arc, + _: Option, + _: Option, + cx: &mut AsyncApp, + ) -> Result { + Ok(cx + .update(|cx| { + language_server_settings(delegate.as_ref(), &self.name(), cx) + .and_then(|settings| settings.settings.clone()) + }) + .unwrap_or_default()) } async fn label_for_completion( diff --git a/crates/languages/src/json.rs b/crates/languages/src/json.rs index b6c3954cf228d90714a5eb5676d86a204b47b88d..3d8ba972eb17b0fe7f9d5070b73a4fb9e94adef3 100644 --- a/crates/languages/src/json.rs +++ b/crates/languages/src/json.rs @@ -245,6 +245,7 @@ impl LspAdapter for JsonLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 9eaf9764f100428b4bbbc80238f7da5847001470..722f4bb795ea857a9d399ef5b291beb8503f1c92 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -507,6 +507,7 @@ impl LspAdapter for PyrightLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { // Provide minimal initialization options // Virtual environment configuration will be handled through workspace configuration @@ -1972,6 +1973,7 @@ impl LspAdapter for BasedPyrightLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { // Provide minimal initialization options // Virtual environment configuration will be handled through workspace configuration diff --git a/crates/languages/src/tailwind.rs b/crates/languages/src/tailwind.rs index 72e4684ce0a0242e5381c118a9748e3d9718341d..a74275af9631eea603cc957d44867d7d53327682 100644 --- a/crates/languages/src/tailwind.rs +++ b/crates/languages/src/tailwind.rs @@ -139,6 +139,7 @@ impl LspAdapter for TailwindLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true, diff --git a/crates/languages/src/tailwindcss.rs b/crates/languages/src/tailwindcss.rs index 016c2956591a5140ab4b2d8313711382fee47d30..aa310fac3f57477b9c0ef85f24f51e619a893c87 100644 --- a/crates/languages/src/tailwindcss.rs +++ b/crates/languages/src/tailwindcss.rs @@ -135,6 +135,7 @@ impl LspAdapter for TailwindCssLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { Ok(Some(json!({ "provideFormatter": true diff --git a/crates/languages/src/typescript.rs b/crates/languages/src/typescript.rs index 2b2fb19c629f85c6b51eba64d154b43e716f6827..d15d01808137dd171cc7ee0ab440671bf58cac52 100644 --- a/crates/languages/src/typescript.rs +++ b/crates/languages/src/typescript.rs @@ -804,6 +804,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn initialization_options( self: Arc, adapter: &Arc, + _: &mut AsyncApp, ) -> Result> { let tsdk_path = self.tsdk_path(adapter).await; Ok(Some(json!({ diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index e2fab975cf455677ff0c92c2902151cc6712b6e0..5a5a13e9cbb4b17f333d29cedd16b1fe6366d204 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -548,6 +548,7 @@ impl LocalLspStore { let mut initialization_options = Self::initialization_options_for_adapter( adapter.adapter.clone(), &delegate, + cx, ) .await?; @@ -3771,9 +3772,10 @@ impl LocalLspStore { async fn initialization_options_for_adapter( adapter: Arc, delegate: &Arc, + cx: &mut AsyncApp, ) -> Result> { let Some(mut initialization_config) = - adapter.clone().initialization_options(delegate).await? + adapter.clone().initialization_options(delegate, cx).await? else { return Ok(None); }; @@ -13986,6 +13988,7 @@ impl LspAdapter for SshLspAdapter { async fn initialization_options( self: Arc, _: &Arc, + _: &mut AsyncApp, ) -> Result> { let Some(options) = &self.initialization_options else { return Ok(None); From e80a456eb042facca0a2cb1fd93631f589d401db Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 2 Mar 2026 11:05:00 +0100 Subject: [PATCH 207/548] agent: Do not include thinking blocks in subagent output (#50473) Release Notes: - N/A Co-authored-by: Ben Brandt --- crates/agent/src/agent.rs | 17 +++- crates/agent/src/tests/mod.rs | 154 ++++++++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+), 1 deletion(-) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 8de0aaee0c05c07e0b3c86a1b7570a1a61dc5332..d468d529c5ac672600d5280a11a45f4f6ad1b2f9 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -14,6 +14,7 @@ mod tools; use context_server::ContextServerId; pub use db::*; +use itertools::Itertools; pub use native_agent_server::NativeAgentServer; pub use pattern_extraction::*; pub use shell_command_parser::extract_commands; @@ -1819,7 +1820,21 @@ impl SubagentHandle for NativeSubagentHandle { SubagentPromptResult::Completed => thread.read_with(cx, |thread, _cx| { thread .last_message() - .map(|m| m.to_markdown()) + .and_then(|message| { + let content = message.as_agent_message()? + .content + .iter() + .filter_map(|c| match c { + AgentMessageContent::Text(text) => Some(text.as_str()), + _ => None, + }) + .join("\n\n"); + if content.is_empty() { + None + } else { + Some(content) + } + }) .context("No response from subagent") }), SubagentPromptResult::Cancelled => Err(anyhow!("User canceled")), diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 5262414631c7f1f329a7de941424e0a0dfa8b1b9..f62edb091463860f5fcf105f2383c352660166a6 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -4294,6 +4294,160 @@ async fn test_subagent_tool_call_end_to_end(cx: &mut TestAppContext) { subagent task response + ## Assistant + + Response + + "#}, + ); +} + +#[gpui::test] +async fn test_subagent_tool_output_does_not_include_thinking(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + LanguageModelRegistry::test(cx); + }); + cx.update(|cx| { + cx.update_flags(true, vec!["subagents".to_string()]); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": { + "b.md": "Lorem" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = NativeAgent::new( + project.clone(), + thread_store.clone(), + Templates::new(), + None, + fs.clone(), + &mut cx.to_async(), + ) + .await + .unwrap(); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_session(project.clone(), Path::new(""), cx) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + let model = Arc::new(FakeLanguageModel::default()); + + // Ensure empty threads are not saved, even if they get mutated. + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + }); + cx.run_until_parked(); + + let send = acp_thread.update(cx, |thread, cx| thread.send_raw("Prompt", cx)); + cx.run_until_parked(); + model.send_last_completion_stream_text_chunk("spawning subagent"); + let subagent_tool_input = SpawnAgentToolInput { + label: "label".to_string(), + message: "subagent task prompt".to_string(), + session_id: None, + }; + let subagent_tool_use = LanguageModelToolUse { + id: "subagent_1".into(), + name: SpawnAgentTool::NAME.into(), + raw_input: serde_json::to_string(&subagent_tool_input).unwrap(), + input: serde_json::to_value(&subagent_tool_input).unwrap(), + is_input_complete: true, + thought_signature: None, + }; + model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + subagent_tool_use, + )); + model.end_last_completion_stream(); + + cx.run_until_parked(); + + let subagent_session_id = thread.read_with(cx, |thread, cx| { + thread + .running_subagent_ids(cx) + .get(0) + .expect("subagent thread should be running") + .clone() + }); + + let subagent_thread = agent.read_with(cx, |agent, _cx| { + agent + .sessions + .get(&subagent_session_id) + .expect("subagent session should exist") + .acp_thread + .clone() + }); + + model.send_last_completion_stream_text_chunk("subagent task response 1"); + model.send_last_completion_stream_event(LanguageModelCompletionEvent::Thinking { + text: "thinking more about the subagent task".into(), + signature: None, + }); + model.send_last_completion_stream_text_chunk("subagent task response 2"); + model.end_last_completion_stream(); + + cx.run_until_parked(); + + assert_eq!( + subagent_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)), + indoc! {" + ## User + + subagent task prompt + + ## Assistant + + subagent task response 1 + + + thinking more about the subagent task + + + subagent task response 2 + + "} + ); + + model.send_last_completion_stream_text_chunk("Response"); + model.end_last_completion_stream(); + + send.await.unwrap(); + + assert_eq!( + acp_thread.read_with(cx, |thread, cx| thread.to_markdown(cx)), + indoc! {r#" + ## User + + Prompt + + ## Assistant + + spawning subagent + + **Tool Call: label** + Status: Completed + + subagent task response 1 + + subagent task response 2 ## Assistant From 4052f98bd8098691eb0ed1ea978445581dd1d5ef Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 2 Mar 2026 11:26:29 +0100 Subject: [PATCH 208/548] agent: Fix subagent permission prompt showing up in multiple cards (#50475) Release Notes: - N/A --- crates/agent_ui/src/connection_view/thread_view.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 9f38ba9ba778b6c23f7a1ee4adecea501c98bfdb..5b40a8a9ae76728eabbf6d45b926407a3911c3da 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -6381,7 +6381,9 @@ impl ThreadView { let is_running = matches!( tool_call.status, - ToolCallStatus::Pending | ToolCallStatus::InProgress + ToolCallStatus::Pending + | ToolCallStatus::InProgress + | ToolCallStatus::WaitingForConfirmation { .. } ); let is_failed = matches!( @@ -6596,7 +6598,7 @@ impl ThreadView { .read(cx) .pending_tool_call(thread.read(cx).session_id(), cx); - if let Some((_, subagent_tool_call_id, _)) = pending_tool_call { + if is_running && let Some((_, subagent_tool_call_id, _)) = pending_tool_call { if let Some((entry_ix, tool_call)) = thread.read(cx).tool_call(&subagent_tool_call_id) { From c05e5859c688a8eeecedace728be4e31172cca09 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 12:04:13 +0100 Subject: [PATCH 209/548] Detect leaked entities at the end of test runs (#50400) This does not yet allow for finding task <-> entity cycles unfortunately, but at least it does catch entity <-> entity cycles for the time being Release Notes: - N/A *or* Added/Fixed/Improved ... --- .../integration/randomized_test_helpers.rs | 16 +- .../incoming_call_notification.rs | 8 + crates/editor/src/editor.rs | 73 ++++-- crates/editor/src/semantic_tokens.rs | 5 +- crates/editor/src/test.rs | 2 - crates/git_ui/src/branch_picker.rs | 14 +- crates/gpui/src/app.rs | 31 +++ crates/gpui/src/app/async_context.rs | 14 +- crates/gpui/src/app/entity_map.rs | 226 ++++++++++++++++-- crates/gpui/src/app/test_context.rs | 18 +- crates/gpui/src/gpui.rs | 2 +- crates/gpui/src/platform/test/dispatcher.rs | 4 + crates/gpui_macros/src/test.rs | 41 +++- crates/project/src/agent_server_store.rs | 3 +- crates/project/src/buffer_store.rs | 1 - crates/project/src/project_settings.rs | 62 ++--- crates/recent_projects/src/remote_servers.rs | 4 + crates/scheduler/src/executor.rs | 5 +- crates/scheduler/src/test_scheduler.rs | 22 ++ 19 files changed, 446 insertions(+), 105 deletions(-) diff --git a/crates/collab/tests/integration/randomized_test_helpers.rs b/crates/collab/tests/integration/randomized_test_helpers.rs index e3e4a122d1df069385ef850aeccaa4c5788d253d..a6772019768ba19e2a92843a1e33b256f0eb8b0c 100644 --- a/crates/collab/tests/integration/randomized_test_helpers.rs +++ b/crates/collab/tests/integration/randomized_test_helpers.rs @@ -180,6 +180,13 @@ pub async fn run_randomized_test( T::on_quiesce(&mut server, &mut clients).await; for (client, cx) in clients { + cx.update(|cx| { + for window in cx.windows() { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } + }); cx.update(|cx| { let settings = cx.remove_global::(); cx.clear_globals(); @@ -187,8 +194,8 @@ pub async fn run_randomized_test( theme::init(theme::LoadThemes::JustBase, cx); drop(client); }); + executor.run_until_parked(); } - executor.run_until_parked(); if let Some(path) = plan_save_path() { eprintln!("saved test plan to path {:?}", path); @@ -556,6 +563,13 @@ impl TestPlan { log::info!("{} removed", client.username); plan.lock().user(removed_user_id).online = false; + client_cx.update(|cx| { + for window in cx.windows() { + window + .update(cx, |_, window, _| window.remove_window()) + .ok(); + } + }); client_cx.update(|cx| { cx.clear_globals(); drop(client); diff --git a/crates/collab_ui/src/notifications/incoming_call_notification.rs b/crates/collab_ui/src/notifications/incoming_call_notification.rs index aabb477072c97f829ab64971488ab66d2f6a79e4..164b91395a8853c330e2f7842b5676fff0916e63 100644 --- a/crates/collab_ui/src/notifications/incoming_call_notification.rs +++ b/crates/collab_ui/src/notifications/incoming_call_notification.rs @@ -42,6 +42,14 @@ pub fn init(app_state: &Arc, cx: &mut App) { } } } + + for window in notification_windows.drain(..) { + window + .update(cx, |_, window, _| { + window.remove_window(); + }) + .log_err(); + } }) .detach(); } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 25bc9996604773bd67964dcb9f5196c41df6cdce..daeb355b048d649d638a8830bdf3d367ea9cd40b 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2398,7 +2398,9 @@ impl Editor { diagnostics_max_severity, hard_wrap: None, completion_provider: project.clone().map(|project| Rc::new(project) as _), - semantics_provider: project.clone().map(|project| Rc::new(project) as _), + semantics_provider: project + .as_ref() + .map(|project| Rc::new(project.downgrade()) as _), collaboration_hub: project.clone().map(|project| Box::new(project) as _), project, blink_manager: blink_manager.clone(), @@ -23944,7 +23946,7 @@ impl Editor { } pub fn refresh_inline_values(&mut self, cx: &mut Context) { - let Some(project) = self.project.clone() else { + let Some(semantics) = self.semantics_provider.clone() else { return; }; @@ -23979,7 +23981,7 @@ impl Editor { let range = buffer.read(cx).anchor_before(0)..current_execution_position.text_anchor; - project.inline_values(buffer, range, cx) + semantics.inline_values(buffer, range, cx) }) .ok() .flatten()? @@ -26808,7 +26810,7 @@ pub trait SemanticsProvider { buffer: Entity, refresh: Option, cx: &mut App, - ) -> Shared>>>; + ) -> Option>>>>; fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool; @@ -27290,14 +27292,15 @@ impl CompletionProvider for Entity { } } -impl SemanticsProvider for Entity { +impl SemanticsProvider for WeakEntity { fn hover( &self, buffer: &Entity, position: text::Anchor, cx: &mut App, ) -> Option>>> { - Some(self.update(cx, |project, cx| project.hover(buffer, position, cx))) + self.update(cx, |project, cx| project.hover(buffer, position, cx)) + .ok() } fn document_highlights( @@ -27306,9 +27309,10 @@ impl SemanticsProvider for Entity { position: text::Anchor, cx: &mut App, ) -> Option>>> { - Some(self.update(cx, |project, cx| { + self.update(cx, |project, cx| { project.document_highlights(buffer, position, cx) - })) + }) + .ok() } fn definitions( @@ -27318,12 +27322,13 @@ impl SemanticsProvider for Entity { kind: GotoDefinitionKind, cx: &mut App, ) -> Option>>>> { - Some(self.update(cx, |project, cx| match kind { + self.update(cx, |project, cx| match kind { GotoDefinitionKind::Symbol => project.definitions(buffer, position, cx), GotoDefinitionKind::Declaration => project.declarations(buffer, position, cx), GotoDefinitionKind::Type => project.type_definitions(buffer, position, cx), GotoDefinitionKind::Implementation => project.implementations(buffer, position, cx), - })) + }) + .ok() } fn supports_inlay_hints(&self, buffer: &Entity, cx: &mut App) -> bool { @@ -27339,6 +27344,7 @@ impl SemanticsProvider for Entity { project.any_language_server_supports_inlay_hints(buffer, cx) }) }) + .unwrap_or(false) } fn supports_semantic_tokens(&self, buffer: &Entity, cx: &mut App) -> bool { @@ -27347,6 +27353,7 @@ impl SemanticsProvider for Entity { project.any_language_server_supports_semantic_tokens(buffer, cx) }) }) + .unwrap_or(false) } fn inline_values( @@ -27360,6 +27367,8 @@ impl SemanticsProvider for Entity { Some(project.inline_values(session, active_stack_frame, buffer_handle, range, cx)) }) + .ok() + .flatten() } fn applicable_inlay_chunks( @@ -27368,15 +27377,21 @@ impl SemanticsProvider for Entity { ranges: &[Range], cx: &mut App, ) -> Vec> { - self.read(cx).lsp_store().update(cx, |lsp_store, cx| { - lsp_store.applicable_inlay_chunks(buffer, ranges, cx) + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.applicable_inlay_chunks(buffer, ranges, cx) + }) }) + .unwrap_or_default() } fn invalidate_inlay_hints(&self, for_buffers: &HashSet, cx: &mut App) { - self.read(cx).lsp_store().update(cx, |lsp_store, _| { - lsp_store.invalidate_inlay_hints(for_buffers) - }); + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, _| { + lsp_store.invalidate_inlay_hints(for_buffers) + }) + }) + .ok(); } fn inlay_hints( @@ -27387,9 +27402,12 @@ impl SemanticsProvider for Entity { known_chunks: Option<(clock::Global, HashSet>)>, cx: &mut App, ) -> Option, Task>>> { - Some(self.read(cx).lsp_store().update(cx, |lsp_store, cx| { - lsp_store.inlay_hints(invalidate, buffer, ranges, known_chunks, cx) - })) + self.update(cx, |project, cx| { + project.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.inlay_hints(invalidate, buffer, ranges, known_chunks, cx) + }) + }) + .ok() } fn semantic_tokens( @@ -27397,10 +27415,13 @@ impl SemanticsProvider for Entity { buffer: Entity, refresh: Option, cx: &mut App, - ) -> Shared>>> { - self.read(cx).lsp_store().update(cx, |lsp_store, cx| { - lsp_store.semantic_tokens(buffer, refresh, cx) + ) -> Option>>>> { + self.update(cx, |this, cx| { + this.lsp_store().update(cx, |lsp_store, cx| { + lsp_store.semantic_tokens(buffer, refresh, cx) + }) }) + .ok() } fn range_for_rename( @@ -27409,7 +27430,7 @@ impl SemanticsProvider for Entity { position: text::Anchor, cx: &mut App, ) -> Option>>>> { - Some(self.update(cx, |project, cx| { + self.update(cx, |project, cx| { let buffer = buffer.clone(); let task = project.prepare_rename(buffer.clone(), position, cx); cx.spawn(async move |_, cx| { @@ -27432,7 +27453,8 @@ impl SemanticsProvider for Entity { } }) }) - })) + }) + .ok() } fn perform_rename( @@ -27442,9 +27464,10 @@ impl SemanticsProvider for Entity { new_name: String, cx: &mut App, ) -> Option>> { - Some(self.update(cx, |project, cx| { + self.update(cx, |project, cx| { project.perform_rename(buffer.clone(), position, new_name, cx) - })) + }) + .ok() } } diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs index 6270e1b37c289c6deb033ebcbf9ea93dba84af8c..31a573f04787e3759a6a21ec15f36ec148a80f30 100644 --- a/crates/editor/src/semantic_tokens.rs +++ b/crates/editor/src/semantic_tokens.rs @@ -217,8 +217,9 @@ impl Editor { }) { None } else { - let task = sema.semantic_tokens(buffer, for_server, cx); - Some(async move { (buffer_id, query_version, task.await) }) + sema.semantic_tokens(buffer, for_server, cx).map( + |task| async move { (buffer_id, query_version, task.await) }, + ) } }) .collect::>() diff --git a/crates/editor/src/test.rs b/crates/editor/src/test.rs index 8052cf215e7ec879dba939a2f66699827bb58aeb..bef2b3fc3ec2b949ffb8288d59b1201f6f3dde90 100644 --- a/crates/editor/src/test.rs +++ b/crates/editor/src/test.rs @@ -123,8 +123,6 @@ pub fn assert_text_with_selections( assert_eq!(actual, marked_text, "Selections don't match"); } -// RA thinks this is dead code even though it is used in a whole lot of tests -#[allow(dead_code)] #[cfg(any(test, feature = "test-support"))] pub(crate) fn build_editor( buffer: Entity, diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index 08290cb88a273d1f3f17da5c08a5b4a402aa74cd..d1ab60b9042fb06a3f049625f7c0a809957a1543 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -1390,7 +1390,9 @@ mod tests { (branch_list, cx) } - async fn init_fake_repository(cx: &mut TestAppContext) -> Entity { + async fn init_fake_repository( + cx: &mut TestAppContext, + ) -> (Entity, Entity) { let fs = FakeFs::new(cx.executor()); fs.insert_tree( path!("/dir"), @@ -1413,7 +1415,7 @@ mod tests { let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; let repository = cx.read(|cx| project.read(cx).active_repository(cx)); - repository.unwrap() + (project, repository.unwrap()) } #[gpui::test] @@ -1476,7 +1478,7 @@ mod tests { #[gpui::test] async fn test_delete_branch(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = create_test_branches(); @@ -1534,7 +1536,7 @@ mod tests { #[gpui::test] async fn test_delete_remote(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = vec![ create_test_branch("main", true, Some("origin"), Some(1000)), create_test_branch("feature-auth", false, Some("origin"), Some(900)), @@ -1721,7 +1723,7 @@ mod tests { const NEW_BRANCH: &str = "new-feature-branch"; init_test(test_cx); - let repository = init_fake_repository(test_cx).await; + let (_project, repository) = init_fake_repository(test_cx).await; let branches = vec![ create_test_branch(MAIN_BRANCH, true, None, Some(1000)), @@ -1785,7 +1787,7 @@ mod tests { #[gpui::test] async fn test_remote_url_detection_https(cx: &mut TestAppContext) { init_test(cx); - let repository = init_fake_repository(cx).await; + let (_project, repository) = init_fake_repository(cx).await; let branches = vec![create_test_branch("main", true, None, Some(1000))]; let (branch_list, mut ctx) = init_branch_list_test(repository.into(), branches, cx).await; diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 1bd5cd6b3c6a74ee840ac93b08554a82b1f050fa..f1fe264f4ef4ccb09081a6672c7c4ddb1d24dc97 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -753,6 +753,37 @@ impl App { app } + #[doc(hidden)] + pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> { + self.entities.ref_counts_drop_handle() + } + + /// Captures a snapshot of all entities that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + #[cfg(any(test, feature = "leak-detection"))] + pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot { + self.entities.leak_detector_snapshot() + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// Entities that were already tracked at the time of the snapshot are ignored, + /// even if they still have handles. Only *new* entities (those whose + /// `EntityId` was not present in the snapshot) are considered leaks. + /// + /// # Panics + /// + /// Panics if any new entity handles exist. The panic message lists every + /// leaked entity with its type name, and includes allocation-site backtraces + /// when `LEAK_BACKTRACE` is set. + #[cfg(any(test, feature = "leak-detection"))] + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + self.entities.assert_no_new_leaks(snapshot) + } + /// Quit the application gracefully. Handlers registered with [`Context::on_app_quit`] /// will be given 100ms to complete before exiting. pub fn shutdown(&mut self) { diff --git a/crates/gpui/src/app/async_context.rs b/crates/gpui/src/app/async_context.rs index ccd39dda89003cf90d51fae43102a565b2136dc2..e2fd203c78364a4d096f9792dcea7e6f7b8113ea 100644 --- a/crates/gpui/src/app/async_context.rs +++ b/crates/gpui/src/app/async_context.rs @@ -4,7 +4,7 @@ use crate::{ PromptLevel, Render, Reservation, Result, Subscription, Task, VisualContext, Window, WindowHandle, }; -use anyhow::Context as _; +use anyhow::{Context as _, bail}; use derive_more::{Deref, DerefMut}; use futures::channel::oneshot; use futures::future::FutureExt; @@ -88,6 +88,9 @@ impl AppContext for AsyncApp { { let app = self.app.upgrade().context("app was released")?; let mut lock = app.try_borrow_mut()?; + if lock.quitting { + bail!("app is quitting"); + } lock.update_window(window, f) } @@ -101,6 +104,9 @@ impl AppContext for AsyncApp { { let app = self.app.upgrade().context("app was released")?; let lock = app.borrow(); + if lock.quitting { + bail!("app is quitting"); + } lock.read_window(window, read) } @@ -174,6 +180,9 @@ impl AsyncApp { { let app = self.app(); let mut lock = app.borrow_mut(); + if lock.quitting { + bail!("app is quitting"); + } lock.open_window(options, build_root_view) } @@ -211,6 +220,9 @@ impl AsyncApp { pub fn try_read_global(&self, read: impl FnOnce(&G, &App) -> R) -> Option { let app = self.app(); let app = app.borrow_mut(); + if app.quitting { + return None; + } Some(read(app.try_global()?, &app)) } diff --git a/crates/gpui/src/app/entity_map.rs b/crates/gpui/src/app/entity_map.rs index b8d9e82680eb6978d073e3e51c420cef9f1f61ec..c12f952cc82ae8c161c5263ea47533bdef55e5e5 100644 --- a/crates/gpui/src/app/entity_map.rs +++ b/crates/gpui/src/app/entity_map.rs @@ -83,6 +83,32 @@ impl EntityMap { } } + #[doc(hidden)] + pub fn ref_counts_drop_handle(&self) -> impl Sized + use<> { + self.ref_counts.clone() + } + + /// Captures a snapshot of all entities that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + #[cfg(any(test, feature = "leak-detection"))] + pub fn leak_detector_snapshot(&self) -> LeakDetectorSnapshot { + self.ref_counts.read().leak_detector.snapshot() + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// See [`LeakDetector::assert_no_new_leaks`] for details. + #[cfg(any(test, feature = "leak-detection"))] + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + self.ref_counts + .read() + .leak_detector + .assert_no_new_leaks(snapshot) + } + /// Reserve a slot for an entity, which you can subsequently use with `insert`. pub fn reserve(&self) -> Slot { let id = self.ref_counts.write().counts.insert(1.into()); @@ -225,7 +251,12 @@ pub struct AnyEntity { } impl AnyEntity { - fn new(id: EntityId, entity_type: TypeId, entity_map: Weak>) -> Self { + fn new( + id: EntityId, + entity_type: TypeId, + entity_map: Weak>, + #[cfg(any(test, feature = "leak-detection"))] type_name: &'static str, + ) -> Self { Self { entity_id: id, entity_type, @@ -236,7 +267,7 @@ impl AnyEntity { .unwrap() .write() .leak_detector - .handle_created(id), + .handle_created(id, Some(type_name)), entity_map, } } @@ -299,7 +330,7 @@ impl Clone for AnyEntity { .unwrap() .write() .leak_detector - .handle_created(self.entity_id), + .handle_created(self.entity_id, None), } } } @@ -395,7 +426,13 @@ impl Entity { T: 'static, { Self { - any_entity: AnyEntity::new(id, TypeId::of::(), entity_map), + any_entity: AnyEntity::new( + id, + TypeId::of::(), + entity_map, + #[cfg(any(test, feature = "leak-detection"))] + std::any::type_name::(), + ), entity_type: PhantomData, } } @@ -574,7 +611,7 @@ impl AnyWeakEntity { .unwrap() .write() .leak_detector - .handle_created(self.entity_id), + .handle_created(self.entity_id, None), }) } @@ -892,7 +929,23 @@ pub(crate) struct HandleId { #[cfg(any(test, feature = "leak-detection"))] pub(crate) struct LeakDetector { next_handle_id: u64, - entity_handles: HashMap>>, + entity_handles: HashMap, +} + +/// A snapshot of the set of alive entities at a point in time. +/// +/// Created by [`LeakDetector::snapshot`]. Can later be passed to +/// [`LeakDetector::assert_no_new_leaks`] to verify that no new entity +/// handles remain between the snapshot and the current state. +#[cfg(any(test, feature = "leak-detection"))] +pub struct LeakDetectorSnapshot { + entity_ids: collections::HashSet, +} + +#[cfg(any(test, feature = "leak-detection"))] +struct EntityLeakData { + handles: HashMap>, + type_name: &'static str, } #[cfg(any(test, feature = "leak-detection"))] @@ -903,11 +956,21 @@ impl LeakDetector { /// the handle is dropped. If `LEAK_BACKTRACE` is set, captures a backtrace /// at the allocation site. #[track_caller] - pub fn handle_created(&mut self, entity_id: EntityId) -> HandleId { + pub fn handle_created( + &mut self, + entity_id: EntityId, + type_name: Option<&'static str>, + ) -> HandleId { let id = gpui_util::post_inc(&mut self.next_handle_id); let handle_id = HandleId { id }; - let handles = self.entity_handles.entry(entity_id).or_default(); - handles.insert( + let handles = self + .entity_handles + .entry(entity_id) + .or_insert_with(|| EntityLeakData { + handles: HashMap::default(), + type_name: type_name.unwrap_or(""), + }); + handles.handles.insert( handle_id, LEAK_BACKTRACE.then(backtrace::Backtrace::new_unresolved), ); @@ -919,8 +982,14 @@ impl LeakDetector { /// This removes the handle from tracking. The `handle_id` should be the same /// one returned by `handle_created` when the handle was allocated. pub fn handle_released(&mut self, entity_id: EntityId, handle_id: HandleId) { - let handles = self.entity_handles.entry(entity_id).or_default(); - handles.remove(&handle_id); + if let std::collections::hash_map::Entry::Occupied(mut data) = + self.entity_handles.entry(entity_id) + { + data.get_mut().handles.remove(&handle_id); + if data.get().handles.is_empty() { + data.remove(); + } + } } /// Asserts that all handles to the given entity have been released. @@ -932,11 +1001,10 @@ impl LeakDetector { /// otherwise it suggests setting the environment variable to get more info. pub fn assert_released(&mut self, entity_id: EntityId) { use std::fmt::Write as _; - let handles = self.entity_handles.entry(entity_id).or_default(); - if !handles.is_empty() { + if let Some(data) = self.entity_handles.remove(&entity_id) { let mut out = String::new(); - for backtrace in handles.values_mut() { - if let Some(mut backtrace) = backtrace.take() { + for (_, backtrace) in data.handles { + if let Some(mut backtrace) = backtrace { backtrace.resolve(); writeln!(out, "Leaked handle:\n{:?}", backtrace).unwrap(); } else { @@ -950,6 +1018,96 @@ impl LeakDetector { panic!("{out}"); } } + + /// Captures a snapshot of all entity IDs that currently have alive handles. + /// + /// The returned [`LeakDetectorSnapshot`] can later be passed to + /// [`assert_no_new_leaks`](Self::assert_no_new_leaks) to verify that no + /// entities created after the snapshot are still alive. + pub fn snapshot(&self) -> LeakDetectorSnapshot { + LeakDetectorSnapshot { + entity_ids: self.entity_handles.keys().copied().collect(), + } + } + + /// Asserts that no entities created after `snapshot` still have alive handles. + /// + /// Entities that were already tracked at the time of the snapshot are ignored, + /// even if they still have handles. Only *new* entities (those whose + /// `EntityId` was not present in the snapshot) are considered leaks. + /// + /// # Panics + /// + /// Panics if any new entity handles exist. The panic message lists every + /// leaked entity with its type name, and includes allocation-site backtraces + /// when `LEAK_BACKTRACE` is set. + pub fn assert_no_new_leaks(&self, snapshot: &LeakDetectorSnapshot) { + use std::fmt::Write as _; + + let mut out = String::new(); + for (entity_id, data) in &self.entity_handles { + if snapshot.entity_ids.contains(entity_id) { + continue; + } + for (_, backtrace) in &data.handles { + if let Some(backtrace) = backtrace { + let mut backtrace = backtrace.clone(); + backtrace.resolve(); + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}):\n{:?}", + data.type_name, backtrace + ) + .unwrap(); + } else { + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)", + data.type_name + ) + .unwrap(); + } + } + } + + if !out.is_empty() { + panic!("New entity leaks detected since snapshot:\n{out}"); + } + } +} + +#[cfg(any(test, feature = "leak-detection"))] +impl Drop for LeakDetector { + fn drop(&mut self) { + use std::fmt::Write; + + if self.entity_handles.is_empty() || std::thread::panicking() { + return; + } + + let mut out = String::new(); + for (entity_id, data) in self.entity_handles.drain() { + for (_handle, backtrace) in data.handles { + if let Some(mut backtrace) = backtrace { + backtrace.resolve(); + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}):\n{:?}", + data.type_name, backtrace + ) + .unwrap(); + } else { + writeln!( + out, + "Leaked handle for entity {} ({entity_id:?}): (export LEAK_BACKTRACE to find allocation site)", + data.type_name + ) + .unwrap(); + } + } + } + panic!("Exited with leaked handles:\n{out}"); + } } #[cfg(test)] @@ -1007,4 +1165,42 @@ mod test { vec![1], ); } + + #[test] + fn test_leak_detector_snapshot_no_leaks() { + let mut entity_map = EntityMap::new(); + + let slot = entity_map.reserve::(); + let pre_existing = entity_map.insert(slot, TestEntity { i: 1 }); + + let snapshot = entity_map.leak_detector_snapshot(); + + let slot = entity_map.reserve::(); + let temporary = entity_map.insert(slot, TestEntity { i: 2 }); + drop(temporary); + + entity_map.assert_no_new_leaks(&snapshot); + + drop(pre_existing); + } + + #[test] + #[should_panic(expected = "New entity leaks detected since snapshot")] + fn test_leak_detector_snapshot_detects_new_leak() { + let mut entity_map = EntityMap::new(); + + let slot = entity_map.reserve::(); + let pre_existing = entity_map.insert(slot, TestEntity { i: 1 }); + + let snapshot = entity_map.leak_detector_snapshot(); + + let slot = entity_map.reserve::(); + let leaked = entity_map.insert(slot, TestEntity { i: 2 }); + + // `leaked` is still alive, so this should panic. + entity_map.assert_no_new_leaks(&snapshot); + + drop(pre_existing); + drop(leaked); + } } diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index dd4f37ed2a561f4259b41241c7cf4c83790a2b2f..0f0f0e14fbd8565d8f948579ed1ab23381c80108 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -5,7 +5,7 @@ use crate::{ ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, Point, Render, Result, Size, Task, TestDispatcher, TestPlatform, TestScreenCaptureSource, TestWindow, TextSystem, VisualContext, Window, WindowBounds, - WindowHandle, WindowOptions, app::GpuiMode, + WindowHandle, WindowOptions, app::GpuiMode, window::ElementArenaScope, }; use anyhow::{anyhow, bail}; use futures::{Stream, StreamExt, channel::oneshot}; @@ -18,18 +18,17 @@ use std::{ /// an implementation of `Context` with additional methods that are useful in tests. #[derive(Clone)] pub struct TestAppContext { - #[doc(hidden)] - pub app: Rc, #[doc(hidden)] pub background_executor: BackgroundExecutor, #[doc(hidden)] pub foreground_executor: ForegroundExecutor, - #[doc(hidden)] - pub dispatcher: TestDispatcher, + dispatcher: TestDispatcher, test_platform: Rc, text_system: Arc, fn_name: Option<&'static str>, on_quit: Rc>>>, + #[doc(hidden)] + pub app: Rc, } impl AppContext for TestAppContext { @@ -402,8 +401,8 @@ impl TestAppContext { } /// Wait until there are no more pending tasks. - pub fn run_until_parked(&mut self) { - self.background_executor.run_until_parked() + pub fn run_until_parked(&self) { + self.dispatcher.run_until_parked(); } /// Simulate dispatching an action to the currently focused node in the window. @@ -819,6 +818,8 @@ impl VisualTestContext { E: Element, { self.update(|window, cx| { + let _arena_scope = ElementArenaScope::enter(&cx.element_arena); + window.invalidator.set_phase(DrawPhase::Prepaint); let mut element = Drawable::new(f(window, cx)); element.layout_as_root(space.into(), window, cx); @@ -830,6 +831,9 @@ impl VisualTestContext { window.invalidator.set_phase(DrawPhase::None); window.refresh(); + drop(element); + cx.element_arena.borrow_mut().clear(); + (request_layout_state, prepaint_state) }) } diff --git a/crates/gpui/src/gpui.rs b/crates/gpui/src/gpui.rs index af3fb51ce51f7df570a8e28faad23018ed7dc778..ff36dbce500b8e7472f3d7faa31d9e5cb17e087e 100644 --- a/crates/gpui/src/gpui.rs +++ b/crates/gpui/src/gpui.rs @@ -1,5 +1,5 @@ #![doc = include_str!("../README.md")] -#![deny(missing_docs)] +#![warn(missing_docs)] #![allow(clippy::type_complexity)] // Not useful, GPUI makes heavy use of callbacks #![allow(clippy::collapsible_else_if)] // False positives in platform specific code #![allow(unused_mut)] // False positives in platform specific code diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index 081f5feab014b3712fa23290038f34d8ed4f5a92..c40ec8f669d1e2e58f8af3bcf0fbd64fbddbe4d8 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -48,6 +48,10 @@ impl TestDispatcher { self.session_id } + pub fn drain_tasks(&self) { + self.scheduler.drain_tasks(); + } + pub fn advance_clock(&self, by: Duration) { self.scheduler.advance_clock(by); } diff --git a/crates/gpui_macros/src/test.rs b/crates/gpui_macros/src/test.rs index 490ea07fee696908fad91410aa67ff124cdabe64..087e01740d2ba48392afee0ed7e31cf0779b180d 100644 --- a/crates/gpui_macros/src/test.rs +++ b/crates/gpui_macros/src/test.rs @@ -165,12 +165,13 @@ fn generate_test_function( dispatcher.clone(), Some(stringify!(#outer_fn_name)), ); + let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle(); )); cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.executor().forbid_parking(); - #cx_varname.quit(); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); inner_fn_args.extend(quote!(&mut #cx_varname,)); continue; @@ -191,10 +192,17 @@ fn generate_test_function( &[#seeds], #max_retries, &mut |dispatcher, _seed| { - let foreground_executor = gpui::ForegroundExecutor::new(std::sync::Arc::new(dispatcher.clone())); + let exec = std::sync::Arc::new(dispatcher.clone()); #cx_vars - foreground_executor.block_test(#inner_fn_name(#inner_fn_args)); + gpui::ForegroundExecutor::new(exec.clone()).block_test(#inner_fn_name(#inner_fn_args)); + drop(exec); #cx_teardowns + // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity + // cycles as cancelled tasks will be dropped properly once the runnable gets run again + // + // async-task does not give us the power to do this just yet though + dispatcher.drain_tasks(); + drop(dispatcher); }, #on_failure_fn_name ); @@ -229,13 +237,15 @@ fn generate_test_function( Some(stringify!(#outer_fn_name)) ); let mut #cx_varname_lock = #cx_varname.app.borrow_mut(); + let _entity_refcounts = #cx_varname_lock.ref_counts_drop_handle(); )); inner_fn_args.extend(quote!(&mut #cx_varname_lock,)); cx_teardowns.extend(quote!( drop(#cx_varname_lock); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); continue; } @@ -246,12 +256,13 @@ fn generate_test_function( dispatcher.clone(), Some(stringify!(#outer_fn_name)) ); + let _entity_refcounts = #cx_varname.app.borrow().ref_counts_drop_handle(); )); cx_teardowns.extend(quote!( - dispatcher.run_until_parked(); - #cx_varname.executor().forbid_parking(); - #cx_varname.quit(); - dispatcher.run_until_parked(); + #cx_varname.run_until_parked(); + #cx_varname.update(|cx| { cx.background_executor().forbid_parking(); cx.quit(); }); + #cx_varname.run_until_parked(); + drop(#cx_varname); )); inner_fn_args.extend(quote!(&mut #cx_varname,)); continue; @@ -277,6 +288,12 @@ fn generate_test_function( #cx_vars #inner_fn_name(#inner_fn_args); #cx_teardowns + // Ideally we would only drop cancelled tasks, that way we could detect leaks due to task <-> entity + // cycles as cancelled tasks will be dropped properly once they runnable gets run again + // + // async-task does not give us the power to do this just yet though + dispatcher.drain_tasks(); + drop(dispatcher); }, #on_failure_fn_name, ); diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index f12e4da5cd39847c94c32fd26c826dff886edbf7..b1dbefa15a3dcaf64c36d027d68060d18f533def 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -1,7 +1,6 @@ use remote::Interactive; use std::{ any::Any, - borrow::Borrow, path::{Path, PathBuf}, sync::Arc, time::Duration, @@ -83,7 +82,7 @@ impl From for SharedString { } } -impl Borrow for ExternalAgentServerName { +impl std::borrow::Borrow for ExternalAgentServerName { fn borrow(&self) -> &str { &self.0 } diff --git a/crates/project/src/buffer_store.rs b/crates/project/src/buffer_store.rs index 9faf80b7ac00002c005df3a3b1e0674dcdd4cc81..b9d1105ad02415699fa6a9bd1be8ec1f9c71271a 100644 --- a/crates/project/src/buffer_store.rs +++ b/crates/project/src/buffer_store.rs @@ -869,7 +869,6 @@ impl BufferStore { entry .insert( - // todo(lw): hot foreground spawn cx.spawn(async move |this, cx| { let load_result = load_buffer.await; this.update(cx, |this, _cx| { diff --git a/crates/project/src/project_settings.rs b/crates/project/src/project_settings.rs index 75a3faf4f82d9e98e3c85a96222486cac217afd4..9258b16eef9f1c07cc44987f6608c2e0867c4154 100644 --- a/crates/project/src/project_settings.rs +++ b/crates/project/src/project_settings.rs @@ -1407,35 +1407,38 @@ impl SettingsObserver { let (mut user_tasks_file_rx, watcher_task) = watch_config_file(cx.background_executor(), fs, file_path.clone()); let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next()); - let weak_entry = cx.weak_entity(); cx.spawn(async move |settings_observer, cx| { let _watcher_task = watcher_task; let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| { - settings_observer.task_store.clone() + settings_observer.task_store.downgrade() }) else { return; }; if let Some(user_tasks_content) = user_tasks_content { - task_store.update(cx, |task_store, cx| { - task_store - .update_user_tasks( - TaskSettingsLocation::Global(&file_path), - Some(&user_tasks_content), - cx, - ) - .log_err(); - }); + task_store + .update(cx, |task_store, cx| { + task_store + .update_user_tasks( + TaskSettingsLocation::Global(&file_path), + Some(&user_tasks_content), + cx, + ) + .log_err(); + }) + .ok(); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let result = task_store.update(cx, |task_store, cx| { + let Ok(result) = task_store.update(cx, |task_store, cx| { task_store.update_user_tasks( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }); + }) else { + continue; + }; - weak_entry + settings_observer .update(cx, |_, cx| match result { Ok(()) => cx.emit(SettingsObserverEvent::LocalTasksUpdated(Ok( file_path.clone() @@ -1459,35 +1462,38 @@ impl SettingsObserver { let (mut user_tasks_file_rx, watcher_task) = watch_config_file(cx.background_executor(), fs, file_path.clone()); let user_tasks_content = cx.foreground_executor().block_on(user_tasks_file_rx.next()); - let weak_entry = cx.weak_entity(); cx.spawn(async move |settings_observer, cx| { let _watcher_task = watcher_task; let Ok(task_store) = settings_observer.read_with(cx, |settings_observer, _| { - settings_observer.task_store.clone() + settings_observer.task_store.downgrade() }) else { return; }; if let Some(user_tasks_content) = user_tasks_content { - task_store.update(cx, |task_store, cx| { - task_store - .update_user_debug_scenarios( - TaskSettingsLocation::Global(&file_path), - Some(&user_tasks_content), - cx, - ) - .log_err(); - }); + task_store + .update(cx, |task_store, cx| { + task_store + .update_user_debug_scenarios( + TaskSettingsLocation::Global(&file_path), + Some(&user_tasks_content), + cx, + ) + .log_err(); + }) + .ok(); } while let Some(user_tasks_content) = user_tasks_file_rx.next().await { - let result = task_store.update(cx, |task_store, cx| { + let Ok(result) = task_store.update(cx, |task_store, cx| { task_store.update_user_debug_scenarios( TaskSettingsLocation::Global(&file_path), Some(&user_tasks_content), cx, ) - }); + }) else { + continue; + }; - weak_entry + settings_observer .update(cx, |_, cx| match result { Ok(()) => cx.emit(SettingsObserverEvent::LocalDebugScenariosUpdated(Ok( file_path.clone(), diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 8bddcf37270e56932e75635fcd35616d12309b6e..6c0ce4b18854320fda8e72f291800049b07cec1a 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1849,6 +1849,7 @@ impl RemoteServerProjects { ) { let replace_window = window.window_handle().downcast::(); + let app_state = Arc::downgrade(&app_state); cx.spawn_in(window, async move |entity, cx| { let (connection, starting_dir) = match start_dev_container_with_config(context, config).await { @@ -1882,6 +1883,9 @@ impl RemoteServerProjects { }) .log_err(); + let Some(app_state) = app_state.upgrade() else { + return; + }; let result = open_remote_project( connection.into(), vec![starting_dir].into_iter().map(PathBuf::from).collect(), diff --git a/crates/scheduler/src/executor.rs b/crates/scheduler/src/executor.rs index 05ea973c4ece53f996b732a7e8c3673487f3b8dc..76df2e69f66398e3709e1db58a847b1cd0079fc4 100644 --- a/crates/scheduler/src/executor.rs +++ b/crates/scheduler/src/executor.rs @@ -372,8 +372,9 @@ where impl Drop for Checked { fn drop(&mut self) { - assert!( - self.id == thread_id(), + assert_eq!( + self.id, + thread_id(), "local task dropped by a thread that didn't spawn it. Task spawned at {}", self.location ); diff --git a/crates/scheduler/src/test_scheduler.rs b/crates/scheduler/src/test_scheduler.rs index 03a8c0b90c77e4c17bd8a1130e5c82ccd935e80f..18ab4d09072fa25919e6e9e1858601d5173bd239 100644 --- a/crates/scheduler/src/test_scheduler.rs +++ b/crates/scheduler/src/test_scheduler.rs @@ -335,6 +335,28 @@ impl TestScheduler { false } + /// Drops all runnable tasks from the scheduler. + /// + /// This is used by the leak detector to ensure that all tasks have been dropped as tasks may keep entities alive otherwise. + /// Why do we even have tasks left when tests finish you may ask. The reason for that is simple, the scheduler itself is the executor and it retains the scheduled runnables. + /// A lot of tasks, including every foreground task contain an executor handle that keeps the test scheduler alive, causing a reference cycle, thus the need for this function right now. + pub fn drain_tasks(&self) { + // dropping runnables may reschedule tasks + // due to drop impls with executors in them + // so drop until we reach a fixpoint + loop { + let mut state = self.state.lock(); + if state.runnables.is_empty() && state.timers.is_empty() { + break; + } + let runnables = std::mem::take(&mut state.runnables); + let timers = std::mem::take(&mut state.timers); + drop(state); + drop(timers); + drop(runnables); + } + } + pub fn advance_clock_to_next_timer(&self) -> bool { if let Some(timer) = self.state.lock().timers.first() { self.clock.advance(timer.expiration - self.clock.now()); From 183d1a41f79c2297af47a2b4477e36f29c5c4c1d Mon Sep 17 00:00:00 2001 From: Lena <241371603+zelenenka@users.noreply.github.com> Date: Mon, 2 Mar 2026 12:16:00 +0100 Subject: [PATCH 210/548] Fix duplicate bot versioning in false neg:s reporting (#50479) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The case of false negatives (“Missed opportunities”) was not considered when the bot versioning was added. Now we're also automatically attributing the false negatives to the bot version that made them. Release Notes: - N/A --- script/github-track-duplicate-bot-effectiveness.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/script/github-track-duplicate-bot-effectiveness.py b/script/github-track-duplicate-bot-effectiveness.py index 18bad6bbdabc6d6f6dc91c42ddf56e1115dc55c5..05e64026d9538606927da2c7e5cfbf211eb42d2e 100644 --- a/script/github-track-duplicate-bot-effectiveness.py +++ b/script/github-track-duplicate-bot-effectiveness.py @@ -92,6 +92,7 @@ def fetch_issue(issue_number): "node_id": data["node_id"], "author": (data.get("user") or {}).get("login", ""), "type_name": (data.get("type") or {}).get("name"), + "created_at": data.get("created_at", ""), } @@ -419,7 +420,8 @@ def classify_as_missed_opportunity(issue): """Issue closed as duplicate but the bot never commented.""" print(" -> Missed opportunity") add_or_update_project_item( - issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified") + issue["node_id"], outcome="Missed opportunity", closed_as="duplicate", status="Auto-classified", + bot_comment_time=issue["created_at"]) def classify_open(): From 6ab7898a5e0a83b1b936b79b2b13d625d2570681 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20Duarte?= Date: Mon, 2 Mar 2026 12:34:01 +0000 Subject: [PATCH 211/548] agent: Add full-path tooltips to chat mentions (#50087) In text box Screenshot 2026-02-25 at 14 32 01 In chat Screenshot 2026-02-25 at 14 32 15 In chat (light theme) Screenshot 2026-02-25 at 14 35 26 Release Notes: - N/A --------- Co-authored-by: Danilo Leal --- crates/acp_thread/src/mention.rs | 35 +++++++++++++++++++++ crates/agent_ui/src/completion_provider.rs | 1 + crates/agent_ui/src/inline_prompt_editor.rs | 1 + crates/agent_ui/src/mention_set.rs | 20 ++++++++++-- crates/agent_ui/src/message_editor.rs | 4 +++ crates/agent_ui/src/ui/mention_crease.rs | 26 ++++++++++++--- 6 files changed, 80 insertions(+), 7 deletions(-) diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index 5769d13860f2466f95fe7dd67c1f908812e40c2d..b63eec154a40de8909d13de2a4e1bd3e9d1e06f3 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -254,6 +254,41 @@ impl MentionUri { } } + pub fn tooltip_text(&self) -> Option { + match self { + MentionUri::File { abs_path } | MentionUri::Directory { abs_path } => { + Some(abs_path.to_string_lossy().into_owned().into()) + } + MentionUri::Symbol { + abs_path, + line_range, + .. + } => Some( + format!( + "{}:{}-{}", + abs_path.display(), + line_range.start(), + line_range.end() + ) + .into(), + ), + MentionUri::Selection { + abs_path: Some(path), + line_range, + .. + } => Some( + format!( + "{}:{}-{}", + path.display(), + line_range.start(), + line_range.end() + ) + .into(), + ), + _ => None, + } + } + pub fn icon_path(&self, cx: &mut App) -> SharedString { match self { MentionUri::File { abs_path } => { diff --git a/crates/agent_ui/src/completion_provider.rs b/crates/agent_ui/src/completion_provider.rs index 802af37eb7f6700eec376327c19da8aab9b9416f..30778909b2c9a91dab0b20417e973b7e83ea6a17 100644 --- a/crates/agent_ui/src/completion_provider.rs +++ b/crates/agent_ui/src/completion_provider.rs @@ -617,6 +617,7 @@ impl PromptCompletionProvider { let crease = crate::mention_set::crease_for_mention( mention_uri.name().into(), mention_uri.icon_path(cx), + None, range, editor.downgrade(), ); diff --git a/crates/agent_ui/src/inline_prompt_editor.rs b/crates/agent_ui/src/inline_prompt_editor.rs index 11e8e59999c59f2ca4eeaccb17a5674a9c1757d9..0450efc4b7ebf466d0b9b13f516249a2cba0ecfa 100644 --- a/crates/agent_ui/src/inline_prompt_editor.rs +++ b/crates/agent_ui/src/inline_prompt_editor.rs @@ -1632,6 +1632,7 @@ fn insert_message_creases( crease_for_mention( crease.label.clone(), crease.icon_path.clone(), + None, start..end, cx.weak_entity(), ) diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index 3b2a65372de957ec57577108e4acea1ab2e9944e..58e7e4cdfc196862bb3b8936f8582ba1ad54bda5 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -233,6 +233,7 @@ impl MentionSet { content_len, mention_uri.name().into(), IconName::Image.path().into(), + mention_uri.tooltip_text(), Some(image), editor.clone(), window, @@ -245,6 +246,7 @@ impl MentionSet { content_len, crease_text, mention_uri.icon_path(cx), + mention_uri.tooltip_text(), None, editor.clone(), window, @@ -485,6 +487,7 @@ impl MentionSet { let crease = crease_for_mention( selection_name(abs_path.as_deref(), &line_range).into(), uri.icon_path(cx), + uri.tooltip_text(), range, editor.downgrade(), ); @@ -695,6 +698,7 @@ pub(crate) async fn insert_images_as_context( content_len, MentionUri::PastedImage.name().into(), IconName::Image.path().into(), + None, Some(Task::ready(Ok(image.clone())).shared()), editor.clone(), window, @@ -805,7 +809,7 @@ pub(crate) fn insert_crease_for_mention( content_len: usize, crease_label: SharedString, crease_icon: SharedString, - // abs_path: Option>, + crease_tooltip: Option, image: Option, String>>>>, editor: Entity, window: &mut Window, @@ -825,6 +829,7 @@ pub(crate) fn insert_crease_for_mention( render: render_mention_fold_button( crease_label.clone(), crease_icon.clone(), + crease_tooltip, start..end, rx, image, @@ -858,11 +863,12 @@ pub(crate) fn insert_crease_for_mention( pub(crate) fn crease_for_mention( label: SharedString, icon_path: SharedString, + tooltip: Option, range: Range, editor_entity: WeakEntity, ) -> Crease { let placeholder = FoldPlaceholder { - render: render_fold_icon_button(icon_path.clone(), label.clone(), editor_entity), + render: render_fold_icon_button(icon_path.clone(), label.clone(), tooltip, editor_entity), merge_adjacent: false, ..Default::default() }; @@ -876,6 +882,7 @@ pub(crate) fn crease_for_mention( fn render_fold_icon_button( icon_path: SharedString, label: SharedString, + tooltip: Option, editor: WeakEntity, ) -> Arc, &mut App) -> AnyElement> { Arc::new({ @@ -886,6 +893,9 @@ fn render_fold_icon_button( MentionCrease::new(fold_id, icon_path.clone(), label.clone()) .is_toggled(is_in_text_selection) + .when_some(tooltip.clone(), |this, tooltip_text| { + this.tooltip(tooltip_text) + }) .into_any_element() } }) @@ -1018,6 +1028,7 @@ fn render_directory_contents(entries: Vec<(Arc, String, String)>) -> St fn render_mention_fold_button( label: SharedString, icon: SharedString, + tooltip: Option, range: Range, mut loading_finished: postage::barrier::Receiver, image_task: Option, String>>>>, @@ -1037,6 +1048,7 @@ fn render_mention_fold_button( id: cx.entity_id(), label, icon, + tooltip, range, editor, loading: Some(loading), @@ -1050,6 +1062,7 @@ struct LoadingContext { id: EntityId, label: SharedString, icon: SharedString, + tooltip: Option, range: Range, editor: WeakEntity, loading: Option>, @@ -1068,6 +1081,9 @@ impl Render for LoadingContext { MentionCrease::new(id, self.icon.clone(), self.label.clone()) .is_toggled(is_in_text_selection) .is_loading(self.loading.is_some()) + .when_some(self.tooltip.clone(), |this, tooltip_text| { + this.tooltip(tooltip_text) + }) .when_some(self.image.clone(), |this, image_task| { this.image_preview(move |_, cx| { let image = image_task.peek().cloned().transpose().ok().flatten(); diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index bf6e753c884fa2434b4fa0f95ff7530cb0ab31bd..a24a5f5f65dae3f8bbce7d0a7b7f4988a1bd5e38 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -690,6 +690,7 @@ impl MessageEditor { content_len, crease_text.into(), mention_uri.icon_path(cx), + mention_uri.tooltip_text(), None, self.editor.clone(), window, @@ -800,6 +801,7 @@ impl MessageEditor { content_len, mention_uri.name().into(), mention_uri.icon_path(cx), + mention_uri.tooltip_text(), None, self.editor.clone(), window, @@ -980,6 +982,7 @@ impl MessageEditor { content_len, mention_uri.name().into(), mention_uri.icon_path(cx), + mention_uri.tooltip_text(), None, self.editor.clone(), window, @@ -1285,6 +1288,7 @@ impl MessageEditor { range.end - range.start, mention_uri.name().into(), mention_uri.icon_path(cx), + mention_uri.tooltip_text(), None, self.editor.clone(), window, diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs index 013d6659493bd0930d132a662d374f60ca47961f..2d464039dc552203ad76979239673ec27d5568c7 100644 --- a/crates/agent_ui/src/ui/mention_crease.rs +++ b/crates/agent_ui/src/ui/mention_crease.rs @@ -3,7 +3,7 @@ use std::time::Duration; use gpui::{Animation, AnimationExt, AnyView, IntoElement, Window, pulsating_between}; use settings::Settings; use theme::ThemeSettings; -use ui::{ButtonLike, TintColor, prelude::*}; +use ui::{ButtonLike, TintColor, Tooltip, prelude::*}; #[derive(IntoElement)] pub struct MentionCrease { @@ -12,6 +12,7 @@ pub struct MentionCrease { label: SharedString, is_toggled: bool, is_loading: bool, + tooltip: Option, image_preview: Option AnyView + 'static>>, } @@ -27,6 +28,7 @@ impl MentionCrease { label: label.into(), is_toggled: false, is_loading: false, + tooltip: None, image_preview: None, } } @@ -41,6 +43,11 @@ impl MentionCrease { self } + pub fn tooltip(mut self, tooltip: impl Into) -> Self { + self.tooltip = Some(tooltip.into()); + self + } + pub fn image_preview( mut self, builder: impl Fn(&mut Window, &mut App) -> AnyView + 'static, @@ -55,6 +62,9 @@ impl RenderOnce for MentionCrease { let settings = ThemeSettings::get_global(cx); let font_size = settings.agent_buffer_font_size(cx); let buffer_font = settings.buffer_font.clone(); + let is_loading = self.is_loading; + let tooltip = self.tooltip; + let image_preview = self.image_preview; let button_height = DefiniteLength::Absolute(AbsoluteLength::Pixels( px(window.line_height().into()) - px(1.), @@ -66,9 +76,6 @@ impl RenderOnce for MentionCrease { .height(button_height) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .toggle_state(self.is_toggled) - .when_some(self.image_preview, |this, image_preview| { - this.hoverable_tooltip(image_preview) - }) .child( h_flex() .pb_px() @@ -82,7 +89,7 @@ impl RenderOnce for MentionCrease { ) .child(self.label.clone()) .map(|this| { - if self.is_loading { + if is_loading { this.with_animation( "loading-context-crease", Animation::new(Duration::from_secs(2)) @@ -96,5 +103,14 @@ impl RenderOnce for MentionCrease { } }), ) + .map(|button| { + if let Some(image_preview) = image_preview { + button.hoverable_tooltip(image_preview) + } else { + button.when_some(tooltip, |this, tooltip_text| { + this.tooltip(Tooltip::text(tooltip_text)) + }) + } + }) } } From 8cd192ec3a61682e78ee03820765909f2bf358de Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Mon, 2 Mar 2026 09:38:32 -0300 Subject: [PATCH 212/548] scheduler: Fix `many` with non-zero seed (#50482) The iteration range was `(seed..num_iterations)`, which produces an empty range whenever `seed >= num_iterations`. Changed it to `(seed..seed + num_iterations)` so the range always runs the correct number of iterations starting from the given seed. Release Notes: - N/A --- crates/scheduler/src/test_scheduler.rs | 2 +- crates/scheduler/src/tests.rs | 25 +++++++++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/crates/scheduler/src/test_scheduler.rs b/crates/scheduler/src/test_scheduler.rs index 18ab4d09072fa25919e6e9e1858601d5173bd239..e4c330dcd162ad6512da05c9e66449fd7da36083 100644 --- a/crates/scheduler/src/test_scheduler.rs +++ b/crates/scheduler/src/test_scheduler.rs @@ -57,7 +57,7 @@ impl TestScheduler { .map(|seed| seed.parse().unwrap()) .unwrap_or(0); - (seed..num_iterations as u64) + (seed..seed + num_iterations as u64) .map(|seed| { let mut unwind_safe_f = AssertUnwindSafe(&mut f); eprintln!("Running seed: {seed}"); diff --git a/crates/scheduler/src/tests.rs b/crates/scheduler/src/tests.rs index dc24fed68d7cb1c83953f4de38bb4392d3b61029..03fe8075f91fff2d72b9bb1c0d4d389a69d9c3bf 100644 --- a/crates/scheduler/src/tests.rs +++ b/crates/scheduler/src/tests.rs @@ -290,6 +290,31 @@ fn test_helper_methods() { assert_eq!(results, vec![10, 10, 10]); } +#[test] +fn test_many_with_arbitrary_seed() { + for seed in [0u64, 1, 5, 42] { + let mut seeds_seen = Vec::new(); + let iterations = 3usize; + + for current_seed in seed..seed + iterations as u64 { + let scheduler = Arc::new(TestScheduler::new(TestSchedulerConfig::with_seed( + current_seed, + ))); + let captured_seed = current_seed; + scheduler + .foreground() + .block_on(async { seeds_seen.push(captured_seed) }); + scheduler.run(); + } + + assert_eq!( + seeds_seen, + (seed..seed + iterations as u64).collect::>(), + "Expected {iterations} iterations starting at seed {seed}" + ); + } +} + #[test] fn test_block_with_timeout() { // Test case: future completes within timeout From ef60143e7a49afe6724f284b41ee283d817de680 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 2 Mar 2026 14:21:26 +0100 Subject: [PATCH 213/548] agent: Show full subagent output if no concurrent tool calls (#50478) Release Notes: - N/A --------- Co-authored-by: Ben Brandt --- crates/acp_thread/src/acp_thread.rs | 114 ++++++++-------- crates/agent/src/agent.rs | 6 +- crates/agent/src/tests/mod.rs | 49 ++++--- crates/agent/src/thread.rs | 16 ++- crates/agent/src/tools/spawn_agent_tool.rs | 123 +++++++++++------- crates/agent_ui/src/connection_view.rs | 5 +- .../src/connection_view/thread_view.rs | 102 +++++++++------ crates/agent_ui/src/entry_view_state.rs | 40 +++--- 8 files changed, 284 insertions(+), 171 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index be681a846f7963950370095f50095160649d1fcd..d0e8860084acd0a4dba7daadb000ed1f80033cf2 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -2,55 +2,23 @@ mod connection; mod diff; mod mention; mod terminal; - -/// Key used in ACP ToolCall meta to store the tool's programmatic name. -/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field. -pub const TOOL_NAME_META_KEY: &str = "tool_name"; - -/// Key used in ACP ToolCall meta to store the session id when a subagent is spawned. -pub const SUBAGENT_SESSION_ID_META_KEY: &str = "subagent_session_id"; - -/// Helper to extract tool name from ACP meta -pub fn tool_name_from_meta(meta: &Option) -> Option { - meta.as_ref() - .and_then(|m| m.get(TOOL_NAME_META_KEY)) - .and_then(|v| v.as_str()) - .map(|s| SharedString::from(s.to_owned())) -} - -/// Helper to extract subagent session id from ACP meta -pub fn subagent_session_id_from_meta(meta: &Option) -> Option { - meta.as_ref() - .and_then(|m| m.get(SUBAGENT_SESSION_ID_META_KEY)) - .and_then(|v| v.as_str()) - .map(|s| acp::SessionId::from(s.to_string())) -} - -/// Helper to create meta with tool name -pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta { - acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())]) -} -use collections::HashSet; -pub use connection::*; -pub use diff::*; -use language::language_settings::FormatOnSave; -pub use mention::*; -use project::lsp_store::{FormatTrigger, LspFormatTarget}; -use serde::{Deserialize, Serialize}; -use serde_json::to_string_pretty; - -use task::{Shell, ShellBuilder}; -pub use terminal::*; - use action_log::{ActionLog, ActionLogTelemetry}; use agent_client_protocol::{self as acp}; use anyhow::{Context as _, Result, anyhow}; +use collections::HashSet; +pub use connection::*; +pub use diff::*; use futures::{FutureExt, channel::oneshot, future::BoxFuture}; use gpui::{AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity}; use itertools::Itertools; +use language::language_settings::FormatOnSave; use language::{Anchor, Buffer, BufferSnapshot, LanguageRegistry, Point, ToPoint, text_diff}; use markdown::Markdown; +pub use mention::*; +use project::lsp_store::{FormatTrigger, LspFormatTarget}; use project::{AgentLocation, Project, git_store::GitStoreCheckpoint}; +use serde::{Deserialize, Serialize}; +use serde_json::to_string_pretty; use std::collections::HashMap; use std::error::Error; use std::fmt::{Formatter, Write}; @@ -59,11 +27,51 @@ use std::process::ExitStatus; use std::rc::Rc; use std::time::{Duration, Instant}; use std::{fmt::Display, mem, path::PathBuf, sync::Arc}; +use task::{Shell, ShellBuilder}; +pub use terminal::*; use text::Bias; use ui::App; use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle}; use uuid::Uuid; +/// Key used in ACP ToolCall meta to store the tool's programmatic name. +/// This is a workaround since ACP's ToolCall doesn't have a dedicated name field. +pub const TOOL_NAME_META_KEY: &str = "tool_name"; + +/// Helper to extract tool name from ACP meta +pub fn tool_name_from_meta(meta: &Option) -> Option { + meta.as_ref() + .and_then(|m| m.get(TOOL_NAME_META_KEY)) + .and_then(|v| v.as_str()) + .map(|s| SharedString::from(s.to_owned())) +} + +/// Helper to create meta with tool name +pub fn meta_with_tool_name(tool_name: &str) -> acp::Meta { + acp::Meta::from_iter([(TOOL_NAME_META_KEY.into(), tool_name.into())]) +} + +/// Key used in ACP ToolCall meta to store the session id and message indexes +pub const SUBAGENT_SESSION_INFO_META_KEY: &str = "subagent_session_info"; + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct SubagentSessionInfo { + /// The session id of the subagent sessiont that was spawned + pub session_id: acp::SessionId, + /// The index of the message of the start of the "turn" run by this tool call + pub message_start_index: usize, + /// The index of the output of the message that the subagent has returned + #[serde(skip_serializing_if = "Option::is_none")] + pub message_end_index: Option, +} + +/// Helper to extract subagent session id from ACP meta +pub fn subagent_session_info_from_meta(meta: &Option) -> Option { + meta.as_ref() + .and_then(|m| m.get(SUBAGENT_SESSION_INFO_META_KEY)) + .and_then(|v| serde_json::from_value(v.clone()).ok()) +} + #[derive(Debug)] pub struct UserMessage { pub id: Option, @@ -223,7 +231,7 @@ pub struct ToolCall { pub raw_input_markdown: Option>, pub raw_output: Option, pub tool_name: Option, - pub subagent_session_id: Option, + pub subagent_session_info: Option, } impl ToolCall { @@ -262,7 +270,7 @@ impl ToolCall { let tool_name = tool_name_from_meta(&tool_call.meta); - let subagent_session = subagent_session_id_from_meta(&tool_call.meta); + let subagent_session_info = subagent_session_info_from_meta(&tool_call.meta); let result = Self { id: tool_call.tool_call_id, @@ -277,7 +285,7 @@ impl ToolCall { raw_input_markdown, raw_output: tool_call.raw_output, tool_name, - subagent_session_id: subagent_session, + subagent_session_info, }; Ok(result) } @@ -310,8 +318,8 @@ impl ToolCall { self.status = status.into(); } - if let Some(subagent_session_id) = subagent_session_id_from_meta(&meta) { - self.subagent_session_id = Some(subagent_session_id); + if let Some(subagent_session_info) = subagent_session_info_from_meta(&meta) { + self.subagent_session_info = Some(subagent_session_info); } if let Some(title) = title { @@ -402,7 +410,7 @@ impl ToolCall { pub fn is_subagent(&self) -> bool { self.tool_name.as_ref().is_some_and(|s| s == "spawn_agent") - || self.subagent_session_id.is_some() + || self.subagent_session_info.is_some() } pub fn to_markdown(&self, cx: &App) -> String { @@ -1528,7 +1536,7 @@ impl AcpThread { raw_input_markdown: None, raw_output: None, tool_name: None, - subagent_session_id: None, + subagent_session_info: None, }; self.push_entry(AgentThreadEntry::ToolCall(failed_tool_call), cx); return Ok(()); @@ -1690,10 +1698,14 @@ impl AcpThread { pub fn tool_call_for_subagent(&self, session_id: &acp::SessionId) -> Option<&ToolCall> { self.entries.iter().find_map(|entry| match entry { - AgentThreadEntry::ToolCall(tool_call) - if tool_call.subagent_session_id.as_ref() == Some(session_id) => - { - Some(tool_call) + AgentThreadEntry::ToolCall(tool_call) => { + if let Some(subagent_session_info) = &tool_call.subagent_session_info + && &subagent_session_info.session_id == session_id + { + Some(tool_call) + } else { + None + } } _ => None, }) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index d468d529c5ac672600d5280a11a45f4f6ad1b2f9..f20c7a8f1d5c780ffd3214c8736e7c921f32d134 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1748,6 +1748,10 @@ impl SubagentHandle for NativeSubagentHandle { self.session_id.clone() } + fn num_entries(&self, cx: &App) -> usize { + self.subagent_thread.read(cx).num_messages() + } + fn send(&self, message: String, cx: &AsyncApp) -> Task> { let thread = self.subagent_thread.clone(); let acp_thread = self.acp_thread.clone(); @@ -1832,7 +1836,7 @@ impl SubagentHandle for NativeSubagentHandle { if content.is_empty() { None } else { - Some(content) + Some( content) } }) .context("No response from subagent") diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index f62edb091463860f5fcf105f2383c352660166a6..8d75aae7e2948ef9c0934a72da112b926f633941 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -159,7 +159,7 @@ impl crate::TerminalHandle for FakeTerminalHandle { struct FakeSubagentHandle { session_id: acp::SessionId, - wait_for_summary_task: Shared>, + send_task: Shared>, } impl SubagentHandle for FakeSubagentHandle { @@ -167,8 +167,12 @@ impl SubagentHandle for FakeSubagentHandle { self.session_id.clone() } + fn num_entries(&self, _cx: &App) -> usize { + unimplemented!() + } + fn send(&self, _message: String, cx: &AsyncApp) -> Task> { - let task = self.wait_for_summary_task.clone(); + let task = self.send_task.clone(); cx.background_spawn(async move { Ok(task.await) }) } } @@ -273,8 +277,17 @@ async fn test_echo(cx: &mut TestAppContext) { let events = events.collect().await; thread.update(cx, |thread, _cx| { - assert_eq!(thread.last_message().unwrap().role(), Role::Assistant); - assert_eq!(thread.last_message().unwrap().to_markdown(), "Hello\n") + assert_eq!( + thread.last_received_or_pending_message().unwrap().role(), + Role::Assistant + ); + assert_eq!( + thread + .last_received_or_pending_message() + .unwrap() + .to_markdown(), + "Hello\n" + ) }); assert_eq!(stop_events(events), vec![acp::StopReason::EndTurn]); } @@ -426,9 +439,15 @@ async fn test_thinking(cx: &mut TestAppContext) { let events = events.collect().await; thread.update(cx, |thread, _cx| { - assert_eq!(thread.last_message().unwrap().role(), Role::Assistant); assert_eq!( - thread.last_message().unwrap().to_markdown(), + thread.last_received_or_pending_message().unwrap().role(), + Role::Assistant + ); + assert_eq!( + thread + .last_received_or_pending_message() + .unwrap() + .to_markdown(), indoc! {" Think Hello @@ -706,7 +725,7 @@ async fn test_basic_tool_calls(cx: &mut TestAppContext) { thread.update(cx, |thread, _cx| { assert!( thread - .last_message() + .last_received_or_pending_message() .unwrap() .as_agent_message() .unwrap() @@ -743,7 +762,7 @@ async fn test_streaming_tool_calls(cx: &mut TestAppContext) { if let Ok(ThreadEvent::ToolCall(tool_call)) = event { thread.update(cx, |thread, _cx| { // Look for a tool use in the thread's last message - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let last_content = agent_message.content.last().unwrap(); if let AgentMessageContent::ToolUse(last_tool_use) = last_content { @@ -1213,7 +1232,7 @@ async fn test_concurrent_tool_calls(cx: &mut TestAppContext) { assert_eq!(stop_reasons, vec![acp::StopReason::EndTurn]); thread.update(cx, |thread, _cx| { - let last_message = thread.last_message().unwrap(); + let last_message = thread.last_received_or_pending_message().unwrap(); let agent_message = last_message.as_agent_message().unwrap(); let text = agent_message .content @@ -1919,7 +1938,7 @@ async fn test_cancellation(cx: &mut TestAppContext) { .collect::>() .await; thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); assert_eq!( agent_message.content, @@ -1988,7 +2007,7 @@ async fn test_terminal_tool_cancellation_captures_output(cx: &mut TestAppContext // Verify the tool result contains the terminal output, not just "Tool canceled by user" thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2144,7 +2163,7 @@ async fn verify_thread_recovery( let events = events.collect::>().await; thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); assert_eq!( agent_message.content, @@ -2453,7 +2472,7 @@ async fn test_terminal_tool_stopped_via_terminal_card_button(cx: &mut TestAppCon // Verify the tool result indicates user stopped thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -2548,7 +2567,7 @@ async fn test_terminal_tool_timeout_expires(cx: &mut TestAppContext) { // Verify the tool result indicates timeout, not user stopped thread.update(cx, |thread, _cx| { - let message = thread.last_message().unwrap(); + let message = thread.last_received_or_pending_message().unwrap(); let agent_message = message.as_agent_message().unwrap(); let tool_use = agent_message @@ -3444,7 +3463,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) { events.collect::>().await; thread.read_with(cx, |thread, _cx| { assert_eq!( - thread.last_message(), + thread.last_received_or_pending_message(), Some(Message::Agent(AgentMessage { content: vec![AgentMessageContent::Text("Done".into())], tool_results: IndexMap::default(), diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 63e180e7a9686991ba67e813c51b65bcc5a8bedf..64ab5795e7dadcb86f4df45e08157c851d4c7c25 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -605,7 +605,12 @@ pub trait TerminalHandle { } pub trait SubagentHandle { + /// The session ID of this subagent thread fn id(&self) -> acp::SessionId; + /// The current number of entries in the thread. + /// Useful for knowing where the next turn will begin + fn num_entries(&self, cx: &App) -> usize; + /// Runs a turn for a given message and returns both the response and the index of that output message. fn send(&self, message: String, cx: &AsyncApp) -> Task>; } @@ -1324,7 +1329,16 @@ impl Thread { cx.notify(); } - pub fn last_message(&self) -> Option { + pub fn last_message(&self) -> Option<&Message> { + self.messages.last() + } + + pub fn num_messages(&self) -> usize { + self.messages.len() + } + + #[cfg(any(test, feature = "test-support"))] + pub fn last_received_or_pending_message(&self) -> Option { if let Some(message) = self.pending_message.clone() { Some(Message::Agent(message)) } else { diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index 7713da050996f6fb4c07d56f51a218dfb88d5db5..a54e19d6a6ebaa3422c43152ba91b03c12b16ce8 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -1,4 +1,4 @@ -use acp_thread::SUBAGENT_SESSION_ID_META_KEY; +use acp_thread::{SUBAGENT_SESSION_INFO_META_KEY, SubagentSessionInfo}; use agent_client_protocol as acp; use anyhow::Result; use gpui::{App, SharedString, Task}; @@ -24,6 +24,7 @@ use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput}; /// /// - If spawning multiple agents that might write to the filesystem, provide guidance on how to avoid conflicts (e.g. assign each to different directories). #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[serde(rename_all = "snake_case")] pub struct SpawnAgentToolInput { /// Short label displayed in the UI while the agent runs (e.g., "Researching alternatives") pub label: String, @@ -34,26 +35,46 @@ pub struct SpawnAgentToolInput { pub session_id: Option, } -#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Clone, Serialize, Deserialize)] #[serde(untagged)] +#[serde(rename_all = "snake_case")] pub enum SpawnAgentToolOutput { Success { session_id: acp::SessionId, output: String, + session_info: SubagentSessionInfo, }, Error { #[serde(skip_serializing_if = "Option::is_none")] #[serde(default)] session_id: Option, error: String, + session_info: Option, }, } impl From for LanguageModelToolResultContent { fn from(output: SpawnAgentToolOutput) -> Self { - serde_json::to_string(&output) + match output { + SpawnAgentToolOutput::Success { + session_id, + output, + session_info: _, // Don't show this to the model + } => serde_json::to_string( + &serde_json::json!({ "session_id": session_id, "output": output }), + ) + .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}")) + .into(), + SpawnAgentToolOutput::Error { + session_id, + error, + session_info: _, // Don't show this to the model + } => serde_json::to_string( + &serde_json::json!({ "session_id": session_id, "error": error }), + ) .unwrap_or_else(|e| format!("Failed to serialize spawn_agent output: {e}")) - .into() + .into(), + } } } @@ -106,9 +127,10 @@ impl AgentTool for SpawnAgentTool { .map_err(|e| SpawnAgentToolOutput::Error { session_id: None, error: format!("Failed to receive tool input: {e}"), + session_info: None, })?; - let (subagent, subagent_session_id) = cx.update(|cx| { + let (subagent, mut session_info) = cx.update(|cx| { let subagent = if let Some(session_id) = input.session_id { self.environment.resume_subagent(session_id, cx) } else { @@ -117,43 +139,48 @@ impl AgentTool for SpawnAgentTool { let subagent = subagent.map_err(|err| SpawnAgentToolOutput::Error { session_id: None, error: err.to_string(), + session_info: None, })?; - let subagent_session_id = subagent.id(); + let session_info = SubagentSessionInfo { + session_id: subagent.id(), + message_start_index: subagent.num_entries(cx), + message_end_index: None, + }; - event_stream.subagent_spawned(subagent_session_id.clone()); - let meta = acp::Meta::from_iter([( - SUBAGENT_SESSION_ID_META_KEY.into(), - subagent_session_id.to_string().into(), - )]); - event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); + event_stream.subagent_spawned(subagent.id()); + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new(), + Some(acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )])), + ); - Ok((subagent, subagent_session_id)) + Ok((subagent, session_info)) })?; match subagent.send(input.message, cx).await { Ok(output) => { - event_stream.update_fields( + session_info.message_end_index = + cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1))); + event_stream.update_fields_with_meta( acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]), + Some(acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )])), ); Ok(SpawnAgentToolOutput::Success { - session_id: subagent_session_id, + session_id: session_info.session_id.clone(), + session_info, output, }) } - Err(e) => { - let error = e.to_string(); - // workaround for now because the agent loop will always mark this as ToolCallStatus::Failed - let canceled = error == "User canceled"; - event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![ - acp::ToolCallContent::Content(acp::Content::new(error.clone()).meta( - acp::Meta::from_iter([("cancelled".into(), canceled.into())]), - )), - ])); - Err(SpawnAgentToolOutput::Error { - session_id: Some(subagent_session_id), - error, - }) - } + Err(e) => Err(SpawnAgentToolOutput::Error { + session_id: Some(session_info.session_id.clone()), + error: e.to_string(), + session_info: Some(session_info), + }), } }) } @@ -165,25 +192,29 @@ impl AgentTool for SpawnAgentTool { event_stream: ToolCallEventStream, _cx: &mut App, ) -> Result<()> { - let session_id = match &output { - SpawnAgentToolOutput::Success { session_id, .. } => Some(session_id), - SpawnAgentToolOutput::Error { session_id, .. } => session_id.as_ref(), + let (content, session_info) = match output { + SpawnAgentToolOutput::Success { + output, + session_info, + .. + } => (output.into(), Some(session_info)), + SpawnAgentToolOutput::Error { + error, + session_info, + .. + } => (error.into(), session_info), }; - if let Some(session_id) = session_id { - event_stream.subagent_spawned(session_id.clone()); - let meta = acp::Meta::from_iter([( - SUBAGENT_SESSION_ID_META_KEY.into(), - session_id.to_string().into(), - )]); - event_stream.update_fields_with_meta(acp::ToolCallUpdateFields::new(), Some(meta)); - } - - let content = match &output { - SpawnAgentToolOutput::Success { output, .. } => output.into(), - SpawnAgentToolOutput::Error { error, .. } => error.into(), - }; - event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![content])); + let meta = session_info.map(|session_info| { + acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )]) + }); + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new().content(vec![content]), + meta, + ); Ok(()) } diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 9b3c3cd4270722ca309de3f18c0a61894029c3df..96b4b69eb24339003d2ce31d33ccf15437b906f3 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -872,7 +872,10 @@ impl ConnectionView { .entries() .iter() .filter_map(|entry| match entry { - AgentThreadEntry::ToolCall(call) => call.subagent_session_id.clone(), + AgentThreadEntry::ToolCall(call) => call + .subagent_session_info + .as_ref() + .map(|i| i.session_id.clone()), _ => None, }) .collect::>(); diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 5b40a8a9ae76728eabbf6d45b926407a3911c3da..8a29d16b1acf165ba77093dced980a7f51fe2e37 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -3923,7 +3923,7 @@ impl ThreadView { let thread = self.thread.clone(); let comments_editor = self.thread_feedback.comments_editor.clone(); - let primary = if entry_ix == total_entries - 1 { + let primary = if entry_ix + 1 == total_entries { v_flex() .w_full() .child(primary) @@ -5002,15 +5002,20 @@ impl ThreadView { div().w_full().map(|this| { if tool_call.is_subagent() { - this.child(self.render_subagent_tool_call( - active_session_id, - entry_ix, - tool_call, - tool_call.subagent_session_id.clone(), - focus_handle, - window, - cx, - )) + this.child( + self.render_subagent_tool_call( + active_session_id, + entry_ix, + tool_call, + tool_call + .subagent_session_info + .as_ref() + .map(|i| i.session_id.clone()), + focus_handle, + window, + cx, + ), + ) } else if has_terminals { this.children(tool_call.terminals().map(|terminal| { self.render_terminal_tool_call( @@ -6667,6 +6672,34 @@ impl ThreadView { .into_any_element() } + /// This will return `true` if there were no other tool calls during the same turn as the given tool call (no concurrent tool calls). + fn should_show_subagent_fullscreen(&self, tool_call: &ToolCall, cx: &App) -> bool { + let parent_thread = self.thread.read(cx); + + let Some(tool_call_index) = parent_thread + .entries() + .iter() + .position(|e| matches!(e, AgentThreadEntry::ToolCall(tc) if tc.id == tool_call.id)) + else { + return false; + }; + + if let Some(AgentThreadEntry::ToolCall(_)) = + parent_thread.entries().get(tool_call_index + 1) + { + return false; + } + + if let Some(AgentThreadEntry::ToolCall(_)) = parent_thread + .entries() + .get(tool_call_index.saturating_sub(1)) + { + return false; + } + + true + } + fn render_subagent_expanded_content( &self, thread_view: &Entity, @@ -6677,29 +6710,7 @@ impl ThreadView { ) -> impl IntoElement { const MAX_PREVIEW_ENTRIES: usize = 8; - let parent_thread = self.thread.read(cx); - let mut started_subagent_count = 0usize; - let mut turn_has_our_call = false; - for entry in parent_thread.entries().iter() { - match entry { - AgentThreadEntry::UserMessage(_) => { - if turn_has_our_call { - break; - } - started_subagent_count = 0; - turn_has_our_call = false; - } - AgentThreadEntry::ToolCall(tc) - if tc.is_subagent() && !matches!(tc.status, ToolCallStatus::Pending) => - { - started_subagent_count += 1; - if tc.id == tool_call.id { - turn_has_our_call = true; - } - } - _ => {} - } - } + let should_show_subagent_fullscreen = self.should_show_subagent_fullscreen(tool_call, cx); let subagent_view = thread_view.read(cx); let session_id = subagent_view.thread.read(cx).session_id().clone(); @@ -6725,11 +6736,22 @@ impl ThreadView { let entries = subagent_view.thread.read(cx).entries(); let total_entries = entries.len(); - let start_ix = if started_subagent_count > 1 { - total_entries.saturating_sub(MAX_PREVIEW_ENTRIES) + let mut entry_range = if let Some(info) = tool_call.subagent_session_info.as_ref() { + info.message_start_index + ..info + .message_end_index + .map(|i| (i + 1).min(total_entries)) + .unwrap_or(total_entries) } else { - 0 + 0..total_entries + }; + if !should_show_subagent_fullscreen { + entry_range.start = entry_range + .end + .saturating_sub(MAX_PREVIEW_ENTRIES) + .max(entry_range.start); }; + let start_ix = entry_range.start; let scroll_handle = self .subagent_scroll_handles @@ -6741,12 +6763,14 @@ impl ThreadView { scroll_handle.scroll_to_bottom(); } - let rendered_entries: Vec = entries[start_ix..] + let rendered_entries: Vec = entries + .get(entry_range) + .unwrap_or_default() .iter() .enumerate() .map(|(i, entry)| { let actual_ix = start_ix + i; - subagent_view.render_entry(actual_ix, total_entries + 1, entry, window, cx) + subagent_view.render_entry(actual_ix, total_entries, entry, window, cx) }) .collect(); @@ -6764,7 +6788,7 @@ impl ThreadView { .track_scroll(&scroll_handle) .children(rendered_entries), ) - .when(started_subagent_count > 1, |this| { + .when(!should_show_subagent_fullscreen, |this| { this.h_56().child(overlay) }) .into_any_element() diff --git a/crates/agent_ui/src/entry_view_state.rs b/crates/agent_ui/src/entry_view_state.rs index b06d67f63b997e67ca891ab6238e0bd2ce94a304..071142f083bc94b5d057a366d124d11e7822d1fd 100644 --- a/crates/agent_ui/src/entry_view_state.rs +++ b/crates/agent_ui/src/entry_view_state.rs @@ -126,14 +126,19 @@ impl EntryViewState { let terminals = tool_call.terminals().cloned().collect::>(); let diffs = tool_call.diffs().cloned().collect::>(); - let views = if let Some(Entry::Content(views)) = self.entries.get_mut(index) { - views + let views = if let Some(Entry::ToolCall(tool_call)) = self.entries.get_mut(index) { + &mut tool_call.content } else { - self.set_entry(index, Entry::empty()); - let Some(Entry::Content(views)) = self.entries.get_mut(index) else { + self.set_entry( + index, + Entry::ToolCall(ToolCallEntry { + content: HashMap::default(), + }), + ); + let Some(Entry::ToolCall(tool_call)) = self.entries.get_mut(index) else { unreachable!() }; - views + &mut tool_call.content }; let is_tool_call_completed = @@ -250,8 +255,8 @@ impl EntryViewState { for entry in self.entries.iter() { match entry { Entry::UserMessage { .. } | Entry::AssistantMessage { .. } => {} - Entry::Content(response_views) => { - for view in response_views.values() { + Entry::ToolCall(ToolCallEntry { content }) => { + for view in content.values() { if let Ok(diff_editor) = view.clone().downcast::() { diff_editor.update(cx, |diff_editor, cx| { diff_editor.set_text_style_refinement( @@ -305,25 +310,30 @@ impl AssistantMessageEntry { } } +#[derive(Debug)] +pub struct ToolCallEntry { + content: HashMap, +} + #[derive(Debug)] pub enum Entry { UserMessage(Entity), AssistantMessage(AssistantMessageEntry), - Content(HashMap), + ToolCall(ToolCallEntry), } impl Entry { pub fn focus_handle(&self, cx: &App) -> Option { match self { Self::UserMessage(editor) => Some(editor.read(cx).focus_handle(cx)), - Self::AssistantMessage(_) | Self::Content(_) => None, + Self::AssistantMessage(_) | Self::ToolCall(_) => None, } } pub fn message_editor(&self) -> Option<&Entity> { match self { Self::UserMessage(editor) => Some(editor), - Self::AssistantMessage(_) | Self::Content(_) => None, + Self::AssistantMessage(_) | Self::ToolCall(_) => None, } } @@ -350,25 +360,21 @@ impl Entry { ) -> Option { match self { Self::AssistantMessage(message) => message.scroll_handle_for_chunk(chunk_ix), - Self::UserMessage(_) | Self::Content(_) => None, + Self::UserMessage(_) | Self::ToolCall(_) => None, } } fn content_map(&self) -> Option<&HashMap> { match self { - Self::Content(map) => Some(map), + Self::ToolCall(ToolCallEntry { content }) => Some(content), _ => None, } } - fn empty() -> Self { - Self::Content(HashMap::default()) - } - #[cfg(test)] pub fn has_content(&self) -> bool { match self { - Self::Content(map) => !map.is_empty(), + Self::ToolCall(ToolCallEntry { content }) => !content.is_empty(), Self::UserMessage(_) | Self::AssistantMessage(_) => false, } } From c0008cf0f8cffafe00ca186c6697e91c1563ab8b Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 2 Mar 2026 08:38:50 -0500 Subject: [PATCH 214/548] editor: Fix edit prediction popovers painting outside the editor's bounds (#50361) Deferred draws previously didn't use a content mask, so you could horizontally scroll an EP popover all the way out of the containing pane. This also affects other UI elements that use the deferred draw system; I think in practice it doesn't make much difference because most of those seem to require something in the editor to be hovered, so if you scroll horizontally the element goes away. Release Notes: - Fixed being able to scroll the edit prediction popover out of the containing pane. --- crates/editor/src/editor.rs | 9 +++++++- crates/editor/src/element.rs | 16 +++++++------- crates/gpui/src/elements/deferred.rs | 2 +- crates/gpui/src/window.rs | 31 ++++++++++++++++++++-------- 4 files changed, 39 insertions(+), 19 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index daeb355b048d649d638a8830bdf3d367ea9cd40b..a6d9e593cc4b2d8d593f48a7887e6308ff0e63cb 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -9893,7 +9893,14 @@ impl Editor { origin.x -= BORDER_WIDTH; - window.defer_draw(element, origin, 1); + window.with_content_mask( + Some(gpui::ContentMask { + bounds: *text_bounds, + }), + |window| { + window.defer_draw(element, origin, 1, Some(window.content_mask())); + }, + ); // Do not return an element, since it will already be drawn due to defer_draw. None diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 4779784ad75fbbe3740bf63572c2bd8cec06f1da..6fc2627533dde920c021b14d5d172cbef40d7a95 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -2863,7 +2863,7 @@ impl EditorElement { } }); - window.defer_draw(element, origin, 2); + window.defer_draw(element, origin, 2, None); } } @@ -5108,7 +5108,7 @@ impl EditorElement { current_position.y -= size.height; } let position = current_position; - window.defer_draw(element, current_position, 1); + window.defer_draw(element, current_position, 1, None); if !y_flipped { current_position.y += size.height + MENU_GAP; } else { @@ -5211,7 +5211,7 @@ impl EditorElement { // Skip drawing if it doesn't fit anywhere. if let Some((aside, position, size)) = positioned_aside { let aside_bounds = Bounds::new(position, size); - window.defer_draw(aside, position, 2); + window.defer_draw(aside, position, 2, None); return Some(aside_bounds); } @@ -5420,7 +5420,7 @@ impl EditorElement { .on_mouse_move(|_, _, cx| cx.stop_propagation()) .into_any_element(); occlusion.layout_as_root(size(width, HOVER_POPOVER_GAP).into(), window, cx); - window.defer_draw(occlusion, origin, 2); + window.defer_draw(occlusion, origin, 2, None); } fn place_popovers_above( @@ -5437,7 +5437,7 @@ impl EditorElement { current_y - size.height, ); - window.defer_draw(popover.element, popover_origin, 2); + window.defer_draw(popover.element, popover_origin, 2, None); if position != itertools::Position::Last { let origin = point(popover_origin.x, popover_origin.y - HOVER_POPOVER_GAP); draw_occluder(size.width, origin, window, cx); @@ -5459,7 +5459,7 @@ impl EditorElement { let size = popover.size; let popover_origin = point(hovered_point.x + popover.horizontal_offset, current_y); - window.defer_draw(popover.element, popover_origin, 2); + window.defer_draw(popover.element, popover_origin, 2, None); if position != itertools::Position::Last { let origin = point(popover_origin.x, popover_origin.y + size.height); draw_occluder(size.width, origin, window, cx); @@ -5561,7 +5561,7 @@ impl EditorElement { let size = popover.size; let popover_origin = point(origin.x, current_y); - window.defer_draw(popover.element, popover_origin, 2); + window.defer_draw(popover.element, popover_origin, 2, None); if position != itertools::Position::Last { let origin = point(popover_origin.x, popover_origin.y + size.height); draw_occluder(size.width, origin, window, cx); @@ -5893,7 +5893,7 @@ impl EditorElement { }) }; - window.defer_draw(element, final_origin, 2); + window.defer_draw(element, final_origin, 2, None); } fn paint_background(&self, layout: &EditorLayout, window: &mut Window, cx: &mut App) { diff --git a/crates/gpui/src/elements/deferred.rs b/crates/gpui/src/elements/deferred.rs index 9498734198dbe58798867ebe7f20138e5667777b..25245fa4b6ea70284658bf0b91b53ca395b750dd 100644 --- a/crates/gpui/src/elements/deferred.rs +++ b/crates/gpui/src/elements/deferred.rs @@ -62,7 +62,7 @@ impl Element for Deferred { ) { let child = self.child.take().unwrap(); let element_offset = window.element_offset(); - window.defer_draw(child, element_offset, self.priority) + window.defer_draw(child, element_offset, self.priority, None) } fn paint( diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index df5948cb99e75a1f15d5b9a63cb1c3a5a29fac03..3fcb911d2c58f8968bc6b0c66f26ed2de365dd53 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -726,6 +726,7 @@ pub(crate) struct DeferredDraw { parent_node: DispatchNodeId, element_id_stack: SmallVec<[ElementId; 32]>, text_style_stack: Vec, + content_mask: Option>, rem_size: Pixels, element: Option, absolute_offset: Point, @@ -2429,15 +2430,18 @@ impl Window { .set_active_node(deferred_draw.parent_node); let prepaint_start = self.prepaint_index(); + let content_mask = deferred_draw.content_mask.clone(); if let Some(element) = deferred_draw.element.as_mut() { self.with_rendered_view(deferred_draw.current_view, |window| { - window.with_rem_size(Some(deferred_draw.rem_size), |window| { - window.with_absolute_element_offset( - deferred_draw.absolute_offset, - |window| { - element.prepaint(window, cx); - }, - ); + window.with_content_mask(content_mask, |window| { + window.with_rem_size(Some(deferred_draw.rem_size), |window| { + window.with_absolute_element_offset( + deferred_draw.absolute_offset, + |window| { + element.prepaint(window, cx); + }, + ); + }); }); }) } else { @@ -2469,10 +2473,13 @@ impl Window { .set_active_node(deferred_draw.parent_node); let paint_start = self.paint_index(); + let content_mask = deferred_draw.content_mask.clone(); if let Some(element) = deferred_draw.element.as_mut() { self.with_rendered_view(deferred_draw.current_view, |window| { - window.with_rem_size(Some(deferred_draw.rem_size), |window| { - element.paint(window, cx); + window.with_content_mask(content_mask, |window| { + window.with_rem_size(Some(deferred_draw.rem_size), |window| { + element.paint(window, cx); + }); }) }) } else { @@ -2536,6 +2543,7 @@ impl Window { parent_node: reused_subtree.refresh_node_id(deferred_draw.parent_node), element_id_stack: deferred_draw.element_id_stack.clone(), text_style_stack: deferred_draw.text_style_stack.clone(), + content_mask: deferred_draw.content_mask.clone(), rem_size: deferred_draw.rem_size, priority: deferred_draw.priority, element: None, @@ -3019,12 +3027,16 @@ impl Window { /// at a later time. The `priority` parameter determines the drawing order relative to other deferred elements, /// with higher values being drawn on top. /// + /// When `content_mask` is provided, the deferred element will be clipped to that region during + /// both prepaint and paint. When `None`, no additional clipping is applied. + /// /// This method should only be called as part of the prepaint phase of element drawing. pub fn defer_draw( &mut self, element: AnyElement, absolute_offset: Point, priority: usize, + content_mask: Option>, ) { self.invalidator.debug_assert_prepaint(); let parent_node = self.next_frame.dispatch_tree.active_node_id().unwrap(); @@ -3033,6 +3045,7 @@ impl Window { parent_node, element_id_stack: self.element_id_stack.clone(), text_style_stack: self.text_style_stack.clone(), + content_mask, rem_size: self.rem_size(), priority, element: Some(element), From db02d093310ed1504df348ac2b38aff6417d4c4c Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 2 Mar 2026 08:40:34 -0500 Subject: [PATCH 215/548] git: Fix conflicted paths not getting cleared (#50327) Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed a bug where files would still be marked as having git conflicts after resolving them. --------- Co-authored-by: Bennet Bo Fenner --- crates/git_graph/src/git_graph.rs | 8 +- crates/git_ui/src/git_panel.rs | 4 +- crates/project/src/git_store.rs | 159 ++++++++--------- crates/project/tests/integration/git_store.rs | 164 +++++++++++++++++- .../tests/integration/project_tests.rs | 6 +- crates/sum_tree/src/tree_map.rs | 4 + 6 files changed, 245 insertions(+), 100 deletions(-) diff --git a/crates/git_graph/src/git_graph.rs b/crates/git_graph/src/git_graph.rs index 0052d58f5985a29f11043f0bd97edb76bb8d2124..90ccf94f5f91720972a52d85bc506d12c1a528cb 100644 --- a/crates/git_graph/src/git_graph.rs +++ b/crates/git_graph/src/git_graph.rs @@ -1024,7 +1024,7 @@ impl GitGraph { } } } - RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { + RepositoryEvent::BranchChanged => { self.pending_select_sha = None; // Only invalidate if we scanned atleast once, // meaning we are not inside the initial repo loading state @@ -3174,12 +3174,6 @@ mod tests { .any(|event| matches!(event, RepositoryEvent::BranchChanged)), "initial repository scan should emit BranchChanged" ); - assert!( - observed_repository_events - .iter() - .any(|event| matches!(event, RepositoryEvent::MergeHeadsChanged)), - "initial repository scan should emit MergeHeadsChanged" - ); let commit_count_after = repository.read_with(cx, |repo, _| { repo.get_graph_data(crate::LogSource::default(), crate::LogOrder::default()) .map(|data| data.commit_data.len()) diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index b042d66ce9ac5c45af2e5701da2d83db3c3ab907..5131e1d144e2cee0cbdbb32a062d3f9c4ea4a08b 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -769,9 +769,7 @@ impl GitPanel { move |this, _git_store, event, window, cx| match event { GitStoreEvent::RepositoryUpdated( _, - RepositoryEvent::StatusesChanged - | RepositoryEvent::BranchChanged - | RepositoryEvent::MergeHeadsChanged, + RepositoryEvent::StatusesChanged | RepositoryEvent::BranchChanged, true, ) | GitStoreEvent::RepositoryAdded diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 3113163cbaec65d7b439e0cbf46603d60ac3fae0..45ba6817248929391dcc484b25879cf34e7506b9 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -72,7 +72,7 @@ use std::{ }, time::Instant, }; -use sum_tree::{Edit, SumTree, TreeSet}; +use sum_tree::{Edit, SumTree, TreeMap}; use task::Shell; use text::{Bias, BufferId}; use util::{ @@ -251,9 +251,8 @@ pub struct RepositoryId(pub u64); #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct MergeDetails { - pub conflicted_paths: TreeSet, + pub merge_heads_by_conflicted_path: TreeMap>>, pub message: Option, - pub heads: Vec>, } #[derive(Clone)] @@ -407,7 +406,6 @@ pub enum GitGraphEvent { #[derive(Clone, Debug, PartialEq, Eq)] pub enum RepositoryEvent { StatusesChanged, - MergeHeadsChanged, BranchChanged, StashEntriesChanged, PendingOpsChanged { pending_ops: SumTree }, @@ -3511,9 +3509,9 @@ impl RepositorySnapshot { removed_statuses: Default::default(), current_merge_conflicts: self .merge - .conflicted_paths + .merge_heads_by_conflicted_path .iter() - .map(|repo_path| repo_path.to_proto()) + .map(|(repo_path, _)| repo_path.to_proto()) .collect(), merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, @@ -3582,9 +3580,9 @@ impl RepositorySnapshot { removed_statuses, current_merge_conflicts: self .merge - .conflicted_paths + .merge_heads_by_conflicted_path .iter() - .map(|path| path.to_proto()) + .map(|(path, _)| path.to_proto()) .collect(), merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()), project_id, @@ -3640,12 +3638,16 @@ impl RepositorySnapshot { } pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool { - self.merge.conflicted_paths.contains(repo_path) + self.merge + .merge_heads_by_conflicted_path + .contains_key(repo_path) } pub fn has_conflict(&self, repo_path: &RepoPath) -> bool { - let had_conflict_on_last_merge_head_change = - self.merge.conflicted_paths.contains(repo_path); + let had_conflict_on_last_merge_head_change = self + .merge + .merge_heads_by_conflicted_path + .contains_key(repo_path); let has_conflict_currently = self .status_for_path(repo_path) .is_some_and(|entry| entry.status.is_conflicted()); @@ -3684,13 +3686,13 @@ pub fn proto_to_stash(entry: &proto::StashEntry) -> Result { } impl MergeDetails { - async fn load( + async fn update( + &mut self, backend: &Arc, - status: &SumTree, - prev_snapshot: &RepositorySnapshot, - ) -> Result<(MergeDetails, bool)> { + current_conflicted_paths: Vec, + ) -> Result { log::debug!("load merge details"); - let message = backend.merge_message().await; + self.message = backend.merge_message().await.map(SharedString::from); let heads = backend .revparse_batch(vec![ "MERGE_HEAD".into(), @@ -3705,44 +3707,31 @@ impl MergeDetails { .into_iter() .map(|opt| opt.map(SharedString::from)) .collect::>(); - let merge_heads_changed = heads != prev_snapshot.merge.heads; - let conflicted_paths = if merge_heads_changed { - let current_conflicted_paths = TreeSet::from_ordered_entries( - status - .iter() - .filter(|entry| entry.status.is_conflicted()) - .map(|entry| entry.repo_path.clone()), - ); - // It can happen that we run a scan while a lengthy merge is in progress - // that will eventually result in conflicts, but before those conflicts - // are reported by `git status`. Since for the moment we only care about - // the merge heads state for the purposes of tracking conflicts, don't update - // this state until we see some conflicts. - if heads.iter().any(Option::is_some) - && !prev_snapshot.merge.heads.iter().any(Option::is_some) - && current_conflicted_paths.is_empty() - { - log::debug!("not updating merge heads because no conflicts found"); - return Ok(( - MergeDetails { - message: message.map(SharedString::from), - ..prev_snapshot.merge.clone() - }, - false, - )); + let mut conflicts_changed = false; + + // Record the merge state for newly conflicted paths + for path in ¤t_conflicted_paths { + if self.merge_heads_by_conflicted_path.get(&path).is_none() { + conflicts_changed = true; + self.merge_heads_by_conflicted_path + .insert(path.clone(), heads.clone()); } + } - current_conflicted_paths - } else { - prev_snapshot.merge.conflicted_paths.clone() - }; - let details = MergeDetails { - conflicted_paths, - message: message.map(SharedString::from), - heads, - }; - Ok((details, merge_heads_changed)) + // Clear state for paths that are no longer conflicted and for which the merge heads have changed + self.merge_heads_by_conflicted_path + .retain(|path, old_merge_heads| { + let keep = current_conflicted_paths.contains(path) + || (old_merge_heads == &heads + && old_merge_heads.iter().any(|head| head.is_some())); + if !keep { + conflicts_changed = true; + } + keep + }); + + Ok(conflicts_changed) } } @@ -3798,7 +3787,7 @@ impl Repository { .shared(); cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event { - RepositoryEvent::BranchChanged | RepositoryEvent::MergeHeadsChanged => { + RepositoryEvent::BranchChanged => { if this.scan_id > 1 { this.initial_graph_data.clear(); } @@ -6004,12 +5993,6 @@ impl Repository { update: proto::UpdateRepository, cx: &mut Context, ) -> Result<()> { - let conflicted_paths = TreeSet::from_ordered_entries( - update - .current_merge_conflicts - .into_iter() - .filter_map(|path| RepoPath::from_proto(&path).log_err()), - ); let new_branch = update.branch_summary.as_ref().map(proto_to_branch); let new_head_commit = update .head_commit_details @@ -6021,7 +6004,17 @@ impl Repository { self.snapshot.branch = new_branch; self.snapshot.head_commit = new_head_commit; - self.snapshot.merge.conflicted_paths = conflicted_paths; + // We don't store any merge head state for downstream projects; the upstream + // will track it and we will just get the updated conflicts + let new_merge_heads = TreeMap::from_ordered_entries( + update + .current_merge_conflicts + .into_iter() + .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))), + ); + let conflicts_changed = + self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads; + self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads; self.snapshot.merge.message = update.merge_message.map(SharedString::from); let new_stash_entries = GitStash { entries: update @@ -6054,7 +6047,7 @@ impl Repository { }), ) .collect::>(); - if !edits.is_empty() { + if conflicts_changed || !edits.is_empty() { cx.emit(RepositoryEvent::StatusesChanged); } self.snapshot.statuses_by_path.edit(edits, ()); @@ -6141,17 +6134,16 @@ impl Repository { let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else { bail!("not a local repository") }; - let (snapshot, events) = this - .update(&mut cx, |this, _| { - this.paths_needing_status_update.clear(); - compute_snapshot( - this.id, - this.work_directory_abs_path.clone(), - this.snapshot.clone(), - backend.clone(), - ) - }) - .await?; + let compute_snapshot = this.update(&mut cx, |this, _| { + this.paths_needing_status_update.clear(); + compute_snapshot( + this.id, + this.work_directory_abs_path.clone(), + this.snapshot.clone(), + backend.clone(), + ) + }); + let (snapshot, events) = cx.background_spawn(compute_snapshot).await?; this.update(&mut cx, |this, cx| { this.snapshot = snapshot.clone(); this.clear_pending_ops(cx); @@ -6759,25 +6751,24 @@ async fn compute_snapshot( )]) .await?; let stash_entries = backend.stash_entries().await?; + let mut conflicted_paths = Vec::new(); let statuses_by_path = SumTree::from_iter( - statuses - .entries - .iter() - .map(|(repo_path, status)| StatusEntry { + statuses.entries.iter().map(|(repo_path, status)| { + if status.is_conflicted() { + conflicted_paths.push(repo_path.clone()); + } + StatusEntry { repo_path: repo_path.clone(), status: *status, - }), + } + }), (), ); - let (merge_details, merge_heads_changed) = - MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?; - log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}"); - - if merge_heads_changed { - events.push(RepositoryEvent::MergeHeadsChanged); - } + let mut merge_details = prev_snapshot.merge; + let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?; + log::debug!("new merge details: {merge_details:?}"); - if statuses_by_path != prev_snapshot.statuses_by_path { + if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path { events.push(RepositoryEvent::StatusesChanged) } diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs index 43704953e0d0bd3e81b9b63b5a797934970dcafa..802e0c072bf60466c32146d12cadd7c1e35c61ad 100644 --- a/crates/project/tests/integration/git_store.rs +++ b/crates/project/tests/integration/git_store.rs @@ -336,7 +336,7 @@ mod conflict_set_tests { second_head: UnmergedStatusCode::Updated, }, ); - // Cause the repository to emit MergeHeadsChanged. + // Cause the repository to update cached conflicts state.refs.insert("MERGE_HEAD".into(), "123".into()) }) .unwrap(); @@ -461,6 +461,168 @@ mod conflict_set_tests { assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); }); } + + #[gpui::test] + async fn test_conflict_updates_with_delayed_merge_head_conflicts( + executor: BackgroundExecutor, + cx: &mut TestAppContext, + ) { + zlog::init_test(); + cx.update(|cx| { + settings::init(cx); + }); + + let initial_text = " + one + two + three + four + " + .unindent(); + + let conflicted_text = " + one + <<<<<<< HEAD + two + ======= + TWO + >>>>>>> branch + three + four + " + .unindent(); + + let resolved_text = " + one + TWO + three + four + " + .unindent(); + + let fs = FakeFs::new(executor); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": initial_text, + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await; + let (git_store, buffer) = project.update(cx, |project, cx| { + ( + project.git_store().clone(), + project.open_local_buffer(path!("/project/a.txt"), cx), + ) + }); + let buffer = buffer.await.unwrap(); + let conflict_set = git_store.update(cx, |git_store, cx| { + git_store.open_conflict_set(buffer.clone(), cx) + }); + + let (events_tx, events_rx) = mpsc::channel::(); + let _conflict_set_subscription = cx.update(|cx| { + cx.subscribe(&conflict_set, move |_, event, _| { + events_tx.send(event.clone()).ok(); + }) + }); + + cx.run_until_parked(); + events_rx + .try_recv() + .expect_err("conflict set should start empty"); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.refs.insert("MERGE_HEAD".into(), "123".into()) + }) + .unwrap(); + + cx.run_until_parked(); + events_rx + .try_recv() + .expect_err("merge head without conflicted paths should not publish conflicts"); + conflict_set.update(cx, |conflict_set, _| { + assert!(!conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + buffer.update(cx, |buffer, cx| { + buffer.set_text(conflicted_text.clone(), cx); + }); + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.unmerged_paths.insert( + repo_path("a.txt"), + UnmergedStatus { + first_head: UnmergedStatusCode::Updated, + second_head: UnmergedStatusCode::Updated, + }, + ); + }) + .unwrap(); + + cx.run_until_parked(); + let update = events_rx + .try_recv() + .expect("conflicts should appear once conflicted paths are visible"); + assert_eq!(update.old_range, 0..0); + assert_eq!(update.new_range, 0..1); + conflict_set.update(cx, |conflict_set, cx| { + assert!(conflict_set.has_conflict); + let conflict_range = conflict_set.snapshot().conflicts[0] + .range + .to_point(buffer.read(cx)); + assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0)); + }); + + buffer.update(cx, |buffer, cx| { + buffer.set_text(resolved_text.clone(), cx); + }); + + cx.run_until_parked(); + let update = events_rx + .try_recv() + .expect("resolved buffer text should clear visible conflict markers"); + assert_eq!(update.old_range, 0..1); + assert_eq!(update.new_range, 0..0); + conflict_set.update(cx, |conflict_set, _| { + assert!(conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.refs.insert("MERGE_HEAD".into(), "456".into()); + }) + .unwrap(); + + cx.run_until_parked(); + events_rx.try_recv().expect_err( + "merge-head change without unmerged-path changes should not emit marker updates", + ); + conflict_set.update(cx, |conflict_set, _| { + assert!(conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + + fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { + state.unmerged_paths.remove(&repo_path("a.txt")); + state.refs.remove("MERGE_HEAD"); + }) + .unwrap(); + + cx.run_until_parked(); + let update = events_rx.try_recv().expect( + "status catch-up should emit a no-op update when clearing stale conflict state", + ); + assert_eq!(update.old_range, 0..0); + assert_eq!(update.new_range, 0..0); + assert!(update.buffer_range.is_none()); + conflict_set.update(cx, |conflict_set, _| { + assert!(!conflict_set.has_conflict); + assert_eq!(conflict_set.snapshot.conflicts.len(), 0); + }); + } } mod git_traversal { diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 9bd0be45ae3fa1e66e8af2c43657ba039045ecef..6092836c19ef280aa2d13abcb32932f3b47703b6 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -10409,10 +10409,7 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) { assert_eq!( repository_updates.lock().drain(..).collect::>(), - vec![ - RepositoryEvent::StatusesChanged, - RepositoryEvent::MergeHeadsChanged, - ], + vec![RepositoryEvent::StatusesChanged,], "Initial worktree scan should produce a repo update event" ); assert_eq!( @@ -10579,7 +10576,6 @@ async fn test_odd_events_for_ignored_dirs( assert_eq!( repository_updates.lock().drain(..).collect::>(), vec![ - RepositoryEvent::MergeHeadsChanged, RepositoryEvent::BranchChanged, RepositoryEvent::StatusesChanged, RepositoryEvent::StatusesChanged, diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index e58f7a65dd5d13ca67d4433bd25118ffb55d1169..004ec918514e0ad18b3c1e55178a6527866d1bb1 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -53,6 +53,10 @@ impl TreeMap { self.0.is_empty() } + pub fn contains_key(&self, key: &K) -> bool { + self.get(key).is_some() + } + pub fn get(&self, key: &K) -> Option<&V> { let (.., item) = self .0 From 2549db7dba3a3aea1e8b7537c6f75800ea6290bb Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Mon, 2 Mar 2026 19:21:34 +0530 Subject: [PATCH 216/548] markdown_preview: Fix multiple mermaid rendering issues (#50485) Closes #50172 Closes #50238 Closes #50243 Closes #50196 Updates `mermaid-rs-renderer` with the following fixes: - Fix ampersand in node labels splitting nodes incorrectly ([`374db9e`](https://github.com/zed-industries/mermaid-rs-renderer/commit/374db9e)) - Add gantt compact display mode via yaml frontmatter ([`d1bf282`](https://github.com/zed-industries/mermaid-rs-renderer/commit/d1bf282)) - Fix class diagram stereotype annotations rendered as members ([`6203d71`](https://github.com/zed-industries/mermaid-rs-renderer/commit/6203d71)) - Fix dotted lines being indistinguishable from solid ([`94fb543`](https://github.com/zed-industries/mermaid-rs-renderer/commit/94fb543)) - Fix class diagram arrowheads hidden under node boxes ([`cd79c56`](https://github.com/zed-industries/mermaid-rs-renderer/commit/cd79c56)) - Fix text not rendering in some cases ([`9534cb1`](https://github.com/zed-industries/mermaid-rs-renderer/commit/9534cb1)) Release Notes: - Fixed multiple mermaid diagram rendering issues in markdown preview. --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1eea7b57e3c1a35870d1369a6066c2d36b8e6d26..030777000144d01de7653fbce314c14223e158d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8663,7 +8663,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.15.5", + "hashbrown 0.16.1", "serde", "serde_core", ] @@ -10398,7 +10398,7 @@ dependencies = [ [[package]] name = "mermaid-rs-renderer" version = "0.2.0" -source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=a1f8fc03bf7293018136fb8e60d83551d2dd5732#a1f8fc03bf7293018136fb8e60d83551d2dd5732" +source = "git+https://github.com/zed-industries/mermaid-rs-renderer?rev=374db9ead5426697c6c2111151d9f246899bc638#374db9ead5426697c6c2111151d9f246899bc638" dependencies = [ "anyhow", "fontdb 0.16.2", diff --git a/Cargo.toml b/Cargo.toml index 235d5a1209621e6d31bcd1b0b747e2a596cebe45..d505a5ee14b9587c874c33c36fc4b154d900680f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -370,7 +370,7 @@ markdown_preview = { path = "crates/markdown_preview" } svg_preview = { path = "crates/svg_preview" } media = { path = "crates/media" } menu = { path = "crates/menu" } -mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "a1f8fc03bf7293018136fb8e60d83551d2dd5732", default-features = false } +mermaid-rs-renderer = { git = "https://github.com/zed-industries/mermaid-rs-renderer", rev = "374db9ead5426697c6c2111151d9f246899bc638", default-features = false } migrator = { path = "crates/migrator" } mistral = { path = "crates/mistral" } multi_buffer = { path = "crates/multi_buffer" } From ad017e03428b71f7602c608e8c5d8b4b28df05db Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 2 Mar 2026 09:14:12 -0500 Subject: [PATCH 217/548] git: Prevent crashes when looking index text for empty path (#50487) We were trying to mitigate these by passing `.` instead of ``, but it turns out that git2 also panics internally for that. It also just doesn't make sense to look up the index text (or committed text) for an empty path, because a file should always have a nonempty repo path. Closes ZED-560 Release Notes: - N/A --- crates/git/src/repository.rs | 57 +++++++++++++++++++++--------------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 1925e84735a8020c7e1896f3cf2e7ee20ae3f712..aed08e8dc850622cff4dc96631199a039c78ac3f 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1318,33 +1318,31 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { - // This check is required because index.get_path() unwraps internally :( let mut index = repo.index()?; index.read(false)?; const STAGE_NORMAL: i32 = 0; - let path = path.as_std_path(); - // `RepoPath` contains a `RelPath` which normalizes `.` into an empty path - // `get_path` unwraps on empty paths though, so undo that normalization here - let path = if path.components().next().is_none() { - ".".as_ref() + // git2 unwraps internally on empty paths or `.` + if path.is_empty() { + bail!("empty path has no index text"); + } + let entry = index + .get_path(path.as_std_path(), STAGE_NORMAL) + .with_context(|| format!("looking up {path:?} in index"))?; + let oid = if entry.mode != GIT_MODE_SYMLINK { + entry.id } else { - path - }; - let oid = match index.get_path(path, STAGE_NORMAL) { - Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id, - _ => return Ok(None), + return Ok(None); }; let content = repo.find_blob(oid)?.content().to_owned(); Ok(String::from_utf8(content).ok()) } - match logic(&repo.lock(), &path) { - Ok(value) => return value, - Err(err) => log::error!("Error loading index text: {:?}", err), - } - None + logic(&repo.lock(), &path) + .context("loading index text") + .log_err() + .flatten() }) .boxed() } @@ -1353,14 +1351,27 @@ impl GitRepository for RealGitRepository { let repo = self.repository.clone(); self.executor .spawn(async move { - let repo = repo.lock(); - let head = repo.head().ok()?.peel_to_tree().log_err()?; - let entry = head.get_path(path.as_std_path()).ok()?; - if entry.filemode() == i32::from(git2::FileMode::Link) { - return None; + fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { + let head = repo.head()?.peel_to_tree()?; + if path.is_empty() { + return Err(anyhow!("empty path has no committed text")); + } + // git2 unwraps internally on empty paths or `.` + let entry = head.get_path(path.as_std_path())?; + if entry.filemode() == i32::from(git2::FileMode::Link) { + bail!( + "symlink has no + committed text" + ); + } + let content = repo.find_blob(entry.id())?.content().to_owned(); + Ok(String::from_utf8(content).ok()) } - let content = repo.find_blob(entry.id()).log_err()?.content().to_owned(); - String::from_utf8(content).ok() + + logic(&repo.lock(), &path) + .context("loading committed text") + .log_err() + .flatten() }) .boxed() } From a09b6fba88125ce446fb00a5271161c36f933b58 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 2 Mar 2026 09:33:48 -0500 Subject: [PATCH 218/548] settings_ui: Defer `fetch_files` to avoid reentrant lease of `Project` (#50488) Closes ZED-5CA Release Notes: - Fixed a crash when opening the settings UI. --- crates/settings_ui/src/settings_ui.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index 9ac338f7b849a53c402a0cea6b79ddc6496df0f2..def4c7630869cae69c539e1d83660e8df9a18318 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -1574,8 +1574,10 @@ impl SettingsWindow { }; this_weak - .update(cx, |this, cx| { - this.fetch_files(window, cx); + .update(cx, |_, cx| { + cx.defer_in(window, |settings_window, window, cx| { + settings_window.fetch_files(window, cx) + }); cx.observe_release_in(&project, window, |_, _, window, cx| { cx.defer_in(window, |this, window, cx| this.fetch_files(window, cx)); }) From 8f329d1ee330f7d05a3fc36301b9988878e61aa2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 15:37:59 +0100 Subject: [PATCH 219/548] workspace: Properly flush effects in `send_keystrokes_impl` (#50486) Should fix the test issue observed in https://github.com/zed-industries/zed/pull/50068 Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/command_palette/src/command_palette.rs | 4 ++-- crates/workspace/src/workspace.rs | 19 +++++++++++++------ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/crates/command_palette/src/command_palette.rs b/crates/command_palette/src/command_palette.rs index a6fc0193a4b18407c2f4473a0fbea471d91eb9a9..d13360a7c5403d997cfb2363f33cfe3b257dcef1 100644 --- a/crates/command_palette/src/command_palette.rs +++ b/crates/command_palette/src/command_palette.rs @@ -510,7 +510,7 @@ impl PickerDelegate for CommandPaletteDelegate { .delegate .matches_updated(query, commands, matches, intercept_result, cx) }) - .log_err(); + .ok(); }) } @@ -543,7 +543,7 @@ impl PickerDelegate for CommandPaletteDelegate { fn dismissed(&mut self, _window: &mut Window, cx: &mut Context>) { self.command_palette .update(cx, |_, cx| cx.emit(DismissEvent)) - .log_err(); + .ok(); } fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context>) { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index cde04d987a015982006d283c17ee82ed9b7a7cb2..b62f6b5eb60eafb7177f7883b825a208e7c81d62 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -14,6 +14,7 @@ mod persistence; pub mod searchable; mod security_modal; pub mod shared_screen; +use db::smol::future::yield_now; pub use shared_screen::SharedScreen; mod status_bar; pub mod tasks; @@ -2820,13 +2821,15 @@ impl Workspace { .spawn(cx, async move |cx| { // limit to 100 keystrokes to avoid infinite recursion. for _ in 0..100 { - let mut state = keystrokes.borrow_mut(); - let Some(keystroke) = state.queue.pop_front() else { - state.dispatched.clear(); - state.task.take(); - return; + let keystroke = { + let mut state = keystrokes.borrow_mut(); + let Some(keystroke) = state.queue.pop_front() else { + state.dispatched.clear(); + state.task.take(); + return; + }; + keystroke }; - drop(state); cx.update(|window, cx| { let focused = window.focused(cx); window.dispatch_keystroke(keystroke.clone(), cx); @@ -2841,6 +2844,10 @@ impl Workspace { } }) .ok(); + + // Yield between synthetic keystrokes so deferred focus and + // other effects can settle before dispatching the next key. + yield_now().await; } *keystrokes.borrow_mut() = Default::default(); From 5e9ee9ea4ac856ebd34bbf5f1319ac9b0a0a3d60 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 2 Mar 2026 15:42:01 +0100 Subject: [PATCH 220/548] agent: More subagent fixes (#50489) - Skip agent location updates for subagent threads - Hide edits summary for subagent thread - Fix tool permission granularity selection from parent thread Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- crates/acp_thread/src/acp_thread.rs | 57 +++-- crates/agent/src/edit_agent.rs | 84 ++++--- crates/agent/src/edit_agent/evals.rs | 1 + crates/agent/src/tools/edit_file_tool.rs | 8 +- crates/agent/src/tools/read_file_tool.rs | 22 +- .../src/tools/streaming_edit_file_tool.rs | 12 +- crates/agent_ui/src/agent_ui.rs | 12 - crates/agent_ui/src/connection_view.rs | 208 +++--------------- .../src/connection_view/thread_view.rs | 87 ++++---- 9 files changed, 178 insertions(+), 313 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index d0e8860084acd0a4dba7daadb000ed1f80033cf2..46797130c0cebb605c6820f4d04fd2e989977617 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -1713,6 +1713,7 @@ impl AcpThread { pub fn resolve_locations(&mut self, id: acp::ToolCallId, cx: &mut Context) { let project = self.project.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); let Some((_, tool_call)) = self.tool_call_mut(&id) else { return; }; @@ -1748,7 +1749,7 @@ impl AcpThread { } else { false }; - if !should_ignore { + if !should_ignore && should_update_agent_location { project.set_agent_location(Some(location.into()), cx); } }); @@ -1979,8 +1980,10 @@ impl AcpThread { .await?; this.update(cx, |this, cx| { - this.project - .update(cx, |project, cx| project.set_agent_location(None, cx)); + if this.parent_session_id.is_none() { + this.project + .update(cx, |project, cx| project.set_agent_location(None, cx)); + } let Ok(response) = response else { // tx dropped, just return return Ok(None); @@ -2252,6 +2255,7 @@ impl AcpThread { let limit = limit.unwrap_or(u32::MAX); let project = self.project.clone(); let action_log = self.action_log.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); cx.spawn(async move |this, cx| { let load = project.update(cx, |project, cx| { let path = project @@ -2302,15 +2306,17 @@ impl AcpThread { let start = snapshot.anchor_before(start_position); let end = snapshot.anchor_before(Point::new(line.saturating_add(limit), 0)); - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: start, - }), - cx, - ); - }); + if should_update_agent_location { + project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: start, + }), + cx, + ); + }); + } Ok(snapshot.text_for_range(start..end).collect::()) }) @@ -2324,6 +2330,7 @@ impl AcpThread { ) -> Task> { let project = self.project.clone(); let action_log = self.action_log.clone(); + let should_update_agent_location = self.parent_session_id.is_none(); cx.spawn(async move |this, cx| { let load = project.update(cx, |project, cx| { let path = project @@ -2351,18 +2358,20 @@ impl AcpThread { }) .await; - project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: edits - .last() - .map(|(range, _)| range.end) - .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())), - }), - cx, - ); - }); + if should_update_agent_location { + project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: edits + .last() + .map(|(range, _)| range.end) + .unwrap_or(Anchor::min_for_buffer(buffer.read(cx).remote_id())), + }), + cx, + ); + }); + } let format_on_save = cx.update(|cx| { action_log.update(cx, |action_log, cx| { diff --git a/crates/agent/src/edit_agent.rs b/crates/agent/src/edit_agent.rs index ef95eee07378438686aff688fdaf2d7fa98e036b..e122d6b2884a593daa819457835d3d00690f5a7d 100644 --- a/crates/agent/src/edit_agent.rs +++ b/crates/agent/src/edit_agent.rs @@ -84,6 +84,7 @@ pub struct EditAgent { templates: Arc, edit_format: EditFormat, thinking_allowed: bool, + update_agent_location: bool, } impl EditAgent { @@ -94,6 +95,7 @@ impl EditAgent { templates: Arc, edit_format: EditFormat, allow_thinking: bool, + update_agent_location: bool, ) -> Self { EditAgent { model, @@ -102,6 +104,7 @@ impl EditAgent { templates, edit_format, thinking_allowed: allow_thinking, + update_agent_location, } } @@ -170,15 +173,17 @@ impl EditAgent { ) -> Result<()> { let buffer_id = cx.update(|cx| { let buffer_id = buffer.read(cx).remote_id(); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::min_for_buffer(buffer_id), - }), - cx, - ) - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: language::Anchor::min_for_buffer(buffer_id), + }), + cx, + ) + }); + } buffer_id }); @@ -190,15 +195,17 @@ impl EditAgent { .ok() }; let set_agent_location = |cx: &mut _| { - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: language::Anchor::max_for_buffer(buffer_id), - }), - cx, - ) - }) + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: language::Anchor::max_for_buffer(buffer_id), + }), + cx, + ) + }) + } }; let mut first_chunk = true; while let Some(event) = parse_rx.next().await { @@ -302,15 +309,17 @@ impl EditAgent { if let Some(old_range) = old_range { let old_range = snapshot.anchor_before(old_range.start) ..snapshot.anchor_before(old_range.end); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: old_range.end, - }), - cx, - ); - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: old_range.end, + }), + cx, + ); + }); + } output_events .unbounded_send(EditAgentOutputEvent::ResolvingEditRange(old_range)) .ok(); @@ -383,15 +392,17 @@ impl EditAgent { }); self.action_log .update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); - self.project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: max_edit_end, - }), - cx, - ); - }); + if self.update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: max_edit_end, + }), + cx, + ); + }); + } (min_edit_start, max_edit_end) }); output_events @@ -1390,6 +1401,7 @@ mod tests { Templates::new(), EditFormat::XmlTags, thinking_allowed, + true, ) } diff --git a/crates/agent/src/edit_agent/evals.rs b/crates/agent/src/edit_agent/evals.rs index cdf6c1c0b3f6440e4827c8b74b47a32d997b092f..2e8818b101995b374cf8172547c45b55c27c6f26 100644 --- a/crates/agent/src/edit_agent/evals.rs +++ b/crates/agent/src/edit_agent/evals.rs @@ -1469,6 +1469,7 @@ impl EditAgentTest { Templates::new(), edit_format, true, + true, ), project, judge_model, diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 3e1e0661f126d464c8d4611e2b3d85a9f668a5ca..b680e3b885f7d002657ee4b0bc384d6d9afaa055 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -253,7 +253,7 @@ impl AgentTool for EditFileTool { error: "thread was dropped".to_string(), })?; - let (project_path, abs_path, allow_thinking, authorize) = + let (project_path, abs_path, allow_thinking, update_agent_location, authorize) = cx.update(|cx| { let project_path = resolve_path(&input, project.clone(), cx).map_err(|err| { EditFileToolOutput::Error { @@ -271,8 +271,11 @@ impl AgentTool for EditFileTool { .thread .read_with(cx, |thread, _cx| thread.thinking_enabled()) .unwrap_or(true); + + let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default(); + let authorize = self.authorize(&input, &event_stream, cx); - Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, authorize)) + Ok::<_, EditFileToolOutput>((project_path, abs_path, allow_thinking, update_agent_location, authorize)) })?; let result: anyhow::Result = async { @@ -293,6 +296,7 @@ impl AgentTool for EditFileTool { self.templates.clone(), edit_format, allow_thinking, + update_agent_location, ); let buffer = project diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index bbc67cf68c7d104772c18ad222478621ce4d7a54..8cfc16ddf6174a190ffe7cc11921dc204b05b79d 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -212,7 +212,6 @@ impl AgentTool for ReadFileTool { }); if is_image { - let image_entity: Entity = cx .update(|cx| { self.project.update(cx, |project, cx| { @@ -269,6 +268,9 @@ impl AgentTool for ReadFileTool { .ok(); } + + let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default(); + let mut anchor = None; // Check if specific line ranges are provided @@ -328,15 +330,17 @@ impl AgentTool for ReadFileTool { }; project.update(cx, |project, cx| { - project.set_agent_location( - Some(AgentLocation { - buffer: buffer.downgrade(), - position: anchor.unwrap_or_else(|| { - text::Anchor::min_for_buffer(buffer.read(cx).remote_id()) + if update_agent_location { + project.set_agent_location( + Some(AgentLocation { + buffer: buffer.downgrade(), + position: anchor.unwrap_or_else(|| { + text::Anchor::min_for_buffer(buffer.read(cx).remote_id()) + }), }), - }), - cx, - ); + cx, + ); + } if let Ok(LanguageModelToolResultContent::Text(text)) = &result { let text: &str = text; let markdown = MarkdownCodeBlock { diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 2658e372d77044b60648d8fab39e458f02dba23d..a0d6d3a374e3b64c6652e089efe8de31b645b052 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -220,9 +220,15 @@ impl StreamingEditFileTool { } fn set_agent_location(&self, buffer: WeakEntity, position: text::Anchor, cx: &mut App) { - self.project.update(cx, |project, cx| { - project.set_agent_location(Some(AgentLocation { buffer, position }), cx); - }); + let should_update_agent_location = self + .thread + .read_with(cx, |thread, _cx| !thread.is_subagent()) + .unwrap_or_default(); + if should_update_agent_location { + self.project.update(cx, |project, cx| { + project.set_agent_location(Some(AgentLocation { buffer, position }), cx); + }); + } } } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 967b53bd200e6dc8e863a86602b2ac5f590406e2..ba188ccb592871c62c6f010f026a8948c8cf89fa 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -179,18 +179,6 @@ pub struct AuthorizeToolCall { pub option_kind: String, } -/// Action to select a permission granularity option from the dropdown. -/// This updates the selected granularity without triggering authorization. -#[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] -#[action(namespace = agent)] -#[serde(deny_unknown_fields)] -pub struct SelectPermissionGranularity { - /// The tool call ID for which to select the granularity. - pub tool_call_id: String, - /// The index of the selected granularity option. - pub index: usize, -} - /// Creates a new conversation thread, optionally based on an existing thread. #[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)] #[action(namespace = agent)] diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 96b4b69eb24339003d2ce31d33ccf15437b906f3..a3a62459a2e98680b3910877cc9cd1e6e58ba056 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -74,9 +74,9 @@ use crate::{ AgentDiffPane, AgentInitialContent, AgentPanel, AllowAlways, AllowOnce, AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu, - OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage, - SelectPermissionGranularity, SendImmediately, SendNextQueuedMessage, ToggleFastMode, - ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject, + OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage, SendImmediately, + SendNextQueuedMessage, ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu, + ToggleThinkingMode, UndoLastReject, }; const STOPWATCH_THRESHOLD: Duration = Duration::from_secs(30); @@ -155,6 +155,9 @@ pub(crate) struct Conversation { threads: HashMap>, permission_requests: IndexMap>, subscriptions: Vec, + /// Tracks the selected granularity index for each tool call's permission dropdown. + /// The index corresponds to the position in the allow_options list. + selected_permission_granularity: HashMap>, } impl Conversation { @@ -196,6 +199,29 @@ impl Conversation { .insert(thread.read(cx).session_id().clone(), thread); } + pub fn selected_permission_granularity( + &self, + session_id: &acp::SessionId, + tool_call_id: &acp::ToolCallId, + ) -> Option { + self.selected_permission_granularity + .get(session_id) + .and_then(|map| map.get(tool_call_id)) + .copied() + } + + pub fn set_selected_permission_granularity( + &mut self, + session_id: acp::SessionId, + tool_call_id: acp::ToolCallId, + granularity: usize, + ) { + self.selected_permission_granularity + .entry(session_id) + .or_default() + .insert(tool_call_id, granularity); + } + pub fn pending_tool_call<'a>( &'a self, session_id: &acp::SessionId, @@ -5580,182 +5606,6 @@ pub(crate) mod tests { }); } - #[gpui::test] - async fn test_granularity_selection_updates_state(cx: &mut TestAppContext) { - init_test(cx); - - let tool_call_id = acp::ToolCallId::new("granularity-test-1"); - let tool_call = - acp::ToolCall::new(tool_call_id.clone(), "Run `cargo build`").kind(acp::ToolKind::Edit); - - let permission_options = - ToolPermissionContext::new(TerminalTool::NAME, vec!["cargo build".to_string()]) - .build_permission_options(); - - let connection = - StubAgentConnection::new().with_permission_requests(HashMap::from_iter([( - tool_call_id.clone(), - permission_options.clone(), - )])); - - connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]); - - let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; - add_to_workspace(thread_view.clone(), cx); - - cx.update(|_window, cx| { - AgentSettings::override_global( - AgentSettings { - notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let message_editor = message_editor(&thread_view, cx); - message_editor.update_in(cx, |editor, window, cx| { - editor.set_text("Build the project", window, cx); - }); - - active_thread(&thread_view, cx).update_in(cx, |view, window, cx| view.send(window, cx)); - - cx.run_until_parked(); - - // Verify default granularity is the last option (index 2 = "Only this time") - thread_view.read_with(cx, |thread_view, cx| { - let state = thread_view.active_thread().unwrap(); - let selected = state - .read(cx) - .selected_permission_granularity - .get(&tool_call_id); - assert!( - selected.is_none(), - "Should have no selection initially (defaults to last)" - ); - }); - - // Select the first option (index 0 = "Always for terminal") - thread_view.update_in(cx, |_, window, cx| { - window.dispatch_action( - crate::SelectPermissionGranularity { - tool_call_id: "granularity-test-1".to_string(), - index: 0, - } - .boxed_clone(), - cx, - ); - }); - - cx.run_until_parked(); - - // Verify the selection was updated - thread_view.read_with(cx, |thread_view, cx| { - let state = thread_view.active_thread().unwrap(); - let selected = state - .read(cx) - .selected_permission_granularity - .get(&tool_call_id); - assert_eq!(selected, Some(&0), "Should have selected index 0"); - }); - } - - #[gpui::test] - async fn test_allow_button_uses_selected_granularity(cx: &mut TestAppContext) { - init_test(cx); - - let tool_call_id = acp::ToolCallId::new("allow-granularity-test-1"); - let tool_call = - acp::ToolCall::new(tool_call_id.clone(), "Run `npm install`").kind(acp::ToolKind::Edit); - - let permission_options = - ToolPermissionContext::new(TerminalTool::NAME, vec!["npm install".to_string()]) - .build_permission_options(); - - // Verify we have the expected options - let PermissionOptions::Dropdown(choices) = &permission_options else { - panic!("Expected dropdown permission options"); - }; - - assert_eq!(choices.len(), 3); - assert!( - choices[0] - .allow - .option_id - .0 - .contains("always_allow:terminal") - ); - assert!( - choices[1] - .allow - .option_id - .0 - .contains("always_allow_pattern:terminal") - ); - assert_eq!(choices[2].allow.option_id.0.as_ref(), "allow"); - - let connection = - StubAgentConnection::new().with_permission_requests(HashMap::from_iter([( - tool_call_id.clone(), - permission_options.clone(), - )])); - - connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]); - - let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await; - add_to_workspace(thread_view.clone(), cx); - - cx.update(|_window, cx| { - AgentSettings::override_global( - AgentSettings { - notify_when_agent_waiting: NotifyWhenAgentWaiting::Never, - ..AgentSettings::get_global(cx).clone() - }, - cx, - ); - }); - - let message_editor = message_editor(&thread_view, cx); - message_editor.update_in(cx, |editor, window, cx| { - editor.set_text("Install dependencies", window, cx); - }); - - active_thread(&thread_view, cx).update_in(cx, |view, window, cx| view.send(window, cx)); - - cx.run_until_parked(); - - // Select the pattern option (index 1 = "Always for `npm` commands") - thread_view.update_in(cx, |_, window, cx| { - window.dispatch_action( - crate::SelectPermissionGranularity { - tool_call_id: "allow-granularity-test-1".to_string(), - index: 1, - } - .boxed_clone(), - cx, - ); - }); - - cx.run_until_parked(); - - // Simulate clicking the Allow button by dispatching AllowOnce action - // which should use the selected granularity - active_thread(&thread_view, cx).update_in(cx, |view, window, cx| { - view.allow_once(&AllowOnce, window, cx) - }); - - cx.run_until_parked(); - - // Verify tool call was authorized - thread_view.read_with(cx, |thread_view, cx| { - let tool_call = thread_view.pending_tool_call(cx); - assert!( - tool_call.is_none(), - "Tool call should be authorized after Allow with pattern granularity" - ); - }); - } - #[gpui::test] async fn test_deny_button_uses_selected_granularity(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 8a29d16b1acf165ba77093dced980a7f51fe2e37..f9d5311983c5f1a0b53504fc88b97ef8f2e953c4 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -235,10 +235,6 @@ pub struct ThreadView { pub is_loading_contents: bool, pub new_server_version_available: Option, pub resumed_without_history: bool, - /// Tracks the selected granularity index for each tool call's permission dropdown. - /// The index corresponds to the position in the allow_options list. - /// Default is the last option (index pointing to "Only this time"). - pub selected_permission_granularity: HashMap, pub resume_thread_metadata: Option, pub _cancel_task: Option>, pub skip_queue_processing_count: usize, @@ -428,7 +424,6 @@ impl ThreadView { discarded_partial_edits: HashSet::default(), is_loading_contents: false, new_server_version_available: None, - selected_permission_granularity: HashMap::default(), _cancel_task: None, skip_queue_processing_count: 0, user_interrupted_generation: false, @@ -1385,19 +1380,6 @@ impl ThreadView { ); } - pub fn handle_select_permission_granularity( - &mut self, - action: &SelectPermissionGranularity, - _window: &mut Window, - cx: &mut Context, - ) { - let tool_call_id = acp::ToolCallId::new(action.tool_call_id.clone()); - self.selected_permission_granularity - .insert(tool_call_id, action.index); - - cx.notify(); - } - fn authorize_pending_with_granularity( &mut self, is_allow: bool, @@ -1417,9 +1399,9 @@ impl ThreadView { // Get selected index, defaulting to last option ("Only this time") let selected_index = self - .selected_permission_granularity - .get(&tool_call_id) - .copied() + .conversation + .read(cx) + .selected_permission_granularity(&session_id, &tool_call_id) .unwrap_or_else(|| choices.len().saturating_sub(1)); let selected_choice = choices.get(selected_index).or(choices.last())?; @@ -1817,23 +1799,26 @@ impl ThreadView { .when(!plan.is_empty() && !changed_buffers.is_empty(), |this| { this.child(Divider::horizontal().color(DividerColor::Border)) }) - .when(!changed_buffers.is_empty(), |this| { - this.child(self.render_edits_summary( - &changed_buffers, - edits_expanded, - pending_edits, - cx, - )) - .when(edits_expanded, |parent| { - parent.child(self.render_edited_files( - action_log, - telemetry.clone(), + .when( + !changed_buffers.is_empty() && thread.parent_session_id().is_none(), + |this| { + this.child(self.render_edits_summary( &changed_buffers, + edits_expanded, pending_edits, cx, )) - }) - }) + .when(edits_expanded, |parent| { + parent.child(self.render_edited_files( + action_log, + telemetry.clone(), + &changed_buffers, + pending_edits, + cx, + )) + }) + }, + ) .when(!queue_is_empty, |this| { this.when(!plan.is_empty() || !changed_buffers.is_empty(), |this| { this.child(Divider::horizontal().color(DividerColor::Border)) @@ -5518,9 +5503,9 @@ impl ThreadView { ) -> Div { // Get the selected granularity index, defaulting to the last option ("Only this time") let selected_index = self - .selected_permission_granularity - .get(&tool_call_id) - .copied() + .conversation + .read(cx) + .selected_permission_granularity(&session_id, &tool_call_id) .unwrap_or_else(|| choices.len().saturating_sub(1)); let selected_choice = choices.get(selected_index).or(choices.last()); @@ -5608,6 +5593,7 @@ impl ThreadView { ) }) .on_click(cx.listener({ + let session_id = session_id.clone(); let tool_call_id = tool_call_id.clone(); let option_id = deny_option_id; let option_kind = deny_option_kind; @@ -5628,6 +5614,7 @@ impl ThreadView { choices, dropdown_label, entry_ix, + session_id, tool_call_id, selected_index, is_first, @@ -5640,6 +5627,7 @@ impl ThreadView { choices: &[PermissionOptionChoice], current_label: SharedString, entry_ix: usize, + session_id: acp::SessionId, tool_call_id: acp::ToolCallId, selected_index: usize, is_first: bool, @@ -5653,6 +5641,8 @@ impl ThreadView { let permission_dropdown_handle = self.permission_dropdown_handle.clone(); + let conversation = self.conversation.clone(); + PopoverMenu::new(("permission-granularity", entry_ix)) .with_handle(permission_dropdown_handle) .trigger( @@ -5673,6 +5663,8 @@ impl ThreadView { }), ) .menu(move |window, cx| { + let session_id = session_id.clone(); + let conversation = conversation.clone(); let tool_call_id = tool_call_id.clone(); let options = menu_options.clone(); @@ -5680,23 +5672,23 @@ impl ThreadView { for (index, display_name) in options.iter() { let display_name = display_name.clone(); let index = *index; - let tool_call_id_for_entry = tool_call_id.clone(); + let session_id = session_id.clone(); + let conversation = conversation.clone(); + let tool_call_id = tool_call_id.clone(); let is_selected = index == selected_index; - menu = menu.toggleable_entry( display_name, is_selected, IconPosition::End, None, - move |window, cx| { - window.dispatch_action( - SelectPermissionGranularity { - tool_call_id: tool_call_id_for_entry.0.to_string(), + move |_window, cx| { + conversation.update(cx, |conversation, _cx| { + conversation.set_selected_permission_granularity( + session_id.clone(), + tool_call_id.clone(), index, - } - .boxed_clone(), - cx, - ); + ); + }); }, ); } @@ -7520,7 +7512,6 @@ impl Render for ThreadView { .on_action(cx.listener(Self::allow_once)) .on_action(cx.listener(Self::reject_once)) .on_action(cx.listener(Self::handle_authorize_tool_call)) - .on_action(cx.listener(Self::handle_select_permission_granularity)) .on_action(cx.listener(Self::open_permission_dropdown)) .on_action(cx.listener(Self::open_add_context_menu)) .on_action(cx.listener(|this, _: &ToggleFastMode, _window, cx| { From 065d85d850908acdd9453b092c3eea7347396275 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 16:15:08 +0100 Subject: [PATCH 221/548] editor: Do treesitter bracket colorization work on the background (#50068) This is pure computation work that is disconnected from main thread entity state yet it can still block for a couple milliseconds depending on the file Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/editor/src/bracket_colorization.rs | 277 +++++++++++++--------- crates/editor/src/editor.rs | 8 +- crates/editor/src/editor_tests.rs | 1 + crates/editor/src/inlays/inlay_hints.rs | 4 +- 4 files changed, 175 insertions(+), 115 deletions(-) diff --git a/crates/editor/src/bracket_colorization.rs b/crates/editor/src/bracket_colorization.rs index bf71d5c71c9580d04e0c61047215992c5cbd4a26..16fe29a7fa4aa066cf045a63c477fbb569d80334 100644 --- a/crates/editor/src/bracket_colorization.rs +++ b/crates/editor/src/bracket_colorization.rs @@ -5,10 +5,10 @@ use std::ops::Range; use crate::{Editor, HighlightKey}; -use collections::HashMap; -use gpui::{Context, HighlightStyle}; +use collections::{HashMap, HashSet}; +use gpui::{AppContext as _, Context, HighlightStyle}; use itertools::Itertools; -use language::language_settings; +use language::{BufferRow, BufferSnapshot, language_settings}; use multi_buffer::{Anchor, ExcerptId}; use ui::{ActiveTheme, utils::ensure_minimum_contrast}; @@ -19,22 +19,16 @@ impl Editor { } if invalidate { - self.fetched_tree_sitter_chunks.clear(); + self.bracket_fetched_tree_sitter_chunks.clear(); } let accents_count = cx.theme().accents().0.len(); let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx); - let anchors_in_multi_buffer = |current_excerpt: ExcerptId, - text_anchors: [text::Anchor; 4]| - -> Option<[Option<_>; 4]> { - multi_buffer_snapshot - .anchors_in_excerpt(current_excerpt, text_anchors)? - .collect_array() - }; - - let bracket_matches_by_accent = self.visible_excerpts(false, cx).into_iter().fold( - HashMap::default(), - |mut acc, (excerpt_id, (buffer, _, buffer_range))| { + + let visible_excerpts = self.visible_excerpts(false, cx); + let excerpt_data: Vec<(ExcerptId, BufferSnapshot, Range)> = visible_excerpts + .into_iter() + .filter_map(|(excerpt_id, (buffer, _, buffer_range))| { let buffer_snapshot = buffer.read(cx).snapshot(); if language_settings::language_settings( buffer_snapshot.language().map(|language| language.name()), @@ -43,112 +37,173 @@ impl Editor { ) .colorize_brackets { - let fetched_chunks = self - .fetched_tree_sitter_chunks - .entry(excerpt_id) - .or_default(); - - let brackets_by_accent = buffer_snapshot - .fetch_bracket_ranges( - buffer_range.start..buffer_range.end, - Some(fetched_chunks), - ) - .into_iter() - .flat_map(|(chunk_range, pairs)| { - if fetched_chunks.insert(chunk_range) { - pairs - } else { - Vec::new() - } - }) - .filter_map(|pair| { - let color_index = pair.color_index?; - - let buffer_open_range = - buffer_snapshot.anchor_range_around(pair.open_range); - let buffer_close_range = - buffer_snapshot.anchor_range_around(pair.close_range); - let [ - buffer_open_range_start, - buffer_open_range_end, - buffer_close_range_start, - buffer_close_range_end, - ] = anchors_in_multi_buffer( - excerpt_id, - [ - buffer_open_range.start, - buffer_open_range.end, - buffer_close_range.start, - buffer_close_range.end, - ], - )?; - let multi_buffer_open_range = - buffer_open_range_start.zip(buffer_open_range_end); - let multi_buffer_close_range = - buffer_close_range_start.zip(buffer_close_range_end); - - let mut ranges = Vec::with_capacity(2); - if let Some((open_start, open_end)) = multi_buffer_open_range { - ranges.push(open_start..open_end); - } - if let Some((close_start, close_end)) = multi_buffer_close_range { - ranges.push(close_start..close_end); - } - if ranges.is_empty() { - None - } else { - Some((color_index % accents_count, ranges)) - } - }); + Some((excerpt_id, buffer_snapshot, buffer_range)) + } else { + None + } + }) + .collect(); - for (accent_number, new_ranges) in brackets_by_accent { - let ranges = acc - .entry(accent_number) - .or_insert_with(Vec::>::new); + let mut fetched_tree_sitter_chunks = excerpt_data + .iter() + .filter_map(|(excerpt_id, ..)| { + Some(( + *excerpt_id, + self.bracket_fetched_tree_sitter_chunks + .get(excerpt_id) + .cloned()?, + )) + }) + .collect::>>>(); + + let bracket_matches_by_accent = cx.background_spawn(async move { + let anchors_in_multi_buffer = |current_excerpt: ExcerptId, + text_anchors: [text::Anchor; 4]| + -> Option<[Option<_>; 4]> { + multi_buffer_snapshot + .anchors_in_excerpt(current_excerpt, text_anchors)? + .collect_array() + }; - for new_range in new_ranges { - let i = ranges - .binary_search_by(|probe| { - probe.start.cmp(&new_range.start, &multi_buffer_snapshot) - }) - .unwrap_or_else(|i| i); - ranges.insert(i, new_range); + let bracket_matches_by_accent: HashMap>> = + excerpt_data.into_iter().fold( + HashMap::default(), + |mut acc, (excerpt_id, buffer_snapshot, buffer_range)| { + let fetched_chunks = + fetched_tree_sitter_chunks.entry(excerpt_id).or_default(); + + let brackets_by_accent = compute_bracket_ranges( + &buffer_snapshot, + buffer_range, + fetched_chunks, + excerpt_id, + accents_count, + &anchors_in_multi_buffer, + ); + + for (accent_number, new_ranges) in brackets_by_accent { + let ranges = acc + .entry(accent_number) + .or_insert_with(Vec::>::new); + + for new_range in new_ranges { + let i = ranges + .binary_search_by(|probe| { + probe.start.cmp(&new_range.start, &multi_buffer_snapshot) + }) + .unwrap_or_else(|i| i); + ranges.insert(i, new_range); + } } - } - } - acc - }, - ); + acc + }, + ); - if invalidate { - self.clear_highlights_with( - &mut |key| matches!(key, HighlightKey::ColorizeBracket(_)), - cx, - ); - } + (bracket_matches_by_accent, fetched_tree_sitter_chunks) + }); let editor_background = cx.theme().colors().editor_background; let accents = cx.theme().accents().clone(); - for (accent_number, bracket_highlights) in bracket_matches_by_accent { - let bracket_color = accents.color_for_index(accent_number as u32); - let adjusted_color = ensure_minimum_contrast(bracket_color, editor_background, 55.0); - let style = HighlightStyle { - color: Some(adjusted_color), - ..HighlightStyle::default() - }; - self.highlight_text_key( - HighlightKey::ColorizeBracket(accent_number), - bracket_highlights, - style, - true, - cx, - ); - } + self.colorize_brackets_task = cx.spawn(async move |editor, cx| { + if invalidate { + editor + .update(cx, |editor, cx| { + editor.clear_highlights_with( + &mut |key| matches!(key, HighlightKey::ColorizeBracket(_)), + cx, + ); + }) + .ok(); + } + + let (bracket_matches_by_accent, updated_chunks) = bracket_matches_by_accent.await; + + editor + .update(cx, |editor, cx| { + editor + .bracket_fetched_tree_sitter_chunks + .extend(updated_chunks); + for (accent_number, bracket_highlights) in bracket_matches_by_accent { + let bracket_color = accents.color_for_index(accent_number as u32); + let adjusted_color = + ensure_minimum_contrast(bracket_color, editor_background, 55.0); + let style = HighlightStyle { + color: Some(adjusted_color), + ..HighlightStyle::default() + }; + + editor.highlight_text_key( + HighlightKey::ColorizeBracket(accent_number), + bracket_highlights, + style, + true, + cx, + ); + } + }) + .ok(); + }); } } +fn compute_bracket_ranges( + buffer_snapshot: &BufferSnapshot, + buffer_range: Range, + fetched_chunks: &mut HashSet>, + excerpt_id: ExcerptId, + accents_count: usize, + anchors_in_multi_buffer: &impl Fn(ExcerptId, [text::Anchor; 4]) -> Option<[Option; 4]>, +) -> Vec<(usize, Vec>)> { + buffer_snapshot + .fetch_bracket_ranges(buffer_range.start..buffer_range.end, Some(fetched_chunks)) + .into_iter() + .flat_map(|(chunk_range, pairs)| { + if fetched_chunks.insert(chunk_range) { + pairs + } else { + Vec::new() + } + }) + .filter_map(|pair| { + let color_index = pair.color_index?; + + let buffer_open_range = buffer_snapshot.anchor_range_around(pair.open_range); + let buffer_close_range = buffer_snapshot.anchor_range_around(pair.close_range); + let [ + buffer_open_range_start, + buffer_open_range_end, + buffer_close_range_start, + buffer_close_range_end, + ] = anchors_in_multi_buffer( + excerpt_id, + [ + buffer_open_range.start, + buffer_open_range.end, + buffer_close_range.start, + buffer_close_range.end, + ], + )?; + let multi_buffer_open_range = buffer_open_range_start.zip(buffer_open_range_end); + let multi_buffer_close_range = buffer_close_range_start.zip(buffer_close_range_end); + + let mut ranges = Vec::with_capacity(2); + if let Some((open_start, open_end)) = multi_buffer_open_range { + ranges.push(open_start..open_end); + } + if let Some((close_start, close_end)) = multi_buffer_close_range { + ranges.push(close_start..close_end); + } + if ranges.is_empty() { + None + } else { + Some((color_index % accents_count, ranges)) + } + }) + .collect() +} + #[cfg(test)] mod tests { use std::{cmp, sync::Arc, time::Duration}; @@ -164,7 +219,7 @@ mod tests { }; use collections::HashSet; use fs::FakeFs; - use gpui::{AppContext as _, UpdateGlobal as _}; + use gpui::UpdateGlobal as _; use indoc::indoc; use itertools::Itertools; use language::{Capability, markdown_lang}; @@ -749,6 +804,7 @@ mod foo «1{ }); }); }); + cx.executor().run_until_parked(); assert_eq!( &separate_with_comment_lines( indoc! {r#" @@ -776,6 +832,7 @@ mod foo { }); }); }); + cx.executor().run_until_parked(); assert_eq!( &separate_with_comment_lines( indoc! {r#" diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a6d9e593cc4b2d8d593f48a7887e6308ff0e63cb..93d87885babf6265ff4b12c9da2c4c0cc07ec9a9 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1347,7 +1347,7 @@ pub struct Editor { suppress_selection_callback: bool, applicable_language_settings: HashMap, LanguageSettings>, accent_data: Option, - fetched_tree_sitter_chunks: HashMap>>, + bracket_fetched_tree_sitter_chunks: HashMap>>, semantic_token_state: SemanticTokenState, pub(crate) refresh_matching_bracket_highlights_task: Task<()>, refresh_document_symbols_task: Shared>, @@ -1356,6 +1356,7 @@ pub struct Editor { outline_symbols_at_cursor: Option<(BufferId, Vec>)>, sticky_headers_task: Task<()>, sticky_headers: Option>>, + pub(crate) colorize_brackets_task: Task<()>, } #[derive(Debug, PartialEq)] @@ -2600,7 +2601,7 @@ impl Editor { applicable_language_settings: HashMap::default(), semantic_token_state: SemanticTokenState::new(cx, full_mode), accent_data: None, - fetched_tree_sitter_chunks: HashMap::default(), + bracket_fetched_tree_sitter_chunks: HashMap::default(), number_deleted_lines: false, refresh_matching_bracket_highlights_task: Task::ready(()), refresh_document_symbols_task: Task::ready(()).shared(), @@ -2609,6 +2610,7 @@ impl Editor { outline_symbols_at_cursor: None, sticky_headers_task: Task::ready(()), sticky_headers: None, + colorize_brackets_task: Task::ready(()), }; if is_minimap { @@ -24165,7 +24167,7 @@ impl Editor { self.refresh_document_highlights(cx); let snapshot = multibuffer.read(cx).snapshot(cx); for id in ids { - self.fetched_tree_sitter_chunks.remove(id); + self.bracket_fetched_tree_sitter_chunks.remove(id); if let Some(buffer) = snapshot.buffer_for_excerpt(*id) { self.semantic_token_state .invalidate_buffer(&buffer.remote_id()); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 9a0033306a1032786535a188a9ea830cb44c3ca3..88be32d6d73d967ab34b287534308164b8623679 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -17284,6 +17284,7 @@ async fn test_no_duplicated_completion_requests(cx: &mut TestAppContext) { } }); + cx.executor().run_until_parked(); cx.condition(|editor, _| editor.context_menu_visible()) .await; cx.assert_editor_state("fn main() { let a = 2.ˇ; }"); diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index d7a116065101dcc5070a7280ba7c3424e74685fe..0b3f6bda09c2cf86b994682e2ed89c2614d72737 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -4501,9 +4501,9 @@ let c = 3;"# }, ); - let buffer = project + let (buffer, _buffer_handle) = project .update(cx, |project, cx| { - project.open_local_buffer(path!("/a/main.rs"), cx) + project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx) }) .await .unwrap(); From af5e26175a4cdc433a6a65cb6e3bed1c2e412ccf Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 2 Mar 2026 16:17:05 +0100 Subject: [PATCH 222/548] agent: Add subagent info to telemetry events (#50492) - **Add parent_session_id to agent thread feedback submission** - **Add more parent session ids to events** Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- crates/acp_thread/src/acp_thread.rs | 2 ++ crates/agent/src/thread.rs | 4 ++++ crates/agent_ui/src/connection_view/thread_view.rs | 11 +++++++++++ crates/cloud_api_types/src/cloud_api_types.rs | 1 + 4 files changed, 18 insertions(+) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 46797130c0cebb605c6820f4d04fd2e989977617..e6da8f3f901b41c0a59d73920c3036fc72d1b906 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -1600,6 +1600,7 @@ impl AcpThread { let agent_telemetry_id = self.connection().telemetry_id(); let session = self.session_id(); + let parent_session_id = self.parent_session_id(); if let ToolCallStatus::Completed | ToolCallStatus::Failed = status { let status = if matches!(status, ToolCallStatus::Completed) { "completed" @@ -1610,6 +1611,7 @@ impl AcpThread { "Agent Tool Call Completed", agent_telemetry_id, session, + parent_session_id, status ); } diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 64ab5795e7dadcb86f4df45e08157c851d4c7c25..0825910e78af2950b895c17699c8cd623b359727 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1739,6 +1739,9 @@ impl Thread { telemetry::event!( "Agent Thread Completion", thread_id = this.read_with(cx, |this, _| this.id.to_string())?, + parent_thread_id = this.read_with(cx, |this, _| this + .parent_thread_id() + .map(|id| id.to_string()))?, prompt_id = this.read_with(cx, |this, _| this.prompt_id.to_string())?, model = model.telemetry_id(), model_provider = model.provider_id().to_string(), @@ -1997,6 +2000,7 @@ impl Thread { telemetry::event!( "Agent Thread Completion Usage Updated", thread_id = self.id.to_string(), + parent_thread_id = self.parent_thread_id().map(|id| id.to_string()), prompt_id = self.prompt_id.to_string(), model = self.model.as_ref().map(|m| m.telemetry_id()), model_provider = self.model.as_ref().map(|m| m.provider_id().to_string()), diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index f9d5311983c5f1a0b53504fc88b97ef8f2e953c4..cd8becbded396afa5c8eb4a9136db18f63e4a535 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -45,6 +45,7 @@ impl ThreadFeedbackState { } } let session_id = thread.read(cx).session_id().clone(); + let parent_session_id = thread.read(cx).parent_session_id().cloned(); let agent_telemetry_id = thread.read(cx).connection().telemetry_id(); let task = telemetry.thread_data(&session_id, cx); let rating = match feedback { @@ -60,6 +61,7 @@ impl ThreadFeedbackState { organization_id: organization.map(|organization| organization.id.clone()), agent: agent_telemetry_id.to_string(), session_id: session_id.to_string(), + parent_session_id: parent_session_id.map(|id| id.to_string()), rating: rating.to_string(), thread, }) @@ -804,6 +806,7 @@ impl ThreadView { cx: &mut Context, ) { let session_id = self.thread.read(cx).session_id().clone(); + let parent_session_id = self.thread.read(cx).parent_session_id().cloned(); let agent_telemetry_id = self.thread.read(cx).connection().telemetry_id(); let thread = self.thread.downgrade(); @@ -858,6 +861,7 @@ impl ThreadView { "Agent Message Sent", agent = agent_telemetry_id, session = session_id, + parent_session_id = parent_session_id.as_ref().map(|id| id.to_string()), model = model_id, mode = mode_id ); @@ -877,6 +881,7 @@ impl ThreadView { "Agent Turn Completed", agent = agent_telemetry_id, session = session_id, + parent_session_id = parent_session_id.as_ref().map(|id| id.to_string()), model = model_id, mode = mode_id, status, @@ -985,11 +990,17 @@ impl ThreadView { let agent_telemetry_id = self.thread.read(cx).connection().telemetry_id(); let session_id = self.thread.read(cx).session_id().clone(); + let parent_session_id = self + .thread + .read(cx) + .parent_session_id() + .map(|id| id.to_string()); telemetry::event!( "Agent Panel Error Shown", agent = agent_telemetry_id, session_id = session_id, + parent_session_id = parent_session_id, kind = error_kind, acp_error_code = acp_error_code, message = message, diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index 5f86dce21eea6f76a426fa1bca735be87a513ee2..2d457fc6630d5b32f049e67a6a460047e925973a 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -62,6 +62,7 @@ pub struct SubmitAgentThreadFeedbackBody { pub organization_id: Option, pub agent: String, pub session_id: String, + pub parent_session_id: Option, pub rating: String, pub thread: serde_json::Value, } From e30720a781ad5e4bee9ab6e5c9f228baffef466c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 16:18:01 +0100 Subject: [PATCH 223/548] gpui_web: Implement fetch based HTTP client (#50463) Can only be used in single threaded environments for now due to js futures being non-send. Release Notes: - N/A *or* Added/Fixed/Improved ... --- Cargo.lock | 2 + crates/gpui/Cargo.toml | 1 + crates/gpui/examples/image/image.rs | 90 ++++++---- crates/gpui/examples/image_gallery.rs | 26 ++- crates/gpui_platform/src/gpui_platform.rs | 8 +- crates/gpui_web/Cargo.toml | 12 +- crates/gpui_web/src/dispatcher.rs | 16 +- crates/gpui_web/src/gpui_web.rs | 2 + crates/gpui_web/src/http_client.rs | 199 ++++++++++++++++++++++ crates/gpui_web/src/platform.rs | 7 +- 10 files changed, 316 insertions(+), 47 deletions(-) create mode 100644 crates/gpui_web/src/http_client.rs diff --git a/Cargo.lock b/Cargo.lock index 030777000144d01de7653fbce314c14223e158d7..97cc166c14e099b57b74585277869052de0cff87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7572,6 +7572,7 @@ dependencies = [ "gpui_macros", "gpui_platform", "gpui_util", + "gpui_web", "http_client", "image", "inventory", @@ -7763,6 +7764,7 @@ dependencies = [ "futures 0.3.31", "gpui", "gpui_wgpu", + "http_client", "js-sys", "log", "parking_lot", diff --git a/crates/gpui/Cargo.toml b/crates/gpui/Cargo.toml index 4bd9510eac1710554f8eec52f22609db31c531ad..c80f97efb6dc8bf1450c08bfe85290096b44815b 100644 --- a/crates/gpui/Cargo.toml +++ b/crates/gpui/Cargo.toml @@ -156,6 +156,7 @@ reqwest_client = { workspace = true, features = ["test-support"] } [target.'cfg(target_family = "wasm")'.dev-dependencies] wasm-bindgen = { workspace = true } +gpui_web.workspace = true [build-dependencies] embed-resource = { version = "3.0", optional = true } diff --git a/crates/gpui/examples/image/image.rs b/crates/gpui/examples/image/image.rs index cf879ba281e18521883222fba54451bb143fae29..832cdf896a80e84c3ca8b591e0a0956af2cedcac 100644 --- a/crates/gpui/examples/image/image.rs +++ b/crates/gpui/examples/image/image.rs @@ -10,7 +10,7 @@ use gpui::{ SharedString, SharedUri, TitlebarOptions, Window, WindowBounds, WindowOptions, actions, div, img, prelude::*, px, rgb, size, }; -use gpui_platform::application; +#[cfg(not(target_family = "wasm"))] use reqwest_client::ReqwestClient; struct Assets { @@ -151,47 +151,63 @@ actions!(image, [Quit]); fn run_example() { let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - application() - .with_assets(Assets { - base: manifest_dir.join("examples"), - }) - .run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + let app = gpui_platform::application(); + #[cfg(target_family = "wasm")] + let app = gpui_platform::application(); + app.with_assets(Assets { + base: manifest_dir.join("examples"), + }) + .run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + { let http_client = ReqwestClient::user_agent("gpui example").unwrap(); cx.set_http_client(Arc::new(http_client)); - - cx.activate(true); - cx.on_action(|_: &Quit, cx| cx.quit()); - cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); - cx.set_menus(vec![Menu { - name: "Image".into(), - items: vec![MenuItem::action("Quit", Quit)], - }]); - - let window_options = WindowOptions { - titlebar: Some(TitlebarOptions { - title: Some(SharedString::from("Image Example")), - appears_transparent: false, - ..Default::default() - }), - - window_bounds: Some(WindowBounds::Windowed(Bounds { - size: size(px(1100.), px(600.)), - origin: Point::new(px(200.), px(200.)), - })), - - ..Default::default() + } + #[cfg(target_family = "wasm")] + { + // Safety: the web examples run single-threaded; the client is + // created and used exclusively on the main thread. + let http_client = unsafe { + gpui_web::FetchHttpClient::with_user_agent("gpui example") + .expect("failed to create FetchHttpClient") }; + cx.set_http_client(Arc::new(http_client)); + } - cx.open_window(window_options, |_, cx| { - cx.new(|_| ImageShowcase { - // Relative path to your root project path - local_resource: manifest_dir.join("examples/image/app-icon.png").into(), - remote_resource: "https://picsum.photos/800/400".into(), - asset_resource: "image/color.svg".into(), - }) + cx.activate(true); + cx.on_action(|_: &Quit, cx| cx.quit()); + cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]); + cx.set_menus(vec![Menu { + name: "Image".into(), + items: vec![MenuItem::action("Quit", Quit)], + }]); + + let window_options = WindowOptions { + titlebar: Some(TitlebarOptions { + title: Some(SharedString::from("Image Example")), + appears_transparent: false, + ..Default::default() + }), + + window_bounds: Some(WindowBounds::Windowed(Bounds { + size: size(px(1100.), px(600.)), + origin: Point::new(px(200.), px(200.)), + })), + + ..Default::default() + }; + + cx.open_window(window_options, |_, cx| { + cx.new(|_| ImageShowcase { + // Relative path to your root project path + local_resource: manifest_dir.join("examples/image/app-icon.png").into(), + remote_resource: "https://picsum.photos/800/400".into(), + asset_resource: "image/color.svg".into(), }) - .unwrap(); - }); + }) + .unwrap(); + }); } #[cfg(not(target_family = "wasm"))] diff --git a/crates/gpui/examples/image_gallery.rs b/crates/gpui/examples/image_gallery.rs index eba3fc0b6444c1b02ed8d6d2437505f1d341e605..9d8ac29ff8c9762417ff59acbfc83db6ad9c8346 100644 --- a/crates/gpui/examples/image_gallery.rs +++ b/crates/gpui/examples/image_gallery.rs @@ -7,7 +7,7 @@ use gpui::{ RetainAllImageCache, SharedString, TitlebarOptions, Window, WindowBounds, WindowOptions, actions, div, hash, image_cache, img, prelude::*, px, rgb, size, }; -use gpui_platform::application; +#[cfg(not(target_family = "wasm"))] use reqwest_client::ReqwestClient; use std::{collections::HashMap, sync::Arc}; @@ -248,9 +248,27 @@ impl ImageCache for SimpleLruCache { actions!(image, [Quit]); fn run_example() { - application().run(move |cx: &mut App| { - let http_client = ReqwestClient::user_agent("gpui example").unwrap(); - cx.set_http_client(Arc::new(http_client)); + #[cfg(not(target_family = "wasm"))] + let app = gpui_platform::application(); + #[cfg(target_family = "wasm")] + let app = gpui_platform::single_threaded_web(); + + app.run(move |cx: &mut App| { + #[cfg(not(target_family = "wasm"))] + { + let http_client = ReqwestClient::user_agent("gpui example").unwrap(); + cx.set_http_client(Arc::new(http_client)); + } + #[cfg(target_family = "wasm")] + { + // Safety: the web examples run single-threaded; the client is + // created and used exclusively on the main thread. + let http_client = unsafe { + gpui_web::FetchHttpClient::with_user_agent("gpui example") + .expect("failed to create FetchHttpClient") + }; + cx.set_http_client(Arc::new(http_client)); + } cx.activate(true); cx.on_action(|_: &Quit, cx| cx.quit()); diff --git a/crates/gpui_platform/src/gpui_platform.rs b/crates/gpui_platform/src/gpui_platform.rs index 86c0577f75ff4ac61ab7a4d956b7e34718fb26e5..7dac5498a652f7a7fe68b9f6d7ea23dffabdfb22 100644 --- a/crates/gpui_platform/src/gpui_platform.rs +++ b/crates/gpui_platform/src/gpui_platform.rs @@ -18,6 +18,12 @@ pub fn headless() -> gpui::Application { gpui::Application::with_platform(current_platform(true)) } +/// Unlike `application`, this function returns a single-threaded web application. +#[cfg(target_family = "wasm")] +pub fn single_threaded_web() -> gpui::Application { + gpui::Application::with_platform(Rc::new(gpui_web::WebPlatform::new(false))) +} + /// Initializes panic hooks and logging for the web platform. /// Call this before running the application in a wasm_bindgen entrypoint. #[cfg(target_family = "wasm")] @@ -49,7 +55,7 @@ pub fn current_platform(headless: bool) -> Rc { #[cfg(target_family = "wasm")] { let _ = headless; - Rc::new(gpui_web::WebPlatform::new()) + Rc::new(gpui_web::WebPlatform::new(true)) } } diff --git a/crates/gpui_web/Cargo.toml b/crates/gpui_web/Cargo.toml index a2bb95a9f4bb3007a2a2feb9f7483d38dff3cf1d..dbb110597c7b850c28cde99ed573eab8264a18f7 100644 --- a/crates/gpui_web/Cargo.toml +++ b/crates/gpui_web/Cargo.toml @@ -9,6 +9,10 @@ autoexamples = false [lints] workspace = true +[features] +default = ["multithreaded"] +multithreaded = ["dep:wasm_thread"] + [lib] path = "src/gpui_web.rs" @@ -16,6 +20,7 @@ path = "src/gpui_web.rs" gpui.workspace = true parking_lot = { workspace = true, features = ["nightly"] } gpui_wgpu.workspace = true +http_client.workspace = true anyhow.workspace = true futures.workspace = true log.workspace = true @@ -27,7 +32,7 @@ web-time.workspace = true console_error_panic_hook = "0.1.7" js-sys = "0.3" raw-window-handle = "0.6" -wasm_thread = { version = "0.3", features = ["es_modules"] } +wasm_thread = { version = "0.3", features = ["es_modules"], optional = true } web-sys = { version = "0.3", features = [ "console", "CssStyleDeclaration", @@ -56,6 +61,11 @@ web-sys = { version = "0.3", features = [ "Screen", "Storage", "VisualViewport", + "Headers", + "Request", + "RequestInit", + "RequestRedirect", + "Response", "WheelEvent", "Window", ] } diff --git a/crates/gpui_web/src/dispatcher.rs b/crates/gpui_web/src/dispatcher.rs index ca0b700a1bf0bc75e1dafd859b59a04540524f63..d9419fb35353cfadd809b0bbc1cb9e7dbf124cda 100644 --- a/crates/gpui_web/src/dispatcher.rs +++ b/crates/gpui_web/src/dispatcher.rs @@ -8,8 +8,10 @@ use std::time::Duration; use wasm_bindgen::prelude::*; use web_time::Instant; +#[cfg(feature = "multithreaded")] const MIN_BACKGROUND_THREADS: usize = 2; +#[cfg(feature = "multithreaded")] fn shared_memory_supported() -> bool { let global = js_sys::global(); let has_shared_array_buffer = @@ -126,6 +128,7 @@ pub struct WebDispatcher { background_sender: PriorityQueueSender, main_thread_mailbox: Arc, supports_threads: bool, + #[cfg(feature = "multithreaded")] _background_threads: Vec>, } @@ -135,11 +138,18 @@ unsafe impl Send for WebDispatcher {} unsafe impl Sync for WebDispatcher {} impl WebDispatcher { - pub fn new(browser_window: web_sys::Window) -> Self { + pub fn new(browser_window: web_sys::Window, allow_threads: bool) -> Self { + #[cfg(feature = "multithreaded")] let (background_sender, background_receiver) = PriorityQueueReceiver::new(); + #[cfg(not(feature = "multithreaded"))] + let (background_sender, _) = PriorityQueueReceiver::new(); let main_thread_mailbox = Arc::new(MainThreadMailbox::new()); - let supports_threads = shared_memory_supported(); + + #[cfg(feature = "multithreaded")] + let supports_threads = allow_threads && shared_memory_supported(); + #[cfg(not(feature = "multithreaded"))] + let supports_threads = false; if supports_threads { main_thread_mailbox.run_waker_loop(browser_window.clone()); @@ -149,6 +159,7 @@ impl WebDispatcher { ); } + #[cfg(feature = "multithreaded")] let background_threads = if supports_threads { let thread_count = browser_window .navigator() @@ -193,6 +204,7 @@ impl WebDispatcher { background_sender, main_thread_mailbox, supports_threads, + #[cfg(feature = "multithreaded")] _background_threads: background_threads, } } diff --git a/crates/gpui_web/src/gpui_web.rs b/crates/gpui_web/src/gpui_web.rs index 966ff3b0d7d90219e8cf702a16fce598f813c835..9cd773823bd9b65ef99cb89c12184919a4c45dc2 100644 --- a/crates/gpui_web/src/gpui_web.rs +++ b/crates/gpui_web/src/gpui_web.rs @@ -3,6 +3,7 @@ mod dispatcher; mod display; mod events; +mod http_client; mod keyboard; mod logging; mod platform; @@ -10,6 +11,7 @@ mod window; pub use dispatcher::WebDispatcher; pub use display::WebDisplay; +pub use http_client::FetchHttpClient; pub use keyboard::WebKeyboardLayout; pub use logging::init_logging; pub use platform::WebPlatform; diff --git a/crates/gpui_web/src/http_client.rs b/crates/gpui_web/src/http_client.rs new file mode 100644 index 0000000000000000000000000000000000000000..14d58cf45766885af76f49892589f70b89fb8116 --- /dev/null +++ b/crates/gpui_web/src/http_client.rs @@ -0,0 +1,199 @@ +use anyhow::anyhow; +use futures::AsyncReadExt as _; +use http_client::{AsyncBody, HttpClient, RedirectPolicy}; +use std::future::Future; +use std::pin::Pin; +use std::task::Poll; +use wasm_bindgen::JsCast as _; +use wasm_bindgen::prelude::*; + +#[wasm_bindgen] +extern "C" { + #[wasm_bindgen(catch, js_name = "fetch")] + fn global_fetch(input: &web_sys::Request) -> Result; +} + +pub struct FetchHttpClient { + user_agent: Option, +} + +impl Default for FetchHttpClient { + fn default() -> Self { + Self { user_agent: None } + } +} + +#[cfg(feature = "multithreaded")] +impl FetchHttpClient { + /// # Safety + /// + /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment. + pub unsafe fn new() -> Self { + Self::default() + } + + /// # Safety + /// + /// The caller must ensure that the created `FetchHttpClient` is only used in a single thread environment. + pub unsafe fn with_user_agent(user_agent: &str) -> anyhow::Result { + Ok(Self { + user_agent: Some(http_client::http::header::HeaderValue::from_str( + user_agent, + )?), + }) + } +} + +#[cfg(not(feature = "multithreaded"))] +impl FetchHttpClient { + pub fn new() -> Self { + Self::default() + } + + pub fn with_user_agent(user_agent: &str) -> anyhow::Result { + Ok(Self { + user_agent: Some(http_client::http::header::HeaderValue::from_str( + user_agent, + )?), + }) + } +} + +/// Wraps a `!Send` future to satisfy the `Send` bound on `BoxFuture`. +/// +/// Safety: only valid in WASM contexts where the `FetchHttpClient` is +/// confined to a single thread (guaranteed by the caller via unsafe +/// constructors when `multithreaded` is enabled, or by the absence of +/// threads when it is not). +struct AssertSend(F); + +unsafe impl Send for AssertSend {} + +impl Future for AssertSend { + type Output = F::Output; + + fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll { + // Safety: pin projection for a single-field newtype wrapper. + let inner = unsafe { self.map_unchecked_mut(|this| &mut this.0) }; + inner.poll(cx) + } +} + +impl HttpClient for FetchHttpClient { + fn user_agent(&self) -> Option<&http_client::http::header::HeaderValue> { + self.user_agent.as_ref() + } + + fn proxy(&self) -> Option<&http_client::Url> { + None + } + + fn send( + &self, + req: http_client::http::Request, + ) -> futures::future::BoxFuture<'static, anyhow::Result>> + { + let (parts, body) = req.into_parts(); + + Box::pin(AssertSend(async move { + let body_bytes = read_body_to_bytes(body).await?; + + let init = web_sys::RequestInit::new(); + init.set_method(parts.method.as_str()); + + if let Some(redirect_policy) = parts.extensions.get::() { + match redirect_policy { + RedirectPolicy::NoFollow => { + init.set_redirect(web_sys::RequestRedirect::Manual); + } + RedirectPolicy::FollowLimit(_) | RedirectPolicy::FollowAll => { + init.set_redirect(web_sys::RequestRedirect::Follow); + } + } + } + + if let Some(ref bytes) = body_bytes { + let uint8array = js_sys::Uint8Array::from(bytes.as_slice()); + init.set_body(uint8array.as_ref()); + } + + let url = parts.uri.to_string(); + let request = web_sys::Request::new_with_str_and_init(&url, &init) + .map_err(|error| anyhow!("failed to create fetch Request: {error:?}"))?; + + let request_headers = request.headers(); + for (name, value) in &parts.headers { + let value_str = value + .to_str() + .map_err(|_| anyhow!("non-ASCII header value for {name}"))?; + request_headers + .set(name.as_str(), value_str) + .map_err(|error| anyhow!("failed to set header {name}: {error:?}"))?; + } + + let promise = global_fetch(&request) + .map_err(|error| anyhow!("fetch threw an error: {error:?}"))?; + let response_value = wasm_bindgen_futures::JsFuture::from(promise) + .await + .map_err(|error| anyhow!("fetch failed: {error:?}"))?; + + let web_response: web_sys::Response = response_value + .dyn_into() + .map_err(|error| anyhow!("fetch result is not a Response: {error:?}"))?; + + let status = web_response.status(); + let mut builder = http_client::http::Response::builder().status(status); + + // `Headers` is a JS iterable yielding `[name, value]` pairs. + // `js_sys::Array::from` calls `Array.from()` which accepts any iterable. + let header_pairs = js_sys::Array::from(&web_response.headers()); + for index in 0..header_pairs.length() { + match header_pairs.get(index).dyn_into::() { + Ok(pair) => match (pair.get(0).as_string(), pair.get(1).as_string()) { + (Some(name), Some(value)) => { + builder = builder.header(name, value); + } + (name, value) => { + log::warn!( + "skipping response header at index {index}: \ + name={name:?}, value={value:?}" + ); + } + }, + Err(entry) => { + log::warn!("skipping non-array header entry at index {index}: {entry:?}"); + } + } + } + + // The entire response body is eagerly buffered into memory via + // `arrayBuffer()`. The Fetch API does not expose a synchronous + // streaming interface; streaming would require `ReadableStream` + // interop which is significantly more complex. + let body_promise = web_response + .array_buffer() + .map_err(|error| anyhow!("failed to initiate response body read: {error:?}"))?; + let body_value = wasm_bindgen_futures::JsFuture::from(body_promise) + .await + .map_err(|error| anyhow!("failed to read response body: {error:?}"))?; + let array_buffer: js_sys::ArrayBuffer = body_value + .dyn_into() + .map_err(|error| anyhow!("response body is not an ArrayBuffer: {error:?}"))?; + let response_bytes = js_sys::Uint8Array::new(&array_buffer).to_vec(); + + builder + .body(AsyncBody::from(response_bytes)) + .map_err(|error| anyhow!(error)) + })) + } +} + +async fn read_body_to_bytes(mut body: AsyncBody) -> anyhow::Result>> { + let mut buffer = Vec::new(); + body.read_to_end(&mut buffer).await?; + if buffer.is_empty() { + Ok(None) + } else { + Ok(Some(buffer)) + } +} diff --git a/crates/gpui_web/src/platform.rs b/crates/gpui_web/src/platform.rs index 420b7cb3f470c683888aa76bd61236c1f1ff181e..4d78b71aa05b743f779d0e8a1e7ed8a5eac136f9 100644 --- a/crates/gpui_web/src/platform.rs +++ b/crates/gpui_web/src/platform.rs @@ -54,10 +54,13 @@ struct WebPlatformCallbacks { } impl WebPlatform { - pub fn new() -> Self { + pub fn new(allow_multi_threading: bool) -> Self { let browser_window = web_sys::window().expect("must be running in a browser window context"); - let dispatcher = Arc::new(WebDispatcher::new(browser_window.clone())); + let dispatcher = Arc::new(WebDispatcher::new( + browser_window.clone(), + allow_multi_threading, + )); let background_executor = BackgroundExecutor::new(dispatcher.clone()); let foreground_executor = ForegroundExecutor::new(dispatcher); let text_system = Arc::new(gpui_wgpu::CosmicTextSystem::new_without_system_fonts( From 12b786dffc39bbc0eb109498e5a3f5149e6bd5de Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 2 Mar 2026 16:30:03 +0100 Subject: [PATCH 224/548] agent: Remove subagents feature flag (#50493) Release Notes: - agent: Add new `spawn_agent` tool which allows the Zed Agent to utilize subagents for doing tasks in parallel and better context management. Co-authored-by: Bennet Bo Fenner --- crates/agent/src/thread.rs | 6 ++---- .../src/agent_configuration/manage_profiles_modal.rs | 8 ++------ crates/feature_flags/src/flags.rs | 10 ---------- 3 files changed, 4 insertions(+), 20 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 0825910e78af2950b895c17699c8cd623b359727..a63437e9e486872458666d324d6dec50cbd1d149 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -8,9 +8,7 @@ use crate::{ }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; -use feature_flags::{ - FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, SubagentsFeatureFlag, -}; +use feature_flags::{FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag}; use agent_client_protocol as acp; use agent_settings::{ @@ -1387,7 +1385,7 @@ impl Thread { self.add_tool(TerminalTool::new(self.project.clone(), environment.clone())); self.add_tool(WebSearchTool); - if cx.has_flag::() && self.depth() < MAX_SUBAGENT_DEPTH { + if self.depth() < MAX_SUBAGENT_DEPTH { self.add_tool(SpawnAgentTool::new(environment)); } } diff --git a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs index f46588c79033d965cbee0aaeb2624e7ae0756af6..744c92a7f7739c9fda2664de45d536769e802986 100644 --- a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs +++ b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs @@ -2,10 +2,9 @@ mod profile_modal_header; use std::sync::Arc; -use agent::{AgentTool, ContextServerRegistry, SpawnAgentTool}; +use agent::ContextServerRegistry; use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profiles}; use editor::Editor; -use feature_flags::{FeatureFlagAppExt as _, SubagentsFeatureFlag}; use fs::Fs; use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*}; use language_model::{LanguageModel, LanguageModelRegistry}; @@ -363,10 +362,7 @@ impl ManageProfilesModal { let supported_by_provider = provider.as_ref().map_or(true, |provider| { agent::tool_supports_provider(name, provider) }); - let enabled_by_feature_flag = - *name != SpawnAgentTool::NAME || cx.has_flag::(); - - supported_by_provider && enabled_by_feature_flag + supported_by_provider }) .map(Arc::from) .collect(); diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index 087e76c4129254d3b6f488259bc8fa19aa91370d..eab9f8c1036a83451fc3201f97cfb1cc8c885043 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -37,16 +37,6 @@ impl FeatureFlag for AgentSharingFeatureFlag { const NAME: &'static str = "agent-sharing"; } -pub struct SubagentsFeatureFlag; - -impl FeatureFlag for SubagentsFeatureFlag { - const NAME: &'static str = "subagents"; - - fn enabled_for_staff() -> bool { - true - } -} - pub struct DiffReviewFeatureFlag; impl FeatureFlag for DiffReviewFeatureFlag { From 7e1026421978b44424d0c14f11feb892b91cf90d Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 2 Mar 2026 10:57:44 -0500 Subject: [PATCH 225/548] git: Silence verbose log when loading index/committed text (#50496) Updates #50487 Release Notes: - N/A --- crates/git/src/repository.rs | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index aed08e8dc850622cff4dc96631199a039c78ac3f..ba77199d75f624c0dd44ad0b2ba4eec812d9a711 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -1326,16 +1326,14 @@ impl GitRepository for RealGitRepository { if path.is_empty() { bail!("empty path has no index text"); } - let entry = index - .get_path(path.as_std_path(), STAGE_NORMAL) - .with_context(|| format!("looking up {path:?} in index"))?; - let oid = if entry.mode != GIT_MODE_SYMLINK { - entry.id - } else { + let Some(entry) = index.get_path(path.as_std_path(), STAGE_NORMAL) else { return Ok(None); }; + if entry.mode == GIT_MODE_SYMLINK { + return Ok(None); + } - let content = repo.find_blob(oid)?.content().to_owned(); + let content = repo.find_blob(entry.id)?.content().to_owned(); Ok(String::from_utf8(content).ok()) } @@ -1353,16 +1351,15 @@ impl GitRepository for RealGitRepository { .spawn(async move { fn logic(repo: &git2::Repository, path: &RepoPath) -> Result> { let head = repo.head()?.peel_to_tree()?; + // git2 unwraps internally on empty paths or `.` if path.is_empty() { return Err(anyhow!("empty path has no committed text")); } - // git2 unwraps internally on empty paths or `.` - let entry = head.get_path(path.as_std_path())?; + let Some(entry) = head.get_path(path.as_std_path()).ok() else { + return Ok(None); + }; if entry.filemode() == i32::from(git2::FileMode::Link) { - bail!( - "symlink has no - committed text" - ); + return Ok(None); } let content = repo.find_blob(entry.id())?.content().to_owned(); Ok(String::from_utf8(content).ok()) From 805e32c363d129f629e9d56b5af429f628cc6cdb Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 2 Mar 2026 13:02:06 -0300 Subject: [PATCH 226/548] agent_ui: Improve loading state when there's only a terminal call (#50501) In the thread view, when the last block is an ongoing terminal tool call, we're not waiting for the LLM to generate content, but rather, we're waiting for the command to run so its output can be sent to the model. With that in mind, it doesn't make sense for the generating loading spinner to be visible. So, this PR, removes it. But I also took the opportunity to refine the terminal card header a bit, as well as make the "command running" spinner, as well as the stop button, visible for external agents, too. https://github.com/user-attachments/assets/c7e2865a-af9b-416b-a06a-908dffb916da In the video, you can see that given we only have a running-terminal tool call, there's no generation loading spinner. Release Notes: - Agent: Improved loading state for the thread view when there's only a running terminal command. --- .../src/connection_view/thread_view.rs | 196 ++++++++++-------- 1 file changed, 113 insertions(+), 83 deletions(-) diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index cd8becbded396afa5c8eb4a9136db18f63e4a535..0915911104b50ffe9d464a0552d3e43b0794b34d 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -4307,6 +4307,8 @@ impl ThreadView { }) .flatten(); + let is_blocked_on_terminal_command = + !confirmation && self.is_blocked_on_terminal_command(cx); let is_waiting = confirmation || self.thread.read(cx).has_in_progress_tool_calls(); let turn_tokens_label = elapsed_label @@ -4344,6 +4346,8 @@ impl ThreadView { .color(Color::Muted), ), ) + } else if is_blocked_on_terminal_command { + this } else { this.child(SpinnerLabel::new().size(LabelSize::Small)) } @@ -4632,51 +4636,74 @@ impl ThreadView { if text.is_empty() { None } else { Some(text) } } + fn is_blocked_on_terminal_command(&self, cx: &App) -> bool { + let thread = self.thread.read(cx); + if !matches!(thread.status(), ThreadStatus::Generating) { + return false; + } + + let mut has_running_terminal_call = false; + + for entry in thread.entries().iter().rev() { + match entry { + AgentThreadEntry::UserMessage(_) => break, + AgentThreadEntry::ToolCall(tool_call) + if matches!( + tool_call.status, + ToolCallStatus::InProgress | ToolCallStatus::Pending + ) => + { + if matches!(tool_call.kind, acp::ToolKind::Execute) { + has_running_terminal_call = true; + } else { + return false; + } + } + AgentThreadEntry::ToolCall(_) | AgentThreadEntry::AssistantMessage(_) => {} + } + } + + has_running_terminal_call + } + fn render_collapsible_command( &self, + group: SharedString, is_preview: bool, command_source: &str, - tool_call_id: &acp::ToolCallId, cx: &Context, ) -> Div { - let command_group = - SharedString::from(format!("collapsible-command-group-{}", tool_call_id)); - v_flex() - .group(command_group.clone()) + .p_1p5() .bg(self.tool_card_header_bg(cx)) - .child( - v_flex() - .p_1p5() - .when(is_preview, |this| { - this.pt_1().child( - // Wrapping this label on a container with 24px height to avoid - // layout shift when it changes from being a preview label - // to the actual path where the command will run in - h_flex().h_6().child( - Label::new("Run Command") - .buffer_font(cx) - .size(LabelSize::XSmall) - .color(Color::Muted), - ), - ) - }) - .children(command_source.lines().map(|line| { - let text: SharedString = if line.is_empty() { - " ".into() - } else { - line.to_string().into() - }; - - Label::new(text).buffer_font(cx).size(LabelSize::Small) - })) - .child( - div().absolute().top_1().right_1().child( - CopyButton::new("copy-command", command_source.to_string()) - .tooltip_label("Copy Command") - .visible_on_hover(command_group), - ), + .when(is_preview, |this| { + this.pt_1().child( + // Wrapping this label on a container with 24px height to avoid + // layout shift when it changes from being a preview label + // to the actual path where the command will run in + h_flex().h_6().child( + Label::new("Run Command") + .buffer_font(cx) + .size(LabelSize::XSmall) + .color(Color::Muted), ), + ) + }) + .children(command_source.lines().map(|line| { + let text: SharedString = if line.is_empty() { + " ".into() + } else { + line.to_string().into() + }; + + Label::new(text).buffer_font(cx).size(LabelSize::Small) + })) + .child( + div().absolute().top_1().right_1().child( + CopyButton::new("copy-command", command_source.to_string()) + .tooltip_label("Copy Command") + .visible_on_hover(group), + ), ) } @@ -4708,7 +4735,11 @@ impl ThreadView { let needs_confirmation = confirmation_options.is_some(); let output = terminal_data.output(); - let command_finished = output.is_some(); + let command_finished = output.is_some() + && !matches!( + tool_call.status, + ToolCallStatus::InProgress | ToolCallStatus::Pending + ); let truncated_output = output.is_some_and(|output| output.original_content_len > output.content.len()); let output_line_count = output.map(|output| output.content_line_count).unwrap_or(0); @@ -4750,14 +4781,15 @@ impl ThreadView { .unwrap_or(&command_source); let command_element = - self.render_collapsible_command(false, command_content, &tool_call.id, cx); + self.render_collapsible_command(header_group.clone(), false, command_content, cx); let is_expanded = self.expanded_tool_calls.contains(&tool_call.id); let header = h_flex() .id(header_id) - .px_1p5() .pt_1() + .pl_1p5() + .pr_1() .flex_none() .gap_1() .justify_between() @@ -4775,19 +4807,54 @@ impl ThreadView { .color(Color::Muted), ), ) + .child( + Disclosure::new( + SharedString::from(format!( + "terminal-tool-disclosure-{}", + terminal.entity_id() + )), + is_expanded, + ) + .opened_icon(IconName::ChevronUp) + .closed_icon(IconName::ChevronDown) + .visible_on_hover(&header_group) + .on_click(cx.listener({ + let id = tool_call.id.clone(); + move |this, _event, _window, cx| { + if is_expanded { + this.expanded_tool_calls.remove(&id); + } else { + this.expanded_tool_calls.insert(id.clone()); + } + cx.notify(); + } + })), + ) + .when(time_elapsed > Duration::from_secs(10), |header| { + header.child( + Label::new(format!("({})", duration_alt_display(time_elapsed))) + .buffer_font(cx) + .color(Color::Muted) + .size(LabelSize::XSmall), + ) + }) .when(!command_finished && !needs_confirmation, |header| { header .gap_1p5() .child( - Button::new( + Icon::new(IconName::ArrowCircle) + .size(IconSize::XSmall) + .color(Color::Muted) + .with_rotate_animation(2) + ) + .child(div().h(relative(0.6)).ml_1p5().child(Divider::vertical().color(DividerColor::Border))) + .child( + IconButton::new( SharedString::from(format!("stop-terminal-{}", terminal.entity_id())), - "Stop", + IconName::Stop ) - .icon(IconName::Stop) - .icon_position(IconPosition::Start) .icon_size(IconSize::Small) .icon_color(Color::Error) - .label_size(LabelSize::Small) .tooltip(move |_window, cx| { Tooltip::with_meta( "Stop This Command", @@ -4808,13 +4875,6 @@ impl ThreadView { }) }), ) - .child(Divider::vertical()) - .child( - Icon::new(IconName::ArrowCircle) - .size(IconSize::XSmall) - .color(Color::Info) - .with_rotate_animation(2) - ) }) .when(truncated_output, |header| { let tooltip = if let Some(output) = output { @@ -4850,14 +4910,6 @@ impl ThreadView { .tooltip(Tooltip::text(tooltip)), ) }) - .when(time_elapsed > Duration::from_secs(10), |header| { - header.child( - Label::new(format!("({})", duration_alt_display(time_elapsed))) - .buffer_font(cx) - .color(Color::Muted) - .size(LabelSize::XSmall), - ) - }) .when(tool_failed || command_failed, |header| { header.child( div() @@ -4875,29 +4927,7 @@ impl ThreadView { }), ) }) - .child( - Disclosure::new( - SharedString::from(format!( - "terminal-tool-disclosure-{}", - terminal.entity_id() - )), - is_expanded, - ) - .opened_icon(IconName::ChevronUp) - .closed_icon(IconName::ChevronDown) - .visible_on_hover(&header_group) - .on_click(cx.listener({ - let id = tool_call.id.clone(); - move |this, _event, _window, cx| { - if is_expanded { - this.expanded_tool_calls.remove(&id); - } else { - this.expanded_tool_calls.insert(id.clone()); - } - cx.notify(); - } - })), - ); +; let terminal_view = self .entry_view_state @@ -5294,9 +5324,9 @@ impl ThreadView { if is_terminal_tool { let label_source = tool_call.label.read(cx).source(); this.child(self.render_collapsible_command( + card_header_id.clone(), true, label_source, - &tool_call.id, cx, )) } else { From 0cf551fd6f45a4bfe4d4e6deaade8017c59d9687 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 2 Mar 2026 13:02:30 -0300 Subject: [PATCH 227/548] agent_ui: Use main message editor for editing queued messages (#50490) This PR changes the behavior of editing queued messages. Previously, you could type directly in the editor that was used to displayed a queued message. However, that felt a bit anxiety-inducing because there was a chance the message would get sent before you could've wrapped up your edits. And even if we blocked the message to get sent while focused on the editor, it could still be a bit unpredictable. So, with this PR, if you type on a queued message, the content of that message goes immediately back to the main message editor. Effectively, you're taking that message off the queue so you can edit it, and then if you hit enter (while there's a generation ongoing), that content goes back into the queue. I believe this allows for a calmer editing experience when wanting to touch up or increment a queued message. Here's a video of the flow: https://github.com/user-attachments/assets/ec60ccad-2b04-4b7c-a8f5-95baa443d9f2 In the video, I'm showcasing a few things: - in case the main message editor already had content, the content from the edited queued message is appended to it. That avoids the user surprisingly losing information. - the fact that mention creases still work well and are not converted back into plain text Release Notes: - Agent: Made editing queued messages better by moving the contnet back to the main message editor, avoiding the chance of it getting sent mid-editing. --- assets/icons/queue_message.svg | 7 + crates/agent_ui/src/connection_view.rs | 100 ++++++ .../src/connection_view/thread_view.rs | 156 +++++---- crates/agent_ui/src/entry_view_state.rs | 2 +- crates/agent_ui/src/message_editor.rs | 327 +++++++++++++++++- crates/icons/src/icons.rs | 1 + 6 files changed, 514 insertions(+), 79 deletions(-) create mode 100644 assets/icons/queue_message.svg diff --git a/assets/icons/queue_message.svg b/assets/icons/queue_message.svg new file mode 100644 index 0000000000000000000000000000000000000000..1bdf6738bcf3143fc13a820281cf1cab8531bd36 --- /dev/null +++ b/assets/icons/queue_message.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index a3a62459a2e98680b3910877cc9cd1e6e58ba056..93bf7c98098530b23522c60f987f9e341ebc69ca 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -1165,6 +1165,20 @@ impl ConnectionView { } } + fn move_queued_message_to_main_editor( + &mut self, + index: usize, + inserted_text: Option<&str>, + window: &mut Window, + cx: &mut Context, + ) { + if let Some(active) = self.active_thread() { + active.update(cx, |active, cx| { + active.move_queued_message_to_main_editor(index, inserted_text, window, cx); + }); + } + } + fn handle_thread_event( &mut self, thread: &Entity, @@ -2162,6 +2176,7 @@ impl ConnectionView { for (index, editor) in editors.into_iter().enumerate() { if let Some(content) = queued_messages.get(index) { editor.update(cx, |editor, cx| { + editor.set_read_only(true, cx); editor.set_message(content.clone(), window, cx); }); } @@ -2190,6 +2205,7 @@ impl ConnectionView { window, cx, ); + editor.set_read_only(true, cx); editor.set_message(content, window, cx); editor }); @@ -2198,6 +2214,8 @@ impl ConnectionView { &editor, window, move |this, _editor, event, window, cx| match event { + MessageEditorEvent::InputAttempted(text) => this + .move_queued_message_to_main_editor(index, Some(text.as_ref()), window, cx), MessageEditorEvent::LostFocus => { this.save_queued_message_at_index(index, cx); } @@ -6084,4 +6102,86 @@ pub(crate) mod tests { assert_eq!(tool_call_id, acp::ToolCallId::new("tc-b")); }); } + + #[gpui::test] + async fn test_move_queued_message_to_empty_main_editor(cx: &mut TestAppContext) { + init_test(cx); + + let (connection_view, cx) = + setup_thread_view(StubAgentServer::default_response(), cx).await; + + // Add a plain-text message to the queue directly. + active_thread(&connection_view, cx).update_in(cx, |thread, window, cx| { + thread.add_to_queue( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "queued message".to_string(), + ))], + vec![], + cx, + ); + // Main editor must be empty for this path — it is by default, but + // assert to make the precondition explicit. + assert!(thread.message_editor.read(cx).is_empty(cx)); + thread.move_queued_message_to_main_editor(0, None, window, cx); + }); + + cx.run_until_parked(); + + // Queue should now be empty. + let queue_len = active_thread(&connection_view, cx) + .read_with(cx, |thread, _cx| thread.local_queued_messages.len()); + assert_eq!(queue_len, 0, "Queue should be empty after move"); + + // Main editor should contain the queued message text. + let text = message_editor(&connection_view, cx).update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "queued message", + "Main editor should contain the moved queued message" + ); + } + + #[gpui::test] + async fn test_move_queued_message_to_non_empty_main_editor(cx: &mut TestAppContext) { + init_test(cx); + + let (connection_view, cx) = + setup_thread_view(StubAgentServer::default_response(), cx).await; + + // Seed the main editor with existing content. + message_editor(&connection_view, cx).update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "existing content".to_string(), + ))], + window, + cx, + ); + }); + + // Add a plain-text message to the queue. + active_thread(&connection_view, cx).update_in(cx, |thread, window, cx| { + thread.add_to_queue( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "queued message".to_string(), + ))], + vec![], + cx, + ); + thread.move_queued_message_to_main_editor(0, None, window, cx); + }); + + cx.run_until_parked(); + + // Queue should now be empty. + let queue_len = active_thread(&connection_view, cx) + .read_with(cx, |thread, _cx| thread.local_queued_messages.len()); + assert_eq!(queue_len, 0, "Queue should be empty after move"); + + // Main editor should contain existing content + separator + queued content. + let text = message_editor(&connection_view, cx).update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "existing content\n\nqueued message", + "Main editor should have existing content and queued message separated by two newlines" + ); + } } diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 0915911104b50ffe9d464a0552d3e43b0794b34d..b8403f8052e32fbeeceb4594438eecf32aa4e2e7 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -463,6 +463,7 @@ impl ThreadView { self.cancel_editing(&Default::default(), window, cx); } MessageEditorEvent::LostFocus => {} + MessageEditorEvent::InputAttempted(_) => {} } } @@ -577,6 +578,7 @@ impl ThreadView { ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Cancel) => { self.cancel_editing(&Default::default(), window, cx); } + ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::InputAttempted(_)) => {} ViewEvent::OpenDiffLocation { path, position, @@ -1217,6 +1219,44 @@ impl ThreadView { self.send_content(contents_task, window, cx); } + pub fn move_queued_message_to_main_editor( + &mut self, + index: usize, + inserted_text: Option<&str>, + window: &mut Window, + cx: &mut Context, + ) -> bool { + let Some(queued_message) = self.remove_from_queue(index, cx) else { + return false; + }; + let queued_content = queued_message.content; + let message_editor = self.message_editor.clone(); + let inserted_text = inserted_text.map(ToOwned::to_owned); + + window.focus(&message_editor.focus_handle(cx), cx); + + if message_editor.read(cx).is_empty(cx) { + message_editor.update(cx, |editor, cx| { + editor.set_message(queued_content, window, cx); + if let Some(inserted_text) = inserted_text.as_deref() { + editor.insert_text(inserted_text, window, cx); + } + }); + cx.notify(); + return true; + } + + message_editor.update(cx, |editor, cx| { + editor.append_message(queued_content, Some("\n\n"), window, cx); + if let Some(inserted_text) = inserted_text.as_deref() { + editor.insert_text(inserted_text, window, cx); + } + }); + + cx.notify(); + true + } + // editor methods pub fn expand_message_editor( @@ -2663,50 +2703,24 @@ impl ThreadView { .child(if editor_focused { h_flex() .gap_1() - .min_w_40() + .min_w(rems_from_px(150.)) + .justify_end() .child( - IconButton::new(("cancel_edit", index), IconName::Close) - .icon_size(IconSize::Small) - .icon_color(Color::Error) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |_window, cx| { - Tooltip::for_action_in( - "Cancel Edit", - &editor::actions::Cancel, - &focus_handle, - cx, - ) - } - }) - .on_click({ - let main_editor = self.message_editor.clone(); - cx.listener(move |_, _, window, cx| { - window.focus(&main_editor.focus_handle(cx), cx); - }) - }), - ) - .child( - IconButton::new(("save_edit", index), IconName::Check) + IconButton::new(("edit", index), IconName::Pencil) .icon_size(IconSize::Small) - .icon_color(Color::Success) - .tooltip({ - let focus_handle = editor.focus_handle(cx); - move |_window, cx| { - Tooltip::for_action_in( - "Save Edit", - &Chat, - &focus_handle, - cx, - ) - } + .tooltip(|_window, cx| { + Tooltip::with_meta( + "Edit Queued Message", + None, + "Type anything to edit", + cx, + ) }) - .on_click({ - let main_editor = self.message_editor.clone(); - cx.listener(move |_, _, window, cx| { - window.focus(&main_editor.focus_handle(cx), cx); - }) - }), + .on_click(cx.listener(move |this, _, window, cx| { + this.move_queued_message_to_main_editor( + index, None, window, cx, + ); + })), ) .child( Button::new(("send_now_focused", index), "Send Now") @@ -2728,62 +2742,64 @@ impl ThreadView { ) } else { h_flex() - .gap_1() .when(!is_next, |this| this.visible_on_hover("queue_entry")) + .gap_1() + .min_w(rems_from_px(150.)) + .justify_end() .child( - IconButton::new(("edit", index), IconName::Pencil) + IconButton::new(("delete", index), IconName::Trash) .icon_size(IconSize::Small) .tooltip({ let focus_handle = focus_handle.clone(); move |_window, cx| { if is_next { Tooltip::for_action_in( - "Edit", - &EditFirstQueuedMessage, + "Remove Message from Queue", + &RemoveFirstQueuedMessage, &focus_handle, cx, ) } else { - Tooltip::simple("Edit", cx) + Tooltip::simple( + "Remove Message from Queue", + cx, + ) } } }) - .on_click({ - let editor = editor.clone(); - cx.listener(move |_, _, window, cx| { - window.focus(&editor.focus_handle(cx), cx); - }) - }), + .on_click(cx.listener(move |this, _, _, cx| { + this.remove_from_queue(index, cx); + cx.notify(); + })), ) .child( - IconButton::new(("delete", index), IconName::Trash) + IconButton::new(("edit", index), IconName::Pencil) .icon_size(IconSize::Small) .tooltip({ let focus_handle = focus_handle.clone(); move |_window, cx| { if is_next { Tooltip::for_action_in( - "Remove Message from Queue", - &RemoveFirstQueuedMessage, + "Edit", + &EditFirstQueuedMessage, &focus_handle, cx, ) } else { - Tooltip::simple( - "Remove Message from Queue", - cx, - ) + Tooltip::simple("Edit", cx) } } }) - .on_click(cx.listener(move |this, _, _, cx| { - this.remove_from_queue(index, cx); - cx.notify(); + .on_click(cx.listener(move |this, _, window, cx| { + this.move_queued_message_to_main_editor( + index, None, window, cx, + ); })), ) .child( Button::new(("send_now", index), "Send Now") .label_size(LabelSize::Small) + .when(is_next, |this| this.style(ButtonStyle::Outlined)) .when(is_next && message_editor.is_empty(cx), |this| { let action: Box = if can_fast_track { @@ -2792,7 +2808,7 @@ impl ThreadView { Box::new(SendNextQueuedMessage) }; - this.style(ButtonStyle::Outlined).key_binding( + this.key_binding( KeyBinding::for_action_in( action.as_ref(), &focus_handle.clone(), @@ -2801,9 +2817,6 @@ impl ThreadView { .map(|kb| kb.size(keybinding_size)), ) }) - .when(is_next && !message_editor.is_empty(cx), |this| { - this.style(ButtonStyle::Outlined) - }) .on_click(cx.listener(move |this, _, window, cx| { this.send_queued_message_at_index( index, true, window, cx, @@ -3281,7 +3294,12 @@ impl ThreadView { .on_click(cx.listener(|this, _event, _, cx| this.cancel_generation(cx))) .into_any_element() } else { - IconButton::new("send-message", IconName::Send) + let send_icon = if is_generating { + IconName::QueueMessage + } else { + IconName::Send + }; + IconButton::new("send-message", send_icon) .style(ButtonStyle::Filled) .map(|this| { if is_editor_empty && !is_generating { @@ -7577,9 +7595,7 @@ impl Render for ThreadView { cx.notify(); })) .on_action(cx.listener(|this, _: &EditFirstQueuedMessage, window, cx| { - if let Some(editor) = this.queued_message_editors.first() { - window.focus(&editor.focus_handle(cx), cx); - } + this.move_queued_message_to_main_editor(0, None, window, cx); })) .on_action(cx.listener(|this, _: &ClearMessageQueue, _, cx| { this.local_queued_messages.clear(); diff --git a/crates/agent_ui/src/entry_view_state.rs b/crates/agent_ui/src/entry_view_state.rs index 071142f083bc94b5d057a366d124d11e7822d1fd..aef7f1f335eff7d092f924b9883ab0d64bbf65a8 100644 --- a/crates/agent_ui/src/entry_view_state.rs +++ b/crates/agent_ui/src/entry_view_state.rs @@ -114,7 +114,7 @@ impl EntryViewState { cx.subscribe(&message_editor, move |_, editor, event, cx| { cx.emit(EntryViewEvent { entry_index: index, - view_event: ViewEvent::MessageEditorEvent(editor, *event), + view_event: ViewEvent::MessageEditorEvent(editor, event.clone()), }) }) .detach(); diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index a24a5f5f65dae3f8bbce7d0a7b7f4988a1bd5e38..274b076eafbcfab4620c66c027c374025242f821 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -51,13 +51,14 @@ pub struct MessageEditor { _parse_slash_command_task: Task<()>, } -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Debug)] pub enum MessageEditorEvent { Send, SendImmediately, Cancel, Focus, LostFocus, + InputAttempted(Arc), } impl EventEmitter for MessageEditor {} @@ -186,6 +187,18 @@ impl MessageEditor { subscriptions.push(cx.subscribe_in(&editor, window, { move |this, editor, event, window, cx| { + let input_attempted_text = match event { + EditorEvent::InputHandled { text, .. } => Some(text), + EditorEvent::InputIgnored { text } => Some(text), + _ => None, + }; + if let Some(text) = input_attempted_text + && editor.read(cx).read_only(cx) + && !text.is_empty() + { + cx.emit(MessageEditorEvent::InputAttempted(text.clone())); + } + if let EditorEvent::Edited { .. } = event && !editor.read(cx).read_only(cx) { @@ -1195,13 +1208,45 @@ impl MessageEditor { message: Vec, window: &mut Window, cx: &mut Context, + ) { + self.clear(window, cx); + self.insert_message_blocks(message, false, window, cx); + } + + pub fn append_message( + &mut self, + message: Vec, + separator: Option<&str>, + window: &mut Window, + cx: &mut Context, + ) { + if message.is_empty() { + return; + } + + if let Some(separator) = separator + && !separator.is_empty() + && !self.is_empty(cx) + { + self.editor.update(cx, |editor, cx| { + editor.insert(separator, window, cx); + }); + } + + self.insert_message_blocks(message, true, window, cx); + } + + fn insert_message_blocks( + &mut self, + message: Vec, + append_to_existing: bool, + window: &mut Window, + cx: &mut Context, ) { let Some(workspace) = self.workspace.upgrade() else { return; }; - self.clear(window, cx); - let path_style = workspace.read(cx).project().read(cx).path_style(cx); let mut text = String::new(); let mut mentions = Vec::new(); @@ -1275,13 +1320,31 @@ impl MessageEditor { } } - let snapshot = self.editor.update(cx, |editor, cx| { - editor.set_text(text, window, cx); - editor.buffer().read(cx).snapshot(cx) - }); + if text.is_empty() && mentions.is_empty() { + return; + } + + let insertion_start = if append_to_existing { + self.editor.read(cx).text(cx).len() + } else { + 0 + }; + + let snapshot = if append_to_existing { + self.editor.update(cx, |editor, cx| { + editor.insert(&text, window, cx); + editor.buffer().read(cx).snapshot(cx) + }) + } else { + self.editor.update(cx, |editor, cx| { + editor.set_text(text, window, cx); + editor.buffer().read(cx).snapshot(cx) + }) + }; for (range, mention_uri, mention) in mentions { - let anchor = snapshot.anchor_before(MultiBufferOffset(range.start)); + let adjusted_start = insertion_start + range.start; + let anchor = snapshot.anchor_before(MultiBufferOffset(adjusted_start)); let Some((crease_id, tx)) = insert_crease_for_mention( anchor.excerpt_id, anchor.text_anchor, @@ -1306,6 +1369,7 @@ impl MessageEditor { ) }); } + cx.notify(); } @@ -1313,6 +1377,16 @@ impl MessageEditor { self.editor.read(cx).text(cx) } + pub fn insert_text(&mut self, text: &str, window: &mut Window, cx: &mut Context) { + if text.is_empty() { + return; + } + + self.editor.update(cx, |editor, cx| { + editor.insert(text, window, cx); + }); + } + pub fn set_placeholder_text( &mut self, placeholder: &str, @@ -3461,4 +3535,241 @@ mod tests { text ); } + + // Helper that creates a minimal MessageEditor inside a window, returning both + // the entity and the underlying VisualTestContext so callers can drive updates. + async fn setup_message_editor( + cx: &mut TestAppContext, + ) -> (Entity, &mut VisualTestContext) { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({"file.txt": ""})).await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + + let message_editor = cx.update(|window, cx| { + cx.new(|cx| { + MessageEditor::new( + workspace.downgrade(), + project.downgrade(), + None, + history.downgrade(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ) + }) + }); + + cx.run_until_parked(); + (message_editor, cx) + } + + #[gpui::test] + async fn test_set_message_plain_text(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "hello world".to_string(), + ))], + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!(text, "hello world"); + assert!(!message_editor.update(cx, |editor, cx| editor.is_empty(cx))); + } + + #[gpui::test] + async fn test_set_message_replaces_existing_content(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + // Set initial content. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "old content".to_string(), + ))], + window, + cx, + ); + }); + + // Replace with new content. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "new content".to_string(), + ))], + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "new content", + "set_message should replace old content" + ); + } + + #[gpui::test] + async fn test_append_message_to_empty_editor(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + message_editor.update_in(cx, |editor, window, cx| { + editor.append_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "appended".to_string(), + ))], + Some("\n\n"), + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "appended", + "No separator should be inserted when the editor is empty" + ); + } + + #[gpui::test] + async fn test_append_message_to_non_empty_editor(cx: &mut TestAppContext) { + init_test(cx); + let (message_editor, cx) = setup_message_editor(cx).await; + + // Seed initial content. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "initial".to_string(), + ))], + window, + cx, + ); + }); + + // Append with separator. + message_editor.update_in(cx, |editor, window, cx| { + editor.append_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "appended".to_string(), + ))], + Some("\n\n"), + window, + cx, + ); + }); + + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert_eq!( + text, "initial\n\nappended", + "Separator should appear between existing and appended content" + ); + } + + #[gpui::test] + async fn test_append_message_preserves_mention_offset(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({"file.txt": "content"})) + .await; + let project = Project::test(fs, [Path::new(path!("/project"))], cx).await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); + let history = + cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + + let message_editor = cx.update(|window, cx| { + cx.new(|cx| { + MessageEditor::new( + workspace.downgrade(), + project.downgrade(), + None, + history.downgrade(), + None, + Default::default(), + Default::default(), + "Test Agent".into(), + "Test", + EditorMode::AutoHeight { + min_lines: 1, + max_lines: None, + }, + window, + cx, + ) + }) + }); + + cx.run_until_parked(); + + // Seed plain-text prefix so the editor is non-empty before appending. + message_editor.update_in(cx, |editor, window, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "prefix text".to_string(), + ))], + window, + cx, + ); + }); + + // Append a message that contains a ResourceLink mention. + message_editor.update_in(cx, |editor, window, cx| { + editor.append_message( + vec![acp::ContentBlock::ResourceLink(acp::ResourceLink::new( + "file.txt", + "file:///project/file.txt", + ))], + Some("\n\n"), + window, + cx, + ); + }); + + cx.run_until_parked(); + + // The mention should be registered in the mention_set so that contents() + // will emit it as a structured block rather than plain text. + let mention_uris = + message_editor.update(cx, |editor, cx| editor.mention_set.read(cx).mentions()); + assert_eq!( + mention_uris.len(), + 1, + "Expected exactly one mention in the mention_set after append, got: {mention_uris:?}" + ); + + // The editor text should start with the prefix, then the separator, then + // the mention placeholder — confirming the offset was computed correctly. + let text = message_editor.update(cx, |editor, cx| editor.text(cx)); + assert!( + text.starts_with("prefix text\n\n"), + "Expected text to start with 'prefix text\\n\\n', got: {text:?}" + ); + } } diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index a8a4e47cd0046fa995b10bb5e91b8884d70cdd6d..73db39afdc5e9bd15f084043370d27f0494569a6 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -191,6 +191,7 @@ pub enum IconName { Power, Public, PullRequest, + QueueMessage, Quote, Reader, RefreshTitle, From 0214d6e78b6a72577047da2aebb41187eaa5418d Mon Sep 17 00:00:00 2001 From: KyleBarton Date: Mon, 2 Mar 2026 08:09:13 -0800 Subject: [PATCH 228/548] Adjust scroll top by number of sticky headers (#50359) In cases where sticky headers are enabled, count the number of sticky headers that would be present when performing `editor:scroll cursor top`. Take the maximum of that number and `verical_scroll_margin` so that we don't inadvertently bury the cursor behind the sticky headers. https://github.com/user-attachments/assets/6d49fe3a-2017-4c76-bd92-c4ec9794f898 Closes #48864 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed scroll top behavior when there are more sticky headers than vertical_scroll_margin --- crates/editor/src/scroll/actions.rs | 37 ++++++++++++++++++++++------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/scroll/actions.rs b/crates/editor/src/scroll/actions.rs index 5a1c849b2438fe987b24481b824375e188468916..3d22db2a4dc3c9339e51b0dae02d6d598400ad64 100644 --- a/crates/editor/src/scroll/actions.rs +++ b/crates/editor/src/scroll/actions.rs @@ -1,10 +1,12 @@ use super::Axis; use crate::{ - Autoscroll, Editor, EditorMode, NextScreen, NextScrollCursorCenterTopBottom, + Autoscroll, Editor, EditorMode, EditorSettings, NextScreen, NextScrollCursorCenterTopBottom, SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT, ScrollCursorBottom, ScrollCursorCenter, ScrollCursorCenterTopBottom, ScrollCursorTop, display_map::DisplayRow, scroll::ScrollOffset, }; use gpui::{Context, Point, Window}; +use settings::Settings; +use text::ToOffset; impl Editor { pub fn next_screen(&mut self, _: &NextScreen, window: &mut Window, cx: &mut Context) { @@ -73,18 +75,37 @@ impl Editor { ) { let display_snapshot = self.display_snapshot(cx); let scroll_margin_rows = self.vertical_scroll_margin() as u32; - let new_screen_top = self - .selections - .newest_display(&display_snapshot) - .head() - .row() - .0; + let selection_head = self.selections.newest_display(&display_snapshot).head(); + + let sticky_headers_len = if EditorSettings::get_global(cx).sticky_scroll.enabled + && let Some((_, _, buffer_snapshot)) = display_snapshot.buffer_snapshot().as_singleton() + { + let select_head_point = + rope::Point::new(selection_head.to_point(&display_snapshot).row, 0); + buffer_snapshot + .outline_items_containing(select_head_point..select_head_point, false, None) + .iter() + .filter(|outline| { + outline.range.start.offset + < select_head_point.to_offset(&buffer_snapshot) as u32 + }) + .collect::>() + .len() + } else { + 0 + } as u32; + + let new_screen_top = selection_head.row().0; let header_offset = display_snapshot .buffer_snapshot() .show_headers() .then(|| display_snapshot.buffer_header_height()) .unwrap_or(0); - let new_screen_top = new_screen_top.saturating_sub(scroll_margin_rows + header_offset); + + // If the number of sticky headers exceeds the vertical_scroll_margin, + // we need to adjust the scroll top a bit further + let adjustment = scroll_margin_rows.max(sticky_headers_len) + header_offset; + let new_screen_top = new_screen_top.saturating_sub(adjustment); self.set_scroll_top_row(DisplayRow(new_screen_top), window, cx); } From 3630ac923874c4b0689be15a0070f02bc1de3ece Mon Sep 17 00:00:00 2001 From: Om Chillure Date: Mon, 2 Mar 2026 21:54:39 +0530 Subject: [PATCH 229/548] Fix renaming with `.` in JSX tags (#50373) Fixes #50245 ### Summary : This PR fixes linked tag renaming when typing . in tag names like .... Previously, linked editing treated . as punctuation (unless a language explicitly configured it as a linked-edit character), so renaming could stop syncing at the dot and produce mismatched closing tags. ### What changed Updated linked-edit input handling to preserve linked edits when the typed input is exactly ".", even if the active language does not explicitly include dot in linked_edit_characters. Added a regression test covering dot typing in linked edits without language override. Kept existing punctuation behavior (e.g. >) unchanged. ### Files changed [editor.rs] [editor_tests.rs] ### Why this approach Minimal, targeted fix in shared linked-edit path. Works for .svelte and similar markup contexts where dot-separated component names are valid in practice. Avoids requiring every language/extension to add dot config individually. ### Validation Manual repro confirmed: opening tag rename with dot now keeps closing tag synced. Added test: test_linked_edits_on_typing_dot_without_language_override. Existing related test remains relevant: test_linked_edits_on_typing_punctuation. --- crates/editor/src/editor.rs | 4 ++- crates/editor/src/editor_tests.rs | 42 +++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 93d87885babf6265ff4b12c9da2c4c0cc07ec9a9..eb8601d59e1c9970f367177f3f365f4feb30811e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -4898,8 +4898,10 @@ impl Editor { .scope_context(Some(CharScopeContext::LinkedEdit)); classifier.is_word(char) }); + let is_dot = text.as_ref() == "."; + let should_apply_linked_edit = is_word_char || is_dot; - if is_word_char { + if should_apply_linked_edit { let anchor_range = start_anchor.text_anchor..anchor.text_anchor; linked_edits.push(&self, anchor_range, text.clone(), cx); } else { diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 88be32d6d73d967ab34b287534308164b8623679..142668e6555bcb23370387aab655b0d6b82fa5fe 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -26623,6 +26623,48 @@ async fn test_linked_edits_on_typing_punctuation(cx: &mut TestAppContext) { cx.assert_editor_state(""); } +#[gpui::test] +async fn test_linked_edits_on_typing_dot_without_language_override(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let mut cx = EditorTestContext::new(cx).await; + let language = Arc::new(Language::new( + LanguageConfig { + name: "HTML".into(), + matcher: LanguageMatcher { + path_suffixes: vec!["html".to_string()], + ..LanguageMatcher::default() + }, + brackets: BracketPairConfig { + pairs: vec![BracketPair { + start: "<".into(), + end: ">".into(), + close: true, + ..Default::default() + }], + ..Default::default() + }, + ..Default::default() + }, + Some(tree_sitter_html::LANGUAGE.into()), + )); + cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx)); + + cx.set_state(""); + cx.update_editor(|editor, _, cx| { + set_linked_edit_ranges( + (Point::new(0, 1), Point::new(0, 6)), + (Point::new(0, 9), Point::new(0, 14)), + editor, + cx, + ); + }); + cx.update_editor(|editor, window, cx| { + editor.handle_input(".", window, cx); + }); + cx.assert_editor_state(""); +} + #[gpui::test] async fn test_invisible_worktree_servers(cx: &mut TestAppContext) { init_test(cx, |_| {}); From bb6c52bd5e902e92827856127e50ce2bd17acbeb Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Mon, 2 Mar 2026 10:27:00 -0600 Subject: [PATCH 230/548] agent: Log thread title generation errors (#50504) --- crates/agent/src/thread.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index a63437e9e486872458666d324d6dec50cbd1d149..9a259ecf6a9debaf4afd68f8271e025ae9f19c4f 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -2390,7 +2390,12 @@ impl Thread { anyhow::Ok(()) }; - if generate.await.context("failed to generate title").is_ok() { + if generate + .await + .context("failed to generate thread title") + .log_err() + .is_some() + { _ = this.update(cx, |this, cx| this.set_title(title.into(), cx)); } _ = this.update(cx, |this, _| this.pending_title_generation = None); From 9ca695d9f69ed12e17128bd12c55559b7f4d9238 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 2 Mar 2026 17:31:20 +0100 Subject: [PATCH 231/548] Remove `dbg!` from tests (#50506) Removes a bunch of `dbg!`'s from strings in test code. It's annoying cause these show up in project search, when you are removing `dbg!(...)`'s after debugging something Release Notes: - N/A --- crates/editor/src/editor_tests.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 142668e6555bcb23370387aab655b0d6b82fa5fe..525910ed276cdfe5fb5c3c2b784269d834c70316 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -8074,7 +8074,7 @@ async fn test_copy_trim_line_mode(cx: &mut TestAppContext) { cx.set_state(indoc! {" « fn main() { - dbg!(1) + 1 }ˇ» "}); cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true)); @@ -8082,7 +8082,7 @@ async fn test_copy_trim_line_mode(cx: &mut TestAppContext) { assert_eq!( cx.read_from_clipboard().and_then(|item| item.text()), - Some("fn main() {\n dbg!(1)\n}\n".to_string()) + Some("fn main() {\n 1\n}\n".to_string()) ); let clipboard_selections: Vec = cx @@ -8099,7 +8099,7 @@ async fn test_copy_trim_line_mode(cx: &mut TestAppContext) { cx.set_state(indoc! {" «fn main() { - dbg!(1) + 1 }ˇ» "}); cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true)); @@ -8107,7 +8107,7 @@ async fn test_copy_trim_line_mode(cx: &mut TestAppContext) { assert_eq!( cx.read_from_clipboard().and_then(|item| item.text()), - Some("fn main() {\n dbg!(1)\n}\n".to_string()) + Some("fn main() {\n 1\n}\n".to_string()) ); let clipboard_selections: Vec = cx From 2d3c05ee411e87c9d9264dcbcf2ffb58292e5f00 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Mon, 2 Mar 2026 11:37:06 -0500 Subject: [PATCH 232/548] Add `libva-devel` as required deps for `dnf` section (#50476) If I follow the steps outlined in https://zed.dev/docs/development/linux then `cargo run` on my Fedora workstation fails because the install script is missing `libva-devel` as a required dependency under `dnf` / `yum` sections. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- script/linux | 1 + 1 file changed, 1 insertion(+) diff --git a/script/linux b/script/linux index c20f154eaf2f7e6b79c7f8539e9e8c13271c3ecd..b3bd03eacc2baf976744ff19b049b7781c330a8e 100755 --- a/script/linux +++ b/script/linux @@ -91,6 +91,7 @@ if [[ -n $dnf ]] || [[ -n $yum ]]; then alsa-lib-devel fontconfig-devel glib2-devel + libva-devel wayland-devel libxcb-devel libxkbcommon-x11-devel From 04fc31fc19d05fb77d94ecbf0ca1191e40b2527b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 2 Mar 2026 09:53:43 -0700 Subject: [PATCH 233/548] Fix another case where we could apply highlights with wrong indices (#50367) Closes #ISSUE Release Notes: - N/A --- crates/editor/src/element.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 6fc2627533dde920c021b14d5d172cbef40d7a95..7b5530c6fc36828b22f7f78a6482c1d6e04fc166 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8070,7 +8070,7 @@ fn apply_dirty_filename_style( text_style: &gpui::TextStyle, cx: &App, ) -> Option { - let text = segment.text.replace('\n', "⏎"); + let text = segment.text.replace('\n', " "); let filename_position = std::path::Path::new(&segment.text) .file_name() From 2b774e5cd240b2e3ca3094e7786cd30910fc4592 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 2 Mar 2026 17:54:53 +0100 Subject: [PATCH 234/548] extension_host: Allow extensions to define semantic highlighting rules (#49282) for their given language via a `semantic_token_rules.json` file Release Notes: - N/A *or* Added/Fixed/Improved ... --- .../default_semantic_token_rules.json | 4 +- crates/extension_host/src/extension_host.rs | 42 +++++++++++++- crates/languages/src/lib.rs | 4 +- crates/project/src/lsp_store.rs | 4 ++ .../project/src/lsp_store/semantic_tokens.rs | 16 +++++- crates/settings/src/settings.rs | 6 +- crates/settings/src/settings_store.rs | 56 +++++++++++-------- docs/src/extensions/languages.md | 42 +++++++++++++- docs/src/semantic-tokens.md | 4 +- 9 files changed, 142 insertions(+), 36 deletions(-) diff --git a/assets/settings/default_semantic_token_rules.json b/assets/settings/default_semantic_token_rules.json index c5e9d1438cad583e78bc3e109b4bc79c62aa7ac5..65b20a7423aef3c3221f9f80e345fd503627d98d 100644 --- a/assets/settings/default_semantic_token_rules.json +++ b/assets/settings/default_semantic_token_rules.json @@ -2,7 +2,9 @@ // // These rules map LSP semantic token types to syntax theme styles. // To customize, add rules to "semantic_token_rules" in your settings.json. -// User-defined rules are prepended to these defaults and take precedence. +// User-defined rules are prepended and take highest precedence. +// Extension language rules are applied next. +// These built-in defaults are applied last. // // Each rule has the following properties: // - `token_type`: The LSP semantic token type to match. If omitted, matches all types. diff --git a/crates/extension_host/src/extension_host.rs b/crates/extension_host/src/extension_host.rs index 6699a9dca099177cfd550ba0f68ef62828356d15..c691296d61183c9bb0fcd41ff6c74eed6cb61149 100644 --- a/crates/extension_host/src/extension_host.rs +++ b/crates/extension_host/src/extension_host.rs @@ -32,8 +32,8 @@ use futures::{ select_biased, }; use gpui::{ - App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, WeakEntity, - actions, + App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, UpdateGlobal as _, + WeakEntity, actions, }; use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; use language::{ @@ -46,7 +46,7 @@ use release_channel::ReleaseChannel; use remote::RemoteClient; use semver::Version; use serde::{Deserialize, Serialize}; -use settings::Settings; +use settings::{SemanticTokenRules, Settings, SettingsStore}; use std::ops::RangeInclusive; use std::str::FromStr; use std::{ @@ -1220,6 +1220,15 @@ impl ExtensionStore { self.proxy .remove_languages(&languages_to_remove, &grammars_to_remove); + // Remove semantic token rules for languages being unloaded. + if !languages_to_remove.is_empty() { + SettingsStore::update_global(cx, |store, cx| { + for language in &languages_to_remove { + store.remove_language_semantic_token_rules(language.as_ref(), cx); + } + }); + } + let mut grammars_to_add = Vec::new(); let mut themes_to_add = Vec::new(); let mut icon_themes_to_add = Vec::new(); @@ -1267,12 +1276,30 @@ impl ExtensionStore { .iter() .filter(|(_, entry)| extensions_to_load.contains(&entry.extension)) .collect::>(); + let mut semantic_token_rules_to_add: Vec<(LanguageName, SemanticTokenRules)> = Vec::new(); for (language_name, language) in languages_to_add { let mut language_path = self.installed_dir.clone(); language_path.extend([ Path::new(language.extension.as_ref()), language.path.as_path(), ]); + + // Load semantic token rules if present in the language directory. + let rules_path = language_path.join("semantic_token_rules.json"); + if let Ok(rules_json) = std::fs::read_to_string(&rules_path) { + match serde_json_lenient::from_str::(&rules_json) { + Ok(rules) => { + semantic_token_rules_to_add.push((language_name.clone(), rules)); + } + Err(err) => { + log::error!( + "Failed to parse semantic token rules from {}: {err:#}", + rules_path.display() + ); + } + } + } + self.proxy.register_language( language_name.clone(), language.grammar.clone(), @@ -1302,6 +1329,15 @@ impl ExtensionStore { ); } + // Register semantic token rules for newly loaded extension languages. + if !semantic_token_rules_to_add.is_empty() { + SettingsStore::update_global(cx, |store, cx| { + for (language_name, rules) in semantic_token_rules_to_add { + store.set_language_semantic_token_rules(language_name.0.clone(), rules, cx); + } + }); + } + let fs = self.fs.clone(); let wasm_host = self.wasm_host.clone(); let root_dir = self.installed_dir.clone(); diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index c5e64aed639374655b0e60bde5dbd0b3da5468c3..c31911f372261db47f689d29de9c60c0f9cad56e 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -368,8 +368,8 @@ fn register_language( ) { let config = load_config(name); if let Some(rules) = &semantic_token_rules { - SettingsStore::update_global(cx, |store, _| { - store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone()); + SettingsStore::update_global(cx, |store, cx| { + store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone(), cx); }); } for adapter in adapters { diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 5a5a13e9cbb4b17f333d29cedd16b1fe6366d204..45111adf9eb45c3a2595ab557e1fbe986d041610 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -5106,6 +5106,10 @@ impl LspStore { .clone(); self.semantic_token_config .update_rules(new_semantic_token_rules); + // Always clear cached stylizers so that changes to language-specific + // semantic token rules (e.g. from extension install/uninstall) are + // picked up. Stylizers are recreated lazily, so this is cheap. + self.semantic_token_config.clear_stylizers(); let new_global_semantic_tokens_mode = all_language_settings(None, cx).defaults.semantic_tokens; diff --git a/crates/project/src/lsp_store/semantic_tokens.rs b/crates/project/src/lsp_store/semantic_tokens.rs index e71b05d47b0cf105429bf50648787fb1db2bad87..cfcd74ad7de7baaf60833cd9db1085d60307c20e 100644 --- a/crates/project/src/lsp_store/semantic_tokens.rs +++ b/crates/project/src/lsp_store/semantic_tokens.rs @@ -12,8 +12,11 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, ReadGlobal as _, SharedSt use language::{Buffer, LanguageName, language_settings::all_language_settings}; use lsp::{AdapterServerCapabilities, LanguageServerId}; use rpc::{TypedEnvelope, proto}; -use settings::{SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore}; +use settings::{ + DefaultSemanticTokenRules, SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore, +}; use smol::future::yield_now; + use text::{Anchor, Bias, OffsetUtf16, PointUtf16, Unclipped}; use util::ResultExt as _; @@ -58,6 +61,15 @@ impl SemanticTokenConfig { } } + /// Clears all cached stylizers. + /// + /// This is called when settings change to ensure that any modifications to + /// language-specific semantic token rules (e.g. from extension install/uninstall) + /// are picked up. Stylizers are recreated lazily on next use. + pub(super) fn clear_stylizers(&mut self) { + self.stylizers.clear(); + } + pub(super) fn update_global_mode(&mut self, new_mode: settings::SemanticTokens) -> bool { if new_mode != self.global_mode { self.global_mode = new_mode; @@ -462,6 +474,7 @@ impl SemanticTokenStylizer { let global_rules = &ProjectSettings::get_global(cx) .global_lsp_settings .semantic_token_rules; + let default_rules = cx.global::(); let rules_by_token_type = token_types .iter() @@ -475,6 +488,7 @@ impl SemanticTokenStylizer { .rules .iter() .chain(language_rules.into_iter().flat_map(|lr| &lr.rules)) + .chain(default_rules.0.rules.iter()) .rev() .filter(filter) .cloned() diff --git a/crates/settings/src/settings.rs b/crates/settings/src/settings.rs index 9049c95eb9529b9a490687e1130af273b7496970..62b577c44520a6922798076cf085defea46d8688 100644 --- a/crates/settings/src/settings.rs +++ b/crates/settings/src/settings.rs @@ -44,9 +44,9 @@ pub use keymap_file::{ pub use settings_file::*; pub use settings_json::*; pub use settings_store::{ - InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, LocalSettingsKind, LocalSettingsPath, - MigrationStatus, Settings, SettingsFile, SettingsJsonSchemaParams, SettingsKey, - SettingsLocation, SettingsParseResult, SettingsStore, + DefaultSemanticTokenRules, InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, + LocalSettingsKind, LocalSettingsPath, MigrationStatus, Settings, SettingsFile, + SettingsJsonSchemaParams, SettingsKey, SettingsLocation, SettingsParseResult, SettingsStore, }; pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource}; diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 411f57375a2303e5e2c30e182365f526989891a4..8551fc2edd53df66965b18abbe91f7083dd08461 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -241,6 +241,11 @@ impl LocalSettingsPath { impl Global for SettingsStore {} +#[derive(Default)] +pub struct DefaultSemanticTokenRules(pub SemanticTokenRules); + +impl gpui::Global for DefaultSemanticTokenRules {} + #[doc(hidden)] #[derive(Debug)] pub struct SettingValue { @@ -275,29 +280,22 @@ pub struct SettingsJsonSchemaParams<'a> { impl SettingsStore { pub fn new(cx: &mut App, default_settings: &str) -> Self { - Self::new_with_semantic_tokens(cx, default_settings, &crate::default_semantic_token_rules()) + Self::new_with_semantic_tokens(cx, default_settings) } - pub fn new_with_semantic_tokens( - cx: &mut App, - default_settings: &str, - default_semantic_tokens: &str, - ) -> Self { + pub fn new_with_semantic_tokens(cx: &mut App, default_settings: &str) -> Self { let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded(); - let mut default_settings: SettingsContent = + let default_settings: SettingsContent = SettingsContent::parse_json_with_comments(default_settings).unwrap(); - if let Ok(semantic_token_rules) = - crate::parse_json_with_comments::(default_semantic_tokens) - { - let global_lsp = default_settings - .global_lsp_settings - .get_or_insert_with(Default::default); - let existing_rules = global_lsp - .semantic_token_rules - .get_or_insert_with(Default::default); - existing_rules.rules.extend(semantic_token_rules.rules); + if !cx.has_global::() { + cx.set_global::( + crate::parse_json_with_comments::( + &crate::default_semantic_token_rules(), + ) + .map(DefaultSemanticTokenRules) + .unwrap_or_default(), + ); } - let default_settings: Rc = default_settings.into(); let mut this = Self { setting_values: Default::default(), @@ -868,18 +866,30 @@ impl SettingsStore { /// Sets language-specific semantic token rules. /// /// These rules are registered by language modules (e.g. the Rust language module) - /// and are stored separately from the global rules. They are only applied to - /// buffers of the matching language by the `SemanticTokenStylizer`. + /// or by third-party extensions (via `semantic_token_rules.json` in their language + /// directories). They are stored separately from the global rules and are only + /// applied to buffers of the matching language by the `SemanticTokenStylizer`. /// - /// These should be registered before any `SemanticTokenStylizer` instances are - /// created (typically during `languages::init`), as existing cached stylizers - /// are not automatically invalidated. + /// This triggers a settings recomputation so that observers (e.g. `LspStore`) + /// are notified and can invalidate cached stylizers. pub fn set_language_semantic_token_rules( &mut self, language: SharedString, rules: SemanticTokenRules, + cx: &mut App, ) { self.language_semantic_token_rules.insert(language, rules); + self.recompute_values(None, cx); + } + + /// Removes language-specific semantic token rules for the given language. + /// + /// This should be called when an extension that registered rules for a language + /// is unloaded. Triggers a settings recomputation so that observers (e.g. + /// `LspStore`) are notified and can invalidate cached stylizers. + pub fn remove_language_semantic_token_rules(&mut self, language: &str, cx: &mut App) { + self.language_semantic_token_rules.remove(language); + self.recompute_values(None, cx); } /// Returns the language-specific semantic token rules for the given language, diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index 3e748e4b33e51e2dcd08175b793d97ea0ddda2d8..eee29cc57d1ce5e1a5a7608c70ece98bf4a233ee 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -434,6 +434,40 @@ The `semantic_tokens` setting accepts the following values: - `"combined"`: Use LSP semantic tokens together with tree-sitter highlighting. - `"full"`: Use LSP semantic tokens exclusively, replacing tree-sitter highlighting. +#### Extension-Provided Semantic Token Rules + +Language extensions can ship default semantic token rules for their language server's custom token types. To do this, place a `semantic_token_rules.json` file in the language directory alongside `config.toml`: + +``` +my-extension/ + languages/ + my-language/ + config.toml + highlights.scm + semantic_token_rules.json +``` + +The file uses the same format as the `semantic_token_rules` array in user settings — a JSON array of rule objects: + +```json +[ + { + "token_type": "lifetime", + "style": ["lifetime"] + }, + { + "token_type": "builtinType", + "style": ["type"] + }, + { + "token_type": "selfKeyword", + "style": ["variable.special"] + } +] +``` + +This is useful when a language server reports custom (non-standard) semantic token types that aren't covered by Zed's built-in default rules. Extension-provided rules act as sensible defaults for that language — users can always override them via `semantic_token_rules` in their settings file, and built-in default rules are only used when neither user nor extension rules match. + #### Customizing Semantic Token Styles Zed supports customizing the styles used for semantic tokens. You can define rules in your settings file, which customize how semantic tokens get mapped to styles in your theme. @@ -463,7 +497,13 @@ Zed supports customizing the styles used for semantic tokens. You can define rul } ``` -All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. User-defined rules take priority over the default rules. +All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. + +Rules are applied in the following priority order (highest to lowest): + +1. **User settings** — rules from `semantic_token_rules` in your settings file. +2. **Extension rules** — rules from `semantic_token_rules.json` in extension language directories. +3. **Default rules** — Zed's built-in rules for standard LSP token types. Each rule in the `semantic_token_rules` array is defined as follows: diff --git a/docs/src/semantic-tokens.md b/docs/src/semantic-tokens.md index ab30525c504455fc7f1fa431b212b975c1d75061..d26666ca7e7e60614bd4f1f9f06e771168611de2 100644 --- a/docs/src/semantic-tokens.md +++ b/docs/src/semantic-tokens.md @@ -48,7 +48,7 @@ You can configure this globally or per-language: Semantic tokens are styled using rules that map LSP token types and modifiers to theme styles or custom colors. Zed provides sensible defaults, but you can customize these in your settings.json: add rules under `global_lsp_settings.semantic_token_rules` key. Rules are matched in order, and the first matching rule wins. -User-defined rules take precedence over defaults. +User-defined rules take highest precedence, followed by extension-provided language rules, then Zed defaults. ### Rule Structure @@ -139,7 +139,7 @@ To disable highlighting for a specific token type, add an empty rule that matche } ``` -Since user rules are prepended to defaults and the first match wins, this empty rule prevents any styling from being applied to comment tokens. +Since user rules take highest precedence and the first match wins, this empty rule prevents any styling from being applied to comment tokens. ## Default Rules From f9895c54685db8d5ead46d333b0bf6272828f6ba Mon Sep 17 00:00:00 2001 From: Oliver Azevedo Barnes Date: Mon, 2 Mar 2026 17:10:12 +0000 Subject: [PATCH 235/548] devcontainer: Fix git output (#49230) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #48434 In Dev Containers, failed git operations were surfaced with a generic failure message, while the useful git output (stderr/stdout) was not reliably available to users. This happened because in devcontainers the git operation errors go through an RPC layer and then got wrapped with `anyhow::Context` (e.g. “sending pull request”); the toast displayed only that outer context via `to_string()`, masking the underlying git stderr message. This change ensures the full git operation output is preserved and surfaced via Zed’s “See logs” flow in Dev Containers, matching the information you get when running the same git command in a terminal. ### What you should expect in the UI - You will see a generic toast like “git pull failed” / “git push failed”. - When clicking on the toast’s “See logs”, the log tab now contains the full git error output (e.g. non-fast-forward hints, merge conflict details, “local changes would be overwritten”, etc.), which previously could be missing/too generic. --- ## Manual testing Run inside a Dev Container and ensure git auth works (SSH keys/agent or HTTPS credentials). 1. **Dirty-tree pull failure** - Make remote ahead by 1 commit (push from another clone). - Locally modify the same file without committing. - In Zed: **Pull** - **Expect:** toast “git pull failed” + **See logs** shows “local changes would be overwritten…” (or equivalent). 2. **Non-fast-forward push failure** - Ensure remote ahead. - Locally create 1 commit. - In Zed: **Push** - **Expect:** toast “git push failed” + **See logs** shows “rejected (non-fast-forward)” + hint to pull first. 3. **Merge-conflict pull failure** - Create conflicting commits on the same lines (one local commit, one remote commit). - In Zed: **Pull** - **Expect:** toast “git pull failed” + **See logs** shows conflict output (“CONFLICT…”, “Automatic merge failed…”). Release Notes: - Fixed devcontainer git failure toasts so they show the actual git error --------- Co-authored-by: KyleBarton --- Cargo.lock | 1 + crates/git_ui/Cargo.toml | 1 + crates/git_ui/src/git_panel.rs | 58 ++++++++++++++++++++++++++++++++- crates/project/src/git_store.rs | 15 +++------ crates/proto/src/error.rs | 6 ++++ 5 files changed, 70 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 97cc166c14e099b57b74585277869052de0cff87..1a192869d6fee631d129e23c275c86e51168fe2f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7310,6 +7310,7 @@ dependencies = [ "pretty_assertions", "project", "prompt_store", + "proto", "rand 0.9.2", "remote", "remote_connection", diff --git a/crates/git_ui/Cargo.toml b/crates/git_ui/Cargo.toml index 28fac0f849a487c6654e2ac5976191cd3e1a733f..a25911d65eb87d176a0a987d996e159e2c43628c 100644 --- a/crates/git_ui/Cargo.toml +++ b/crates/git_ui/Cargo.toml @@ -44,6 +44,7 @@ panel.workspace = true picker.workspace = true project.workspace = true prompt_store.workspace = true +proto.workspace = true remote_connection.workspace = true remote.workspace = true schemars.workspace = true diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 5131e1d144e2cee0cbdbb32a062d3f9c4ea4a08b..1fabc387247e3f0889749463e3aabd89ef0bff42 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -56,6 +56,7 @@ use project::{ project_settings::{GitPathStyle, ProjectSettings}, }; use prompt_store::{BuiltInPrompt, PromptId, PromptStore, RULES_FILE_NAMES}; +use proto::RpcError; use serde::{Deserialize, Serialize}; use settings::{Settings, SettingsStore, StatusStyle}; use smallvec::SmallVec; @@ -6420,7 +6421,7 @@ pub(crate) fn show_error_toast( cx: &mut App, ) { let action = action.into(); - let message = e.to_string().trim().to_string(); + let message = format_git_error_toast_message(&e); if message .matches(git::repository::REMOTE_CANCELLED_BY_USER) .next() @@ -6446,6 +6447,20 @@ pub(crate) fn show_error_toast( } } +fn rpc_error_raw_message_from_chain(error: &anyhow::Error) -> Option<&str> { + error + .chain() + .find_map(|cause| cause.downcast_ref::().map(RpcError::raw_message)) +} + +fn format_git_error_toast_message(error: &anyhow::Error) -> String { + if let Some(message) = rpc_error_raw_message_from_chain(error) { + message.trim().to_string() + } else { + error.to_string().trim().to_string() + } +} + #[cfg(test)] mod tests { use git::{ @@ -6477,6 +6492,47 @@ mod tests { }); } + #[test] + fn test_format_git_error_toast_message_prefers_raw_rpc_message() { + let rpc_error = RpcError::from_proto( + &proto::Error { + message: + "Your local changes to the following files would be overwritten by merge\n" + .to_string(), + code: proto::ErrorCode::Internal as i32, + tags: Default::default(), + }, + "Pull", + ); + + let message = format_git_error_toast_message(&rpc_error); + assert_eq!( + message, + "Your local changes to the following files would be overwritten by merge" + ); + } + + #[test] + fn test_format_git_error_toast_message_prefers_raw_rpc_message_when_wrapped() { + let rpc_error = RpcError::from_proto( + &proto::Error { + message: + "Your local changes to the following files would be overwritten by merge\n" + .to_string(), + code: proto::ErrorCode::Internal as i32, + tags: Default::default(), + }, + "Pull", + ); + let wrapped = rpc_error.context("sending pull request"); + + let message = format_git_error_toast_message(&wrapped); + assert_eq!( + message, + "Your local changes to the following files would be overwritten by merge" + ); + } + #[gpui::test] async fn test_entry_worktree_paths(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 45ba6817248929391dcc484b25879cf34e7506b9..ae776966a770ccadcffdbf9b140ed10d4871b317 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -4956,8 +4956,7 @@ impl Repository { .map(|repo_path| repo_path.to_proto()) .collect(), }) - .await - .context("sending stash request")?; + .await?; Ok(()) } } @@ -5166,8 +5165,7 @@ impl Repository { }), askpass_id, }) - .await - .context("sending commit request")?; + .await?; Ok(()) } @@ -5206,8 +5204,7 @@ impl Repository { askpass_id, remote: fetch_options.to_proto(), }) - .await - .context("sending fetch request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5308,8 +5305,7 @@ impl Repository { } as i32), }) - .await - .context("sending push request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, @@ -5375,8 +5371,7 @@ impl Repository { branch_name: branch.as_ref().map(|b| b.to_string()), remote_name: remote.to_string(), }) - .await - .context("sending pull request")?; + .await?; Ok(RemoteCommandOutput { stdout: response.stdout, diff --git a/crates/proto/src/error.rs b/crates/proto/src/error.rs index d83b0fc499ba9dddb1d6417307fea9eaed9fdfd7..f551e8c3fc4d7023f5d9d43c3dc6eb51ffe2bb46 100644 --- a/crates/proto/src/error.rs +++ b/crates/proto/src/error.rs @@ -159,6 +159,12 @@ pub struct RpcError { /// in the app; however it is useful for chaining .message() and .with_tag() on /// ErrorCode. impl RpcError { + /// Returns the raw server-provided error message without any RPC framing + /// (e.g. without the "RPC request X failed: " prefix that `Display` adds). + pub fn raw_message(&self) -> &str { + &self.msg + } + /// from_proto converts a crate::Error into an anyhow::Error containing /// an RpcError. pub fn from_proto(error: &crate::Error, request: &str) -> anyhow::Error { From c6319d3e02fcef04e1ac6706fbfe723fde0a1b92 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 2 Mar 2026 18:10:36 +0100 Subject: [PATCH 236/548] agent: Propagate model settings to running subagents (#50510) When the model, summarization model, thinking settings, speed, or profile are updated on a thread, apply the same settings to any currently running subagents. Release Notes: - N/A Co-authored-by: Bennet Bo Fenner --- crates/agent/src/thread.rs | 226 ++++++++++++++++++++++++++++++++++++- 1 file changed, 222 insertions(+), 4 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 9a259ecf6a9debaf4afd68f8271e025ae9f19c4f..4560671cc8ad84fb43f07ee711aa72f053e4a2a9 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1277,13 +1277,20 @@ impl Thread { pub fn set_model(&mut self, model: Arc, cx: &mut Context) { let old_usage = self.latest_token_usage(); - self.model = Some(model); + self.model = Some(model.clone()); let new_caps = Self::prompt_capabilities(self.model.as_deref()); let new_usage = self.latest_token_usage(); if old_usage != new_usage { cx.emit(TokenUsageUpdated(new_usage)); } self.prompt_capabilities_tx.send(new_caps).log_err(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_model(model.clone(), cx)) + .ok(); + } + cx.notify() } @@ -1296,7 +1303,15 @@ impl Thread { model: Option>, cx: &mut Context, ) { - self.summarization_model = model; + self.summarization_model = model.clone(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| { + thread.set_summarization_model(model.clone(), cx) + }) + .ok(); + } cx.notify() } @@ -1306,6 +1321,12 @@ impl Thread { pub fn set_thinking_enabled(&mut self, enabled: bool, cx: &mut Context) { self.thinking_enabled = enabled; + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_thinking_enabled(enabled, cx)) + .ok(); + } cx.notify(); } @@ -1314,7 +1335,15 @@ impl Thread { } pub fn set_thinking_effort(&mut self, effort: Option, cx: &mut Context) { - self.thinking_effort = effort; + self.thinking_effort = effort.clone(); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| { + thread.set_thinking_effort(effort.clone(), cx) + }) + .ok(); + } cx.notify(); } @@ -1324,6 +1353,12 @@ impl Thread { pub fn set_speed(&mut self, speed: Speed, cx: &mut Context) { self.speed = Some(speed); + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_speed(speed, cx)) + .ok(); + } cx.notify(); } @@ -1399,6 +1434,7 @@ impl Thread { self.tools.insert(T::NAME.into(), tool.erase()); } + #[cfg(any(test, feature = "test-support"))] pub fn remove_tool(&mut self, name: &str) -> bool { self.tools.remove(name).is_some() } @@ -1412,12 +1448,18 @@ impl Thread { return; } - self.profile_id = profile_id; + self.profile_id = profile_id.clone(); // Swap to the profile's preferred model when available. if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) { self.set_model(model, cx); } + + for subagent in &self.running_subagents { + subagent + .update(cx, |thread, cx| thread.set_profile(profile_id.clone(), cx)) + .ok(); + } } pub fn cancel(&mut self, cx: &mut Context) -> Task<()> { @@ -3776,6 +3818,7 @@ mod tests { use super::*; use gpui::TestAppContext; use language_model::LanguageModelToolUseId; + use language_model::fake_provider::FakeLanguageModel; use serde_json::json; use std::sync::Arc; @@ -3813,6 +3856,181 @@ mod tests { }) } + fn setup_parent_with_subagents( + cx: &mut TestAppContext, + parent: &Entity, + count: usize, + ) -> Vec> { + cx.update(|cx| { + let mut subagents = Vec::new(); + for _ in 0..count { + let subagent = cx.new(|cx| Thread::new_subagent(parent, cx)); + parent.update(cx, |thread, _cx| { + thread.register_running_subagent(subagent.downgrade()); + }); + subagents.push(subagent); + } + subagents + }) + } + + #[gpui::test] + async fn test_set_model_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + let new_model: Arc = Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "new-model", + "New Model", + false, + )); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_model(new_model, cx); + }); + + for subagent in &subagents { + let subagent_model_id = subagent.read(cx).model().unwrap().id(); + assert_eq!( + subagent_model_id.0.as_ref(), + "new-model", + "Subagent model should match parent model after set_model" + ); + } + }); + } + + #[gpui::test] + async fn test_set_summarization_model_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + let summary_model: Arc = + Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "summary-model", + "Summary Model", + false, + )); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_summarization_model(Some(summary_model), cx); + }); + + for subagent in &subagents { + let subagent_summary_id = subagent.read(cx).summarization_model().unwrap().id(); + assert_eq!( + subagent_summary_id.0.as_ref(), + "summary-model", + "Subagent summarization model should match parent after set_summarization_model" + ); + } + }); + } + + #[gpui::test] + async fn test_set_thinking_enabled_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(true, cx); + }); + + for subagent in &subagents { + assert!( + subagent.read(cx).thinking_enabled(), + "Subagent thinking should be enabled after parent enables it" + ); + } + + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(false, cx); + }); + + for subagent in &subagents { + assert!( + !subagent.read(cx).thinking_enabled(), + "Subagent thinking should be disabled after parent disables it" + ); + } + }); + } + + #[gpui::test] + async fn test_set_thinking_effort_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_effort(Some("high".to_string()), cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).thinking_effort().map(|s| s.as_str()), + Some("high"), + "Subagent thinking effort should match parent" + ); + } + + parent.update(cx, |thread, cx| { + thread.set_thinking_effort(None, cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).thinking_effort(), + None, + "Subagent thinking effort should be None after parent clears it" + ); + } + }); + } + + #[gpui::test] + async fn test_set_speed_propagates_to_subagents(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 2); + + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_speed(Speed::Fast, cx); + }); + + for subagent in &subagents { + assert_eq!( + subagent.read(cx).speed(), + Some(Speed::Fast), + "Subagent speed should match parent after set_speed" + ); + } + }); + } + + #[gpui::test] + async fn test_dropped_subagent_does_not_panic(cx: &mut TestAppContext) { + let (parent, _event_stream) = setup_thread_for_test(cx).await; + let subagents = setup_parent_with_subagents(cx, &parent, 1); + + // Drop the subagent so the WeakEntity can no longer be upgraded + drop(subagents); + + // Should not panic even though the subagent was dropped + cx.update(|cx| { + parent.update(cx, |thread, cx| { + thread.set_thinking_enabled(true, cx); + thread.set_speed(Speed::Fast, cx); + thread.set_thinking_effort(Some("high".to_string()), cx); + }); + }); + } + #[gpui::test] async fn test_handle_tool_use_json_parse_error_adds_tool_use_to_content( cx: &mut TestAppContext, From a6106d2bfbbf5bb1a1d07506a83e1d0f1c1fbee9 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 2 Mar 2026 18:29:22 +0100 Subject: [PATCH 237/548] agent: Add back in discouragement of useless subagents (#50512) Release Notes: - N/A --- crates/agent/src/tools/spawn_agent_tool.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index a54e19d6a6ebaa3422c43152ba91b03c12b16ce8..b75c41775258db49577024dca3eb1770937e52e8 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -16,6 +16,8 @@ use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput}; /// - Run multiple tasks in parallel. /// - Delegate a self-contained task where you only need the final outcome. /// +/// Do NOT use this tool for tasks you could accomplish directly with one or two tool calls (e.g. reading a file, running a single command). +/// /// You will receive only the agent's final message as output. /// /// **New session** (no session_id): Creates a new agent that does NOT see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message. From db8f64935a8d0163fd423da84524c328eceb51be Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Mon, 2 Mar 2026 12:59:14 -0500 Subject: [PATCH 238/548] remote_server: Don't panic when forwarding stderr (#50505) Closes ZED-5B7 Release Notes: - N/A --- crates/remote/src/transport.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/crates/remote/src/transport.rs b/crates/remote/src/transport.rs index 09bb22ddbe2b303b767255fd7ab02b54d9b17b2f..8d0f212cfc4f9544d0a827a41aefc3a8af07ee72 100644 --- a/crates/remote/src/transport.rs +++ b/crates/remote/src/transport.rs @@ -1,3 +1,5 @@ +use std::io::Write; + use crate::{ RemoteArch, RemoteOs, RemotePlatform, json_log::LogRecord, @@ -137,7 +139,12 @@ fn handle_rpc_messages_over_child_process_stdio( if let Ok(record) = serde_json::from_slice::(content) { record.log(log::logger()) } else { - eprintln!("(remote) {}", String::from_utf8_lossy(content)); + std::io::stderr() + .write_fmt(format_args!( + "(remote) {}\n", + String::from_utf8_lossy(content) + )) + .ok(); } } stderr_buffer.drain(0..start_ix); From 41a0c63c2317a97fd0acbb00c6ad6c9ccad38050 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 2 Mar 2026 19:00:12 +0100 Subject: [PATCH 239/548] agent: Add linked action log support for subagent threads (#50500) Subagents now forward buffer reads/writes/edits to a parent action log, allowing the parent's review experience to track all file changes made by subagents alongside its own. Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- crates/action_log/src/action_log.rs | 364 +++++++++++++++++++++++++++- crates/agent/src/thread.rs | 27 ++- 2 files changed, 378 insertions(+), 13 deletions(-) diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 1157d8d6f881ecb33df8104dd4be04bd9d846b5e..5f8a639c0559c10546fc5640dc240aeba9dde487 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -48,6 +48,10 @@ pub struct ActionLog { tracked_buffers: BTreeMap, TrackedBuffer>, /// The project this action log is associated with project: Entity, + /// An action log to forward all public methods to + /// Useful in cases like subagents, where we want to track individual diffs for this subagent, + /// but also want to associate the reads/writes with a parent review experience + linked_action_log: Option>, /// Stores undo information for the most recent reject operation last_reject_undo: Option, } @@ -58,10 +62,16 @@ impl ActionLog { Self { tracked_buffers: BTreeMap::default(), project, + linked_action_log: None, last_reject_undo: None, } } + pub fn with_linked_action_log(mut self, linked_action_log: Entity) -> Self { + self.linked_action_log = Some(linked_action_log); + self + } + pub fn project(&self) -> &Entity { &self.project } @@ -496,16 +506,25 @@ impl ActionLog { /// Track a buffer as read by agent, so we can notify the model about user edits. pub fn buffer_read(&mut self, buffer: Entity, cx: &mut Context) { + if let Some(linked_action_log) = &mut self.linked_action_log { + linked_action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + } self.track_buffer_internal(buffer, false, cx); } /// Mark a buffer as created by agent, so we can refresh it in the context pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) { + if let Some(linked_action_log) = &mut self.linked_action_log { + linked_action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + } self.track_buffer_internal(buffer, true, cx); } /// Mark a buffer as edited by agent, so we can refresh it in the context pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { + if let Some(linked_action_log) = &mut self.linked_action_log { + linked_action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + } let new_version = buffer.read(cx).version(); let tracked_buffer = self.track_buffer_internal(buffer, false, cx); if let TrackedBufferStatus::Deleted = tracked_buffer.status { @@ -517,6 +536,7 @@ impl ActionLog { } pub fn will_delete_buffer(&mut self, buffer: Entity, cx: &mut Context) { + let has_linked_action_log = self.linked_action_log.is_some(); let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); match tracked_buffer.status { TrackedBufferStatus::Created { .. } => { @@ -524,12 +544,24 @@ impl ActionLog { cx.notify(); } TrackedBufferStatus::Modified => { - buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); tracked_buffer.status = TrackedBufferStatus::Deleted; - tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + if !has_linked_action_log { + buffer.update(cx, |buffer, cx| buffer.set_text("", cx)); + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } } + TrackedBufferStatus::Deleted => {} } + + if let Some(linked_action_log) = &mut self.linked_action_log { + linked_action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + } + + if has_linked_action_log && let Some(tracked_buffer) = self.tracked_buffers.get(&buffer) { + tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx); + } + cx.notify(); } @@ -914,15 +946,6 @@ impl ActionLog { .collect() } - /// Returns all tracked buffers for debugging purposes - #[cfg(any(test, feature = "test-support"))] - pub fn tracked_buffers_for_debug( - &self, - _cx: &App, - ) -> impl Iterator, &TrackedBuffer)> { - self.tracked_buffers.iter() - } - /// Iterate over buffers changed since last read or edited by the model pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator> { self.tracked_buffers @@ -2634,6 +2657,325 @@ mod tests { assert!(!action_log.read_with(cx, |log, _| log.has_pending_undo())); } + #[gpui::test] + async fn test_linked_action_log_buffer_read(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + + // Neither log considers the buffer stale immediately after reading it. + let child_stale = cx.read(|cx| { + child_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + let parent_stale = cx.read(|cx| { + parent_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + assert!(child_stale.is_empty()); + assert!(parent_stale.is_empty()); + + // Simulate a user edit after the agent read the file. + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..5, "goodbye")], None, cx).unwrap(); + }); + }); + cx.run_until_parked(); + + // Both child and parent should see the buffer as stale because both tracked + // it at the pre-edit version via buffer_read forwarding. + let child_stale = cx.read(|cx| { + child_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + let parent_stale = cx.read(|cx| { + parent_log + .read(cx) + .stale_buffers(cx) + .cloned() + .collect::>() + }); + assert_eq!(child_stale, vec![buffer.clone()]); + assert_eq!(parent_stale, vec![buffer]); + } + + #[gpui::test] + async fn test_linked_action_log_buffer_edited(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| { + buffer + .edit([(Point::new(1, 0)..Point::new(1, 3), "DEF")], None, cx) + .unwrap(); + }); + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer, + vec![HunkStatus { + range: Point::new(1, 0)..Point::new(2, 0), + diff_status: DiffHunkStatusKind::Modified, + old_text: "def\n".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the agent edit" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the agent edit via linked log forwarding" + ); + } + + #[gpui::test] + async fn test_linked_action_log_buffer_created(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({})).await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/new_file", cx) + }) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + buffer.update(cx, |buffer, cx| buffer.set_text("hello", cx)); + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx)) + .await + .unwrap(); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 5), + diff_status: DiffHunkStatusKind::Added, + old_text: "".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the created file" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the created file via linked log forwarding" + ); + } + + #[gpui::test] + async fn test_linked_action_log_will_delete_buffer(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello\n"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path.clone(), cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + }); + project + .update(cx, |project, cx| project.delete_file(file_path, false, cx)) + .unwrap() + .await + .unwrap(); + cx.run_until_parked(); + + let expected_hunks = vec![( + buffer.clone(), + vec![HunkStatus { + range: Point::new(0, 0)..Point::new(0, 0), + diff_status: DiffHunkStatusKind::Deleted, + old_text: "hello\n".into(), + }], + )]; + assert_eq!( + unreviewed_hunks(&child_log, cx), + expected_hunks, + "child should track the deleted file" + ); + assert_eq!( + unreviewed_hunks(&parent_log, cx), + expected_hunks, + "parent should also track the deleted file via linked log forwarding" + ); + } + + /// Simulates the subagent scenario: two child logs linked to the same parent, each + /// editing a different file. The parent accumulates all edits while each child + /// only sees its own. + #[gpui::test] + async fn test_linked_action_log_independent_tracking(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/dir"), + json!({ + "file_a": "content of a", + "file_b": "content of b", + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log_1 = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + let child_log_2 = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_a_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/file_a", cx) + }) + .unwrap(); + let file_b_path = project + .read_with(cx, |project, cx| { + project.find_project_path("dir/file_b", cx) + }) + .unwrap(); + let buffer_a = project + .update(cx, |project, cx| project.open_buffer(file_a_path, cx)) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| project.open_buffer(file_b_path, cx)) + .await + .unwrap(); + + cx.update(|cx| { + child_log_1.update(cx, |log, cx| log.buffer_read(buffer_a.clone(), cx)); + buffer_a.update(cx, |buffer, cx| { + buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap(); + }); + child_log_1.update(cx, |log, cx| log.buffer_edited(buffer_a.clone(), cx)); + + child_log_2.update(cx, |log, cx| log.buffer_read(buffer_b.clone(), cx)); + buffer_b.update(cx, |buffer, cx| { + buffer.edit([(0..0, "MODIFIED: ")], None, cx).unwrap(); + }); + child_log_2.update(cx, |log, cx| log.buffer_edited(buffer_b.clone(), cx)); + }); + cx.run_until_parked(); + + let child_1_changed: Vec<_> = cx.read(|cx| { + child_log_1 + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + let child_2_changed: Vec<_> = cx.read(|cx| { + child_log_2 + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + let parent_changed: Vec<_> = cx.read(|cx| { + parent_log + .read(cx) + .changed_buffers(cx) + .into_keys() + .collect() + }); + + assert_eq!( + child_1_changed, + vec![buffer_a.clone()], + "child 1 should only track file_a" + ); + assert_eq!( + child_2_changed, + vec![buffer_b.clone()], + "child 2 should only track file_b" + ); + assert_eq!(parent_changed.len(), 2, "parent should track both files"); + assert!( + parent_changed.contains(&buffer_a) && parent_changed.contains(&buffer_b), + "parent should contain both buffer_a and buffer_b" + ); + } + #[derive(Debug, PartialEq)] struct HunkStatus { range: Range, diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 4560671cc8ad84fb43f07ee711aa72f053e4a2a9..2a9e8d3270cc6ae6b95e28dbb2c06370980bf028 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -917,12 +917,16 @@ impl Thread { let context_server_registry = parent_thread.read(cx).context_server_registry.clone(); let templates = parent_thread.read(cx).templates.clone(); let model = parent_thread.read(cx).model().cloned(); - let mut thread = Self::new( + let parent_action_log = parent_thread.read(cx).action_log().clone(); + let action_log = + cx.new(|_cx| ActionLog::new(project.clone()).with_linked_action_log(parent_action_log)); + let mut thread = Self::new_internal( project, project_context, context_server_registry, templates, model, + action_log, cx, ); thread.subagent_context = Some(SubagentContext { @@ -939,6 +943,26 @@ impl Thread { templates: Arc, model: Option>, cx: &mut Context, + ) -> Self { + Self::new_internal( + project.clone(), + project_context, + context_server_registry, + templates, + model, + cx.new(|_cx| ActionLog::new(project)), + cx, + ) + } + + fn new_internal( + project: Entity, + project_context: Entity, + context_server_registry: Entity, + templates: Arc, + model: Option>, + action_log: Entity, + cx: &mut Context, ) -> Self { let settings = AgentSettings::get_global(cx); let profile_id = settings.default_profile.clone(); @@ -950,7 +974,6 @@ impl Thread { .default_model .as_ref() .and_then(|model| model.effort.clone()); - let action_log = cx.new(|_cx| ActionLog::new(project.clone())); let (prompt_capabilities_tx, prompt_capabilities_rx) = watch::channel(Self::prompt_capabilities(model.as_deref())); Self { From 56ae09502a4baaceae6dcd497655b95d17574839 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Mon, 2 Mar 2026 19:27:32 +0100 Subject: [PATCH 240/548] agent: Use correct subagent thread entity to get the entries list (#50515) Was grabbing off the Thread not the AcpThead :facepalm: Release Notes: - N/A --- crates/agent/src/agent.rs | 2 +- crates/agent/src/thread.rs | 4 ---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index f20c7a8f1d5c780ffd3214c8736e7c921f32d134..0bb0f2c8790a5e07b97976ba391105554ad03307 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1749,7 +1749,7 @@ impl SubagentHandle for NativeSubagentHandle { } fn num_entries(&self, cx: &App) -> usize { - self.subagent_thread.read(cx).num_messages() + self.acp_thread.read(cx).entries().len() } fn send(&self, message: String, cx: &AsyncApp) -> Task> { diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 2a9e8d3270cc6ae6b95e28dbb2c06370980bf028..4c43a66fe5bb67c11fe5f0438d54cc86a498c55c 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1389,10 +1389,6 @@ impl Thread { self.messages.last() } - pub fn num_messages(&self) -> usize { - self.messages.len() - } - #[cfg(any(test, feature = "test-support"))] pub fn last_received_or_pending_message(&self) -> Option { if let Some(message) = self.pending_message.clone() { From 2879349b1e6c4c7bc146676f30009ae0d3c4f505 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Mon, 2 Mar 2026 19:46:25 +0100 Subject: [PATCH 241/548] agent: Fix review experience for `StreamingEditFileTool` (#50518) Release Notes: - N/A --- .../src/tools/streaming_edit_file_tool.rs | 237 +++++++++++++++--- 1 file changed, 197 insertions(+), 40 deletions(-) diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index a0d6d3a374e3b64c6652e089efe8de31b645b052..7e023d7d7e00c2eb13ea78467776816b13151796 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -10,6 +10,7 @@ use crate::{ }, }; use acp_thread::Diff; +use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; use anyhow::{Context as _, Result}; use collections::HashSet; @@ -22,9 +23,11 @@ use project::lsp_store::{FormatTrigger, LspFormatTarget}; use project::{AgentLocation, Project, ProjectPath}; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; +use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; use streaming_diff::{CharOperation, StreamingDiff}; +use text::ToOffset; use ui::SharedString; use util::rel_path::RelPath; use util::{Deferred, ResultExt}; @@ -720,20 +723,30 @@ impl EditSession { event_stream: &ToolCallEventStream, cx: &mut AsyncApp, ) -> Result<(), StreamingEditFileToolOutput> { + let action_log = tool + .thread + .read_with(cx, |thread, _cx| thread.action_log().clone()) + .ok(); + for event in events { match event { ToolEditEvent::ContentChunk { chunk } => { + let (buffer_id, insert_at) = buffer.read_with(cx, |buffer, _cx| { + let insert_at = if !pipeline.content_written && buffer.len() > 0 { + 0..buffer.len() + } else { + let len = buffer.len(); + len..len + }; + (buffer.remote_id(), insert_at) + }); + agent_edit_buffer( + buffer, + [(insert_at, chunk.as_str())], + action_log.as_ref(), + cx, + ); cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - let insert_at = if !pipeline.content_written && buffer.len() > 0 { - 0..buffer.len() - } else { - let len = buffer.len(); - len..len - }; - buffer.edit([(insert_at, chunk.as_str())], None, cx); - }); - let buffer_id = buffer.read(cx).remote_id(); tool.set_agent_location( buffer.downgrade(), text::Anchor::max_for_buffer(buffer_id), @@ -881,6 +894,7 @@ impl EditSession { buffer, original_snapshot, edit_cursor, + action_log.as_ref(), cx, ); @@ -888,16 +902,6 @@ impl EditSession { cx.update(|cx| { tool.set_agent_location(buffer.downgrade(), position, cx); }); - - let action_log = tool - .thread - .read_with(cx, |thread, _cx| thread.action_log().clone()) - .ok(); - if let Some(action_log) = action_log { - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - } } ToolEditEvent::NewTextChunk { @@ -933,6 +937,7 @@ impl EditSession { buffer, &original_snapshot, &mut edit_cursor, + action_log.as_ref(), cx, ); } @@ -943,6 +948,7 @@ impl EditSession { buffer, &original_snapshot, &mut edit_cursor, + action_log.as_ref(), cx, ); @@ -950,16 +956,6 @@ impl EditSession { cx.update(|cx| { tool.set_agent_location(buffer.downgrade(), position, cx); }); - - let action_log = tool - .thread - .read_with(cx, |thread, _cx| thread.action_log().clone()) - .ok(); - if let Some(action_log) = action_log { - action_log.update(cx, |log, cx| { - log.buffer_edited(buffer.clone(), cx); - }); - } } } } @@ -971,26 +967,19 @@ impl EditSession { buffer: &Entity, snapshot: &text::BufferSnapshot, edit_cursor: &mut usize, + action_log: Option<&Entity>, cx: &mut AsyncApp, ) { for op in ops { match op { CharOperation::Insert { text } => { let anchor = snapshot.anchor_after(*edit_cursor); - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit([(anchor..anchor, text.as_str())], None, cx); - }); - }); + agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx); } CharOperation::Delete { bytes } => { let delete_end = *edit_cursor + bytes; let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end); - cx.update(|cx| { - buffer.update(cx, |buffer, cx| { - buffer.edit([(anchor_range, "")], None, cx); - }); - }); + agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx); *edit_cursor = delete_end; } CharOperation::Keep { bytes } => { @@ -1001,6 +990,30 @@ impl EditSession { } } +/// Edits a buffer and reports the edit to the action log in the same effect +/// cycle. This ensures the action log's subscription handler sees the version +/// already updated by `buffer_edited`, so it does not misattribute the agent's +/// edit as a user edit. +fn agent_edit_buffer( + buffer: &Entity, + edits: I, + action_log: Option<&Entity>, + cx: &mut AsyncApp, +) where + I: IntoIterator, T)>, + S: ToOffset, + T: Into>, +{ + cx.update(|cx| { + buffer.update(cx, |buffer, cx| { + buffer.edit(edits, None, cx); + }); + if let Some(action_log) = action_log { + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + } + }); +} + fn ensure_buffer_saved( buffer: &Entity, abs_path: &PathBuf, @@ -4756,6 +4769,150 @@ mod tests { assert_eq!(new_text, "HELLO\nWORLD\nfoo\n"); } + // Verifies that after streaming_edit_file_tool edits a file, the action log + // reports changed buffers so that the Accept All / Reject All review UI appears. + #[gpui::test] + async fn test_streaming_edit_file_tool_registers_changed_buffers(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "file.txt": "line 1\nline 2\nline 3\n" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let thread = cx.new(|cx| { + crate::Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + None, + cx, + ) + }); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + + let tool = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + language_registry, + )); + let (event_stream, _rx) = ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Edit lines".to_string(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Edit, + content: None, + edits: Some(vec![Edit { + old_text: "line 2".into(), + new_text: "modified line 2".into(), + }]), + }), + event_stream, + cx, + ) + }); + + let result = task.await; + assert!(result.is_ok(), "edit should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming edit, \ + but no changed buffers were found \u{2014} Accept All / Reject All will not appear" + ); + } + + // Same test but for Write mode (overwrite entire file). + #[gpui::test] + async fn test_streaming_edit_file_tool_write_mode_registers_changed_buffers( + cx: &mut TestAppContext, + ) { + init_test(cx); + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + let fs = project::FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "file.txt": "original content" + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _cx| project.languages().clone()); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); + let thread = cx.new(|cx| { + crate::Thread::new( + project.clone(), + cx.new(|_cx| ProjectContext::default()), + context_server_registry, + Templates::new(), + None, + cx, + ) + }); + let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); + + let tool = Arc::new(StreamingEditFileTool::new( + project.clone(), + thread.downgrade(), + language_registry, + )); + let (event_stream, _rx) = ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Overwrite file".to_string(), + path: "root/file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("completely new content".into()), + edits: None, + }), + event_stream, + cx, + ) + }); + + let result = task.await; + assert!(result.is_ok(), "write should succeed: {:?}", result.err()); + + cx.run_until_parked(); + + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log.changed_buffers() should be non-empty after streaming write, \ + but no changed buffers were found \u{2014} Accept All / Reject All will not appear" + ); + } + fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); From b47b353e372e1b5a471883e9117c811153434606 Mon Sep 17 00:00:00 2001 From: Lucas White Date: Mon, 2 Mar 2026 11:21:07 -0800 Subject: [PATCH 242/548] Docs/privacy documentation refresh (#50522) Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Updated Privacy and Telemetry docs for improved clarity --------- Co-authored-by: Claude Sonnet 4.6 Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- docs/src/ai/ai-improvement.md | 107 +++++++++++++++++----------- docs/src/ai/privacy-and-security.md | 19 ++--- docs/src/telemetry.md | 41 ++++++----- 3 files changed, 97 insertions(+), 70 deletions(-) diff --git a/docs/src/ai/ai-improvement.md b/docs/src/ai/ai-improvement.md index 94085058f237b942f29d43f8d82b2f0afa97a782..26085bc3971eca633fa481469e26719161fbf7e0 100644 --- a/docs/src/ai/ai-improvement.md +++ b/docs/src/ai/ai-improvement.md @@ -3,73 +3,99 @@ title: AI Improvement and Data Collection - Zed description: Zed's opt-in approach to AI data collection for improving the agent panel and edit predictions. --- -# Zed AI Improvement +# Zed AI Features and Privacy -## Agent Panel +## Overview -### Opt-In +AI features in Zed include: -When you use the Agent Panel through any of these means: +- [Agent Panel](./agent-panel.md) +- [Edit Predictions](./edit-prediction.md) +- [Inline Assist](./inline-assistant.md) +- [Text Threads](./text-threads.md) +- Auto Git Commit Message Generation -- [Zed's hosted models](./subscription.md) -- [connecting a non-Zed AI service via API key](./llm-providers.md) -- using an [external agent](./external-agents.md) +By default, Zed does not store your prompts or code context. This data is sent to your selected AI provider (e.g., Anthropic, OpenAI, Google, or xAI) to generate responses, then discarded. Zed will not use your data to evaluate or improve AI features unless you explicitly share it (see [AI Feedback with Ratings](#ai-feedback-with-ratings)) or you opt in to edit prediction training data collection (see [Edit Predictions](#edit-predictions)). + +Zed is model-agnostic by design, and none of this changes based on which provider you choose. You can use your own API keys or Zed's hosted models without any data being retained. + +### Data Retention and Training -Zed does not persistently store user content or use user content to evaluate and/or improve our AI features, unless it is explicitly shared with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again. +Zed's Agent Panel can be used via: -> Note that rating responses will send your data related to that response to Zed's servers. -> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our Agentic offering without you explicitly rating responses. +- [Zed's hosted models](./subscription.md) +- [connecting a non-Zed AI service via API key](./llm-providers.md) +- using an [external agent](./external-agents.md) via ACP -When using upstream services through Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models. +When using Zed's hosted models, we require assurances from our service providers that your user content won't be used for training models. | Provider | No Training Guarantee | Zero-Data Retention (ZDR) | | --------- | ------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------- | | Anthropic | [Yes](https://www.anthropic.com/legal/commercial-terms) | [Yes](https://privacy.anthropic.com/en/articles/8956058-i-have-a-zero-data-retention-agreement-with-anthropic-what-products-does-it-apply-to) | | Google | [Yes](https://cloud.google.com/terms/service-terms) | [Yes](https://cloud.google.com/terms/service-terms), see Service Terms sections 17 and 19h | | OpenAI | [Yes](https://openai.com/enterprise-privacy/) | [Yes](https://platform.openai.com/docs/guides/your-data) | +| xAI | [Yes](https://x.ai/legal/faq-enterprise) | [Yes](https://x.ai/legal/faq-enterprise) | When you use your own API keys or external agents, **Zed does not have control over how your data is used by that service provider.** You should reference your agreement with each service provider to understand what terms and conditions apply. -### Data we collect +### AI Feedback with Ratings + +You can provide feedback on Zed's AI features by rating specific AI responses in Zed and sharing details related to those conversations with Zed. Each share is opt-in, and sharing once will not cause future content or data to be shared again. + +> **Rating = Data Sharing:** When you rate a response, your entire conversation thread is sent to Zed. This includes messages, AI responses, and thread metadata. +> **_If you don't want data persisted on Zed's servers, don't rate_**. We will not collect data for improving our AI features without you explicitly rating responses. -For prompts you have explicitly shared with us, Zed may store copies of those prompts and other data about the specific use of the Agent Panel. +### Data Collected (AI Feedback) -This data includes: +For conversations you have explicitly shared with us via rating, Zed may store: -- The prompt given to the Agent -- Any commentary you include -- Product telemetry about the agentic thread +- All messages in the thread (your prompts and AI responses) +- Any commentary you include with your rating +- Thread metadata (model used, token counts, timestamps) - Metadata about your Zed installation -### Data Handling +If you do not rate responses, Zed will not store Customer Data (code, conversations, responses) related to your usage of the AI features. + +Telemetry related to Zed's AI features is collected. This includes metadata such as the AI feature being used and high-level interactions with the feature to understand performance (e.g., Agent response time, edit acceptance/rejection in the Agent panel or edit completions). You can read more in Zed's [telemetry](../telemetry.md) documentation. Collected data is stored in Snowflake, a private database. We periodically review this data to refine the agent's system prompt and tool use. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses). ## Edit Predictions -By default, when using Zed Edit Predictions, Zed does not persistently store user content or use user content for training of its models. +Edit predictions can be powered by **Zed's Zeta model** or by **third-party providers** like GitHub Copilot. + +### Zed's Zeta Model (Default) + +Zed sends a limited context window to the model to generate predictions: + +- A code excerpt around your cursor (not the full file) +- Recent edits as diffs +- Relevant excerpts from related open files -### Opt-in +This data is processed transiently to generate predictions and is not retained afterward. -Users who are working on open source licensed projects may optionally opt-in to providing model improvement feedback. This opt-in occurs on a per-project basis. If you work on multiple open source projects and wish to provide model improvement feedback you will have to opt-in for each individual project. +### Third-Party Providers -When working on other projects where you haven't opted-in, Zed will not persistently store user content or use user content for training of its models. +When using third-party providers like GitHub Copilot, **Zed does not control how your data is handled** by that provider. You should consult their Terms and Conditions directly. -You can see exactly how Zed detects open source licenses in: [license_detection.rs](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs). +Note: Zed's `disabled_globs` settings will prevent predictions from being requested, but third-party providers may receive file content when files are opened. -### Exclusions +### Training Data: Opt-In for Open Source Projects -Zed will intentionally exclude certain files from Predictive Edits entirely, even when you have opted-in to model improvement feedback. +Zed does not collect training data for our edit prediction model unless the following conditions are met: -You can inspect this exclusion list by opening `zed: open default settings` from the command palette: +1. **You opt in** – Toggle "Training Data Collection" under the **Privacy** section of the edit prediction status bar menu (click the edit prediction icon in the status bar). +2. **The project is open source** — detected via LICENSE file ([see detection logic](https://github.com/zed-industries/zed/blob/main/crates/edit_prediction/src/license_detection.rs)) +3. **The file isn't excluded** — via `disabled_globs` + +### File Exclusions + +Certain files are always excluded from edit predictions—regardless of opt-in status: ```json [settings] { "edit_predictions": { - // A list of globs representing files that edit predictions should be disabled for. - // There's a sensible default list of globs already included. - // Any addition to this list will be merged with the default list. "disabled_globs": [ "**/.env*", "**/*.pem", @@ -92,22 +118,17 @@ Users may explicitly exclude additional paths and/or file extensions by adding t } ``` -### Data we collect - -For open source projects where you have opted-in, Zed may store copies of requests and responses to the Zed AI Prediction service. - -This data includes: +### Data Collected (Edit Prediction Training Data) -- sampled edit prediction examples (cursor context + recent diffs/edits) for offline evaluation -- the edit prediction -- a portion of the buffer content around the cursor -- a few recent edits -- the current buffer outline -- diagnostics (errors, warnings, etc) from language servers +For open source projects where you've opted in, Zed may collect: -### Data Handling +- Code excerpt around your cursor +- Recent edit diffs +- The generated prediction +- Repository URL and git revision +- Buffer outline and diagnostics -Collected data is stored in Snowflake, a private database. We periodically select training samples from this data. All data is anonymized and stripped of sensitive information (access tokens, user IDs, email addresses). The training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta). +Collected data is stored in Snowflake. We periodically review this data to select training samples for inclusion in our model training dataset. We ensure any included data is anonymized and contains no sensitive information (access tokens, user IDs, email addresses, etc). This training dataset is publicly available at [huggingface.co/datasets/zed-industries/zeta](https://huggingface.co/datasets/zed-industries/zeta). ### Model Output @@ -115,4 +136,4 @@ We then use this training dataset to fine-tune [Qwen2.5-Coder-7B](https://huggin ## Applicable terms -Please see the [Zed Terms of Service](https://zed.dev/terms-of-service) for more. +Please see the [Zed Terms of Service](https://zed.dev/terms) for more. diff --git a/docs/src/ai/privacy-and-security.md b/docs/src/ai/privacy-and-security.md index 5eac8a43268865920825557aa8f5a20ec9e04839..4aada3dff47ba8d0eca8f1056e326d6060451306 100644 --- a/docs/src/ai/privacy-and-security.md +++ b/docs/src/ai/privacy-and-security.md @@ -7,15 +7,17 @@ description: Zed's approach to AI privacy: opt-in data sharing by default, zero- ## Philosophy -Zed aims to collect only the minimum data necessary to serve and improve our product. +Zed collects minimal data necessary to serve and improve our product. Features that could share data, like AI and telemetry, are either opt-in or can be disabled. -Data sharing is opt-in by default. Privacy is not a setting to toggle—it's the baseline. +- **Telemetry**: Zed collects only the data necessary to understand usage and fix issues. Client-side telemetry can be disabled in settings. -As an open-source product, we believe in maximal transparency, and invite you to examine our codebase. If you find issues, we encourage you to share them with us. +- **AI**: Data sharing for AI improvement is opt-in, and each share is a one-time action; it does not grant permission for future data collection. You can use Zed's AI features without sharing any data with Zed and without authenticating. -Zed, including AI features, works without sharing data with us and without authentication. +- **Open-Source**: Zed's codebase is public. You can inspect exactly what data is collected and how it's handled. If you find issues, we encourage you to report them. -## Documentation +- **Secure-by-default**: Designing Zed and our Service with "secure-by-default" as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle. + +## Related Documentation - [Tool Permissions](./tool-permissions.md): Configure granular rules to control which agent actions are auto-approved, blocked, or require confirmation. @@ -23,16 +25,15 @@ Zed, including AI features, works without sharing data with us and without authe - [Telemetry](../telemetry.md): How Zed collects general telemetry data. -- [AI Improvement](./ai-improvement.md): Zed's opt-in-only approach to data collection for AI improvement, whether our Agentic offering or Edit Predictions. +- [Zed AI Features and Privacy](./ai-improvement.md): An overview of Zed's AI features, your data when using AI in Zed, and how to opt-in and help Zed improve these features. - [Accounts](../authentication.md): When and why you'd need to authenticate into Zed, how to do so, and what scope we need from you. -- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works, and how data flows to provide the experience (we don't store your code). +- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works and how data flows. Zed does not store your code. ## Legal Links -- [Terms of Service](https://zed.dev/terms-of-service) -- [Terms of Use](https://zed.dev/terms) +- [Terms of Service](https://zed.dev/terms) - [Privacy Policy](https://zed.dev/privacy-policy) - [Zed's Contributor License and Feedback Agreement](https://zed.dev/cla) - [Subprocessors](https://zed.dev/subprocessors) diff --git a/docs/src/telemetry.md b/docs/src/telemetry.md index 44f3abd7e651bc55f6c1891d3f18ff63b3d7206c..a8ca9f3e03ce9c5399af38ab443a043a813b6c8f 100644 --- a/docs/src/telemetry.md +++ b/docs/src/telemetry.md @@ -5,7 +5,12 @@ description: "What data Zed collects and how to control telemetry settings." # Telemetry in Zed -Zed collects anonymous telemetry data to help the team understand how people are using the application and to see what sort of issues they are experiencing. +Zed collects anonymous telemetry to understand usage patterns and diagnose issues. + +Telemetry falls into two categories: + +- **Client-side**: Usage metrics and crash reports. You can disable these in settings. +- **Server-side**: Collected when using hosted services like AI or Collaboration. Required for these features to function. ## Configuring Telemetry Settings @@ -21,7 +26,7 @@ To enable or disable some or all telemetry types, open Settings ({#kb zed::OpenS ## Dataflow -Telemetry is sent from the application to our servers. Data is proxied through our servers to enable us to easily switch analytics services. We currently use: +Telemetry is sent from the application to our servers every 5 minutes (or when 50 events accumulate), then routed to the appropriate service. We currently use: - [Sentry](https://sentry.io): Crash-monitoring service - stores diagnostic events - [Snowflake](https://snowflake.com): Data warehouse - stores both diagnostic and metric events @@ -32,33 +37,33 @@ Telemetry is sent from the application to our servers. Data is proxied through o ### Diagnostics -Crash reports consist of a [minidump](https://learn.microsoft.com/en-us/windows/win32/debug/minidump-files) and some extra debug information. Reports are sent on the first application launch after the crash occurred. We've built dashboards that allow us to visualize the frequency and severity of issues experienced by users. Having these reports sent automatically allows us to begin implementing fixes without the user needing to file a report in our issue tracker. The plots in the dashboards also give us an informal measurement of the stability of Zed. +Crash reports consist of a [minidump](https://learn.microsoft.com/en-us/windows/win32/debug/minidump-files) and debug metadata. Reports are sent on the next launch after a crash, allowing Zed to identify and fix issues without requiring you to file a bug report. -You can see what extra data is sent alongside the minidump in the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs) in the Zed repo. You can find additional information in the [Debugging Crashes](./development/debugging-crashes.md) documentation. +You can inspect what data is sent in the `Panic` struct in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs). See also: [Debugging Crashes](./development/debugging-crashes.md). -### Client-Side Usage Data {#client-metrics} +### Client-Side Metrics -To improve Zed and understand how it is being used in the wild, Zed optionally collects usage data like the following: +Client-side telemetry includes: -- (a) file extensions of opened files; -- (b) features and tools You use within the Editor; -- (c) project statistics (e.g., number of files); and -- (d) frameworks detected in Your projects +- File extensions of opened files +- Features and tools used within the editor +- Project statistics (e.g., number of files) +- Frameworks detected in your projects -Usage Data does not include any of Your software code or sensitive project details. Metric events are reported over HTTPS, and requests are rate-limited to avoid using significant network bandwidth. +This data does not include your code or sensitive project details. Events are sent over HTTPS and rate-limited. -Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. +Usage data is tied to a random telemetry ID. If you've authenticated, this ID may be linked to your email so Zed can analyze patterns over time and reach out for feedback. -You can audit the metrics data that Zed has reported by running the command {#action zed::OpenTelemetryLog} from the command palette, or clicking `Help > View Telemetry Log` in the application menu. +To audit what Zed has reported, run {#action zed::OpenTelemetryLog} from the command palette or click `Help > View Telemetry Log`. -You can see the full list of the event types and exactly the data sent for each by inspecting the `Event` enum and the associated structs in [crates/telemetry_events/src/telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs) in the Zed repository. +For the full list of event types, see the `Event` enum in [telemetry_events.rs](https://github.com/zed-industries/zed/blob/main/crates/telemetry_events/src/telemetry_events.rs). -### Server-Side Usage Data {#metrics} +### Server-Side Metrics -When using Zed's hosted services, we may collect, generate, and Process data to allow us to support users and improve our hosted offering. Examples include metadata around rate limiting and billing metrics/token usage. Zed does not persistently store user content or use user content to evaluate and/or improve our AI features, unless it is explicitly shared with Zed, and we have a zero-data retention agreement with Anthropic. +When using Zed's hosted services, we collect metadata for rate limiting and billing (e.g., token usage). Zed does not store your prompts or code unless you explicitly share them via feedback ratings. -You can see more about our stance on data collection (and that any prompt data shared with Zed is explicitly opt-in) at [AI Improvement](./ai/ai-improvement.md). +For details on AI data handling, see [Zed AI Features and Privacy](./ai/ai-improvement.md). ## Concerns and Questions -If you have concerns about telemetry, please feel free to [open an issue](https://github.com/zed-industries/zed/issues/new/choose). +If you have concerns about telemetry, you can [open an issue](https://github.com/zed-industries/zed/issues/new/choose) or email hi@zed.dev. From 4be8544777deea7e5ab7a3cf19fb90a13dc80d67 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 2 Mar 2026 13:30:40 -0700 Subject: [PATCH 243/548] wGPU: Select more specifically (#50528) This uses the compositor hints if available to pick the best GPU. If none is available, it tries each GPU in turn, and the first that actually works is chosen Release Notes: - Linux: Select a more appropriate GPU --------- Co-authored-by: John Tur --- crates/gpui_linux/Cargo.toml | 1 + crates/gpui_linux/src/linux/platform.rs | 40 +++ crates/gpui_linux/src/linux/wayland/client.rs | 74 +++++- crates/gpui_linux/src/linux/wayland/window.rs | 7 +- crates/gpui_linux/src/linux/x11/client.rs | 36 ++- crates/gpui_linux/src/linux/x11/window.rs | 7 +- crates/gpui_wgpu/src/wgpu_context.rs | 237 ++++++++++++------ crates/gpui_wgpu/src/wgpu_renderer.rs | 21 +- crates/zed/src/main.rs | 2 +- 9 files changed, 339 insertions(+), 86 deletions(-) diff --git a/crates/gpui_linux/Cargo.toml b/crates/gpui_linux/Cargo.toml index 08c759125a7600f94867cff95035d0318f26305a..9078fa82c2884421c6cd11c6d3384645621b7e6f 100644 --- a/crates/gpui_linux/Cargo.toml +++ b/crates/gpui_linux/Cargo.toml @@ -121,6 +121,7 @@ x11rb = { version = "0.13.1", features = [ "cursor", "resource_manager", "sync", + "dri3", ], optional = true } # WARNING: If you change this, you must also publish a new version of zed-xim to crates.io xim = { git = "https://github.com/zed-industries/xim-rs.git", rev = "16f35a2c881b815a2b6cdfd6687988e84f8447d8", features = [ diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index ff79aa64b2f7cd61c3ab6a8b54e2e11b72614d0f..924303cc84b5c662847bdde96979239073adbe19 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -1038,6 +1038,46 @@ pub(super) fn capslock_from_xkb(keymap_state: &State) -> gpui::Capslock { gpui::Capslock { on } } +/// Resolve a Linux `dev_t` to PCI vendor/device IDs via sysfs, returning a +/// [`CompositorGpuHint`] that the GPU adapter selection code can use to +/// prioritize the compositor's rendering device. +#[cfg(any(feature = "wayland", feature = "x11"))] +pub(super) fn compositor_gpu_hint_from_dev_t(dev: u64) -> Option { + fn dev_major(dev: u64) -> u32 { + ((dev >> 8) & 0xfff) as u32 | (((dev >> 32) & !0xfff) as u32) + } + + fn dev_minor(dev: u64) -> u32 { + (dev & 0xff) as u32 | (((dev >> 12) & !0xff) as u32) + } + + fn read_sysfs_hex_id(path: &str) -> Option { + let content = std::fs::read_to_string(path).ok()?; + let trimmed = content.trim().strip_prefix("0x").unwrap_or(content.trim()); + u32::from_str_radix(trimmed, 16).ok() + } + + let major = dev_major(dev); + let minor = dev_minor(dev); + + let vendor_path = format!("/sys/dev/char/{major}:{minor}/device/vendor"); + let device_path = format!("/sys/dev/char/{major}:{minor}/device/device"); + + let vendor_id = read_sysfs_hex_id(&vendor_path)?; + let device_id = read_sysfs_hex_id(&device_path)?; + + log::info!( + "Compositor GPU hint: vendor={:#06x}, device={:#06x} (from dev {major}:{minor})", + vendor_id, + device_id, + ); + + Some(gpui_wgpu::CompositorGpuHint { + vendor_id, + device_id, + }) +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index a810a00af642c3a252a9a144b884837f82eac7e7..b49e269a72459d52c13c21b8d1a474ab310dbffd 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -95,7 +95,10 @@ use gpui::{ ScrollDelta, ScrollWheelEvent, SharedString, Size, TaskTiming, TouchPhase, WindowParams, point, profiler, px, size, }; -use gpui_wgpu::WgpuContext; +use gpui_wgpu::{CompositorGpuHint, WgpuContext}; +use wayland_protocols::wp::linux_dmabuf::zv1::client::{ + zwp_linux_dmabuf_feedback_v1, zwp_linux_dmabuf_v1, +}; /// Used to convert evdev scancode to xkb scancode const MIN_KEYCODE: u32 = 8; @@ -202,6 +205,7 @@ pub(crate) struct WaylandClientState { serial_tracker: SerialTracker, globals: Globals, pub gpu_context: Option, + pub compositor_gpu: Option, wl_seat: wl_seat::WlSeat, // TODO: Multi seat support wl_pointer: Option, wl_keyboard: Option, @@ -515,6 +519,7 @@ impl WaylandClient { }) .unwrap(); + let compositor_gpu = detect_compositor_gpu(); let gpu_context = None; let seat = seat.unwrap(); @@ -571,6 +576,7 @@ impl WaylandClient { serial_tracker: SerialTracker::new(), globals, gpu_context, + compositor_gpu, wl_seat: seat, wl_pointer: None, wl_keyboard: None, @@ -715,10 +721,12 @@ impl LinuxClient for WaylandClient { let parent = state.keyboard_focused_window.clone(); let appearance = state.common.appearance; + let compositor_gpu = state.compositor_gpu.take(); let (window, surface_id) = WaylandWindow::new( handle, state.globals.clone(), &mut state.gpu_context, + compositor_gpu, WaylandClientStatePtr(Rc::downgrade(&self.0)), params, appearance, @@ -904,6 +912,70 @@ impl LinuxClient for WaylandClient { } } +struct DmabufProbeState { + device: Option, +} + +impl Dispatch for DmabufProbeState { + fn event( + _: &mut Self, + _: &wl_registry::WlRegistry, + _: wl_registry::Event, + _: &GlobalListContents, + _: &Connection, + _: &QueueHandle, + ) { + } +} + +impl Dispatch for DmabufProbeState { + fn event( + _: &mut Self, + _: &zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1, + _: zwp_linux_dmabuf_v1::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + } +} + +impl Dispatch for DmabufProbeState { + fn event( + state: &mut Self, + _: &zwp_linux_dmabuf_feedback_v1::ZwpLinuxDmabufFeedbackV1, + event: zwp_linux_dmabuf_feedback_v1::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + if let zwp_linux_dmabuf_feedback_v1::Event::MainDevice { device } = event { + if let Ok(bytes) = <[u8; 8]>::try_from(device.as_slice()) { + state.device = Some(u64::from_ne_bytes(bytes)); + } + } + } +} + +fn detect_compositor_gpu() -> Option { + let connection = Connection::connect_to_env().ok()?; + let (globals, mut event_queue) = registry_queue_init::(&connection).ok()?; + let queue_handle = event_queue.handle(); + + let dmabuf: zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1 = + globals.bind(&queue_handle, 4..=4, ()).ok()?; + let feedback = dmabuf.get_default_feedback(&queue_handle, ()); + + let mut state = DmabufProbeState { device: None }; + + event_queue.roundtrip(&mut state).ok()?; + + feedback.destroy(); + dmabuf.destroy(); + + crate::linux::compositor_gpu_hint_from_dev_t(state.device?) +} + impl Dispatch for WaylandClientStatePtr { fn event( this: &mut Self, diff --git a/crates/gpui_linux/src/linux/wayland/window.rs b/crates/gpui_linux/src/linux/wayland/window.rs index dd8e0b27c32ca9d15152028e686b065165a9e0c1..4c0dbae530ee254f5232eaead187b93d10b0b8e3 100644 --- a/crates/gpui_linux/src/linux/wayland/window.rs +++ b/crates/gpui_linux/src/linux/wayland/window.rs @@ -34,7 +34,7 @@ use gpui::{ WindowDecorations, WindowKind, WindowParams, layer_shell::LayerShellNotSupportedError, px, size, }; -use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; #[derive(Default)] pub(crate) struct Callbacks { @@ -318,6 +318,7 @@ impl WaylandWindowState { client: WaylandClientStatePtr, globals: Globals, gpu_context: &mut Option, + compositor_gpu: Option, options: WindowParams, parent: Option, ) -> anyhow::Result { @@ -338,7 +339,7 @@ impl WaylandWindowState { }, transparent: true, }; - WgpuRenderer::new(gpu_context, &raw_window, config)? + WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)? }; if let WaylandSurfaceState::Xdg(ref xdg_state) = surface_state { @@ -488,6 +489,7 @@ impl WaylandWindow { handle: AnyWindowHandle, globals: Globals, gpu_context: &mut Option, + compositor_gpu: Option, client: WaylandClientStatePtr, params: WindowParams, appearance: WindowAppearance, @@ -515,6 +517,7 @@ impl WaylandWindow { client, globals, gpu_context, + compositor_gpu, params, parent, )?)), diff --git a/crates/gpui_linux/src/linux/x11/client.rs b/crates/gpui_linux/src/linux/x11/client.rs index 7e3f67c9bf5fe3176f3badd9b33375ffdeb9dc19..3a970d9f72e1dc82215fc0d11297d222835df431 100644 --- a/crates/gpui_linux/src/linux/x11/client.rs +++ b/crates/gpui_linux/src/linux/x11/client.rs @@ -31,7 +31,7 @@ use x11rb::{ AtomEnum, ChangeWindowAttributesAux, ClientMessageData, ClientMessageEvent, ConnectionExt as _, EventMask, ModMask, Visibility, }, - protocol::{Event, randr, render, xinput, xkb, xproto}, + protocol::{Event, dri3, randr, render, xinput, xkb, xproto}, resource_manager::Database, wrapper::ConnectionExt as _, xcb_ffi::XCBConnection, @@ -64,7 +64,7 @@ use gpui::{ PlatformKeyboardLayout, PlatformWindow, Point, RequestFrameOptions, ScrollDelta, Size, TouchPhase, WindowParams, point, px, }; -use gpui_wgpu::WgpuContext; +use gpui_wgpu::{CompositorGpuHint, WgpuContext}; /// Value for DeviceId parameters which selects all devices. pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0; @@ -178,6 +178,7 @@ pub struct X11ClientState { pub(crate) current_count: usize, pub(crate) gpu_context: Option, + pub(crate) compositor_gpu: Option, pub(crate) scale_factor: f32, @@ -430,6 +431,9 @@ impl X11Client { let clipboard = Clipboard::new().context("Failed to initialize clipboard")?; + let screen = &xcb_connection.setup().roots[x_root_index]; + let compositor_gpu = detect_compositor_gpu(&xcb_connection, screen); + let xcb_connection = Rc::new(xcb_connection); let ximc = X11rbClient::init(Rc::clone(&xcb_connection), x_root_index, None).ok(); @@ -490,6 +494,7 @@ impl X11Client { last_location: Point::new(px(0.0), px(0.0)), current_count: 0, gpu_context: None, + compositor_gpu, scale_factor, xkb_context, @@ -1514,11 +1519,13 @@ impl LinuxClient for X11Client { let atoms = state.atoms; let scale_factor = state.scale_factor; let appearance = state.common.appearance; + let compositor_gpu = state.compositor_gpu.take(); let window = X11Window::new( handle, X11ClientStatePtr(Rc::downgrade(&self.0)), state.common.foreground_executor.clone(), &mut state.gpu_context, + compositor_gpu, params, &xcb_connection, client_side_decorations_supported, @@ -1976,7 +1983,30 @@ fn fp3232_to_f32(value: xinput::Fp3232) -> f32 { value.integral as f32 + value.frac as f32 / u32::MAX as f32 } -fn check_compositor_present(xcb_connection: &XCBConnection, root: u32) -> bool { +fn detect_compositor_gpu( + xcb_connection: &XCBConnection, + screen: &xproto::Screen, +) -> Option { + use std::os::fd::AsRawFd; + use std::os::unix::fs::MetadataExt; + + xcb_connection + .extension_information(dri3::X11_EXTENSION_NAME) + .ok()??; + + let reply = dri3::open(xcb_connection, screen.root, 0) + .ok()? + .reply() + .ok()?; + let fd = reply.device_fd; + + let path = format!("/proc/self/fd/{}", fd.as_raw_fd()); + let metadata = std::fs::metadata(&path).ok()?; + + crate::linux::compositor_gpu_hint_from_dev_t(metadata.rdev()) +} + +fn check_compositor_present(xcb_connection: &XCBConnection, root: xproto::Window) -> bool { // Method 1: Check for _NET_WM_CM_S{root} let atom_name = format!("_NET_WM_CM_S{}", root); let atom1 = get_reply( diff --git a/crates/gpui_linux/src/linux/x11/window.rs b/crates/gpui_linux/src/linux/x11/window.rs index 55da1d89947eb9a39937b9e70b05ab71aceb6525..f2199ac65e425a8daa04755115264231dd869837 100644 --- a/crates/gpui_linux/src/linux/x11/window.rs +++ b/crates/gpui_linux/src/linux/x11/window.rs @@ -9,7 +9,7 @@ use gpui::{ Tiling, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowDecorations, WindowKind, WindowParams, px, }; -use gpui_wgpu::{WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; +use gpui_wgpu::{CompositorGpuHint, WgpuContext, WgpuRenderer, WgpuSurfaceConfig}; use collections::FxHashSet; use raw_window_handle as rwh; @@ -392,6 +392,7 @@ impl X11WindowState { client: X11ClientStatePtr, executor: ForegroundExecutor, gpu_context: &mut Option, + compositor_gpu: Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -679,7 +680,7 @@ impl X11WindowState { // too transparent: false, }; - WgpuRenderer::new(gpu_context, &raw_window, config)? + WgpuRenderer::new(gpu_context, &raw_window, config, compositor_gpu)? }; // Set max window size hints based on the GPU's maximum texture dimension. @@ -803,6 +804,7 @@ impl X11Window { client: X11ClientStatePtr, executor: ForegroundExecutor, gpu_context: &mut Option, + compositor_gpu: Option, params: WindowParams, xcb: &Rc, client_side_decorations_supported: bool, @@ -819,6 +821,7 @@ impl X11Window { client, executor, gpu_context, + compositor_gpu, params, xcb, client_side_decorations_supported, diff --git a/crates/gpui_wgpu/src/wgpu_context.rs b/crates/gpui_wgpu/src/wgpu_context.rs index 84b7166f6e6b97a9dc7f16c76069872bae473161..b7883a6910261da8dc3f1df6414c5e38e1c46cd2 100644 --- a/crates/gpui_wgpu/src/wgpu_context.rs +++ b/crates/gpui_wgpu/src/wgpu_context.rs @@ -12,9 +12,19 @@ pub struct WgpuContext { dual_source_blending: bool, } +#[cfg(not(target_family = "wasm"))] +pub struct CompositorGpuHint { + pub vendor_id: u32, + pub device_id: u32, +} + impl WgpuContext { #[cfg(not(target_family = "wasm"))] - pub fn new(instance: wgpu::Instance, surface: &wgpu::Surface<'_>) -> anyhow::Result { + pub fn new( + instance: wgpu::Instance, + surface: &wgpu::Surface<'_>, + compositor_gpu: Option, + ) -> anyhow::Result { let device_id_filter = match std::env::var("ZED_DEVICE_ID") { Ok(val) => parse_pci_id(&val) .context("Failed to parse device ID from `ZED_DEVICE_ID` environment variable") @@ -27,24 +37,15 @@ impl WgpuContext { } }; - let adapter = pollster::block_on(Self::select_adapter( - &instance, - device_id_filter, - Some(surface), - ))?; - - let caps = surface.get_capabilities(&adapter); - if caps.formats.is_empty() { - let info = adapter.get_info(); - anyhow::bail!( - "No adapter compatible with the display surface could be found. \ - Best candidate {:?} (backend={:?}, device={:#06x}) reports no \ - supported surface formats.", - info.name, - info.backend, - info.device, - ); - } + // Select an adapter by actually testing surface configuration with the real device. + // This is the only reliable way to determine compatibility on hybrid GPU systems. + let (adapter, device, queue, dual_source_blending) = + pollster::block_on(Self::select_adapter_and_device( + &instance, + device_id_filter, + surface, + compositor_gpu.as_ref(), + ))?; log::info!( "Selected GPU adapter: {:?} ({:?})", @@ -52,9 +53,6 @@ impl WgpuContext { adapter.get_info().backend ); - let (device, queue, dual_source_blending) = - pollster::block_on(Self::create_device(&adapter))?; - Ok(Self { instance, adapter, @@ -158,70 +156,165 @@ impl WgpuContext { Ok(()) } + /// Select an adapter and create a device, testing that the surface can actually be configured. + /// This is the only reliable way to determine compatibility on hybrid GPU systems, where + /// adapters may report surface compatibility via get_capabilities() but fail when actually + /// configuring (e.g., NVIDIA reporting Vulkan Wayland support but failing because the + /// Wayland compositor runs on the Intel GPU). #[cfg(not(target_family = "wasm"))] - async fn select_adapter( + async fn select_adapter_and_device( instance: &wgpu::Instance, device_id_filter: Option, - compatible_surface: Option<&wgpu::Surface<'_>>, - ) -> anyhow::Result { + surface: &wgpu::Surface<'_>, + compositor_gpu: Option<&CompositorGpuHint>, + ) -> anyhow::Result<(wgpu::Adapter, wgpu::Device, wgpu::Queue, bool)> { + let mut adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await; + + if adapters.is_empty() { + anyhow::bail!("No GPU adapters found"); + } + if let Some(device_id) = device_id_filter { - let adapters: Vec<_> = instance.enumerate_adapters(wgpu::Backends::all()).await; + log::info!("ZED_DEVICE_ID filter: {:#06x}", device_id); + } - if adapters.is_empty() { - anyhow::bail!("No GPU adapters found"); - } + // Sort adapters into a single priority order. Tiers (from highest to lowest): + // + // 1. ZED_DEVICE_ID match — explicit user override + // 2. Compositor GPU match — the GPU the display server is rendering on + // 3. Device type — WGPU HighPerformance order (Discrete > Integrated > + // Other > Virtual > Cpu). "Other" ranks above "Virtual" because + // backends like OpenGL may report real hardware as "Other". + // 4. Backend — prefer Vulkan/Metal/Dx12 over GL/etc. + adapters.sort_by_key(|adapter| { + let info = adapter.get_info(); + + // Backends like OpenGL report device=0 for all adapters, so + // device-based matching is only meaningful when non-zero. + let device_known = info.device != 0; + + let user_override: u8 = match device_id_filter { + Some(id) if device_known && info.device == id => 0, + _ => 1, + }; + + let compositor_match: u8 = match compositor_gpu { + Some(hint) + if device_known + && info.vendor == hint.vendor_id + && info.device == hint.device_id => + { + 0 + } + _ => 1, + }; + + let type_priority: u8 = match info.device_type { + wgpu::DeviceType::DiscreteGpu => 0, + wgpu::DeviceType::IntegratedGpu => 1, + wgpu::DeviceType::Other => 2, + wgpu::DeviceType::VirtualGpu => 3, + wgpu::DeviceType::Cpu => 4, + }; + + let backend_priority: u8 = match info.backend { + wgpu::Backend::Vulkan => 0, + wgpu::Backend::Metal => 0, + wgpu::Backend::Dx12 => 0, + _ => 1, + }; + + ( + user_override, + compositor_match, + type_priority, + backend_priority, + ) + }); - let mut non_matching_adapter_infos: Vec = Vec::new(); - - for adapter in adapters.into_iter() { - let info = adapter.get_info(); - if info.device == device_id { - if let Some(surface) = compatible_surface { - let caps = surface.get_capabilities(&adapter); - if caps.formats.is_empty() { - log::warn!( - "GPU matching ZED_DEVICE_ID={:#06x} ({}) is not compatible \ - with the display surface. Falling back to auto-selection.", - device_id, - info.name, - ); - break; - } - } + // Log all available adapters (in sorted order) + log::info!("Found {} GPU adapter(s):", adapters.len()); + for adapter in &adapters { + let info = adapter.get_info(); + log::info!( + " - {} (vendor={:#06x}, device={:#06x}, backend={:?}, type={:?})", + info.name, + info.vendor, + info.device, + info.backend, + info.device_type, + ); + } + + // Test each adapter by creating a device and configuring the surface + for adapter in adapters { + let info = adapter.get_info(); + log::info!("Testing adapter: {} ({:?})...", info.name, info.backend); + + match Self::try_adapter_with_surface(&adapter, surface).await { + Ok((device, queue, dual_source_blending)) => { log::info!( - "Found GPU matching ZED_DEVICE_ID={:#06x}: {}", - device_id, - info.name + "Selected GPU (passed configuration test): {} ({:?})", + info.name, + info.backend + ); + return Ok((adapter, device, queue, dual_source_blending)); + } + Err(e) => { + log::info!( + " Adapter {} ({:?}) failed: {}, trying next...", + info.name, + info.backend, + e ); - return Ok(adapter); - } else { - non_matching_adapter_infos.push(info); } } + } - log::warn!( - "No compatible GPU found matching ZED_DEVICE_ID={:#06x}. Available devices:", - device_id - ); + anyhow::bail!("No GPU adapter found that can configure the display surface") + } - for info in &non_matching_adapter_infos { - log::warn!( - " - {} (device_id={:#06x}, backend={})", - info.name, - info.device, - info.backend - ); - } + /// Try to use an adapter with a surface by creating a device and testing configuration. + /// Returns the device and queue if successful, allowing them to be reused. + #[cfg(not(target_family = "wasm"))] + async fn try_adapter_with_surface( + adapter: &wgpu::Adapter, + surface: &wgpu::Surface<'_>, + ) -> anyhow::Result<(wgpu::Device, wgpu::Queue, bool)> { + let caps = surface.get_capabilities(adapter); + if caps.formats.is_empty() { + anyhow::bail!("no compatible surface formats"); + } + if caps.alpha_modes.is_empty() { + anyhow::bail!("no compatible alpha modes"); } - instance - .request_adapter(&wgpu::RequestAdapterOptions { - power_preference: wgpu::PowerPreference::HighPerformance, - compatible_surface, - force_fallback_adapter: false, - }) - .await - .map_err(|e| anyhow::anyhow!("Failed to request GPU adapter: {e}")) + // Create the real device with full features + let (device, queue, dual_source_blending) = Self::create_device(adapter).await?; + + // Use an error scope to capture any validation errors during configure + let error_scope = device.push_error_scope(wgpu::ErrorFilter::Validation); + + let test_config = wgpu::SurfaceConfiguration { + usage: wgpu::TextureUsages::RENDER_ATTACHMENT, + format: caps.formats[0], + width: 64, + height: 64, + present_mode: wgpu::PresentMode::Fifo, + desired_maximum_frame_latency: 2, + alpha_mode: caps.alpha_modes[0], + view_formats: vec![], + }; + + surface.configure(&device, &test_config); + + // Check if there was a validation error + let error = error_scope.pop().await; + if let Some(e) = error { + anyhow::bail!("surface configuration failed: {e}"); + } + + Ok((device, queue, dual_source_blending)) } pub fn supports_dual_source_blending(&self) -> bool { diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 6e4169e34b4706dbdcdfc88238c170ec484180be..bbecca198eb3ae46b739ab4c42267e7f04b0f7a9 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -1,3 +1,5 @@ +#[cfg(not(target_family = "wasm"))] +use crate::CompositorGpuHint; use crate::{WgpuAtlas, WgpuContext}; use bytemuck::{Pod, Zeroable}; use gpui::{ @@ -96,6 +98,7 @@ pub struct WgpuRenderer { queue: Arc, surface: wgpu::Surface<'static>, surface_config: wgpu::SurfaceConfiguration, + surface_configured: bool, pipelines: WgpuPipelines, bind_group_layouts: WgpuBindGroupLayouts, atlas: Arc, @@ -132,6 +135,7 @@ impl WgpuRenderer { gpu_context: &mut Option, window: &W, config: WgpuSurfaceConfig, + compositor_gpu: Option, ) -> anyhow::Result { let window_handle = window .window_handle() @@ -167,7 +171,7 @@ impl WgpuRenderer { context.check_compatible_with_surface(&surface)?; context } - None => gpu_context.insert(WgpuContext::new(instance, &surface)?), + None => gpu_context.insert(WgpuContext::new(instance, &surface, compositor_gpu)?), }; Self::new_with_surface(context, surface, config) @@ -186,7 +190,7 @@ impl WgpuRenderer { Self::new_with_surface(context, surface, config) } - pub fn new_with_surface( + fn new_with_surface( context: &WgpuContext, surface: wgpu::Surface<'static>, config: WgpuSurfaceConfig, @@ -266,6 +270,8 @@ impl WgpuRenderer { alpha_mode, view_formats: vec![], }; + // Configure the surface immediately. The adapter selection process already validated + // that this adapter can successfully configure this surface. surface.configure(&context.device, &surface_config); let queue = Arc::clone(&context.queue); @@ -366,6 +372,7 @@ impl WgpuRenderer { queue, surface, surface_config, + surface_configured: true, pipelines, bind_group_layouts, atlas, @@ -857,7 +864,9 @@ impl WgpuRenderer { self.surface_config.width = clamped_width.max(1); self.surface_config.height = clamped_height.max(1); - self.surface.configure(&self.device, &self.surface_config); + if self.surface_configured { + self.surface.configure(&self.device, &self.surface_config); + } // Invalidate intermediate textures - they will be lazily recreated // in draw() after we confirm the surface is healthy. This avoids @@ -908,7 +917,9 @@ impl WgpuRenderer { if new_alpha_mode != self.surface_config.alpha_mode { self.surface_config.alpha_mode = new_alpha_mode; - self.surface.configure(&self.device, &self.surface_config); + if self.surface_configured { + self.surface.configure(&self.device, &self.surface_config); + } self.pipelines = Self::create_pipelines( &self.device, &self.bind_group_layouts, @@ -955,7 +966,7 @@ impl WgpuRenderer { let frame = match self.surface.get_current_texture() { Ok(frame) => frame, Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => { - self.surface.configure(&self.device, &self.surface_config); + self.surface_configured = false; return; } Err(e) => { diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index cfa339afc08faeac8b050ef3d3abbe627b19dadf..0921c12c2f06cea32ccba0e0bc58553d2fa91ab2 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -276,7 +276,7 @@ fn main() { zlog::init(); - if stdout_is_a_pty() { + if true { zlog::init_output_stdout(); } else { let result = zlog::init_output_file(paths::log_file(), Some(paths::old_log_file())); From 0d79f44ec982e53a40f2b8fee19835f532796bd2 Mon Sep 17 00:00:00 2001 From: Lucas White Date: Mon, 2 Mar 2026 12:36:31 -0800 Subject: [PATCH 244/548] Update legal docs to reflect new terms of service (#50530) Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Update legal docs to reflect new Terms, Privacy Policy, Subprocessors, and Acceptable Use Policies (fka Third Party Terms) --- legal/privacy-policy.md | 274 +++++++++++---------------------- legal/subprocessors.md | 118 +++++++++++--- legal/terms.md | 261 +++++++++++++++++++------------ legal/third-party-terms.md | 46 ++---- script/terms/terms.rtf | 308 ++++++++++++++++++++++--------------- 5 files changed, 552 insertions(+), 455 deletions(-) diff --git a/legal/privacy-policy.md b/legal/privacy-policy.md index eaf8ece78160e7f643b1fd646e42a71432aafbd0..1eacc5273c93f5035579a72fe5241beca7e3718f 100644 --- a/legal/privacy-policy.md +++ b/legal/privacy-policy.md @@ -3,266 +3,178 @@ title: Privacy Policy slug: privacy-policy --- -At Zed Industries, Inc. ("Zed", "Company" or "we"), we take privacy and the security of data seriously. This Privacy Policy is established to help advise you about how we treat your personal data. By using or accessing our website located at zed.dev, or the Solution or services available pursuant the Zed End User Terms located at [https://zed.dev/terms](https://zed.dev/terms) (collectively, the "Services"), you acknowledge awareness of the practices and policies outlined below, and hereby consent that we will collect, use, and share your personal data as described in this Privacy Policy. +**Last Updated**: March 2, 2026 -As we grow and expand our Services, we may modify this Privacy Policy from time to time. When material modifications are made, we will alert you to any such changes by placing a notice on the Company website, by sending you an email and/or by some other means. Please note that if you've opted not to receive legal notice emails from us (or haven't provided us with a valid email address), those legal notices will still govern your use of the Services. If you use the Services after any changes to the Privacy Policy have been published on our website, you consent and agree to all of the changes. +## Summary -## What this Privacy Policy Covers +Zed collects user information (name, email, username, IP address, etc.), device and usage data, and web analytics to improve our products and services, as well as payment details if you subscribe. We do not store your source code and we process but do not store audio/video/screenshare from collaboration sessions. -Our Privacy Policy covers how we treat Personal Data that we gather when you access or use our Services. "Personal Data" means information that identifies or relates to a particular individual and includes information referred to as "personally identifiable information" or "personal information" under applicable data privacy laws, rules or regulations. Our Privacy Policy does not cover the practices of companies we don't own or control or people we don't manage. +- **We don't sell your data and we don't allow AI providers to train on it** +- **You control Zed editor telemetry** — disable it in Settings or learn more at [https://zed.dev/docs/telemetry](/telemetry) +- **We honor Global Privacy Control** (GPC) signals from your browser +- To access, correct, or delete your data, email privacy@zed.dev with subject "Privacy Request" -## Personal Data +Questions? Contact us at privacy@zed.dev. The full policy below contains complete details. -### Categories of Personal Data We Collect +## Introduction -This chart details the categories of Personal Data that we collect and have collected over the past 12 months: +This Privacy Policy explains how Zed Industries, Inc. ("**Zed**," "**we**," "**our**," or "**us**") collects, uses, and shares your personal data when you use our websites (including [https://zed.dev](https://zed.dev)), our downloadable software ("**Software**"), our subscription service ("**Service**"), or otherwise interact with us. -| Category of personal data | Examples of data we collect | Categories of third parties with whom we share this data | -| ----------------------------- | ------------------------------------------------------------- | -------------------------------------------------------- | -| Profile or contact data | First and last name
Email address
GitHub username | Cloud infrastructure providers
Analytics providers | -| IP data | IP address & derived geolocation data | Cloud infrastructure providers
Analytics providers | -| Web analytics | Interactions
Referrer
Request IDs
Statistics | Cloud infrastructure providers
Analytics providers | -| Photos, videos and recordings | Screenshots
Videos and video recordings you share with us | Cloud infrastructure providers | -| Audio, screenshare data | Audio and screen sharing during collaboration calls | Cloud infrastructure providers | +As used in this Privacy Policy, “personal data” means any information relating to an identified or identifiable individual and includes any information that constitutes "personally identifiable information," “personal data,” or "personal information" under applicable privacy or data protection laws or regulations. -Note that "collection" does not necessarily imply long-term storage. +You acknowledge the collection, use, disclosure, procedures, and other processing described in this Privacy Policy. Beyond the Privacy Policy, your use of our products and services is also subject to our Terms of Service included alongside. This Privacy Policy does not apply to the extent we process personal data in the role of a processor or service provider on behalf of our Zed Business customers. Such processing is governed by our Data Processing Agreement, available upon request. For information about the sub-processors we engage in that capacity, see [https://zed.dev/subprocessors](/subprocessors). -### Categories of Sources of Personal Data - -We collect Personal Data about you from the following categories of sources: - -#### You - -- When you provide such information directly to us. Examples include: - - When you create an account - - When you voluntarily provide information through our Services or through responses to surveys or questionnaires. - - When you send us an email or otherwise contact us. - - When you sign up to our mailing list. -- When you use our hosted Services and such information is collected automatically. Examples include: - - Cookies (defined in the "Tracking Tools and Opt-Out" section below). -- When you use the client software we provide on your machine. Examples include: - - Authentication information when you sign in. - - Version and system metadata when the software checks for updates. - - Usage data, unless you opt out. - - Crash reports, unless you opt out. - - When you make requests to language models we host for you. - - Zed does not store or train on your requests without consent. - - Other relevant data necessary to provide you with our Services. - -#### Third Parties +## Personal Data We Collect -- When you login to the service using a third-party service like GitHub. -- Information collected by content delivery networks or similar service providers -- We may use analytics providers to analyze how you interact and engage with the Services, or third parties may help us provide you with customer support. +We may collect personal data from or about you and your devices from the sources and in the manner described below. If you do not provide requested data, some features may not work - either because we need that data to deliver them, or because we're legally required to collect it. -## Our Business Purposes for Collecting or Disclosing Personal Data +For authorized users on Zed Business plans, certain data described in this section may be processed by Zed as a data processor on behalf of the Zed Business customer. In those cases, the Zed Business customer determines the purposes and lawful basis for that processing, as set forth in our Data Processing Agreement. -- Providing, Customizing and Improving the Services - - Creating and managing your account or other user profiles. - - Processing orders or other fee-based transactions; billing. - - Providing you with the products, services or information you request. - - Meeting or fulfilling the reason you provided the information to us. - - Providing support and assistance for the Services. - - Improving the Services, including testing, research, internal analytics and product development. - - Doing fraud protection, security and debugging. - - Carrying out other business purposes stated when collecting your Personal Data or as otherwise set forth in applicable data privacy laws. -- Marketing the Services - - Marketing and selling the Services. -- Corresponding with You - - Responding to correspondence that we receive from you, contacting you when necessary or requested, and sending you information about Zed or our Services. - - Sending emails and other communications according to your preferences or that display content that we think will interest you. -- Meeting Legal Requirements and Enforcing Legal Terms - - Fulfilling our legal obligations under applicable law, regulation, court order or other legal process, such as preventing, detecting and investigating security incidents and potentially illegal or prohibited activities. - - Protecting the rights, property or safety of you, Zed or another party. - - Enforcing any agreements with you. - - Responding to claims that any posting or other content violates third-party rights. - - Resolving disputes. +### Personal Data You Provide to Us -We will not collect additional categories of Personal Data or use the Personal Data we collected for materially different, unrelated or incompatible purposes without providing you notice as is described above. +- **Contact Information** - We may collect your personal data when you inquire about Zed, our products and services, or when you otherwise interact with us, including when you sign up for, attend, or take part in our demos, events, or webinars. This data may include your full name, work email, company name, company size, and any other data you share with us. -## How We Disclose Your Personal Data +- **Communications** - When you contact us directly, we may receive personal data about you, such as your name, email address, message contents and attachments, and - if you join a live collaboration session - we process, but do not store, your audio and shared screen. When you sign up for news and updates, we will collect your email address and any other data you share. When you communicate with us online, our third-party vendors may receive and store these communications on our behalf. Our emails may include tracking pixels to track information about how you interact with our emails, such as whether you open them and whether you access any included links, your approximate Location Information (described below) based on your IP address, and Device Information (described below), to improve our website, products, and services. -We disclose your Personal Data to categories of service providers and other parties listed in this section. Some of these disclosures may constitute a "sale" of your Personal Data as defined under applicable laws. For more information, please refer to the state-specific sections below. +- **Account Information** - When you create an Account with Zed, we collect the data you provide to create, update, or manage your Service account. Examples include: your name, username, and email address. -- Service Providers. These parties help us provide the Services or perform business functions on our behalf. They include: - - Hosting, technology and communication providers. - - Providers of artificial intelligence or machine learning models - - Payment processors. - - If you are using our Services on a fee-basis, our payment processing partner Stripe, Inc. ("Stripe") collects your voluntarily-provided payment card information necessary to process your payment. - - Please see Stripe Terms of Service and Stripe Privacy Policy for information on its use and storage of your Personal Data. -- Analytics Partners. These parties provide analytics on web traffic or usage of the Services. They include: - - Companies that track how users found or were referred to the Services. - - Companies that track how users interact with the Services. -- Authorized authentication providers (e.g. GitHub OAuth) +- **Careers** - If you apply for a job with us, you may submit your contact information and your resume online. We will collect any information you choose to provide on your resume, such as your contact information, education, and employment experience. -### Fulfilling Legal Obligations +- **Payment Information** - If you make a payment, your payment details, such as credit card, address, phone number, or other financial information, are collected by our third-party payment processor on our behalf. Zed does not collect, process, or store your payment information directly. -We may share any Personal Data that we collect with third parties in relation to the activities set forth under "Meeting Legal Requirements and Enforcing Legal Terms" in the "Our Business Purposes for Collecting Personal Data" section above. +- **Regarding Third-Party Services** - If you use or integrate third-party tools or link third-party services with the Software or Service, we may receive personal data about you, such as your [GitHub username and other related information](https://docs.github.com/en/apps/oauth-apps/using-oauth-apps/connecting-with-third-party-applications) that permits us to authenticate your user identity and keep your account secure. You can learn more about Zed Third Parties here: [https://zed.dev/](https://zed.dev/acceptable-use-policies)[acceptable-use-policies](/acceptable-use-policies) -### Business Transfers +### Personal Data We Collect When You Use Our Websites, Software, or Service -Personal Data collected may be transferred to a third party if we undergo a merger, acquisition, bankruptcy or other transaction in which such third party assumes control of our business (in whole or in part). In such an event, we will make reasonable efforts to notify you before your information becomes subject to different privacy and security policies and practices as authorized or mandated by applicable law. +- **Website, Software, and Service Telemetry** - We automatically collect telemetry - technical logs, metrics, and usage data - to improve and support Zed’s websites, Software, and Service. You may opt out of local telemetry collection in the Software settings. However, when you sign into or use the websites or Service (including via the Software) we collect telemetry on our servers related to use of the websites and Service. -## Data that is Not Personal Data +Learn more about telemetry and your choices and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry) -We may create aggregated, de-identified or anonymized data from the Personal Data we collect, including by removing information that makes the data personally identifiable to a particular user. We may use such aggregated, de-identified or anonymized data and share it with third parties for our lawful business purposes, including to analyze, build and improve the Services and promote our business, provided that we will not share such data in a manner that could identify you. +- **Device and Location Information** - When you use the website, Software, or Service we may collect information about your device and software, including IP address (and inferred approximate location), device type, device identifiers, browser (type, version, user-agent, and language), and operating system or mobile device type. We do so to support improving and securing the Software and Service. Zed does not collect precise location information. -## Tracking Tools and Opt-Out +- **Usage Information** - We automatically collect information about how you use our website and Service, like the pages or other content you view and the dates and times of your visits. We do so to support improving and securing the websites, Software, and Service. -The Services use cookies and similar technologies such as pixel tags, web beacons, clear GIFs and JavaScript (collectively, "Cookies") to enable our servers to recognize your web browser, tell us how and when you visit and use our Services, analyze trends, learn about our user base and operate and improve our Services. Cookies are small pieces of data– usually text files – placed on your computer, tablet, phone or similar device when you use that device to access our Services. We may also supplement the information we collect from you with information received from third parties, including third parties that have placed their own Cookies on your device(s). +- **Information from Cookies and Similar Technologies** - We and our third-party partners may collect information using cookies, beacons, and similar technologies (collectively “**Cookies**”) to provide functionality and to recognize you across visits. See our [Cookie Policy](/cookie-policy), which includes information on how to control or opt out of these Cookies. -### We use the following types of Cookies: +## How We Use the Personal Data We Collect -- Essential Cookies. Essential Cookies are required for providing you with features or services that you have requested. For example, certain Cookies enable you to log into secure areas of our Services. Disabling these Cookies may make certain features and services unavailable. -- Functional Cookies. Functional Cookies are used to record your choices and settings regarding our Services, maintain your preferences over time and recognize you when you return to our Services. These Cookies help us to personalize our content for you, greet you by name and remember your preferences (for example, your choice of language or region). -- Performance/Analytical Cookies. Performance/Analytical Cookies allow us to understand how visitors use our Services. They do this by collecting information about the number of visitors to the Services, what pages visitors view on our Services and how long visitors are viewing pages on the Services. Performance/Analytical Cookies also help us measure the performance of our advertising campaigns to help us improve our campaigns and Services' content for those who engage with our advertising. +We use the personal data we collect: -You can decide whether or not to accept Cookies through your internet browser's settings. Most browsers have an option for turning off the Cookie feature, which will prevent your browser from accepting new Cookies, as well as (depending on the sophistication of your browser software) allow you to decide on acceptance of each new Cookie in a variety of ways. You can also delete all Cookies that are already on your device. If you do this, however, you may have to manually adjust some preferences every time you visit our website and some of the Services and functionalities may not work. +- To deliver and improve our products: Providing the Software and Service functionality you request, debugging issues, and developing new features based on usage patterns; -To find out more information about Cookies generally, including information about how to manage and delete Cookies, please visit [https://allaboutcookies.org/](https://allaboutcookies.org/) or [https://ico.org.uk/for-the-public/online/cookies/](https://ico.org.uk/for-the-public/online/cookies/) if you are located in the European Union. +- To communicate with you: Responding to support requests, sending service announcements, and (with your consent) marketing communications; -## Data Security +- To secure our services: Detecting and preventing fraud, abuse, and security threats; -We endeavor to protect your Personal Data from unauthorized access, use and disclosure using appropriate physical, technical, organizational and administrative security measures based on our Services,the type of Personal Data being collected and how we are processing that data. You should also help protect your data by selecting and protecting your password and/or other sign-on mechanism(s) with care; limiting access to your computer or device and browser; and signing off after you have finished accessing your account. Although we work to protect the security of your account and other data that we hold in our records, be aware that no method of transmitting data over the internet or storing data is completely secure. +- To meet legal obligations: Complying with tax, accounting, and regulatory requirements; -## Data Retention +- To process payments: Completing transactions through our payment processor; and -We retain Personal Data about you for as long as reasonably necessary to provide you with our Services or otherwise in support of our business or commercial purposes for utilization of your Personal Data, as expressed. When establishing a retention period for particular categories of data, we consider who we collected the data from, our need for the Personal Data, why we collected the Personal Data, and the sensitivity of the Personal Data. In some cases we retain Personal Data for a longer period, if doing so is necessary to comply with our legal obligations, resolve disputes or collect fees owed, or as is otherwise permitted or required by applicable law, rule or regulation. We may further retain information in an anonymous or aggregated form where such information would not identify you personally. +- To understand aggregate usage: Generating anonymized statistics to guide product decisions. -For example: +We do not use your personal data for purposes materially different from those described above without providing you notice and, where required by law, obtaining your consent. -- We retain your profile information and credentials for as long as you have an account with us. -- We retain your payment data for as long as we need to process your purchase or subscription. -- We retain your device/IP data for as long as we need it to ensure that our systems are working appropriately, effectively and efficiently. +## Legal Bases for Processing European Personal Data -It's worth noting that we avoid retaining data unless necessary to provide our Service. For example: +If you are located in the European Economic Area (“**EEA**”) or the United Kingdom (“**UK**”), we only process your personal data when we have a valid “legal basis,” including as set forth below. -- We do not currently store source code that we proxy during collaboration sessions. -- We do not currently store audio or video recordings of Collaboration calls handled by LiveKit. +- **Consent** - We may process your personal data where you have consented to certain processing of your personal data. For example, we may process your personal data to send you marketing communications or to use Cookies where you have consented to such use. -## Personal Data of Children +- **Contractual Necessity** - We may process your personal data where required to provide you with our products and services. For example, we may need to process your personal data to respond to your inquiries or requests. -We do not knowingly collect or solicit Personal Data from children under 13 years of age; if you are a child under the age of 13, please do not attempt to register for or otherwise use the Services or send us any Personal Data. If we learn we have collected Personal Data from a child under 13 years of age, we will delete that information as quickly as possible. If you believe that a child under 13 years of age may have provided Personal Data to us, please contact us at hi@zed.dev. +- **Compliance with a Legal Obligation** - We may process your personal data where we have a legal obligation to do so. For example, we may process your personal data to comply with tax, labor and accounting obligations. -## California Resident Rights +- **Legitimate Interests** - We may process your personal data where we or a third party have a legitimate interest in processing your personal data. Specifically, we have a legitimate interest in using your personal data for product development and internal analytics purposes, and otherwise to improve the safety, security, and performance of our products and services. We only rely on our or a third party’s legitimate interests to process your personal data when these interests are not overridden by your rights and interests. -If you are a California resident, you have the rights set forth in this section. Please see the "Exercising Your Rights" section below for instructions regarding how to exercise these rights. Please note that we may process Personal Data of our customers' end users or employees in connection with our provision of certain services to our customers. If we are processing your Personal Data as a service provider, you may contact the entity that collected your Personal Data in the first instance to address your rights with respect to such data as desired. +## How We Disclose the Personal Data We Collect -If there are any conflicts between this section and any other provision of this Privacy Policy and you are a California resident, the portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following rights apply to you, please contact us at hi@zed.dev. +The disclosures described below relate to Zed’s processing as a data controller. When we process data on behalf of Zed Business customers as a data processor, some of the third-parties described below may act as sub-processors under our Data Processing Agreement. -### Access +- **Zed does not sell your personal data to third-parties**.  We also do not share your data with third-parties for the purposes of cross-context advertising. -You have the right to request certain information about our collection and use of your Personal Data over the past 12 months. In response, we will provide you with the following information: +- **Partners and Affiliates** - We may share information we receive to our current or future affiliates (companies under common ownership with Zed) for any of the lawful business purposes described in this Privacy Policy above. -- The categories of Personal Data that we have collected about you. -- The categories of sources from which that Personal Data was collected. -- The business or commercial purpose for collecting or selling your Personal Data. -- The categories of third parties with whom we have shared your Personal Data. -- The specific pieces of Personal Data that we have collected about you. -- If we have disclosed your Personal Data to any third parties for a business purpose over the past 12 months, we will identify the categories of Personal Data shared with each category of third party recipient. If we have sold your Personal Data over the past 12 months, we will identify the categories of Personal Data sold to each category of third party recipient. +- **Vendors and Service Providers** - We may disclose information we receive to vendors and service providers retained in connection with operating, maintaining, or monitoring our websites, products, and services for any of the lawful business purposes described in this Privacy Policy above. -### Deletion +- **AI Service Providers** - We may disclose information we receive to vendors that provide artificial intelligence services in connection with our websites, software, or services for legitimate business purposes only, including website performance monitoring and sales and marketing of our products and services. Zed does not utilize third-party services which use this information for AI training purposes. -You have the right to request that we delete the Personal Data that we have collected about you. Under the CCPA, this right is subject to certain exceptions: for e.g., we may need to retain your Personal Data to provide you with the Services or complete a transaction or other action you may have requested, or if deletion of your Personal Data involves disproportionate effort to achieve. If your deletion request is subject to one of these exceptions, we may deny your deletion request to such data. +- **Web Analytics** - We use analytics services such as Amplitude to collect and process certain analytics data related to your use of our websites. These services utilize first-party cookies to collect information about your use of our websites, apps, and online resources via HTTP referrer and/or depending on your choices regarding cookies. Zed does not use third-party tracking cookies that collect your activity for other websites. -### Correction +- **As Required By Law and Similar Disclosures** - We may access, preserve, and disclose your information if we believe doing so is required or appropriate to: -You have the right to request that we correct any inaccurate Personal Data we have collected about you. Under the CCPA, this right is subject to certain exceptions: for example, if we reasonably decide, based on the totality of circumstances related to your Personal Data, that such data is correct. If your correction request is subject to one of these CCPA exceptions, we may deny your request to correct such data. + - Comply with law enforcement requests and legal process, such as a court order or subpoena; + - Respond to your requests; + - Protect your, our, or others’ rights, property, security, or safety; + - Protect against legal liability; or + - Investigate fraud or other unlawful activity. -### Processing of Sensitive Personal Information Opt-Out + For the avoidance of doubt, the disclosure of your information may occur if you post any objectionable, harmful, or illegal content on or through our websites or products and services. -Consumers have certain rights over the processing of their sensitive information. However, we do not intentionally collect sensitive categories of personal information, but it is possible to share sensitive information with us through your use of the Services. It is your responsibility not to share any such sensitive information when you use the Services. +- **Merger, Sale, or Other Asset Transfers** - We may transfer your personal data to service providers, advisors, potential transactional partners, or other third parties in connection with the consideration, negotiation, or completion of a corporate transaction in which we are acquired by or merged with another company or we sell, liquidate, or transfer all or a portion of our assets. -### Personal Data Sales Opt-Out and Opt-In +- **With Your Consent** - We may also disclose your information for other purposes with your permission. -We will not sell your Personal Data, and have not done so over the last 12 months. To our knowledge, we do not sell the Personal Data of minors under 16 years of age. Under the CCPA, California residents have certain rights when a business "shares" Personal Data with third parties for purposes of cross-contextual behavioral advertising. We have shared the foregoing categories of Personal Data for the purposes of cross-contextual behavioral advertising, as applicable. +## Your Choices -Under California Civil Code Sections 1798.83-1798.84, California residents are entitled to contact us to prevent disclosure of Personal Data to third parties for such third parties' direct marketing purposes; in order to submit such a request, please contact us at hi@zed.dev. +- **Marketing Communications** - You can unsubscribe from our promotional emails via the link provided in the emails. Even if you opt out of receiving promotional messages from us, you will continue to receive administrative and security-related messages from us as long as you maintain a Service account. -Your browser may offer you a "Do Not Track" option, which allows you to signal to operators of websites and web applications and services that you do not wish such operators to track certain of your online activities over time and across different websites. Our Services do not support Do Not Track requests at this time. To find out more about "Do Not Track," you can visit [www.allaboutdnt.com](https://www.allaboutdnt.com). +- **Do Not Track** - Because there is no widely-accepted standard on how to respond to “Do Not Track” signals, we instead utilize and honor [Global Privacy Control (GPC)](https://globalprivacycontrol.org/#gpc-spec) as an alternative where and when feasible. -### Exercising Your Rights under CCPA +- **Opting-out of Software Telemetry** - Learn more about telemetry and how to opt out of Software telemetry collection at [https://zed.dev/docs/telemetry](/telemetry) -To exercise the rights described in this Privacy Policy, you or, if you are a California resident, your Authorized Agent (as defined below) can send us a request that (1) provides sufficient information to allow us to adequately verify that you are the person about whom we have collected Personal Data, and (2) describes your request in sufficient detail to allow us to understand, evaluate and respond ( a "Valid Request"). We are not obligated to respond to requests that do not meet these criteria. We will only use Personal Data provided in a Valid Request to verify your identity and complete your request. +- **Disabling Image Loading for Email** - In order to prevent the use of tracking pixels, you may disable image loading in your own email client. -We are committed to respond to Valid Requests within the time frame required by applicable law. We will not charge you a fee for making a Valid Request unless your Valid Request(s) is excessive, repetitive or manifestly unfounded. If we determine that your Valid Request warrants a fee, we will notify you of the fee and explain that decision before completing your request. +## Your Privacy Rights -You may submit a Valid Request using the following methods: +Depending on where you are located, applicable data protection laws may provide you with specific rights regarding your personal data. These may include the right to: -- Email us at: hi@zed.dev +- Request access to the personal data we maintain about you, update, and correct inaccuracies in your personal data, restrict or object to the processing of your personal data, have your personal data anonymized or deleted, as appropriate, or exercise your right to data portability to easily transfer your personal data to another company. -If you are a California resident, you may also authorize an agent (an "Authorized Agent") to exercise your rights on your behalf. +- Withdraw any consent you previously provided to us regarding the processing of your personal data at any time and free of charge. We will apply your preferences going forward and this will not affect the lawfulness of the processing before you withdrew your consent. -### We Will Not Discriminate Against You for Exercising Your Rights +- **Your European Privacy Rights** - If you are located in the European Economic Area (EEA) or the United Kingdom (UK), you may exercise any of the rights described above under GDPR or applicable local data protection law. You also have the right to lodge a complaint with a supervisory authority, including in your country of residence, place of work, or where an incident took place. -We will not discriminate against you for exercising your rights under applicable data protection laws. We will not deny you our goods or services, charge you different prices or rates, or provide you a lower quality of goods and services if you exercise your rights under applicable law. However, we may offer different tiers of our Services, as allowed by applicable law, with varying prices, rates or levels of quality of the goods or services you receive related to the value of Personal Data that we receive from you. +### How to Exercise Your Privacy Rights -# European Union and United Kingdom Data Subject Rights +Regardless of where you are located, you may exercise these rights by contacting us at [privacy@zed.dev](mailto:privacy@zed.dev) or by using the contact details at the end of this Privacy Policy. Please include the subject line "Privacy Request" and include: (1) the specific right you wish to exercise, (2) your account email address, and (3) any details that help us locate your data. -## EU and UK Residents +Before fulfilling your request, we may ask you to provide reasonable information to verify your identity. Zed will respond to these requests without undue delay and in any event, within one month and will execute the request within one month of responding. Complex requests may require an additional 60 days with notice provided to you. -If you are a resident of the European Union ("EU"), United Kingdom ("UK"), Lichtenstein, Norway or Iceland, you may have additional rights under the EU or UK General Data Protection Regulation (the "GDPR") with respect to your Personal Data, as outlined below. -We use the terms "Personal Data" and "processing" as they are defined in the GDPR in this section, but "Personal Data" generally means information that can be used to individually identify a person, and "processing" generally covers actions that can be performed in connection with data such as collection, use, storage and disclosure. Company will be the controller of your Personal Data processed in connection with the Services. -If there are any conflicts between this section and any other provision of this Privacy Policy, the policy or portion that is more protective of Personal Data shall control to the extent of such conflict. If you have any questions about this section or whether any of the following applies to you, please contact us at hi@zed.dev. Note that we may also process Personal Data of our customers' end users or employees in connection with our provision of certain services to you, in which case we are the processor of Personal Data. If we are the processor of your Personal Data, please contact the controller party in the first instance to address your rights with respect to such data. +Please note that there are exceptions and limitations to each of these rights, and that while any changes you make will be reflected in active user databases instantly or within a reasonable period of time, we may retain personal data for backups, archiving, prevention of fraud and abuse, satisfaction of legal obligations, or where we otherwise reasonably believe that we have a legitimate and lawful reason to do so. -## Personal Data We Collect +## Third Parties -The "Categories of Personal Data We Collect" section above details the Personal Data that we collect from you. +Our websites, products, and services may contain links to other websites, products, or services that we do not own or operate or permit you to integrate with third-party services. We are not responsible for the privacy or security practices of these third parties. Please be aware that this Privacy Policy does not apply to your activities on these third-party services or any data you disclose to these third parties. We encourage you to read their privacy policies before providing any data to them. -## Personal Data Use and Processing Grounds +## Retention -The "Our Commercial or Business Purposes for Collecting Personal Data" section above explains how we use your Personal Data. +We keep personal data as long as necessary to provide, maintain, and secure our websites, products, and services. We take measures to avoid retaining data we don't need - for example, we don't store source code proxied during collaboration sessions, or audio, video, and screen contents from calls. -We will only process your Personal Data if we have a lawful basis for doing so. Lawful bases for processing include consent, contractual necessity and our "legitimate interests" or the legitimate interest of others, as further described below. +When you request deletion, we take measures to delete your personal data or anonymize it, unless we're legally required to retain it. We determine retention periods based on the type of service, our relationship with you, legal requirements, and applicable statutes of limitations. -- Contractual Necessity: We process the following categories of Personal Data as a matter of "contractual necessity", meaning that we need to process the data to perform under our End User Terms with you, which enables us to provide you with the Services. When we process data due to contractual necessity, failure to provide such Personal Data will result in your inability to use some or all portions of the Services that require such data. - - Profile or Contact Data - - Payment Data -- Legitimate Interest: We process the following categories of Personal Data when we believe it furthers the legitimate interest of us or third parties: - - Device/IP Data - - Web Analytics - - We may also de-identify or anonymize Personal Data to further our legitimate interests. -- Examples of these legitimate interests include (as described in more detail above): - - Providing, customizing and improving the Services. - - Marketing the Services. - - Corresponding with you. - - Meeting legal requirements and enforcing legal terms. - - Completing corporate transactions. -- Consent: In some cases, we process Personal Data based on the consent you expressly grant to us at the time we collect such data. - - Other Processing Grounds: From time to time we may also need to process Personal Data to comply with a legal obligation, if it is necessary to protect the interests of you or other data subjects, or if it is necessary in the public interest. +## Security -## Sharing Personal Data +Designing Zed and our Service with “secure-by-default” as an objective is of utmost importance to us. We take your security and ours very seriously and strive to follow industry best-practice in order to uphold that principle.  To learn more about Zed’s security program, please visit [https://zed.dev/docs/ai/privacy-and-security](https://zed.dev/docs/ai/privacy-and-security). -The "How We Share Your Personal Data" section above details how we share your Personal Data with third parties. +Zed will notify users as soon as possible should an incident affect their security or privacy. However, because no electronic transmission or storage of data can be proven entirely secure, we can make no guarantees as to the security or privacy of your data. -## EU Data Subject Rights +## Children’s Privacy -For more information about these EU or UK personal data terms and your rights related thereto, or to submit a request for information, please email us at hi@zed.dev. Please note that in some circumstances, we may not be able to fully comply with your request, such as if it is frivolous or impractical, if it jeopardizes the rights of others, or if it is not required by law, but, in those circumstances, we are committed to respond to notify you of such a decision regardless. In some cases, we may also need you to provide us with additional information, which may include Personal Data, if necessary to verify your identity and the nature of your request. +We do not knowingly collect, maintain, or use personal data from children under 18 years of age, and no part of our websites, products, or services is directed to children. If you learn that a child has provided us with personal data in violation of this Privacy Policy, alert us at [privacy@zed.dev](mailto:privacy@zed.dev). -- Access: You can request more information about the Personal Data we hold about you and request a copy of such Personal Data. You can also access certain of your Personal Data by logging on to your account. -- Rectification: If you believe that any Personal Data we are holding about you is incorrect or incomplete, you can request that we correct or supplement such data. You can also correct some of this information directly by logging on to your account. -- Erasure: You can request that we erase some or all of your Personal Data from our systems. -- Withdrawal of Consent: If we are processing your Personal Data based on your consent, you have the right to withdraw your consent at any time. Please note, however, that if you exercise this right, you may have to then provide express consent on a case-by-case basis for the use or disclosure of certain of your Personal Data, if such use or disclosure is necessary to enable you to utilize some or all of our Services. -- Portability: You can ask for a copy of your Personal Data in a machine-readable format. You can also request that we transmit the data to another controller where technically feasible. -- Objection: You can contact us to let us know that you object to the further use or disclosure of your Personal Data for certain purposes, such as for direct marketing purposes. -- Restriction of Processing: You can ask us to restrict further processing of your Personal Data. -- Right to File Complaint: You have the right to lodge a complaint about Company's practices with respect to your Personal Data with the supervisory authority of your country or EU Member State. A list of Supervisory Authorities is available here: [https://edpb.europa.eu/about-edpb/board/members_en](https://edpb.europa.eu/about-edpb/board/members_en) +## International Visitors -## Transfers of Personal Data +Our websites, products, and services are hosted in the United States (“**U.S.**”). If you choose to use our websites or products and services from the EEA, the UK or other regions of the world with laws governing data collection and use that may differ from U.S. law, then please note that you are transferring your personal data outside of those regions to the U.S. for storage and processing. We may transfer personal data from the EEA or the UK to the U.S. and other third countries based on European Commission-approved or UK Government-approved Standard Contractual Clauses, or otherwise in accordance with applicable data protection laws. We may also transfer your data from the U.S. to other countries or regions in connection with storage and processing of data, fulfilling your requests, and operating our websites, products, and services. By providing any data, including personal data, on or to the websites, products, or services, you consent to such transfer, storage, and processing. For more information about the tools that we use to transfer personal data, or to obtain a copy of the contractual safeguards we use for such transfers (if applicable), you may contact us as described below. -The Services are hosted and operated in the United States ("U.S.") through Company and its service providers. By using the Services, you acknowledge that any Personal Data about you is being provided to Company in the U.S. and will be hosted on U.S. servers, and you authorize Company to transfer, store and process your information to and in the U.S., and possibly other countries. In some circumstances, your Personal Data may be transferred to the U.S. pursuant to a data processing agreement incorporating legally required data protection clauses. +## Changes to this Privacy Policy -# Contact Information: +We will post any adjustments to the Privacy Policy on this page, and the revised version will be effective when it is posted. Registered customers will be notified of material privacy policy changes via the email on file with Zed. -If you have additional questions about this Privacy Policy, the methods in which we collect and use your Personal Data or your choices and rights regarding such collection and use, please do not hesitate to contact us at: +## Contact Information -- Website: zed.dev -- Email Address: hi@zed.dev -- Corporate Address: - Zed Industries, Inc. - 2590 Welton St - Suite 200 - PO Box 1916 - Denver CO 80205 +When data is used as outlined in this Privacy Policy, Zed is the data controller and responsible for the processing of your personal data. When Zed processes personal data on behalf of Zed Business customers as a data processor, the terms of our Data Processing Agreement apply. If you have any questions, comments, or concerns about our processing activities, please email us at [privacy@zed.dev](mailto:privacy@zed.dev) or write to us at: -**DATE: May 6, 2025** +Zed Industries, Inc. +2590 Welton St +Suite 200, PO Box 1916 +Denver, CO 80205 diff --git a/legal/subprocessors.md b/legal/subprocessors.md index df3a5f7c9fd1ff5d3fb309a58d58700f8a08681a..7bd95e888473e66e0f9eb232bef1d3e7d67fb802 100644 --- a/legal/subprocessors.md +++ b/legal/subprocessors.md @@ -3,24 +3,100 @@ title: Subprocessor List slug: subprocessors --- -This page provides information about the Subprocessors Zed has engaged to provide processing activities on Customer Data as defined in the [Zed End User Terms](https://zed.dev/terms). - -| Subprocessor | Purpose | Location | -| ------------------- | ------------------------ | ------------- | -| Cloudflare | Cloud Infrastructure | Worldwide | -| Amazon Web Services | Cloud Infrastructure | United States | -| DigitalOcean | Cloud Infrastructure | United States | -| Vercel | Cloud Infrastructure | United States | -| ConvertKit | Email Marketing | United States | -| Axiom | Analytics | United States | -| Hex Technologies | Analytics | United States | -| Snowflake | Analytics | United States | -| LiveKit | Audio/Video Conferencing | United States | -| GitHub | Authentication | United States | -| Anthropic | AI Services | United States | -| BaseTen | AI Services | United States | -| Exa Labs | AI Services | United States | -| Google | AI Services | United States | -| OpenAI | AI Services | United States | - -**DATE: May 6th, 2025** +Zed uses select third-party subprocessors to deliver core product functionality. Each subprocessor processes customer personal data only as necessary to provide its service, and all are subject to appropriate data protection agreements. + +### How Zed Uses Subprocessors + +To provide fast, reliable, and secure functionality, Zed relies on a small number of carefully vetted third-party subprocessors. These vendors help us deliver essential capabilities such as hosting, billing, analytics, real-time collaboration, and hosted AI features. + +Each subprocessor only processes customer personal data as needed to provide its service. + +Zed maintains contracts and data protection agreements with all subprocessors, including GDPR-compliant terms where applicable. We do not sell customer data, and we do not share customer personal data with vendors for advertising or marketing purposes. + +### AI Subprocessors + +Zed offers three modes for AI: + +1. **Bring your own API key** — data goes directly from the customer to the model provider; Zed does not process or store it. +2. [**External Agents**](https://zed.dev/docs/ai/external-agents) — Zed uses ACP to provide an enhanced experience with terminal-based AI code agents like Claude Code or OpenAI Codex. Data is not processed or stored by Zed when using external agents. +3. **Zed-hosted models** — Zed sends customer prompts to one of its AI providers (listed below). These vendors act as subprocessors only for customers who choose this mode. + +### Ongoing Updates + +**Last Updated**: March 2, 2026 + +This subprocessor list is reviewed regularly. Zed will notify customers of material changes in accordance with our [Terms](https://zed.dev/terms) and [Privacy Policy](https://zed.dev/privacy-policy). + +--- + +## Infrastructure & Hosting + +| Subprocessor | Purpose | Data Location | +| ----------------------- | ---------------------------------------- | ------------- | +| **Cloudflare** | Network services, Cloudflare Workers | Global | +| **Amazon Web Services** | Telemetry ingestion pipeline, S3 buckets | United States | +| **DigitalOcean** | Application database hosting | United States | +| **Vercel** | Website and edge infrastructure hosting | United States | + +--- + +## Billing & Payments + +| Subprocessor | Purpose | Data Location | +| ------------ | ------------------------------------------------------------ | ------------- | +| **Stripe** | Payment processing | United States | +| **Orb** | Usage tracking, subscription management, and metered billing | United States | + +--- + +## Operational Tools + +| Subprocessor | Purpose | Data Location | +| ------------ | ------------------------------------- | ------------- | +| **Day.ai** | Customer relationship management | United States | +| **Linear** | Issue tracking and project management | United States | + +--- + +## Email & Communication + +| Subprocessor | Purpose | Data Location | +| -------------- | ---------------------------------------------------------- | ------------- | +| **ConvertKit** | Product update and feature announcement emails | United States | +| **Loops** | Email marketing and product communications | United States | +| **Plain** | Consolidated platform for end-user support across channels | United States | + +--- + +## Analytics & Data Processing + +| Subprocessor | Purpose | Data Location | +| -------------------- | ---------------------------------------------------------------------------------------- | ------------- | +| **Amplitude** | Product analytics | United States | +| **Axiom** | Application telemetry, observability, and logs | United States | +| **Fivetran** | Automates data pipeline integration (extract, transformation, and load services) for Zed | United States | +| **Hex Technologies** | Analytics and debugging | United States | +| **Snowflake** | Data warehouse | United States | + +--- + +## Collaboration Services + +| Subprocessor | Purpose | Data Location | +| ------------ | -------------------------------------------------------------- | ------------- | +| **LiveKit** | Real-time audio/video and collaborative session infrastructure | United States | + +--- + +## AI Services (Zed-Hosted Models) + +_These subprocessors apply only when customers opt to use Zed's hosted AI models. When users supply their own API keys, or use external agents, data is sent directly to the provider and does not pass through Zed's infrastructure._ + +| Subprocessor | Purpose | Data Location | +| ------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------- | +| **Anthropic** | Requests may be sent to Anthropic even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with Anthropic. | United States | +| **Baseten** | Inference infrastructure for Edit Predictions | United States | +| **Exa Labs** | AI-powered contextual search and retrieval | United States | +| **Google (Vertex)** | Requests may be sent to Google even if you have another provider's model selected in chat (e.g. for summarization). We have a zero data retention agreement with Google. | United States | +| **OpenAI** | Requests may be sent to OpenAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with OpenAI. | United States | +| **xAI** | Requests may be sent to xAI even if you have another provider's model selected in chat (e.g. for summarization or generating git commit messages). We have a zero data retention agreement with xAI. | United States | diff --git a/legal/terms.md b/legal/terms.md index 88afa36aa9cb17c55b1b2fe50a26893c4e5a3389..ed90fd36c835ddcc0949a3ad0d49e35fb7e79c8a 100644 --- a/legal/terms.md +++ b/legal/terms.md @@ -1,197 +1,254 @@ --- -title: Zed End User Terms +title: Terms of Service slug: terms --- -PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS. +**Last Updated**: March 2, 2026 -## 1. ACCESS TO AND USE OF THE SOLUTION +Welcome, and thank you for your interest in Zed Industries, Inc. (“**Zed**,” “**we**,” or “**us**”) and our website at [www.zed.dev](https://www.zed.dev), along with our downloadable Zed software (the “**Software**”) and related subscription service (the “**Service**”). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service. -Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2. +Please read the following Terms carefully. -## 2. TERMS APPLICABLE TO THE EDITOR +**By accessing or using the Service, you (“You” or “Customer”) agree to these Terms of Service, the Data Processing Addendum (“DPA”), available upon request, and Zed’s [Privacy Policy](/privacy-policy) (collectively, the “Terms”).** -### 2.1. License Grant +If you are not eligible, or do not agree to the Terms, you may not access or use the Service. -Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2. +By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed. -### 2.2. License Limitations +**ARBITRATION NOTICE**. Except for certain kinds of disputes described in Section 15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING. ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a). -You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof. +## 1. Overview -### 2.3. Open Source Software +Subject to these Terms, Zed will permit Customer to access and use Zed’s AI-enabled software-as-a-service offering (the “**Service**”), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed’s open source code editing software (“**Software**”). -Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: [https://github.com/zed-industries/zed](https://github.com/zed-industries/zed) (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo. +## 2. Service -## 3. TERMS APPLICABLE TO THE ZED SERVICE +### 2.1. Eligibility -### 3.1. Access to and Scope of Zed Service +Customer must be at least 18 years old to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer’s registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer’s behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms. -If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement. +### 2.2. Access Grant -### 3.2. Restrictions +During the Term, subject to Customer’s compliance with the terms of the Terms, Customer may access and use the Service only for Customer’s internal business purposes or for individuals, for personal non-commercial purposes, in accordance with the then-current version of Zed’s usage guidelines and standard technical documentation for the Service that Zed makes generally available to its customers (“**Documentation**”), the Terms, and any terms set forth in the applicable Subscription Service (as defined in Section 3.4 below). Customer agrees to access the Service only through the mechanisms designated by Zed. Without limiting the foregoing, to access the Service, Customer may be required to associate an existing third-party account with the Service to enable authentication (e.g., via OAuth). Customer will be responsible for the acts and omissions of all persons who access the Service through Customer’s account as though such acts and omissions were Customer’s own. Customer will promptly notify Zed if it becomes aware of any compromise to its Zed account. -You will use the Zed Service only in accordance with all applicable laws, including, but not limited to, laws related to data (whether applicable within the United States, the European Union, or otherwise). You agree not to (and will not allow any third party to): (i) remove or otherwise alter any proprietary notices or labels from the Zed Service or any portion thereof; (ii) reverse engineer, decompile, disassemble, or otherwise attempt to discover the underlying structure, ideas, or algorithms of the Zed Service or any software used to provide or make the Zed Service available; or (iii) rent, resell or otherwise allow any third party access to or use of the Zed Service. Zed may suspend Your access to or use of the Zed Service as follows: (a) immediately if Zed reasonably believes Your use of the Zed Service may pose a security risk to or may adversely impact the Zed Service; or (b) if You are in breach of this Agreement. +### 2.3. Acceptable Use -### 3.3. Customer Data +The Service uses technology provided by multiple third party AI subprocessors (the “AI Providers”) including but not limited to: Anthropic, PBC (“Anthropic”), Google LLC (“Google”), LiveKit Incorporated, OpenAI, LLC (“OpenAI”) etc., as may be updated from time to time. Customer may not use the Service in a manner that violates any applicable AI Provider policy which are listed on [https://zed.dev/acceptable-use-policies](https://zed.dev/acceptable-use-policies), including Anthropic’s [Usage Policy](https://www.anthropic.com/legal/aup), Google Gemini’s [Generative AI Prohibited Use Policy](https://policies.google.com/terms/generative-ai/use-policy), GitHub's [Acceptable Use Policy](https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies), LiveKit’s [Acceptable Use Policy](https://livekit.io/legal/acceptable-use-policy); OpenAI’s [Usage Policies](https://openai.com/policies/usage-policies/) or [Sharing and Publication Policy](https://openai.com/api/policies/sharing-publication/); and [Community Guidelines](https://openai.com/api/policies/community-guidelines/); each of which may be updated from time to time and are expressly incorporated by reference. Customer is solely responsible to check for updates to the applicable AI Provider policy from time to time. -You are solely responsible for Customer Data including, but not limited to: (a) compliance with all applicable laws and this Agreement; (b) any claims relating to Customer Data; and (c) any claims that Customer Data infringes, misappropriates, or otherwise violates the rights of any third party. You agree and acknowledge that Customer Data may be irretrievably deleted if Your account is terminated. For purposes of this Agreement, "Customer Data" shall mean any data, information or other material provided, uploaded, or submitted by You to the Zed Service in the course of using the Zed Service. Notwithstanding anything to the contrary, You represent and warrant that You will not transfer or make available to Zed any personally identifiable information or related information subject to applicable data privacy laws or regulations, unless otherwise agreed to in writing by Zed. +### 2.4. Restrictions -#### 3.3.1. Customer Data Made Available to Zed +Customer will not (and will not permit anyone else to), directly or indirectly, do any of the following: (a) provide access to, distribute, sell, or sublicense the Service to a third party; (b) seek to access non-public APIs associated with the Service; (c) copy any element of the Service; (d) interfere with the operation of the Service, circumvent any access restrictions, or conduct any security or vulnerability test of the Service; (e) transmit any viruses or other harmful materials to the Service or others; (f) take any action that risks harm to others or to the security, availability, or integrity of the Service except for the purposes of legitimate security or malware research; or (g) access or use the Service or Output in a manner that violates any applicable relevant local, state, federal or international laws, regulations, or conventions, including those related to data privacy or data transfer, international communications, or export of data (collectively, “**Laws**”), or the Terms. The Service incorporates functionality provided by third-party services, the use of which is subject to additional terms. Customer agrees that if Customer accesses or uses services, features or functionality in the Software or Service that are provided by a third party, Customer will comply with any applicable terms promulgated by that third party, including as set forth at [https://zed.dev/acceptable-use-policies](/acceptable-use-policies) (as may be updated from time to time). Customer further acknowledges that certain components of the Software or Service may be covered by open source licenses ("**Open Source Component**"), including but not limited to Apache License, Version 2.0, GNU General Public License v3.0, and the GNU Affero General Public License v3.0. To the extent required by such open source license for the applicable Open Source Component, the terms of such license will apply to such Open Source Component in lieu of the relevant provisions of these Terms. If such open source license prohibits any of the restrictions in these Terms, such restrictions will not apply to such Open Source Component. Zed shall provide Customer with a list of Open Source Components upon Customer's request. -To the extent You elect to make Customer Data available to Zed, the same may only be used by Zed according to the Customer Data type and the use rights regarding the same as described herein: +## 3. General Payment Terms -#### 3.3.2. Usage Data +Accessing certain features and tiers of the Service requires Customer to pay fees. Before Customer pays any fees, Customer will have an opportunity to review and accept the fees that Customer will be charged. Unless otherwise specifically provided for in these Terms, all fees are in U.S. Dollars and are non-refundable, except as required by law. -To improve the Editor and understand how You use it, Zed optionally collects the following usage data: +### 3.1. Price -- (a) file extensions of opened files; -- (b) features and tools You use within the Editor; -- (c) project statistics (e.g., number of files); and -- (d) frameworks detected in Your projects +Zed reserves the right to determine pricing for the Service. Zed will make reasonable efforts to keep pricing information published on our pricing page at [https://zed.dev/pricing](https://zed.dev/pricing) up to date. Zed encourages Customer to check Zed’s pricing page periodically for current pricing information. Zed may change the fees for any feature of the Service, including by adding fees or charges, if Zed gives Customer advance notice of changes before they apply. -(a-d collectively, "Usage Data"). Usage Data does not include any of Your software code or sensitive project details. You may change Your preferences disabling the collection of Usage Data and You can audit Usage Data collected by the Editor at any time. See [https://zed.dev/docs/telemetry](https://zed.dev/docs/telemetry) for more. +### 3.2. Taxes -Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. Zed may contact You based on Your usage patterns to better understand your needs and improve the Solution. If You delete Your account, the link between Your telemetry ID and Your email address will be permanently removed. By continuing to use Editor or Solution with this feature enabled You agree to this Usage Data collection. +Customer is responsible for any sales, use, GST, value-added, withholding, or similar taxes or levies that apply to Orders, whether domestic or foreign, other than Zed’s income tax (“**Taxes**”). Fees are exclusive of all Taxes. If Customer is compelled to make a deduction or set-off for any such Taxes, Customer will pay Zed such additional amounts as necessary to ensure receipt by Zed of the full amount Zed would have received but for the deduction. -#### 3.3.3. Crash Reports +### 3.3. Authorization -Customer Data consisting of data related to the behavior of the Solution prior to a crash or failure, such as stack traces are collected and classified as "Crash Reports". Zed will use commercially reasonable efforts to exclude any personally identifiable information from Crash Reports, but due to the nature of a crash, Zed does not ensure that information such as paths will be excluded from Crash Reports. Crash Reports will be used solely for Zed's internal purposes in connection with diagnosing defects in the Solution that led to the crash. You may grant us permission to capture Crash Reports when installing or activating the Solution, and You may change Your preferences at any time in the settings feature of the Solution. Once You grant us this permission, Zed will retain the Crash Reports indefinitely. +Customer authorizes Zed to charge all sums for the orders that Customer makes, the level of Service that Customer selects, and Customer’s submission of prompts or other Customer Data (defined below) to the Service to generate Output (defined below) as described in these Terms or published by Zed, including all applicable taxes, to the payment method specified in Customer’s account. If Customer pays any fees with a credit card, then Zed may seek pre-authorization of Customer’s credit card account prior to Customer’s purchase to verify that the credit card is valid and has the necessary funds or credit available to cover Customer’s purchase. -#### 3.3.4. User Content +### 3.4. Subscription Service -• You may access, modify or create certain data or information in connection with your access or use of the Zed Editor or the Solution. Such data and information may include, but is not limited to any of the following: +The Service may include certain subscription-based plans with automatically recurring payments for periodic charges ("**Subscription Service**"). The "**Subscription Billing Date**" is the date when Customer purchases its first subscription to the Service. The Subscription Service will begin on the Subscription Billing Date and continue for the subscription period that Customer selects on its account (such period, the "**Initial Subscription Period**"), and will automatically renew for successive periods of the same duration as the Initial Subscription Period (the Initial Subscription Period and each such renewal period, each a "**Subscription Period**") unless Customer cancels the Subscription Service or Zed terminates it. If Customer activates a Subscription Service, then Customer authorizes Zed or its third-party payment processors to periodically charge, on a going-forward basis and until cancellation of the Subscription Service, all accrued sums on or before the payment due date. For information on the "Subscription Fee", please see Zed’s pricing page at [https://zed.dev/pricing](https://zed.dev/pricing). Customer’s account will be charged automatically on the Subscription Billing Date and thereafter on the renewal date of its Subscription Service for all applicable fees and taxes for the next Subscription Period. Customer must cancel its Subscription Service before it renews in order to avoid billing of the next periodic Subscription Fee to Customer’s account. Zed or its third-party payment processor will bill the periodic Subscription Fee to the payment method associated with Customer’s account or that Customer otherwise provides to Zed. Customer may cancel the Subscription Service from the account page at https://zed.dev/account or by contacting us at [billing-support@zed.dev](mailto:billing-support@zed.dev). **YOUR CANCELLATION MUST BE RECEIVED BEFORE THE RENEWAL DATE IN ORDER TO AVOID BEING CHARGED FOR THE NEXT SUBSCRIPTION PERIOD.** -- (a) file contents and associated metadata (e.g., filename, paths, size, timestamps); -- (b) source control history, comments and metadata (e.g., git history, commit messages); -- (c) configuration data (e.g., settings, keymaps); -- (d) anything typed, pasted and/or displayed on screen while using the Editor; -- (e) derivative works of the above generated by the Editor (e.g., format conversions, summaries, indexes, caches); -- (f) metadata, code and other derivative works of the above returned by language servers and other local tooling; and -- (g) metadata, code and other derivative works of the above returned by services integrated with the Zed Editor +### 3.5. Consumption Fees -(a-g collectively, "User Content"). +Customer’s subscription to the Service may permit Customer to submit prompts or other Customer Data for the purpose of generating Output, at no additional charge for a certain number of times each month. If Customer elects to submit a volume of prompts in excess of the quantity included in its Subscription Fee, then Customer authorizes Zed to charge, and Customer will be charged, a fee for each additional prompt at the rates set forth at [https://zed.dev/docs/ai/models](https://zed.dev/docs/ai/models). -#### 3.3.5. Handling of User Content +### 3.6. Delinquent Accounts -Zed will make use of or transfer User Content only as specified in this Agreement, or as necessary to comply with applicable law. +Zed may suspend or terminate access to the Service, including fee-based portions of the Service, for any account for which any amount is due but unpaid. In addition to the amount due for the Service, a delinquent account will be charged with fees or charges that are incidental to any chargeback or collection of any unpaid amount, including collection fees. If your payment method is no longer valid at the time a renewal Subscription Fee is due, then Zed reserves the right to delete your account and any information or Customer Data associated with your account without any liability to Customer. -#### 3.3.5.1. Zed Collaboration Services +## 4. Data -When using Zed Collaboration Services, User Content is transmitted from Your environment only if You collaborate with other Zed users by electing to share a project in the Editor. Once You share a project, Zed may transmit User Content consisting of file paths, file contents, and metadata regarding the code returned by language servers. Currently, Zed does not persist any User Content beyond the Your collaboration session. If You unshare a project or disconnect from the Solution, all information associated with such project will be deleted from Zed servers. In the future, Zed may save User Content regarding projects beyond the scope of a single collaboration session. We may share such User Content with those users You elected to grant access to. Zed's access to such User Content is limited to debugging and making improvements to the Solution. +### 4.1. Zed's Use of Customer Data -#### 3.3.5.2. Other Services +Customer hereby grants Zed a non-exclusive, worldwide, royalty-free, fully paid-up, non-sublicensable (except to service providers and Customer’s designees), non-transferable (except as set forth in Section 15.1) right to use, copy, store, disclose, transmit, transfer, display, modify, create derivative works from, collect, access, store, host, or otherwise process (“**Process**”) any materials that Customer inputs into or otherwise makes available to the Service (including prompts and other written content) (collectively, “**Customer Data**”) solely: (a) to perform its obligations set forth in the Terms, including its Support obligations as applicable; (b) to derive and generate Telemetry (see Section 4.4); and (c) as necessary to comply with applicable Laws. Except as required by applicable Laws, Zed will not provide Customer Data to any person or entity other than Customer’s designees (including pursuant to Section 7) or service providers. In the event that autocomplete suggestions are turned on, Customer understands and agrees that the Service will periodically send Customer Data in the background to an AI Provider for the purpose of generating autocomplete input suggestions in the Services. Autocomplete features can be turned off at any time, in which case Customer Data will not be sent. -The Zed Editor supports integration with API-based services maintained and not operated by Zed (the "Other Services"). By way of example, Other Services includes those made available by GitHub, Anthropic, OpenAI, and similar providers, or those You host or manage directly. You may configure the Zed Editor to interoperate, communicate with, and exchange data (including User Content) directly with the Other Services. Zed is not responsible or otherwise liable with respect to Your use of any Other Service, including but not limited to the exchange of data between the Other Service and the Zed Editor. The terms and conditions, including the applicable privacy policy, with respect to the Other Service are those made available by the applicable Other Service, not these Terms. +### 4.2. Customer's Ownership of Output -#### 3.3.5.3. Zed AI Services +The Service may generate specifically for, and make available to, Customer text and written content based on or in response to Customer Data input into the Service (collectively, “**Output**”), including through the use of technologies that incorporate or rely upon artificial intelligence, machine learning techniques, and other similar technology and features. As between the Parties, to the greatest extent permitted by applicable Laws, Customer owns all Output and Zed hereby irrevocably assigns to Customer all right, title, and interest in and to the Output that Zed may possess. **For the avoidance of doubt, Zed and its AI Providers will not retain or use Customer Data for the purpose of improving or training the Service or any AI Provider products, except to the extent Customer explicitly opts-in on Zed’s specific feature to allow training and/or such improvement (such as fine-tuning) and is solely for the benefit of Customer.** -The Zed Editor supports integration with API-based services maintained and operated by Zed (the "Zed AI Services"). You may elect to use Zed AI Services as the provider for various Zed Editor features (e.g., Agent Panel, Inline Assistant, Edit Predictions, and similar features). In connection with Your use of these features, the Zed Editor and Zed AI Services may make use of User Content to generate contextually relevant responses (the “Output”). Other than as specified in Section 3.3.5.4 of these Terms, Zed will not use User Content for training of its models, or disclose User Content. +### 4.3. Zed's Collection of Output Rating -Output is provided "as is" without any warranties or guarantees of functionality, security, or fitness for a particular purpose. While efforts are made to ensure the accuracy and reliability, Output may include errors, vulnerabilities, and defects. You are responsible for reviewing, testing, and validating Output before use in any production or critical environment. Zed assumes no liability for any damages, losses, or liability arising from the use, modification, reliance on, or deployment of Output. Any such use is at Your own risk. +The Service may enable Customer, at its option, to rate or otherwise provide feedback with respect to Output generated through the Service. If Customer opts in to provide feedback concerning Output using the features of the Software or Service (e.g., by clicking an Output rating button), then Customer agrees that Zed may Process that Output and associated Customer Data for the purpose of product development and improvement (“Output Rating”). For clarity, Customer’s decision to opt in to provide Output Rating is specific to the corresponding Output. Your decision to provide Output Rating with respect to one instance of Output does not give Zed the right to use any other Output for Output Rating purposes. -#### 3.3.5.4. Improvement Feedback +### 4.4. Telemetry -When using Zed AI Services to provide Edit Predictions in connection with certain open source software projects, You may elect to share requests, responses and feedback comments (collectively "Model Improvement Feedback") with Zed, and Zed may use the same to improve Zed Edit Predictions models. You may opt-out of sharing Model Improvement Feedback at any time. +Zed may collect, generate, and Process information, including technical logs, metrics, and data and learnings, related to the Software and Service (“**Telemetry**”) to improve and support the Services and for other lawful business purposes. Customer may configure the Software to opt out of the collection of certain Telemetry Processed locally by the Software itself, but Zed may still collect, generate, and Process Telemetry on Zed’s servers. Zed may not disclose Telemetry to any third-party other than Zed’s Representatives unless it is de-identified so that it does not identify Customer as the source thereof and is aggregated with data across other customers. **For avoidance of doubt, Telemetry expressly does not include Customer Data.** -For more information on Zed Edit Predictions please see: [https://zed.dev/docs/ai/ai-improvement](https://zed.dev/docs/ai/ai-improvement) +## 5. Customer Obligations -When using Zed AI Services in connection with the Agent Panel, You may elect to share with Zed requests, responses and feedback regarding the Agent Panel and related Output (the “Agent Improvement Feedback”) with Zed, and Zed may use the same to improve the Agent Panel and related Output. Zed will only collect Agent Improvement Feedback when You elect to share the same. +Customer is responsible for its Customer Data and will comply with applicable Laws when using the Service. Customer represents and warrants that it has obtained all rights, consents, and permissions necessary for Zed to Process Customer Data and exercise the rights granted to it in the Terms without violating or infringing Laws or third-party rights. Customer Data shall not contain: (a) any “protected health information” or “PHI” as defined under HIPAA (including 45 C.F.R. Parts 160 and 164); or (b) any payment card or cardholder data subject to PCI DSS (including primary account numbers, full track or chip data, CVV/CVC codes, PINs, or similar payment card security data). Customer is solely responsible for ensuring compliance with this restriction and shall be liable for, and shall indemnify Zed against, any claims, fines, or penalties arising from Customer’s breach of this Section. Zed disclaims any and all liability in connection with Customer Data. -For more information regarding the Agent Panel please see: [https://zed.dev/docs/ai/ai-improvement](https://zed.dev/docs/ai/ai-improvement) +## 6. Suspension of Service -#### 3.4. Privacy Policy +Zed may immediately suspend Customer’s access to any or all of the Service if: (a) Customer breaches Section 2.2 - 2.4 or Section 5; (b) any payments required under the Terms are overdue by 30 days or more; (c) changes to Laws or new Laws require that Zed suspend the Service or otherwise may impose additional liability on Zed in connection with its provision of the Service to Customer; or (d) Customer’s breach of the Terms risks harm to any of Zed’s other customers or the security, availability, or integrity of the Service or other services and entities. Where practicable, Zed will use reasonable efforts to provide Customer with prior notice of the suspension (email sufficing). If the issue that led to the suspension is resolved, Zed will restore Customer’s access to the Service. -You and Zed are bound by the terms and conditions contained in the Zed Privacy Policy which is incorporated by reference hereto. The Zed Privacy Policy is available at the following URL: [https://zed.dev/privacy-policy](https://zed.dev/privacy-policy). +## 7. Data Sharing and Third-Party Integrations -## 4. FEE BASED SERVICES, FEES AND PAYMENT TERMS +### 7.1. Collaboration Services -### 4.1. Fee Based Services +Certain features of the Service may allow Customer to share data between accounts on the Service, including accounts controlled by persons and entities not associated with Customer (“**Collaboration Features**”). If Customer elects to use Collaboration Features, Customer acknowledges and agrees that Zed will, and authorizes Zed to, make available Customer Data consisting of file paths, file contents, and metadata regarding the code returned by language servers to the third parties designated by Customer, and that Zed exercises no control over, and has no liability for, the acts or omissions of such third parties (including in connection with the Customer Data). Currently, with the exception of the Channel notes feature, Zed does not persist any shared Customer Data beyond the designated Collaboration Feature session. -The Zed AI Services is made available with additional usage benefits (the “Enhanced Use ”) as described in the table published at [zed.dev/pricing](https://zed.dev/pricing) (the “Pricing Table”), subject to the requirements and limitations set forth in the Pricing Table and these Terms. In order to make use of the Enhanced Use, Customer must access the Zed AI Services through a Zed registered account. +### 7.2. Third-Party Integrations -### 4.2. Fees +The Service may support integration with third-party platforms, add-ons, services, or products not provided by Zed (“**Third-Party Platforms**”). Use of any Third-Party Platforms integrated with or made available through the Service is subject to Customer’s agreement with the relevant provider and not these Terms. Zed does not control and has no liability for Third-Party Platforms, including their security, functionality, operation, availability, or interoperability with the Service. By enabling a Third-Party Platform to interact with the Service, Customer authorizes Zed to access and exchange Customer Data with such Third-Party Platform on Customer’s behalf. -Customer shall pay to Zed the applicable fees set forth in Pricing Table, together with any applicable taxes and shipping and handling (collectively, the “Fees”). Customer shall have no right of return, and all Fees shall be non-refundable. +## 8. Disclaimers; No Warranties by Zed -### 4.3. Payment Terms +THE SOFTWARE, SERVICE, OUTPUT, AND ALL OTHER ZED SERVICES ARE PROVIDED “AS IS” AND “AS AVAILABLE”. ZED, ON ITS OWN BEHALF AND ON BEHALF OF ITS SUPPLIERS AND LICENSORS, MAKES NO OTHER WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, INCLUDING WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, OR NONINFRINGEMENT. ZED DOES NOT WARRANT THAT CUSTOMER’S USE OF THE SOFTWARE OR SERVICE WILL BE UNINTERRUPTED OR ERROR-FREE OR THAT IT WILL MAINTAIN CUSTOMER DATA WITHOUT LOSS. ZED IS NOT LIABLE FOR DELAYS, FAILURES, OR PROBLEMS INHERENT IN USE OF THE INTERNET AND ELECTRONIC COMMUNICATIONS OR OTHER SYSTEMS OUTSIDE OF ZED’S CONTROL. ZED IS NOT RESPONSIBLE FOR ANY DAMAGE THAT MAY RESULT FROM THE SOFTWARE OR SERVICE OR OUTPUT OR CUSTOMER’S DEALING WITH ANY OTHER SERVICE USER. Without limiting the foregoing, Customer acknowledges and agrees that: (a) the Service may produce inaccurate or erroneous Output; (b) Customer is responsible for independently evaluating the Output and any other information Customer receives from the Service; and (c) due to the nature of the Service and artificial intelligence technologies generally, Output may not be unique and other users of the Service may receive output from the Service that is similar or identical to the Output (and, notwithstanding anything to the contrary, such similar or identical output will not be understood to be Output). -All amounts payable to Zed under this Agreement shall be paid in United States dollars and paid Zed according to the method of payment, frequency and calculated as set forth in the Pricing Table. +THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS IN THIS SECTION 8 (DISCLAIMERS; NO WARRANTIES BY ZED) APPLY TO THE FULLEST EXTENT PERMITTED BY LAW. Zed does not disclaim any warranty or other right that Zed is prohibited from disclaiming under applicable law. -### 4.4. Taxes; Set-offs +## 9. Term, Termination, and Modification of the Service -Any and all payments made by Customer in accordance with this Agreement are exclusive of any taxes that might be assessed by any jurisdiction. Customer shall pay or reimburse Zed for all sales, use, property and similar taxes; all customs duties, import fees, stamp duties, license fees and similar charges; and all other mandatory payments to government agencies of whatever kind, except taxes imposed on the net or gross income of Zed. All amounts payable to Zed under this Agreement shall be without set-off and without deduction of any taxes, levies, imposts, charges, withholdings and/or duties of any nature which may be levied or imposed, including without limitation, value added tax, customs duty and withholding tax. +### 9.1. Term -## 5. TERM AND TERMINATION +These Terms are effective beginning when Customer accepts the Terms or first downloads, installs, accesses, or uses the Service, and ending when terminated as described in Section 9.2 (Termination). -### 5.1. Term +### 9.2. Termination -The term of this Agreement shall commence on the date You first download the Editor or use the Zed Service (the "Effective Date"), and unless terminated earlier according to this Section 3, will end pursuant to this Section 5 (the "Term"). +If Customer violates any provision of these Terms, then Customer is not authorized to access the Service and these Terms automatically terminate. In addition, Zed may, at its sole discretion, terminate these Terms or Customer’s account on the Service, or suspend or terminate Customer’s access to the Service, at any time for any reason or no reason, with or without notice, and without any liability to Customer arising from such termination. Customer may terminate its account and these Terms at any time by contacting Zed at [hi@zed.dev](mailto:hi@zed.dev). -### 5.2. Termination +### 9.3. Effect of Termination -This Agreement may be terminated: (a) by either party if the other has materially breached this Agreement; or (b) by Zed at any time and for any reason upon notice to Customer. You acknowledge that Zed is under no obligation to continue to operate the Zed Service or make the Editor available, and We may end any programs in connection with the same at any time. +Upon termination of these Terms: a) Customer’s license to access and use the Service will terminate and Customer must immediately cease all use of the Service; b) Customer will no longer be authorized to access its account or the Service; c) Customer must pay Zed any unpaid amount that was due prior to termination; and d) all payment obligations accrued prior to termination and Section(s) 2.4 (Restrictions), 3 (General Payment Terms) with the exception of 3.4 (Subscription Service), 4.2 (Customer’s Ownership of Output), 4.4 (Telemetry), 8 (Disclaimers; No Warranties by Zed), 9.3 (Effect of Termination), 10 (Ownership; Feedback), 11 (Limitations of Liability), 12 (Indemnity), 15 (Governing Law, Dispute Resolution and Arbitration); and 16 (General Terms), will survive. If Customer’s account has been terminated for a breach of these Terms, then Customer is prohibited from creating a new account on the Service. -### 5.3. Effect of Termination and Survival +### 9.4. Modification of the Service -Upon any expiration or termination of this Agreement, Customer shall (i) immediately cease use of the Zed Service, and (ii) return all Zed Confidential Information and other materials provided by Zed. The following provisions will survive termination of this Agreement: Sections 3.3 (Customer Data), Section 3.4 (Privacy Policy), Section 5.3 (Effect of Termination and Survival), Section 6 (Ownership), Section 7 (Indemnification), Section 9 (Limitation of Liability), Section 10 (Third Party Services), and Section 11 (Miscellaneous). +Zed reserves the right to modify or discontinue all or any portion of the Service at any time (including by limiting or discontinuing certain features of the Service), temporarily or permanently, without notice to Customer. Zed will have no liability to Customer for any change to the Service. -## 6. OWNERSHIP +## 10. Ownership; Feedback -Zed retains all right, title, and interest in and to the Zed Service, Editor, and any software, products, works or other intellectual property created, used, provided, or made available by Zed under or in connection with the Zed Service or Editor. Customer may from time to time provide suggestions, comments, or other feedback to Zed with respect to the Zed Service or Editor ("Feedback"). Customer shall, and hereby does, grant to Zed a nonexclusive, worldwide, perpetual, irrevocable, transferable, sublicensable, royalty-free, fully paid-up license to use and exploit the Feedback for any purpose. You retain all right, title and interest in and to the Customer Data, including all intellectual property rights therein. No intellectual property rights with respect to any software code you develop or modify with the Editor or Zed Service (collectively, the “Output”) are transferred or assigned to Zed hereunder. +Neither Party grants the other Party any rights or licenses not expressly set out in the Terms. Except as expressly provided in the Terms, as between the Parties, Customer retains all intellectual property rights and other rights in and to the Customer Data and Output. Except for the rights and licenses granted in the Terms, Zed and its licensors retain all intellectual property rights in and to the Service and Software. To the extent Customer provides Zed with feedback (including suggestions and comments for enhancements or new functionality) regarding the Service or Software, Output, or Zed’s products, services, or other technology (“**Feedback**”), Zed has the full and unrestricted right (but no obligation) to use or incorporate Feedback in any manner, including to improve and develop any of its products, services, technology, or other materials without attribution to Customer. -## 7. INDEMNIFICATION +## 11. Limitations of Liability -Customer will defend, indemnify, and hold Zed, its affiliates, suppliers and licensors harmless and each of their respective officers, directors, employees and representatives from and against any claims, damages, losses, liabilities, costs, and expenses (including reasonable attorneys' fees) arising out of or relating to any third party claim with respect to: (a) Customer Data; (b) breach of this Agreement or violation of applicable law by Customer; or (c) alleged infringement or misappropriation of third-party's intellectual property rights resulting from Customer Data. +### 11.1. -## 8. WARRANTY +TO THE FULLEST EXTENT PERMITTED BY LAW, IN NO EVENT WILL THE ZED ENTITIES BE LIABLE TO CUSTOMER FOR ANY INDIRECT, INCIDENTAL, SPECIAL, CONSEQUENTIAL, OR PUNITIVE DAMAGES (INCLUDING DAMAGES FOR LOSS OF PROFITS, GOODWILL, OR ANY OTHER INTANGIBLE LOSS) ARISING OUT OF OR RELATING TO YOUR ACCESS TO OR USE OF, OR YOUR INABILITY TO ACCESS OR USE, THE SERVICE OR ANY MATERIALS OR CONTENT ON THE SERVICE, WHETHER BASED ON WARRANTY, CONTRACT, TORT (INCLUDING NEGLIGENCE), STATUTE, OR ANY OTHER LEGAL THEORY, AND WHETHER OR NOT ANY ZED ENTITY HAS BEEN INFORMED OF THE POSSIBILITY OF DAMAGE. -Zed does not represent or warrant that the operation of the Zed Service or Editor (or any portion thereof) will be uninterrupted or error free, or that the Zed Service or Editor (or any portion thereof) will operate in combination with other hardware, software, systems or data not provided by Zed. CUSTOMER ACKNOWLEDGES THAT, ZED MAKES NO EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES OF ANY KIND WITH RESPECT TO THE SERVICE OR SOFTWARE, OR THEIR CONDITION. ZED HEREBY EXPRESSLY EXCLUDES, ANY AND ALL OTHER EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES, WHETHER UNDER COMMON LAW, STATUTE OR OTHERWISE, INCLUDING WITHOUT LIMITATION ANY AND ALL WARRANTIES AS TO MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, SATISFACTORY QUALITY OR NON-INFRINGEMENT OF THIRD-PARTY RIGHTS. +### 11.2. -## 9. LIMITATIONS OF LIABILITY +TO THE FULLEST EXTENT PERMITTED BY LAW, THE AGGREGATE LIABILITY OF THE ZED ENTITIES TO CUSTOMER FOR ALL CLAIMS ARISING OUT OF OR RELATING TO THE USE OF OR ANY INABILITY TO USE ANY PORTION OF THE SERVICE, OR OTHERWISE ARISING UNDER THESE TERMS, WHETHER IN CONTRACT, TORT, OR OTHERWISE, IS LIMITED TO THE GREATER OF:  THE AMOUNT CUSTOMER HAS PAID TO ZED FOR ACCESS TO AND USE OF THE SERVICE IN THE 12 MONTHS PRIOR TO THE EVENT OR CIRCUMSTANCE GIVING RISE TO THE CLAIM OR US$100. -IN NO EVENT SHALL ZED BE LIABLE FOR ANY LOST DATA, LOST PROFITS, BUSINESS INTERRUPTION, REPLACEMENT SERVICE OR OTHER SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR INDIRECT DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THEORY OF LIABILITY. ZED'S LIABILITY FOR ALL CLAIMS ARISING UNDER THIS AGREEMENT, WHETHER IN CONTRACT, TORT OR OTHERWISE, SHALL NOT EXCEED THE GREATER OF: THE FEES PAID TO ZED BY CUSTOMER DURING THE TWELVE (12) MONTH PERIOD PRECEDING THE DATE OF THE CLAIM, OR ONE THOUSAND US DOLLARS ($1,000). +### 11.3. -## 10. Third Party Services +EACH PROVISION OF THESE TERMS THAT PROVIDES FOR A LIMITATION OF LIABILITY, DISCLAIMER OF WARRANTIES, OR EXCLUSION OF DAMAGES IS INTENDED TO AND DOES ALLOCATE THE RISKS BETWEEN THE PARTIES UNDER THESE TERMS. THIS ALLOCATION IS AN ESSENTIAL ELEMENT OF THE BASIS OF THE BARGAIN BETWEEN THE PARTIES. EACH OF THESE PROVISIONS IS SEVERABLE AND INDEPENDENT OF ALL OTHER PROVISIONS OF THESE TERMS. THE LIMITATIONS IN THIS SECTION 11 (LIMITATION OF LIABILITY) WILL APPLY EVEN IF ANY LIMITED REMEDY FAILS OF ITS ESSENTIAL PURPOSE. -Zed may make certain third party services available to You within the Editor or the Zed Service (each a "Third Party Service"). You acknowledge and agree that (a) use of each Third Party Service is subject to the corresponding terms and conditions available at the following URL: [https://zed.dev/third-party-terms](https://zed.dev/third-party-terms) and/or presented in connection with Your use of such Third Party Service; (b) the terms and conditions of this Agreement do not apply with respect to Your use of any Third Party Service; and (c) Zed is not liable in any way regarding Your use of any Third Party Service. +## 12. Indemnity -## 11. MISCELLANEOUS +To the fullest extent permitted by law, Customer is responsible for its use of the Service, and Customer will defend and indemnify Zed, its affiliates, and their respective shareholders, directors, managers, members, officers, employees, consultants, and agents (together, the "Zed Entities") from and against every claim brought by a third party, and any related liability, damage, loss, and expense, including attorneys' fees and costs, arising out of or connected with: (1) Customer’s unauthorized use of, or misuse of, the Service; (2) the Customer Data; (3) Customer’s use of Output; (4) Customer’s violation or alleged violation of any portion of these Terms, any representation, warranty, or agreement referenced in these Terms, or any applicable law or regulation; (5) Customer’s violation or alleged violation of any third-party right, including any intellectual property right or publicity, confidentiality, other property, or privacy right; or (6) any dispute or issue between Customer and any third party. Zed reserves the right, at Zed’s own expense, to assume the exclusive defense and control of any matter otherwise subject to indemnification by Customer (without limiting Customer’s indemnification obligations with respect to that matter), and in that case, Customer agrees to cooperate with our defense of those claims. -### 11.1. Export Control +## 13. Confidentiality -You hereby certify that You will comply with all current US Export Control laws. You agree to defend, indemnify and hold Zed harmless from any liability for Your violation of U.S. Export Control laws. +### 13.1. Definition -### 11.2. Compliance with Laws +“**Confidential Information**” means information disclosed to the receiving Party (“**Recipient**”) under the Terms that is designated by the disclosing Party (“**Discloser**”) as proprietary or confidential or that should be reasonably understood to be proprietary or confidential due to its nature and the circumstances of its disclosure. Zed’s Confidential Information includes the terms and conditions of the Terms and the Service (including any technical or performance information about the Service). -You shall comply with all applicable laws and regulations in its use of the Solution, including without limitation the unlawful gathering or collecting, or assisting in the gathering or collecting of information in violation of any privacy laws or regulations. You shall, at its own expense, defend, indemnify and hold harmless Zed from and against any and all claims, losses, liabilities, damages, judgments, government or federal sanctions, costs and expenses (including attorneys' fees) incurred by Zed arising from any claim or assertion by any third party of violation of privacy laws or regulations by You or any of its agents, officers, directors or employees. +### 13.2. Obligations -### 11.3. Assignment +As Recipient, each Party will: (a) hold Confidential Information in confidence and not disclose it to third parties except as permitted in the Terms, including Section 4.1; and (b) only use Confidential Information to fulfill its obligations and exercise its rights under the Terms. Recipient may disclose Confidential Information to its employees, agents, contractors, and other representatives having a legitimate need to know (including, for Zed, the subcontractors referenced in Section 16.5) (“**Representatives**”), provided Recipient remains responsible for its respective Representatives’ compliance with this Section 13 and such Representatives are bound by written agreements (or, in the case of professional advisers like attorneys and accountants, ethical duties) imposing confidentiality and non-use obligations no less protective than this Section 13. -Neither party may transfer and assign its rights and obligations under this Agreement without the prior written consent of the other party. Notwithstanding the foregoing, Zed may transfer and assign its rights under this Agreement without consent from the other party in connection with a change in control, acquisition or sale of all or substantially all of its assets. +### 13.3. Exclusions -### 11.4. Force Majeure +These confidentiality obligations do not apply to information that Recipient can document: (a) is or becomes public knowledge through no fault of Recipient or its Representatives; (b) it rightfully knew or possessed prior to receipt under the Terms; (c) it rightfully received from a third party without breach of confidentiality obligations; or (d) it independently developed without using Confidential Information. -Neither party shall be responsible for failure or delay in performance by events out of their reasonable control, including but not limited to, acts of God, Internet outage, terrorism, war, fires, earthquakes and other disasters (each a "Force Majeure"). Notwithstanding the foregoing: if a Force Majeure continues for more than thirty (30) days, either party may to terminate this agreement by written notice to the other party. +### 13.4. Remedies -### 11.5. Notice +Unauthorized use or disclosure of Confidential Information may cause substantial harm for which damages alone are an insufficient remedy. Discloser may seek appropriate equitable relief, in addition to other available remedies, for breach or threatened breach of this Section 13, without the  necessity of posting a bond or proving actual damages. -All notices between the parties shall be in writing and shall be deemed to have been given if personally delivered or sent by registered or certified mail (return receipt), or by recognized courier service. +### 13.5. Required Disclosures -### 11.6. No Agency +Nothing in the Terms prohibits Recipient from making disclosures, including of Customer Data and other Confidential Information, if required by Laws, subpoena, or court order, provided (if permitted by Laws) it notifies Discloser in advance and cooperates in any effort to obtain confidential treatment. -Both parties agree that no agency, partnership, joint venture, or employment is created as a result of this Agreement. You do not have any authority of any kind to bind Zed. +## 14. Publicity -### 11.7. Governing Law +Neither Party may publicly announce that the Parties have entered into the Terms, except with the other Party’s prior consent or as required by Laws. However, Zed may use the name, brand, or logo of Customer (or Customer’s parent company) for the purpose of identifying Customer as a licensee or customer on Zed’s website or in other promotional materials. Zed will cease further use at Customer’s written request. -This Agreement shall be governed exclusively by, and construed exclusively in accordance with, the laws of the United States and the State of California, without regard to its conflict of laws provisions. The federal courts of the United States in the Northern District of California and the state courts of the State of California shall have exclusive jurisdiction to adjudicate any dispute arising out of or relating to this Agreement. Each party hereby consents to the jurisdiction of such courts and waives any right it may otherwise have to challenge the appropriateness of such forums, whether on the basis of the doctrine of forum non conveniens or otherwise. The United Nations Convention on Contracts for the International Sale of Goods shall not apply to this Agreement or any Purchase Order issued under this Agreement. +## 15. Governing Law, Dispute Resolution and Arbitration -### 11.8. Updated Agreement +### 15.1. Governing Law, Jurisdiction and Venue -Zed reserves the right to update this Agreement at any time. The terms and conditions of the updated version of the Agreement shall apply to the Zed Service and Editor downloaded, or accessed following the date of publication of the updated version. If You do not agree with any terms of the updated Agreement, You may not use or access the Zed Service or Editor in any manner. Zed may from time-to-time provide release notes applicable to the Editor or Zed Service, and such release notes may contain additional use restrictions or terms applicable to Customer Data. Your use of the Editor or Zed Service after the applicable release notes are made available shall be subject to the additional use restrictions or terms applicable to Customer Data. +The Terms are governed by the laws of the State of Delaware and the United States without regard to conflicts of laws provisions that would result in the application of the laws of another jurisdiction and without regard to the United Nations Convention on the International Sale of Goods. The parties further agree that except as stated below in the Arbitration provision, and for any claims under Section 15.2 (b), each party irrevocably consents to the exclusive jurisdiction and venue of the state and federal courts located in New Castle County, Delaware, for any action arising out of or relating to these Terms, and waive any objection based on venue or forum non conveniens. ANY CAUSE OF ACTION OR CLAIM CUSTOMER MAY HAVE ARISING OUT OF OR RELATING TO THESE TERMS MUST BE COMMENCED WITHIN ONE (1) YEAR AFTER THE CAUSE OF ACTION OR CLAIM ACCRUES, OTHERWISE, SUCH CAUSE OF ACTION OR CLAIM IS PERMANENTLY BARRED. -### 11.9. Entire Agreement +### 15.2. Dispute Resolution and Arbitration -This Agreement is the complete and exclusive statement of the mutual understanding of the parties and supersedes and cancels all previous written and oral agreements, communications, and other understandings relating to the subject matter of this Agreement, and all waivers and modifications must be in a writing signed by both parties, except as otherwise provided herein. Any term or provision of this Agreement held to be illegal or unenforceable shall be, to the fullest extent possible, interpreted so as to be construed as valid, but in any event the validity or enforceability of the remainder hereof shall not be affected. +ANY CONTROVERSY OR CLAIM ARISING OUT OF OR RELATING TO THESE TERMS, OR THE BREACH THEREOF, SHALL BE SETTLED BY ARBITRATION AND JUDGMENT ON THE AWARD RENDERED BY THE ARBITRATOR MAY BE ENTERED IN ANY COURT HAVING JURISDICTION THEREOF. IF THERE IS A DISPUTE ABOUT WHETHER THIS ARBITRATION AGREEMENT CAN BE ENFORCED OR APPLIES TO THE DISPUTE, CUSTOMER AND ZED AGREE THAT THE ARBITRATOR WILL DECIDE THAT ISSUE. -**DATE: May 6, 2025** +**a. Opt-Out.** If Customer does not wish to resolve disputes by binding arbitration, Customer may opt out of the provisions of this Section 17.2 (Dispute Resolution and Arbitration) within 30 days after the date that Customer agrees to these Terms by sending an email to [arbitration-opt-out@zed.dev](mailto:arbitration-opt-out@zed.dev) or a letter to Zed Industries, Inc., Attention: Legal Department – Arbitration Opt-Out, 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 that specifies: Customer’s full legal name, the email address associated with Customer’s account on the Service, and a statement that Customer wishes to opt out of arbitration (“**Opt-Out Notice**”). Once Zed receives Customer’s Opt-Out Notice, this Section 15.2 (Dispute Resolution and Arbitration) will be void and any action arising out of these Terms will be resolved as set forth in Section 15.1 (Governing Law). The remaining provisions of these Terms will not be affected by Customer’s Opt-Out Notice. + +**b. Pre-Arbitration Dispute Resolution and Notification.** Prior to initiating an arbitration, Customer and Zed each agree to notify the other party of the dispute and attempt to negotiate an informal resolution to it first. Zed will contact Customer at the email address Customer has provided to Zed; Customer can contact Zed by email at [legal@zed.dev](mailto:legal@zed.dev). If after a good faith effort to negotiate, one party feels the dispute has not and cannot be resolved informally, the party intending to pursue arbitration agrees to notify the other party via email prior to initiating the arbitration. + +**c. Exceptions to Arbitration.** Customer and Zed each agree that the following claims are exceptions to arbitration and will be brought in a judicial proceeding in a court of competent jurisdiction: (i) Any claim related to actual or threatened infringement, misappropriation or violation of a party’s copyrights, trademarks, trade secrets, patents, or other intellectual property rights; or (ii) Any claim seeking emergency injunctive relief based on exigent circumstances (e.g., imminent danger or commission of a crime, hacking, cyber-attack). + +**d. Arbitration Rules.** (1) If Customer is domiciled in the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be settled by arbitration administered by the American Arbitration Association in accordance with its Commercial Arbitration Rules, and judgment on the award rendered by the arbitrator may be entered in any court having jurisdiction thereof. (2) If Customer is domiciled internationally outside the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be determined by arbitration administered by the International Centre for Dispute Resolution in accordance with its International Arbitration Rules. + +**e. Modification to AAA Rules - Arbitration Hearing/Location.** Customer agrees that any required arbitration hearing will be conducted in the English language by one (1) mutually agreed upon arbitrator, (a) in city/county and state of Customer’s headquarters unless both parties agree otherwise; and appearances may be made via telephonic or video hearing; and (b) for any claim or counterclaim under $25,000, by solely the submission of documents to the arbitrator. + +### 15.3. Waiver of Jury Trial and Class Action Waiver + +EACH PARTY HEREBY IRREVOCABLY WAIVES ALL RIGHT TO TRIAL BY JURY IN ANY ACTION, SUIT, PROCEEDING, CLAIM, OR COUNTERCLAIM ARISING OUT OF OR RELATING TO THESE TERMS. CUSTOMER AND ZED EACH AGREE THAT ANY SUIT, PROCEEDING, OR OTHER ACTION ARISING OUT OF OR RELATED TO THESE TERMS WILL BE CONDUCTED ONLY ON AN INDIVIDUAL BASIS AND NOT IN A CLASS, CONSOLIDATED OR REPRESENTATIVE ACTION. + +## 16. General Terms + +### 16.1. + +These Terms, including the Privacy Policy and any other agreements expressly incorporated by reference into these Terms, are the entire and exclusive understanding and agreement between Customer and Zed regarding your use of the Service. Customer may not assign or transfer these Terms or its rights under these Terms, in whole or in part, by operation of law or otherwise, without Zed’s prior written consent. Zed may assign these Terms and all rights granted under these Terms at any time without notice or consent. The failure to require performance of any provision will not affect Zed’s right to require performance at any other time after that, nor will a waiver by Zed of any breach or default of these Terms, or any provision of these Terms, be a waiver of any subsequent breach or default or a waiver of the provision itself. Use of Section headers in these Terms are for convenience only and will not have any impact on the interpretation of any provision. Throughout these Terms the use of the word “including” means “including but not limited to.” If any part of these Terms are held to be invalid or unenforceable, then the unenforceable part will be given effect to the greatest extent possible, and the remaining parts will remain in full force and effect. + +### 16.2. Notices + +Except as set out in the Terms, any notice or consent under the Terms must be in writing to the Customer email address on the Order and Customer shall send all notices to Zed at Zed Industries, Inc., 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to [legal@zed.dev](mailto:legal@zed.dev) and will be deemed given: (a) upon receipt if by personal delivery; (b) upon receipt if by certified or registered U.S. mail (return receipt requested); or (c) one day after dispatch if by a commercial overnight delivery service. Either Party may update its address with notice to the other Party pursuant to this Section. Zed may also send operational notices to Customer by email or through the Service. + +### 16.3. DPA + +The terms of the Data Processing Agreement (“**DPA**”), available upon request, are incorporated into these Terms by reference. + +### 16.4. Modification of Terms + +Zed may, from time to time, change these Terms. Please check these Terms periodically for changes. Revisions will be effective immediately except that, for existing users, material revisions will be effective 30 days after posting or notice to Customer of the revisions unless otherwise stated. Zed may require that Customer accept modified Terms in order to continue to use the Service. If Customer does not agree to the modified Terms, then Customer should discontinue its use of the Service and notify Zed at hi@zed.dev, in which case Zed will provide a pro-rated refund of any prepaid Subscription Fee. The terms in any Customer purchase order or business form will not amend or modify the Terms and are expressly rejected by Zed; any of these Customer documents are for administrative purposes only and have no legal effect with respect to the Terms. + +### 16.5. Subcontractors + +Zed may use subcontractors and permit them to exercise Zed’s rights, but Zed remains responsible for their compliance with the Terms and for its overall performance under the Terms. + +### 16.6. Independent Contractors + +The Parties are independent contractors, not agents, partners, or joint venturers. + +### 16.7. Export + +Customer will comply with all relevant U.S. and foreign export and import Laws in using the Service. Customer: (a) represents and warrants that it is not listed on any U.S. government list of prohibited or restricted parties or located in (or a national of) a country that is subject to a U.S. government embargo or that has been designated by the U.S. government as a “terrorist supporting” country; (b) agrees not to access or use the Service in violation of any U.S. export embargo, prohibition, or restriction; and (c) will not submit to the Service any information controlled under the U.S. International Traffic in Arms Regulations. + +### 16.8. Government End-Users + +Elements of the Service may include commercial computer software. If the user or licensee of the Service is an agency, department, or other entity of the United States Government, the use, duplication, reproduction, release, modification, disclosure, or transfer of the Service or any related documentation of any kind, including technical data and manuals, is restricted by the terms of the Terms in accordance with Federal Acquisition Regulation 12.212 for civilian purposes and Defense Federal Acquisition Regulation Supplement 227.7202 for military purposes. The Service was developed fully at private expense. All other use is prohibited. + +### 16.9. Privacy Policy + +Please read the [Zed Privacy Policy](/privacy-policy) (the “**Privacy Policy**”) carefully for information relating to our collection, use, storage, and disclosure of your personal information. The Zed Privacy Policy is incorporated by this reference into, and made a part of, these Terms. + +### 16.10. Additional Terms + +Customer’s use of the Service is subject to all additional terms, policies, rules, or guidelines applicable to the Service or certain features of the Service that we may post on or link to from the Service (the “**Additional Terms**”). All Additional Terms are incorporated by this reference into, and made a part of, these Terms. + +### 16.11. Consent to Electronic Communications + +By using the Service, Customer consents to receiving certain electronic communications from Zed as further described in the Privacy Policy. Please read the Privacy Policy to learn more about Zed’s electronic communications practices. Customer agrees that any notices, agreements, disclosures, or other communications that Zed sends to Customer electronically will satisfy any legal communication requirements, including that those communications be in writing. Zed may send Customer emails concerning Zed products and services, as well as those of third parties. Customer may opt out of promotional emails by following the unsubscribe instructions in the promotional email itself. + +### 16.12. Contact Information + +The Service is offered by Zed Industries, Inc. Customer may contact Zed by sending correspondence to 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to [legal@zed.dev](mailto:legal@zed.dev). + +### 16.13. Notice to California Residents + +If Customer is a California resident, then under California Civil Code Section 1789.3, Customer may contact the Complaint Assistance Unit of the Division of Consumer Services of the California Department of Consumer Affairs in writing at 1625 N. Market Blvd., Suite N 112, Sacramento, California 95834, or by telephone at +1-800-952-5210 in order to resolve a complaint regarding the Service or to receive further information regarding use of the Service. diff --git a/legal/third-party-terms.md b/legal/third-party-terms.md index 4c4a0f6cce319369283c42d68f150699f9c1565c..6d4153d0b4771a5ccb9cca924caae682eece145c 100644 --- a/legal/third-party-terms.md +++ b/legal/third-party-terms.md @@ -1,53 +1,39 @@ --- -title: 3rd Party Terms -slug: third-party-terms +title: Acceptable Use Policies +slug: acceptable-use-policies --- -In addition to the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy) usage of certain Zed features may also subject you to additional 3rd party terms and conditions. These terms and conditions may include, but are not limited to, the following: +**Last Updated:** March 2, 2026 -## Anthropic - -- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup) -- [Anthropic Privacy Policy](https://www.anthropic.com/legal/privacy) -- [Anthropic Commercial Terms of Service](https://www.anthropic.com/legal/commercial-terms) +Some third-party services accessible through Zed have their own acceptable use policies. These apply whether Zed hosts the service on your behalf or you connect your own account. The applicable policies are listed below and apply alongside the [Zed End User Terms](/terms) and [Zed Privacy Policy](/privacy-policy). -## Baseten +These policies may be updated from time to time by the applicable provider. -- [BaseTen Terms and Conditions](https://www.baseten.co/terms-and-conditions/) - -### Exa.ai +## Anthropic -- [Exa Labs Terms and Conditions](https://exa.ai/assets/Exa_Labs_Terms_of_Service.pdf) -- [Exa Labs Privacy Policy](https://exa.ai/privacy-policy) +- [Anthropic Usage Policy](https://www.anthropic.com/legal/aup) ## GitHub -- [GitHub Terms of Service](https://docs.github.com/en/site-policy/github-terms/github-terms-of-service) -- [GitHub Privacy Statement](https://docs.github.com/en/site-policy/privacy-policies/github-general-privacy-statement) - [GitHub Acceptable Use Policies](https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies) - [GitHub Copilot Product Specific Terms](https://github.com/customer-terms/github-copilot-product-specific-terms) ## Google -- [Google APIs Terms of Service](https://developers.google.com/terms) -- [Google Gemini API Additional Terms of Service](https://ai.google.dev/gemini-api/terms) - [Google Generative AI Prohibited Use Policy](https://policies.google.com/terms/generative-ai/use-policy) -## LiveKit +## OpenAI + +- [OpenAI Usage Policies](https://openai.com/policies/usage-policies/) -- [LiveKit Terms of Service](https://livekit.io/legal/terms-of-service) -- [LiveKit Privacy Policy](https://livekit.io/legal/privacy-policy) +## OpenRouter -## OpenAI +- [OpenRouter Terms of Service](https://openrouter.ai/terms) -- [OpenAI Terms of Use](https://openai.com/policies/terms-of-use/) -- [OpenAI Privacy Policy](https://openai.com/policies/privacy-policy/) -- [OpenAI Business terms](https://openai.com/policies/business-terms/) -- [OpenAI Service terms](https://openai.com/policies/service-terms/) +## Vercel -## SuperMaven +- [Vercel Acceptable Use Policy](https://vercel.com/legal/acceptable-use-policy) -- [SuperMaven Terms of Service](https://supermaven.com/terms-of-service) -- [SuperMaven Privacy Policy](https://supermaven.com/privacy-policy) +## xAI -**DATE: May 6, 2025** +- [xAI Acceptable Use Policy](https://x.ai/legal/acceptable-use-policy) diff --git a/script/terms/terms.rtf b/script/terms/terms.rtf index f5fab23f4551fd0b3f8605209c3315eb470af224..cd01004c11ed10e58d3c17b1274cd499a1046c66 100644 --- a/script/terms/terms.rtf +++ b/script/terms/terms.rtf @@ -2,128 +2,194 @@ {\colortbl;\red255\green0\blue0;\red0\green0\blue255;} \widowctrl\hyphauto -{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Zed End User Terms\par} +{\pard \qc \f0 \sa180 \li0 \fi0 \b \fs36 Terms of Service\par} {\pard \ql \f0 \sa180 \li0 \fi0 \par} -{\pard \ql \f0 \sa180 \li0 \fi0 PLEASE READ THESE TERMS AND CONDITIONS CAREFULLY BEFORE USING THE SERVICE OR SOFTWARE OFFERED BY ZED INDUSTRIES, INC. ("ZED", OR "WE"). BY ACCESSING OR USING THE SOLUTION (AS DEFINED BELOW) IN ANY MANNER, YOU ("YOU" OR "CUSTOMER") AGREE TO BE BOUND BY THESE TERMS (THE "AGREEMENT") TO THE EXCLUSION OF ALL OTHER TERMS. YOU REPRESENT AND WARRANT THAT YOU HAVE THE AUTHORITY TO ENTER INTO THIS AGREEMENT; IF YOU ARE ENTERING INTO THIS AGREEMENT ON BEHALF OF AN ORGANIZATION OR ENTITY, REFERENCES TO "CUSTOMER" AND "YOU" IN THIS AGREEMENT, REFER TO THAT ORGANIZATION OR ENTITY. IF YOU DO NOT AGREE TO ALL OF THE FOLLOWING, YOU MAY NOT USE OR ACCESS THE SOLUTION IN ANY MANNER. IF THE TERMS OF THIS AGREEMENT ARE CONSIDERED AN OFFER, ACCEPTANCE IS EXPRESSLY LIMITED TO SUCH TERMS.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. ACCESS TO AND USE OF THE SOLUTION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. TERMS APPLICABLE TO THE EDITOR\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. License Grant\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Subject to the terms and conditions of this Agreement, Zed hereby grants to You, and You hereby accept from Zed, a term-limited, non-exclusive, non-transferable, non-assignable and non-sublicensable license to make use of the Editor for Your internal use only, and subject to the use limitations in Section 2.2.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. License Limitations\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You agree that You shall not: (a) exceed the scope of the licenses granted in Section 2.1; (b) make copies of the Editor; (c) distribute, sublicense, assign, delegate, rent, lease, sell, time-share or otherwise transfer the benefits of, use under, or rights to, the license granted in Section 2.1; (d) reverse engineer, decompile, disassemble or otherwise attempt to learn the source code, structure or algorithms underlying the Editor, except to the extent required to be permitted under applicable law; (e) modify, translate or create derivative works of the Editor; or (f) remove any copyright, trademark, patent or other proprietary notice that appears on the Editor or copies thereof.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.3. Open Source Software\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed makes certain versions of the Editor and related software available at the Zed GitHub Repository: {\field{\*\fldinst{HYPERLINK "https://github.com/zed-industries/zed"}}{\fldrslt{\ul -https://github.com/zed-industries/zed -}}} - (the "Repo"). Your use of such software is subject to the open source software licenses declared in the Repo.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 3. TERMS APPLICABLE TO THE ZED SERVICE\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.1. Access to and Scope of Zed Service\par} -{\pard \ql \f0 \sa180 \li0 \fi0 If you have elected to use the Zed Service by enabling or activating the Zed Service, Zed will use commercially reasonable efforts to make the Zed Service available to You as set forth in this Agreement. Once you elected to use the Zed Service, You may access and use the Zed Service during the Term, subject to Your compliance with the terms and conditions of the Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.2. Restrictions\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You will use the Zed Service only in accordance with all applicable laws, including, but not limited to, laws related to data (whether applicable within the United States, the European Union, or otherwise). You agree not to (and will not allow any third party to): (i) remove or otherwise alter any proprietary notices or labels from the Zed Service or any portion thereof; (ii) reverse engineer, decompile, disassemble, or otherwise attempt to discover the underlying structure, ideas, or algorithms of the Zed Service or any software used to provide or make the Zed Service available; or (iii) rent, resell or otherwise allow any third party access to or use of the Zed Service. Zed may suspend Your access to or use of the Zed Service as follows: (a) immediately if Zed reasonably believes Your use of the Zed Service may pose a security risk to or may adversely impact the Zed Service; or (b) if You are in breach of this Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.3. Customer Data\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You are solely responsible for Customer Data including, but not limited to: (a) compliance with all applicable laws and this Agreement; (b) any claims relating to Customer Data; and (c) any claims that Customer Data infringes, misappropriates, or otherwise violates the rights of any third party. You agree and acknowledge that Customer Data may be irretrievably deleted if Your account is terminated. For purposes of this Agreement, "Customer Data" shall mean any data, information or other material provided, uploaded, or submitted by You to the Zed Service in the course of using the Zed Service. Notwithstanding anything to the contrary, You represent and warrant that You will not transfer or make available to Zed any personally identifiable information or related information subject to applicable data privacy laws or regulations, unless otherwise agreed to in writing by Zed.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.1. Customer Data Made Available to Zed\par} -{\pard \ql \f0 \sa180 \li0 \fi0 To the extent You elect to make Customer Data available to Zed, the same may only be used by Zed according to the Customer Data type and the use rights regarding the same as described herein:\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.2. Usage Data\par} -{\pard \ql \f0 \sa180 \li0 \fi0 To improve the Editor and understand how You use it, Zed optionally collects the following usage data:\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file extensions of opened files;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab features and tools You use within the Editor;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab project statistics (e.g., number of files); and\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab frameworks detected in Your projects\sa180\sa180\par} -{\pard \ql \f0 \sa180 \li0 \fi0 (a-d collectively, "Usage Data"). Usage Data does not include any of Your software code or sensitive project details. You may change Your preferences disabling the collection of Usage Data and You can audit Usage Data collected by the Editor at any time. See {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/telemetry"}}{\fldrslt{\ul -https://zed.dev/docs/telemetry -}}} - for more.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Usage Data is associated with a secure random telemetry ID which may be linked to Your email address. This linkage currently serves two purposes: (1) it allows Zed to analyze usage patterns over time while maintaining Your privacy; and (2) it enables Zed to reach out to specific user groups for feedback and improvement suggestions. Zed may contact You based on Your usage patterns to better understand your needs and improve the Solution. If You delete Your account, the link between Your telemetry ID and Your email address will be permanently removed. By continuing to use Editor or Solution with this feature enabled You agree to this Usage Data collection.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.3. Crash Reports\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer Data consisting of data related to the behavior of the Solution prior to a crash or failure, such as stack traces are collected and classified as "Crash Reports". Zed will use commercially reasonable efforts to exclude any personally identifiable information from Crash Reports, but due to the nature of a crash, Zed does not ensure that information such as paths will be excluded from Crash Reports. Crash Reports will be used solely for Zed's internal purposes in connection with diagnosing defects in the Solution that led to the crash. You may grant us permission to capture Crash Reports when installing or activating the Solution, and You may change Your preferences at any time in the settings feature of the Solution. Once You grant us this permission, Zed will retain the Crash Reports indefinitely.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.4. User Content\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \u8226 ? You may access, modify or create certain data or information in connection with your access or use of the Zed Editor or the Solution. Such data and information may include, but is not limited to any of the following:\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (a)\tx360\tab file contents and associated metadata (e.g., filename, paths, size, timestamps);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (b)\tx360\tab source control history, comments and metadata (e.g., git history, commit messages);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (c)\tx360\tab configuration data (e.g., settings, keymaps);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (d)\tx360\tab anything typed, pasted and/or displayed on screen while using the Editor;\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (e)\tx360\tab derivative works of the above generated by the Editor (e.g., format conversions, summaries, indexes, caches);\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (f)\tx360\tab metadata, code and other derivative works of the above returned by language servers and other local tooling; and\sa180\par} -{\pard \ql \f0 \sa0 \li720 \fi-360 \bullet \tx360\tab (g)\tx360\tab metadata, code and other derivative works of the above returned by services integrated with the Zed Editor\sa180\sa180\par} -{\pard \ql \f0 \sa180 \li0 \fi0 (a-g collectively, "User Content").\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5. Handling of User Content\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed will make use of or transfer User Content only as specified in this Agreement, or as necessary to comply with applicable law.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.1. Zed Collaboration Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed Collaboration Services, User Content is transmitted from Your environment only if You collaborate with other Zed users by electing to share a project in the Editor. Once You share a project, Zed may transmit User Content consisting of file paths, file contents, and metadata regarding the code returned by language servers. Currently, Zed does not persist any User Content beyond the Your collaboration session. If You unshare a project or disconnect from the Solution, all information associated with such project will be deleted from Zed servers. In the future, Zed may save User Content regarding projects beyond the scope of a single collaboration session. We may share such User Content with those users You elected to grant access to. Zed's access to such User Content is limited to debugging and making improvements to the Solution.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.2. Other Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and not operated by Zed (the "Other Services"). By way of example, Other Services includes those made available by GitHub, Anthropic, OpenAI, and similar providers, or those You host or manage directly. You may configure the Zed Editor to interoperate, communicate with, and exchange data (including User Content) directly with the Other Services. Zed is not responsible or otherwise liable with respect to Your use of any Other Service, including but not limited to the exchange of data between the Other Service and the Zed Editor. The terms and conditions, including the applicable privacy policy, with respect to the Other Service are those made available by the applicable Other Service, not these Terms.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.3. Zed AI Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed Editor supports integration with API-based services maintained and operated by Zed (the "Zed AI Services"). You may elect to use Zed AI Services as the provider for various Zed Editor features (e.g., Agent Panel, Inline Assistant, Edit Predictions, and similar features). In connection with Your use of these features, the Zed Editor and Zed AI Services may make use of User Content to generate contextually relevant responses (the \u8220"Output\u8221"). Other than as specified in Section 3.3.5.4 of these Terms, Zed will not use User Content for training of its models, or disclose User Content.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Output is provided "as is" without any warranties or guarantees of functionality, security, or fitness for a particular purpose. While efforts are made to ensure the accuracy and reliability, Output may include errors, vulnerabilities, and defects. You are responsible for reviewing, testing, and validating Output before use in any production or critical environment. Zed assumes no liability for any damages, losses, or liability arising from the use, modification, reliance on, or deployment of Output. Any such use is at Your own risk.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.3.5.4. Improvement Feedback\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services to provide Edit Predictions in connection with certain open source software projects, You may elect to share requests, responses and feedback comments (collectively "Model Improvement Feedback") with Zed, and Zed may use the same to improve Zed Edit Predictions models. You may opt-out of sharing Model Improvement Feedback at any time.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 For more information on Zed Edit Predictions please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul -https://zed.dev/docs/ai/ai-improvement -}}} -\par} -{\pard \ql \f0 \sa180 \li0 \fi0 When using Zed AI Services in connection with the Agent Panel, You may elect to share with Zed requests, responses and feedback regarding the Agent Panel and related Output (the \u8220"Agent Improvement Feedback\u8221") with Zed, and Zed may use the same to improve the Agent Panel and related Output. Zed will only collect Agent Improvement Feedback when You elect to share the same.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 For more information regarding the Agent Panel please see: {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/ai-improvement"}}{\fldrslt{\ul -https://zed.dev/docs/ai/ai-improvement -}}} -\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel3 \b \fs24 3.4. Privacy Policy\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You and Zed are bound by the terms and conditions contained in the Zed Privacy Policy which is incorporated by reference hereto. The Zed Privacy Policy is available at the following URL: {\field{\*\fldinst{HYPERLINK "https://zed.dev/privacy-policy"}}{\fldrslt{\ul -https://zed.dev/privacy-policy +{\pard \ql \f0 \sa180 \li0 \fi0 {\b Last Updated}: March 2, 2026\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Welcome, and thank you for your interest in Zed Industries, Inc. (\u8220"{\b Zed},\u8221" \u8220"{\b we},\u8221" or \u8220"{\b us}\u8221") and our website at {\field{\*\fldinst{HYPERLINK "https://www.zed.dev"}}{\fldrslt{\ul +www.zed.dev +}}} +, along with our downloadable Zed software (the \u8220"{\b Software}\u8221") and related subscription service (the \u8220"{\b Service}\u8221"). These Terms of Service are a legally binding contract between you and Zed regarding your use of the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Please read the following Terms carefully.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b By accessing or using the Service, you (\u8220"You\u8221" or \u8220"Customer\u8221")\u160 ?agree to these Terms of Service, the Data Processing Addendum (\u8220"DPA\u8221"), available upon request,\u160 ?and Zed\u8217's {\field{\*\fldinst{HYPERLINK "/privacy-policy"}}{\fldrslt{\ul +Privacy Policy +}}} + (collectively, the \u8220"Terms\u8221").}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If you are not eligible, or do not agree to the Terms, you may not access or use the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 By using the Service, you confirm that you have read and understand these Terms and that they form a binding agreement between you and Zed.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b ARBITRATION\u160 ?NOTICE}. Except for certain kinds of disputes described in Section\u160 ?15.2 (Dispute Resolution and Arbitration), you agree that disputes arising under these Terms will be resolved by binding, individual arbitration, and BY ACCEPTING THESE TERMS, YOU AND ZED ARE EACH WAIVING THE RIGHT TO A TRIAL BY JURY OR TO PARTICIPATE IN ANY CLASS ACTION OR REPRESENTATIVE PROCEEDING.\u160 ?ALTERNATIVELY, CUSTOMER MAY OPT OUT OF ARBITRATION PER SECTION 17.2(a).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 1. Overview\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Subject to these Terms, Zed will permit Customer to access and use Zed\u8217's AI-enabled software-as-a-service offering (the \u8220"{\b Service}\u8221"), which enables certain additional features and functionality (including artificial intelligence and collaboration features) in Zed\u8217's open source code editing software (\u8220"{\b Software}\u8221").\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 2. Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.1. Eligibility\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u160 ?must be at least 18 years old\u160 ?to use the Service. By agreeing to these Terms, Customer represents and warrants to Zed that: (a) Customer is at least 18 years old; (b) Customer has not previously been suspended or removed from the Service; and (c) Customer\u8217's registration and use of the Service is in compliance with any and all applicable laws and regulations. If Customer is an entity, organization, or company, the individual accepting these Terms on Customer\u8217's behalf represents and warrants that they have authority to bind Customer to these Terms and Customer agrees to be bound by these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.2. Access Grant\par} +{\pard \ql \f0 \sa180 \li0 \fi0 During the Term, subject to Customer\u8217's compliance with the terms of the Terms, Customer may access and use the Service only for Customer\u8217's internal business purposes\u160 ?or for individuals, for personal non-commercial purposes, in accordance with the then-current version of Zed\u8217's usage guidelines and standard technical documentation for the Service that Zed makes generally available to its customers (\u8220"{\b Documentation}\u8221"), the Terms, and any terms set forth in the applicable Subscription Service (as defined in Section 3.4 below). Customer\u160 ?agrees\u160 ?to access the Service only through the mechanisms designated by Zed. Without limiting the foregoing, to access the Service, Customer may be required to associate an existing third-party account with the Service to enable authentication (e.g., via OAuth). Customer will be responsible for the acts and omissions of all persons who access the Service through Customer\u8217's account as though such acts and omissions were Customer\u8217's own. Customer\u160 ?will promptly notify Zed if it becomes aware of any compromise to its Zed account.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.3. Acceptable Use\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service uses technology provided by multiple third party AI subprocessors (the \u8220"AI Providers\u8221") including but not limited to: Anthropic, PBC (\u8220"Anthropic\u8221"), Google LLC (\u8220"Google\u8221"), LiveKit Incorporated, OpenAI, LLC (\u8220"OpenAI\u8221") etc., as may be updated from time to time. Customer may not use the Service in a manner that violates any applicable AI Provider policy which are listed on {\field{\*\fldinst{HYPERLINK "https://zed.dev/acceptable-use-policies"}}{\fldrslt{\ul +https://zed.dev/acceptable-use-policies +}}} +, including Anthropic\u8217's {\field{\*\fldinst{HYPERLINK "https://www.anthropic.com/legal/aup"}}{\fldrslt{\ul +Usage Policy +}}} +, Google Gemini\u8217's {\field{\*\fldinst{HYPERLINK "https://policies.google.com/terms/generative-ai/use-policy"}}{\fldrslt{\ul +Generative AI Prohibited Use Policy +}}} +, GitHub's {\field{\*\fldinst{HYPERLINK "https://docs.github.com/en/site-policy/acceptable-use-policies/github-acceptable-use-policies"}}{\fldrslt{\ul +Acceptable Use Policy +}}} +, LiveKit\u8217's {\field{\*\fldinst{HYPERLINK "https://livekit.io/legal/acceptable-use-policy"}}{\fldrslt{\ul +Acceptable Use Policy +}}} +; OpenAI\u8217's {\field{\*\fldinst{HYPERLINK "https://openai.com/policies/usage-policies/"}}{\fldrslt{\ul +Usage Policies +}}} +\u160 ?or {\field{\*\fldinst{HYPERLINK "https://openai.com/api/policies/sharing-publication/"}}{\fldrslt{\ul +Sharing and Publication Policy +}}} +; and {\field{\*\fldinst{HYPERLINK "https://openai.com/api/policies/community-guidelines/"}}{\fldrslt{\ul +Community Guidelines +}}} +; each of which may be updated from time to time and are expressly incorporated by reference. Customer\u160 ?is solely responsible to check for updates to the applicable AI Provider policy from time to time.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 2.4. Restrictions\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer will not (and will not permit anyone else to), directly or indirectly, do any of the following: (a) provide access to, distribute, sell, or sublicense the Service to a third party; (b)\u160 ?seek to access non-public APIs associated with the Service; (c) copy any element of the Service; (d) interfere with the operation of the Service, circumvent any access restrictions, or conduct any security or vulnerability test of the Service; (e) transmit any viruses or other harmful materials to the Service or others;\u160 ?(f) take any action that risks harm to others or to the security, availability, or integrity of the Service except for the purposes of legitimate security or malware research; or (g) access or use the Service or Output in a manner that violates any applicable relevant local, state, federal or international laws, regulations, or conventions, including those related to data privacy or data transfer, international communications, or export of data (collectively, \u8220"{\b Laws}\u8221"), or the Terms.\u160 ?The Service incorporates functionality provided by third-party services, the use of which is subject to additional terms. Customer agrees that if Customer accesses or uses services, features or functionality in the Software or Service that are provided by a third party, Customer will comply with any applicable terms promulgated by that third party, including as set forth at {\field{\*\fldinst{HYPERLINK "/acceptable-use-policies"}}{\fldrslt{\ul +https://zed.dev/acceptable-use-policies +}}} +\u160 ?(as may be updated from time to time). Customer further acknowledges that certain components of the Software or Service may be covered by open source licenses ("{\b Open Source Component}"), including but not limited to Apache License, Version 2.0, GNU General Public License v3.0, and the GNU Affero General Public License v3.0.\u160 ?To the extent required by such open source license for the applicable Open Source Component, the terms of such license will apply to such Open Source Component in lieu of the relevant provisions of these Terms. If such open\u160 ?source license prohibits any of the restrictions in these Terms, such restrictions will not apply to such Open Source Component. Zed shall provide Customer with a list of Open Source Components upon Customer's request.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 3. General Payment Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Accessing certain features and tiers of the Service requires Customer\u160 ?to pay fees. Before Customer pays any fees, Customer will have an opportunity to review and accept the fees that Customer will be charged. Unless otherwise specifically provided for in these Terms, all fees are in U.S. Dollars and are non-refundable, except as required by law.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.1. Price\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to determine pricing for the Service. Zed will make reasonable efforts to keep pricing information published on our pricing page at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul +https://zed.dev/pricing +}}} +\u160 ?up to date. Zed encourages Customer\u160 ?to check Zed\u8217's pricing page periodically for current pricing information. Zed may change the fees for any feature of the Service, including by adding fees or charges, if Zed gives Customer advance notice of changes before they apply.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.2. Taxes\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer is responsible for any sales, use, GST, value-added, withholding, or similar taxes or levies that apply to Orders, whether domestic or foreign, other than Zed\u8217's income tax (\u8220"{\b Taxes}\u8221"). Fees are exclusive of all Taxes. If Customer is compelled to make a deduction or set-off for any such Taxes, Customer will pay Zed such additional amounts as necessary to ensure receipt by Zed of the full amount Zed would have received but for the deduction.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.3. Authorization\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer authorizes Zed to charge all sums for the orders that Customer makes, the level of Service that Customer selects, and Customer\u8217's submission of prompts or other Customer Data (defined below) to the Service to generate Output (defined below) as described in these Terms or published by Zed, including all applicable taxes, to the payment method specified in Customer\u8217's account. If Customer pays any fees with a credit card, then Zed may seek pre-authorization of Customer\u8217's credit card account prior to Customer\u8217's purchase to verify that the credit card is valid and has the necessary funds or credit available to cover Customer\u8217's purchase.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.4. Subscription Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may include certain subscription-based plans with automatically recurring payments for periodic charges ("{\b Subscription Service}"). The "{\b Subscription Billing Date}" is the date when Customer purchases its first subscription to the Service. The Subscription Service will begin on the Subscription Billing Date and continue for the subscription period that Customer selects on its account (such period, the "{\b Initial Subscription Period}"), and will automatically renew for successive periods of the same duration as the Initial Subscription Period (the Initial Subscription Period and each such renewal period, each a "{\b Subscription Period}") unless Customer cancels the Subscription Service or Zed terminates it. If Customer activates a Subscription Service, then Customer authorizes Zed or its third-party payment processors to periodically charge, on a going-forward basis and until cancellation of the Subscription Service, all accrued sums on or before the payment due date. For information on the "Subscription Fee", please see Zed\u8217's pricing page at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul +https://zed.dev/pricing +}}} +. Customer\u8217's\u160 ?account will be charged automatically on the Subscription Billing Date and thereafter on the renewal date of its Subscription Service for all applicable fees and taxes for the next Subscription Period. Customer must cancel its Subscription Service before it renews in order to avoid billing of the next periodic Subscription Fee to Customer\u8217's account. Zed or its third-party payment processor will bill the periodic Subscription Fee to the payment method associated with Customer\u8217's account or that Customer otherwise provides to Zed. Customer\u160 ?may cancel the Subscription Service from the account page at https://zed.dev/account or by contacting us at {\field{\*\fldinst{HYPERLINK "mailto:billing-support@zed.dev"}}{\fldrslt{\ul +billing-support@zed.dev +}}} +. {\b YOUR CANCELLATION MUST BE RECEIVED BEFORE THE RENEWAL DATE IN ORDER TO AVOID BEING CHARGED FOR THE NEXT SUBSCRIPTION PERIOD.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.5. Consumption Fees\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u8217's subscription to the Service may permit Customer to submit prompts or other Customer Data for the purpose of generating Output, at no additional charge for a certain number of times each month. If Customer elects to submit a volume of prompts in excess of the quantity included in its Subscription Fee, then Customer authorizes Zed to charge, and Customer will be charged, a fee for each additional prompt at the rates set forth at {\field{\*\fldinst{HYPERLINK "https://zed.dev/docs/ai/models"}}{\fldrslt{\ul +https://zed.dev/docs/ai/models +}}} +.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 3.6. Delinquent Accounts\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may suspend or terminate access to the Service, including fee-based portions of the Service, for any account for which any amount is due but unpaid. In addition to the amount due for the Service, a delinquent account will be charged with fees or charges that are incidental to any chargeback or collection of any unpaid amount, including collection fees. If your payment method is no longer valid at the time a renewal Subscription Fee is due, then Zed reserves the right to delete your account and any information or Customer Data associated with your account without any liability to Customer.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 4. Data\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.1. Zed's Use of Customer Data\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer hereby grants Zed a non-exclusive, worldwide, royalty-free, fully paid-up, non-sublicensable (except to service providers and Customer\u8217's designees), non-transferable (except as set forth in Section 15.1) right to use, copy, store, disclose, transmit, transfer, display, modify, create derivative works from, collect, access, store, host, or otherwise process (\u8220"{\b Process}\u8221") any materials that Customer inputs into or otherwise makes available to the Service (including prompts and other written content) (collectively, \u8220"{\b Customer Data}\u8221") solely: (a) to perform its obligations set forth in the Terms, including its Support obligations as applicable; (b) to derive and generate Telemetry (see Section 4.4); and (c) as necessary to comply with applicable Laws. Except as required by applicable Laws, Zed will not provide Customer Data to any person or entity other than Customer\u8217's designees (including pursuant to Section 7) or service providers. In the event that autocomplete suggestions are turned on, Customer understands and agrees that the Service will periodically send Customer Data in the background to an AI Provider for the purpose of generating autocomplete input suggestions in the Services. Autocomplete features can be turned off at any time, in which case Customer Data will not be sent.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.2. Customer's Ownership of Output\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may generate specifically for, and make available to, Customer text and written content based on or in response to Customer Data input into the Service (collectively, \u8220"{\b Output}\u8221"), including through the use of technologies that incorporate or rely upon artificial intelligence, machine learning techniques, and other similar technology and features. As between the Parties, to the greatest extent permitted by applicable Laws, Customer owns all Output and Zed hereby irrevocably assigns to Customer all right, title, and interest in and to the Output that Zed may possess. {\b For the avoidance of doubt, Zed and its AI Providers will not retain or use Customer Data for the purpose of improving or training the Service or any AI Provider products, except to the extent Customer explicitly opts-in on Zed\u8217's specific feature to allow training and/or such improvement (such as fine-tuning) and is solely for the benefit of Customer.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.3. Zed's Collection of Output Rating\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may enable Customer, at its option, to rate or otherwise provide feedback with respect to Output generated through the Service. If Customer opts in to provide feedback concerning Output using the features of the Software or Service (e.g., by clicking an Output rating button), then Customer agrees that Zed may Process that Output and associated Customer Data for the purpose of product development and improvement (\u8220"Output Rating\u8221"). For clarity, Customer\u8217's decision to opt in to provide Output Rating is specific to the corresponding Output. Your decision to provide Output Rating with respect to one instance of Output does not give Zed the right to use any other Output for Output Rating purposes.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.4. Telemetry\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may collect, generate, and Process\u160 ?information, including technical logs, metrics, and data and learnings, related to the Software and Service (\u8220"{\b Telemetry}\u8221") to improve and support the Services and for other lawful business purposes. Customer\u160 ?may configure the Software to opt out of the collection of certain Telemetry Processed\u160 ?locally by the Software itself, but Zed may still collect, generate, and Process Telemetry on Zed\u8217's servers. Zed may not disclose Telemetry to any third-party other than Zed\u8217's Representatives unless it is de-identified so that it does not identify Customer as the source thereof and is aggregated with data across other customers. {\b For avoidance of doubt, Telemetry expressly does not include Customer Data.}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 5. Customer Obligations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer is responsible for its Customer Data and will comply with applicable Laws when using the Service. Customer represents and warrants that it has obtained all rights, consents, and permissions necessary for Zed to Process Customer Data and exercise the rights granted to it in the Terms without violating or infringing Laws or third-party rights. Customer Data shall not contain: (a) any \u8220"protected health information\u8221" or \u8220"PHI\u8221" as defined under HIPAA (including 45 C.F.R. Parts 160 and 164); or (b) any payment card or cardholder data subject to PCI DSS (including primary account numbers, full track or chip data, CVV/CVC codes, PINs, or similar payment card security data). Customer is solely responsible for ensuring compliance with this restriction and shall be liable for, and shall indemnify Zed against, any claims, fines, or penalties arising from Customer\u8217's breach of this Section. Zed disclaims any and all liability in connection with Customer Data.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 6. Suspension of Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may immediately suspend Customer\u8217's access to any or all of the Service if: (a) Customer breaches Section 2.2 - 2.4 or Section 5; (b)\u160 ?any payments required under the Terms are overdue by 30 days or more; (c)\u160 ?changes to Laws or new Laws require that Zed suspend the Service or otherwise may impose additional liability on Zed in connection with its provision of the Service to Customer; or (d) Customer\u8217's breach of the Terms risks harm to any of Zed\u8217's other customers or the security, availability, or integrity of the Service or other services and entities. Where practicable, Zed will use reasonable efforts to provide Customer with prior notice of the suspension (email sufficing). If the issue that led to the suspension is resolved, Zed will restore Customer\u8217's access to the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 7. Data Sharing and Third-Party Integrations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 7.1. Collaboration Services\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Certain features of the Service may allow Customer to share data between accounts on the Service, including accounts controlled by persons and entities not associated with Customer (\u8220"{\b Collaboration Features}\u8221"). If Customer elects to use Collaboration Features, Customer acknowledges and agrees that Zed will, and authorizes Zed to, make available Customer Data consisting of file paths, file contents, and metadata regarding the code returned by language servers to the third parties designated by Customer, and that Zed exercises no control over, and has no liability for, the acts or omissions of such third parties (including in connection with the Customer Data).\u160 ?Currently, with the exception of the Channel notes feature, Zed does not persist any shared Customer Data beyond the designated Collaboration Feature session.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 7.2. Third-Party Integrations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service may support integration with third-party platforms, add-ons, services, or products not provided by Zed (\u8220"{\b Third-Party Platforms}\u8221"). Use of any Third-Party Platforms integrated with or made available through the Service is subject to Customer\u8217's agreement with the relevant provider and not these Terms. Zed does not control and has no liability for Third-Party Platforms, including their security, functionality, operation, availability, or interoperability with the Service. By enabling a Third-Party Platform to interact with the Service, Customer authorizes Zed to access and exchange Customer Data with such Third-Party Platform on Customer\u8217's behalf.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 8. Disclaimers; No Warranties by Zed\par} +{\pard \ql \f0 \sa180 \li0 \fi0 THE SOFTWARE, SERVICE, OUTPUT, AND ALL OTHER ZED SERVICES ARE PROVIDED \u8220"AS IS\u8221" AND \u8220"AS AVAILABLE\u8221". ZED, ON ITS OWN BEHALF AND ON BEHALF OF ITS SUPPLIERS AND LICENSORS, MAKES NO OTHER WARRANTIES, WHETHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, INCLUDING WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE, OR NONINFRINGEMENT. ZED DOES NOT WARRANT THAT CUSTOMER\u8217'S\u160 ?USE OF THE SOFTWARE OR SERVICE WILL BE UNINTERRUPTED OR ERROR-FREE OR THAT IT WILL MAINTAIN CUSTOMER DATA WITHOUT LOSS. ZED IS NOT LIABLE FOR DELAYS, FAILURES, OR PROBLEMS INHERENT IN USE OF THE INTERNET AND ELECTRONIC COMMUNICATIONS OR OTHER SYSTEMS OUTSIDE OF ZED\u8217'S CONTROL.\u160 ?ZED IS\u160 ?NOT RESPONSIBLE FOR ANY DAMAGE THAT MAY RESULT FROM THE SOFTWARE OR SERVICE OR OUTPUT OR CUSTOMER\u8217'S\u160 ?DEALING WITH ANY OTHER SERVICE USER. Without limiting the foregoing, Customer\u160 ?acknowledges\u160 ?and agrees\u160 ?that: (a) the Service may produce inaccurate or erroneous Output; (b) Customer is\u160 ?responsible for independently evaluating the Output and any other information Customer\u160 ?receives\u160 ?from the Service; and (c) due to the nature of the Service and artificial intelligence technologies generally, Output may not be unique and other users of the Service may receive output from the Service that is similar or identical to the Output (and, notwithstanding anything to the contrary, such similar or identical output will not be understood to be Output).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 THE LIMITATIONS, EXCLUSIONS AND DISCLAIMERS IN THIS SECTION\u160 ?8 (DISCLAIMERS; NO WARRANTIES BY\u160 ?ZED)\u160 ?APPLY TO THE FULLEST EXTENT PERMITTED BY LAW. Zed does not disclaim any warranty or other right that Zed is prohibited from disclaiming under applicable law.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 9. Term, Termination, and Modification of the Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.1. Term\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These Terms are effective beginning when Customer\u160 ?accepts\u160 ?the Terms or first downloads, installs, accesses, or uses\u160 ?the Service, and ending when terminated as described in Section 9.2 (Termination).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.2. Termination\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If Customer\u160 ?violates\u160 ?any provision of these Terms, then Customer is\u160 ?not authorized to access the Service and these Terms automatically terminate. In addition, Zed may, at its sole discretion, terminate these Terms or Customer\u8217's\u160 ?account on the Service, or suspend or terminate Customer\u8217's\u160 ?access to the Service, at any time for any reason or no reason, with or without notice, and without any liability to Customer\u160 ?arising from such termination. Customer\u160 ?may terminate its\u160 ?account and these Terms at any time by contacting Zed\u160 ?at {\field{\*\fldinst{HYPERLINK "mailto:hi@zed.dev"}}{\fldrslt{\ul +hi@zed.dev +}}} +.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.3. Effect of Termination\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Upon termination of these Terms: a) Customer\u8217's\u160 ?license to access and use the Service will terminate and Customer\u160 ?must immediately cease all use of the Service; b) Customer\u160 ?will no longer be authorized to access its\u160 ?account or the Service; c) Customer\u160 ?must pay Zed any unpaid amount that was due prior to termination; and d) all payment obligations accrued prior to termination and Section(s)\u160 ?2.4 (Restrictions), 3 (General Payment Terms) with the exception of 3.4 (Subscription Service), 4.2\u160 ?(Customer\u8217's Ownership of Output), 4.4 (Telemetry), 8 (Disclaimers; No Warranties by Zed), 9.3 (Effect of Termination), 10 (Ownership; Feedback), 11 (Limitations of Liability), 12 (Indemnity), 15\u160 ?(Governing Law, Dispute Resolution and Arbitration); and 16 (General Terms), will survive. If Customer\u8217's account has been terminated for a breach of these Terms, then Customer is\u160 ?prohibited from creating a new account on the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 9.4. Modification of the Service\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to modify or discontinue all or any portion of the Service at any time (including by limiting or discontinuing certain features of the Service), temporarily or permanently, without notice to Customer. Zed will have no liability to Customer\u160 ?for any change to the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 10. Ownership; Feedback\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Neither Party grants the other Party any rights or licenses not expressly set out in the Terms. Except as expressly provided in the Terms, as between the Parties, Customer retains all intellectual property rights and other rights in and to the Customer Data and Output. Except for the rights and licenses granted in the Terms, Zed and its licensors retain all intellectual property rights in and to the Service and Software. To the extent Customer provides Zed with feedback (including suggestions and comments for enhancements or new functionality) regarding the Service or Software, Output, or Zed\u8217's products, services, or other technology (\u8220"{\b Feedback}\u8221"), Zed has the full and unrestricted right (but no obligation) to use or incorporate Feedback in any manner, including to improve and develop any of its products, services, technology, or other materials without attribution to Customer.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 11. Limitations of Liability\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.1.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 TO THE FULLEST EXTENT PERMITTED BY LAW, IN NO EVENT WILL THE ZED ENTITIES BE LIABLE TO CUSTOMER FOR ANY INDIRECT, INCIDENTAL, SPECIAL, CONSEQUENTIAL, OR PUNITIVE DAMAGES (INCLUDING DAMAGES FOR LOSS OF PROFITS, GOODWILL, OR ANY OTHER INTANGIBLE LOSS) ARISING OUT OF OR RELATING TO YOUR ACCESS TO OR USE OF, OR YOUR INABILITY TO ACCESS OR USE, THE SERVICE OR ANY MATERIALS OR CONTENT ON THE SERVICE, WHETHER BASED ON WARRANTY, CONTRACT, TORT (INCLUDING NEGLIGENCE), STATUTE, OR ANY OTHER LEGAL THEORY, AND WHETHER OR NOT ANY ZED ENTITY HAS BEEN INFORMED OF THE POSSIBILITY OF DAMAGE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.2.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 TO THE FULLEST EXTENT PERMITTED BY LAW, THE AGGREGATE LIABILITY OF THE ZED ENTITIES TO CUSTOMER FOR ALL CLAIMS ARISING OUT OF OR RELATING TO THE USE OF OR ANY INABILITY TO USE ANY PORTION OF THE SERVICE, OR OTHERWISE ARISING UNDER THESE TERMS, WHETHER IN CONTRACT, TORT, OR OTHERWISE, IS LIMITED TO THE GREATER OF: \u160 ?THE AMOUNT CUSTOMER HAS PAID TO ZED FOR ACCESS TO AND USE OF THE SERVICE IN THE 12 MONTHS PRIOR TO THE EVENT OR CIRCUMSTANCE GIVING RISE TO THE CLAIM OR US$100.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.3.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 EACH PROVISION OF THESE TERMS THAT PROVIDES FOR A LIMITATION OF LIABILITY, DISCLAIMER OF WARRANTIES, OR EXCLUSION OF DAMAGES IS INTENDED TO AND DOES ALLOCATE THE RISKS BETWEEN THE PARTIES UNDER THESE TERMS. THIS ALLOCATION IS AN ESSENTIAL ELEMENT OF THE BASIS OF THE BARGAIN BETWEEN THE PARTIES. EACH OF THESE PROVISIONS IS SEVERABLE AND INDEPENDENT OF ALL OTHER PROVISIONS OF THESE TERMS. THE LIMITATIONS IN THIS SECTION\u160 ?11 (LIMITATION OF LIABILITY) WILL APPLY EVEN IF ANY LIMITED REMEDY FAILS OF ITS ESSENTIAL PURPOSE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 12. Indemnity\par} +{\pard \ql \f0 \sa180 \li0 \fi0 To the fullest extent permitted by law, Customer is responsible for its use of the Service, and Customer will defend and indemnify Zed, its affiliates, and their respective shareholders, directors, managers, members, officers, employees, consultants, and agents (together, the "Zed Entities") from and against every claim brought by a third party, and any related liability, damage, loss, and expense, including attorneys' fees and costs, arising out of or connected with: (1) Customer\u8217's unauthorized use of, or misuse of, the Service; (2) the Customer Data; (3) Customer\u8217's use of Output; (4) Customer\u8217's violation or alleged violation of any portion of these Terms, any representation, warranty, or agreement referenced in these Terms, or any applicable law or regulation; (5) Customer\u8217's violation or alleged violation of any third-party right, including any intellectual property right or publicity, confidentiality, other property, or privacy right; or (6) any dispute or issue between Customer and any third party. Zed reserves the right, at Zed\u8217's own expense, to assume the exclusive defense and control of any matter otherwise subject to indemnification by Customer (without limiting Customer\u8217's indemnification obligations with respect to that matter), and in that case, Customer agrees to cooperate with our defense of those claims.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 13. Confidentiality\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.1. Definition\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \u8220"{\b Confidential Information}\u8221" means information disclosed to the receiving Party (\u8220"{\b Recipient}\u8221") under the Terms that is designated by the disclosing Party (\u8220"{\b Discloser}\u8221") as proprietary or confidential or that should be reasonably understood to be proprietary or confidential due to its nature and the circumstances of its disclosure. Zed\u8217's Confidential\u160 ?Information\u160 ?includes the terms and conditions of the Terms and the Service (including any technical or performance information about the Service).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.2. Obligations\par} +{\pard \ql \f0 \sa180 \li0 \fi0 As Recipient, each Party will: (a) hold Confidential Information in confidence and not disclose it to third parties except as permitted in the Terms, including Section 4.1; and (b) only use Confidential Information to fulfill its obligations and exercise its rights under the Terms. Recipient may disclose Confidential Information to its employees, agents, contractors, and other representatives having a legitimate need to know (including, for Zed, the subcontractors referenced in Section 16.5) (\u8220"{\b Representatives}\u8221"), provided Recipient remains responsible for its respective Representatives\u8217' compliance with this Section\u160 ?13 and such Representatives are bound by written agreements (or, in the case of professional advisers like attorneys and accountants, ethical duties) imposing confidentiality and non-use obligations no less protective than this Section 13.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.3. Exclusions\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These confidentiality obligations do not apply to information that Recipient can document: (a) is or becomes public knowledge through no fault of Recipient or its Representatives; (b) it rightfully knew or possessed prior to receipt under the Terms; (c) it rightfully received from a third party without breach of confidentiality obligations; or (d) it independently developed without using Confidential Information.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.4. Remedies\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Unauthorized use or disclosure of Confidential\u160 ?Information\u160 ?may cause substantial harm for which damages alone are an insufficient remedy. Discloser\u160 ?may seek appropriate equitable relief, in addition to other available remedies, for breach or threatened breach of this Section 13, without the \u160 ?necessity\u160 ?of posting a bond or proving actual damages.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 13.5. Required Disclosures\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Nothing in the Terms prohibits Recipient from making disclosures, including of Customer Data and other Confidential Information, if required by Laws, subpoena, or court order, provided (if permitted by Laws) it notifies Discloser in advance and cooperates in any effort to obtain confidential treatment.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 14. Publicity\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Neither Party may publicly announce that the Parties have entered into the Terms, except with the other Party\u8217's prior consent or as required by Laws. However, Zed may use the name, brand, or logo of Customer (or Customer\u8217's parent company) for the purpose of identifying Customer as a licensee or customer on Zed\u8217's website or in other promotional materials. Zed will cease further use at Customer\u8217's written request.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 15. Governing Law, Dispute Resolution and Arbitration\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.1. Governing Law, Jurisdiction and Venue\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Terms are governed by the laws of the State of Delaware and the United States without regard to conflicts of laws provisions that would result in the application of the laws of another jurisdiction and without regard to the United Nations Convention on the International Sale of Goods. The parties further agree that except as stated below in the Arbitration provision, and for any claims under Section 15.2 (b), each party irrevocably consents to the exclusive jurisdiction and venue of the state and federal courts located in New Castle County, Delaware, for any action arising out of or relating to these Terms, and waive any objection based on venue or forum non conveniens. ANY CAUSE OF ACTION OR CLAIM CUSTOMER MAY HAVE ARISING OUT OF OR RELATING TO THESE TERMS MUST BE COMMENCED WITHIN ONE (1) YEAR AFTER THE CAUSE OF ACTION OR CLAIM ACCRUES, OTHERWISE, SUCH CAUSE OF ACTION OR CLAIM IS PERMANENTLY BARRED.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.2. Dispute Resolution and Arbitration\par} +{\pard \ql \f0 \sa180 \li0 \fi0 ANY CONTROVERSY OR CLAIM ARISING OUT OF OR RELATING TO THESE TERMS, OR THE BREACH THEREOF, SHALL BE SETTLED BY ARBITRATION AND JUDGMENT ON THE AWARD RENDERED BY THE ARBITRATOR MAY BE ENTERED IN ANY COURT HAVING JURISDICTION THEREOF. IF THERE IS A DISPUTE ABOUT WHETHER THIS ARBITRATION AGREEMENT CAN BE ENFORCED OR APPLIES TO THE DISPUTE, CUSTOMER AND ZED AGREE THAT THE ARBITRATOR WILL DECIDE THAT ISSUE.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b a. Opt-Out.} If Customer does not wish to resolve disputes by binding arbitration, Customer may opt out of the provisions of this Section 17.2 (Dispute Resolution and Arbitration) within 30 days after the date that Customer agrees to these Terms by sending an email to {\field{\*\fldinst{HYPERLINK "mailto:arbitration-opt-out@zed.dev"}}{\fldrslt{\ul +arbitration-opt-out@zed.dev +}}} +\u160 ?or a letter to Zed Industries, Inc., Attention: Legal Department \u8211- Arbitration Opt-Out, 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 that specifies: Customer\u8217's full legal name, the email address associated with Customer\u8217's account on the Service, and a statement that Customer wishes to opt out of arbitration (\u8220"{\b Opt-Out Notice}\u8221"). Once Zed receives Customer\u8217's Opt-Out Notice, this Section 15.2 (Dispute Resolution and Arbitration) will be void and any action arising out of these Terms will be resolved as set forth in Section 15.1 (Governing Law). The remaining provisions of these Terms will not be affected by Customer\u8217's Opt-Out Notice.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b b. Pre-Arbitration Dispute Resolution and Notification.} Prior to initiating an arbitration, Customer and Zed each agree to notify the other party of the dispute and attempt to negotiate an informal resolution to it first. Zed will contact Customer at the email address Customer has provided to Zed; Customer can contact Zed by email at\u160 ?{\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev +}}} +. If after a good faith effort to negotiate, one party feels the dispute has not and cannot be resolved informally, the party intending to pursue arbitration agrees to notify the other party via email prior to initiating the arbitration.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b c. Exceptions to Arbitration.} Customer and Zed each agree that the following claims are exceptions to arbitration and will be brought in a judicial proceeding in a court of competent jurisdiction: (i) Any claim related to actual or threatened infringement, misappropriation or violation of a party\u8217's copyrights, trademarks, trade secrets, patents, or other intellectual property rights; or (ii) Any claim seeking emergency injunctive relief based on exigent circumstances (e.g., imminent danger or commission of a crime, hacking, cyber-attack).\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b d. Arbitration Rules.} (1) If Customer is domiciled in the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be settled by arbitration administered by the American Arbitration Association in accordance with its Commercial Arbitration Rules, and judgment on the award rendered by the arbitrator may be entered in any court having jurisdiction thereof.\u160 ?(2)\u160 ?If Customer is domiciled internationally outside the U.S. - Any controversy or claim arising out of or relating to this contract, or the breach thereof, shall be determined by arbitration administered by the International Centre for Dispute Resolution in accordance with its International Arbitration Rules.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 {\b e. Modification to AAA Rules - Arbitration Hearing/Location.} Customer agrees that any required arbitration hearing will be conducted in the English language by one (1) mutually agreed upon arbitrator, (a) in city/county and state of Customer\u8217's headquarters unless both parties agree otherwise; and appearances may be made via telephonic or video hearing; and (b) for any claim or counterclaim under $25,000, by solely the submission of documents to the arbitrator.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 15.3. Waiver of Jury Trial and Class Action Waiver\par} +{\pard \ql \f0 \sa180 \li0 \fi0 EACH PARTY HEREBY IRREVOCABLY WAIVES ALL RIGHT TO TRIAL BY JURY IN ANY ACTION, SUIT, PROCEEDING, CLAIM, OR COUNTERCLAIM ARISING OUT OF OR RELATING TO THESE TERMS. CUSTOMER AND ZED EACH AGREE THAT ANY SUIT, PROCEEDING, OR OTHER ACTION ARISING OUT OF OR RELATED TO THESE TERMS WILL BE CONDUCTED ONLY ON AN INDIVIDUAL BASIS AND NOT IN A CLASS, CONSOLIDATED OR REPRESENTATIVE ACTION.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 16. General Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.1.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 These Terms, including the Privacy Policy and any other agreements expressly incorporated by reference into these Terms, are the entire and exclusive understanding and agreement between Customer and Zed regarding your use of the Service. Customer\u160 ?may not assign or transfer these Terms or its rights under these Terms, in whole or in part, by operation of law or otherwise, without Zed\u8217's prior written consent. Zed may assign these Terms and all rights granted under these Terms at any time without notice or consent. The failure to require performance of any provision will not affect Zed\u8217's right to require performance at any other time after that, nor will a waiver by Zed of any breach or default of these Terms, or any provision of these Terms, be a waiver of any subsequent breach or default or a waiver of the provision itself. Use of Section\u160 ?headers in these Terms are for convenience only and will not have any impact on the interpretation of any provision. Throughout these Terms the use of the word \u8220"including\u8221" means \u8220"including but not limited to.\u8221" If any part of these Terms are held to be invalid or unenforceable, then the unenforceable part will be given effect to the greatest extent possible, and the remaining parts will remain in full force and effect.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.2. Notices\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Except as set out in the Terms, any notice or consent under the Terms must be in writing to the Customer email address on the Order and Customer shall send all notices to Zed at Zed Industries, Inc., 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to {\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev +}}} +\u160 ?and will be deemed given: (a) upon receipt if by personal delivery; (b) upon receipt if by certified or registered U.S. mail (return receipt requested); or (c) one day after dispatch if by a commercial overnight delivery service. Either Party may update its address with notice to the other Party pursuant to this Section. Zed may also send operational notices to Customer\u160 ?by email or through the Service.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.3. DPA\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The terms of the Data Processing Agreement (\u8220"{\b DPA}\u8221"), available upon request, are incorporated into these Terms by reference.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.4. Modification of Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may, from time to time, change these Terms. Please check these Terms periodically for changes. Revisions will be effective immediately except that, for existing users, material revisions will be effective 30 days after posting or notice to Customer of the revisions unless otherwise stated. Zed may require that Customer\u160 ?accept modified Terms in order to continue to use the Service. If Customer does not agree to the modified Terms, then Customer should discontinue its use of the Service and notify Zed at hi@zed.dev, in which case Zed will provide a pro-rated refund of any prepaid Subscription Fee. The terms in any Customer purchase order or business form will not amend or modify the Terms and are expressly rejected by Zed; any of these Customer documents are for administrative purposes only and have no legal effect with respect to the Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.5. Subcontractors\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Zed may use subcontractors and permit them to exercise Zed\u8217's rights, but Zed remains responsible for their compliance with the Terms and for its overall performance under the Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.6. Independent Contractors\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Parties are independent contractors, not agents, partners, or joint venturers.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.7. Export\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u160 ?will comply with all relevant U.S. and foreign export and import Laws in using the Service. Customer: (a) represents and warrants that it is not listed on any U.S. government list of prohibited or restricted parties or located in (or a national of) a country that is subject to a U.S. government embargo or that has been designated by the U.S. government as a \u8220"terrorist supporting\u8221" country; (b) agrees not to access or use the Service in violation of any U.S. export embargo, prohibition, or restriction; and (c) will not submit to the Service any information controlled under the U.S. International Traffic in Arms Regulations.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.8. Government End-Users\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Elements of the Service may include commercial computer software. If the user or licensee of the Service is an agency, department, or other entity of the United States Government, the use, duplication, reproduction, release, modification, disclosure, or transfer of the Service or any related documentation of any kind, including technical data and manuals, is restricted by the terms of the Terms in accordance with Federal Acquisition Regulation 12.212 for civilian purposes and Defense Federal Acquisition Regulation Supplement 227.7202 for military purposes. The Service was developed fully at private expense. All other use is prohibited.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.9. Privacy Policy\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Please read the {\field{\*\fldinst{HYPERLINK "/privacy-policy"}}{\fldrslt{\ul +Zed Privacy Policy +}}} + (the \u8220"{\b Privacy Policy}\u8221") carefully for information relating to our collection, use, storage, and disclosure of your personal information. The Zed Privacy Policy is incorporated by this reference into, and made a part of, these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.10. Additional Terms\par} +{\pard \ql \f0 \sa180 \li0 \fi0 Customer\u8217's use of the Service is subject to all additional terms, policies, rules, or guidelines applicable to the Service or certain features of the Service that we may post on or link to from the Service (the \u8220"{\b Additional Terms}\u8221"). All Additional Terms are incorporated by this reference into, and made a part of, these Terms.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.11. Consent to Electronic Communications\par} +{\pard \ql \f0 \sa180 \li0 \fi0 By using the Service, Customer consents to receiving certain electronic communications from Zed as further described in the Privacy Policy. Please read the Privacy Policy to learn more about Zed\u8217's electronic communications practices. Customer agrees that any notices, agreements, disclosures, or other communications that Zed sends to Customer electronically will satisfy any legal communication requirements, including that those communications be in writing. Zed may send Customer emails concerning Zed products and services, as well as those of third parties. Customer\u160 ?may opt out of promotional emails by following the unsubscribe instructions in the promotional email itself.\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.12. Contact Information\par} +{\pard \ql \f0 \sa180 \li0 \fi0 The Service is offered by Zed Industries, Inc. Customer\u160 ?may contact Zed by sending correspondence to 2590 Welton Street, Suite 200, PMB 1916, Denver, CO 80205 with cc: to {\field{\*\fldinst{HYPERLINK "mailto:legal@zed.dev"}}{\fldrslt{\ul +legal@zed.dev }}} .\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 4. FEE BASED SERVICES, FEES AND PAYMENT TERMS\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.1. Fee Based Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The Zed AI Services is made available with additional usage benefits (the \u8220"Enhanced Use \u8221") as described in the table published at {\field{\*\fldinst{HYPERLINK "https://zed.dev/pricing"}}{\fldrslt{\ul -zed.dev/pricing -}}} - (the \u8220"Pricing Table\u8221"), subject to the requirements and limitations set forth in the Pricing Table and these Terms. In order to make use of the Enhanced Use, Customer must access the Zed AI Services through a Zed registered account.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.2. Fees\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer shall pay to Zed the applicable fees set forth in Pricing Table, together with any applicable taxes and shipping and handling (collectively, the \u8220"Fees\u8221"). Customer shall have no right of return, and all Fees shall be non-refundable.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.3. Payment Terms\par} -{\pard \ql \f0 \sa180 \li0 \fi0 All amounts payable to Zed under this Agreement shall be paid in United States dollars and paid Zed according to the method of payment, frequency and calculated as set forth in the Pricing Table.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 4.4. Taxes; Set-offs\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Any and all payments made by Customer in accordance with this Agreement are exclusive of any taxes that might be assessed by any jurisdiction. Customer shall pay or reimburse Zed for all sales, use, property and similar taxes; all customs duties, import fees, stamp duties, license fees and similar charges; and all other mandatory payments to government agencies of whatever kind, except taxes imposed on the net or gross income of Zed. All amounts payable to Zed under this Agreement shall be without set-off and without deduction of any taxes, levies, imposts, charges, withholdings and/or duties of any nature which may be levied or imposed, including without limitation, value added tax, customs duty and withholding tax.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 5. TERM AND TERMINATION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.1. Term\par} -{\pard \ql \f0 \sa180 \li0 \fi0 The term of this Agreement shall commence on the date You first download the Editor or use the Zed Service (the "Effective Date"), and unless terminated earlier according to this Section 3, will end pursuant to this Section 5 (the "Term").\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.2. Termination\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement may be terminated: (a) by either party if the other has materially breached this Agreement; or (b) by Zed at any time and for any reason upon notice to Customer. You acknowledge that Zed is under no obligation to continue to operate the Zed Service or make the Editor available, and We may end any programs in connection with the same at any time.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 5.3. Effect of Termination and Survival\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Upon any expiration or termination of this Agreement, Customer shall (i) immediately cease use of the Zed Service, and (ii) return all Zed Confidential Information and other materials provided by Zed. The following provisions will survive termination of this Agreement: Sections 3.3 (Customer Data), Section 3.4 (Privacy Policy), Section 5.3 (Effect of Termination and Survival), Section 6 (Ownership), Section 7 (Indemnification), Section 9 (Limitation of Liability), Section 10 (Third Party Services), and Section 11 (Miscellaneous).\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 6. OWNERSHIP\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed retains all right, title, and interest in and to the Zed Service, Editor, and any software, products, works or other intellectual property created, used, provided, or made available by Zed under or in connection with the Zed Service or Editor. Customer may from time to time provide suggestions, comments, or other feedback to Zed with respect to the Zed Service or Editor ("Feedback"). Customer shall, and hereby does, grant to Zed a nonexclusive, worldwide, perpetual, irrevocable, transferable, sublicensable, royalty-free, fully paid-up license to use and exploit the Feedback for any purpose. You retain all right, title and interest in and to the Customer Data, including all intellectual property rights therein. No intellectual property rights with respect to any software code you develop or modify with the Editor or Zed Service (collectively, the \u8220"Output\u8221") are transferred or assigned to Zed hereunder.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 7. INDEMNIFICATION\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Customer will defend, indemnify, and hold Zed, its affiliates, suppliers and licensors harmless and each of their respective officers, directors, employees and representatives from and against any claims, damages, losses, liabilities, costs, and expenses (including reasonable attorneys' fees) arising out of or relating to any third party claim with respect to: (a) Customer Data; (b) breach of this Agreement or violation of applicable law by Customer; or (c) alleged infringement or misappropriation of third-party's intellectual property rights resulting from Customer Data.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 8. WARRANTY\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed does not represent or warrant that the operation of the Zed Service or Editor (or any portion thereof) will be uninterrupted or error free, or that the Zed Service or Editor (or any portion thereof) will operate in combination with other hardware, software, systems or data not provided by Zed. CUSTOMER ACKNOWLEDGES THAT, ZED MAKES NO EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES OF ANY KIND WITH RESPECT TO THE SERVICE OR SOFTWARE, OR THEIR CONDITION. ZED HEREBY EXPRESSLY EXCLUDES, ANY AND ALL OTHER EXPRESS OR IMPLIED REPRESENTATIONS OR WARRANTIES, WHETHER UNDER COMMON LAW, STATUTE OR OTHERWISE, INCLUDING WITHOUT LIMITATION ANY AND ALL WARRANTIES AS TO MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, SATISFACTORY QUALITY OR NON-INFRINGEMENT OF THIRD-PARTY RIGHTS.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 9. LIMITATIONS OF LIABILITY\par} -{\pard \ql \f0 \sa180 \li0 \fi0 IN NO EVENT SHALL ZED BE LIABLE FOR ANY LOST DATA, LOST PROFITS, BUSINESS INTERRUPTION, REPLACEMENT SERVICE OR OTHER SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR INDIRECT DAMAGES, HOWEVER CAUSED AND REGARDLESS OF THEORY OF LIABILITY. ZED'S LIABILITY FOR ALL CLAIMS ARISING UNDER THIS AGREEMENT, WHETHER IN CONTRACT, TORT OR OTHERWISE, SHALL NOT EXCEED THE GREATER OF: THE FEES PAID TO ZED BY CUSTOMER DURING THE TWELVE (12) MONTH PERIOD PRECEDING THE DATE OF THE CLAIM, OR ONE THOUSAND US DOLLARS ($1,000).\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 10. Third Party Services\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed may make certain third party services available to You within the Editor or the Zed Service (each a "Third Party Service"). You acknowledge and agree that (a) use of each Third Party Service is subject to the corresponding terms and conditions available at the following URL: {\field{\*\fldinst{HYPERLINK "https://zed.dev/third-party-terms"}}{\fldrslt{\ul -https://zed.dev/third-party-terms -}}} - and/or presented in connection with Your use of such Third Party Service; (b) the terms and conditions of this Agreement do not apply with respect to Your use of any Third Party Service; and (c) Zed is not liable in any way regarding Your use of any Third Party Service.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel1 \b \fs32 11. MISCELLANEOUS\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.1. Export Control\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You hereby certify that You will comply with all current US Export Control laws. You agree to defend, indemnify and hold Zed harmless from any liability for Your violation of U.S. Export Control laws.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.2. Compliance with Laws\par} -{\pard \ql \f0 \sa180 \li0 \fi0 You shall comply with all applicable laws and regulations in its use of the Solution, including without limitation the unlawful gathering or collecting, or assisting in the gathering or collecting of information in violation of any privacy laws or regulations. You shall, at its own expense, defend, indemnify and hold harmless Zed from and against any and all claims, losses, liabilities, damages, judgments, government or federal sanctions, costs and expenses (including attorneys' fees) incurred by Zed arising from any claim or assertion by any third party of violation of privacy laws or regulations by You or any of its agents, officers, directors or employees.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.3. Assignment\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Neither party may transfer and assign its rights and obligations under this Agreement without the prior written consent of the other party. Notwithstanding the foregoing, Zed may transfer and assign its rights under this Agreement without consent from the other party in connection with a change in control, acquisition or sale of all or substantially all of its assets.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.4. Force Majeure\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Neither party shall be responsible for failure or delay in performance by events out of their reasonable control, including but not limited to, acts of God, Internet outage, terrorism, war, fires, earthquakes and other disasters (each a "Force Majeure"). Notwithstanding the foregoing: if a Force Majeure continues for more than thirty (30) days, either party may to terminate this agreement by written notice to the other party.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.5. Notice\par} -{\pard \ql \f0 \sa180 \li0 \fi0 All notices between the parties shall be in writing and shall be deemed to have been given if personally delivered or sent by registered or certified mail (return receipt), or by recognized courier service.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.6. No Agency\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Both parties agree that no agency, partnership, joint venture, or employment is created as a result of this Agreement. You do not have any authority of any kind to bind Zed.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.7. Governing Law\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement shall be governed exclusively by, and construed exclusively in accordance with, the laws of the United States and the State of California, without regard to its conflict of laws provisions. The federal courts of the United States in the Northern District of California and the state courts of the State of California shall have exclusive jurisdiction to adjudicate any dispute arising out of or relating to this Agreement. Each party hereby consents to the jurisdiction of such courts and waives any right it may otherwise have to challenge the appropriateness of such forums, whether on the basis of the doctrine of forum non conveniens or otherwise. The United Nations Convention on Contracts for the International Sale of Goods shall not apply to this Agreement or any Purchase Order issued under this Agreement.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.8. Updated Agreement\par} -{\pard \ql \f0 \sa180 \li0 \fi0 Zed reserves the right to update this Agreement at any time. The terms and conditions of the updated version of the Agreement shall apply to the Zed Service and Editor downloaded, or accessed following the date of publication of the updated version. If You do not agree with any terms of the updated Agreement, You may not use or access the Zed Service or Editor in any manner. Zed may from time-to-time provide release notes applicable to the Editor or Zed Service, and such release notes may contain additional use restrictions or terms applicable to Customer Data. Your use of the Editor or Zed Service after the applicable release notes are made available shall be subject to the additional use restrictions or terms applicable to Customer Data.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 11.9. Entire Agreement\par} -{\pard \ql \f0 \sa180 \li0 \fi0 This Agreement is the complete and exclusive statement of the mutual understanding of the parties and supersedes and cancels all previous written and oral agreements, communications, and other understandings relating to the subject matter of this Agreement, and all waivers and modifications must be in a writing signed by both parties, except as otherwise provided herein. Any term or provision of this Agreement held to be illegal or unenforceable shall be, to the fullest extent possible, interpreted so as to be construed as valid, but in any event the validity or enforceability of the remainder hereof shall not be affected.\par} -{\pard \ql \f0 \sa180 \li0 \fi0 {\b DATE: May 6, 2025}\par} +{\pard \ql \f0 \sa180 \li0 \fi0 \outlinelevel2 \b \fs28 16.13. Notice to California Residents\par} +{\pard \ql \f0 \sa180 \li0 \fi0 If Customer is a California resident, then under California Civil Code Section\u160 ?1789.3, Customer may contact the Complaint Assistance Unit of the Division of Consumer Services of the California Department of Consumer Affairs in writing at 1625 N. Market Blvd., Suite N 112, Sacramento, California 95834, or by telephone at +1-800-952-5210 in order to resolve a complaint regarding the Service or to receive further information regarding use of the Service.\par} } From f0620c6f8f3c7bc9369143583f26fa6e07719303 Mon Sep 17 00:00:00 2001 From: Marco Mihai Condrache <52580954+marcocondrache@users.noreply.github.com> Date: Mon, 2 Mar 2026 22:33:05 +0100 Subject: [PATCH 245/548] editor: Preserve compound emojis (#50082) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #45466 Emojis like 🧑‍✈️ are grapheme clusters formed using zero-width joiners and variation selectors. Iterating over the string by individual chars in `highlight_invisibles` breaks these clusters, even though they represent a single visible character. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Zed now supports compound emojis --- crates/editor/src/display_map.rs | 47 +++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 10c17871709e7f6ac237cb3ecb000724b0095c01..610e30f4e1538fa1eb91768a91bd816b3cbd00dd 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1498,7 +1498,7 @@ impl<'a> HighlightedChunk<'a> { self, editor_style: &'a EditorStyle, ) -> impl Iterator + 'a { - let mut chars = self.text.chars().peekable(); + let mut chunks = self.text.graphemes(true).peekable(); let mut text = self.text; let style = self.style; let is_tab = self.is_tab; @@ -1506,10 +1506,12 @@ impl<'a> HighlightedChunk<'a> { let is_inlay = self.is_inlay; iter::from_fn(move || { let mut prefix_len = 0; - while let Some(&ch) = chars.peek() { - if !is_invisible(ch) { - prefix_len += ch.len_utf8(); - chars.next(); + while let Some(&chunk) = chunks.peek() { + let mut chars = chunk.chars(); + let Some(ch) = chars.next() else { break }; + if chunk.len() != ch.len_utf8() || !is_invisible(ch) { + prefix_len += chunk.len(); + chunks.next(); continue; } if prefix_len > 0 { @@ -1523,8 +1525,8 @@ impl<'a> HighlightedChunk<'a> { replacement: renderer.clone(), }); } - chars.next(); - let (prefix, suffix) = text.split_at(ch.len_utf8()); + chunks.next(); + let (prefix, suffix) = text.split_at(chunk.len()); text = suffix; if let Some(replacement) = replacement(ch) { let invisible_highlight = HighlightStyle { @@ -4124,4 +4126,35 @@ pub mod tests { assert_eq!(ranges[0].start, DisplayPoint::new(DisplayRow(0), 10)); assert_eq!(ranges[0].end, DisplayPoint::new(DisplayRow(0), 14)); } + + #[test] + fn test_highlight_invisibles_preserves_compound_emojis() { + let editor_style = EditorStyle::default(); + + let pilot_emoji = "🧑\u{200d}✈\u{fe0f}"; + let chunk = HighlightedChunk { + text: pilot_emoji, + style: None, + is_tab: false, + is_inlay: false, + replacement: None, + }; + + let chunks: Vec<_> = chunk + .highlight_invisibles(&editor_style) + .map(|chunk| chunk.text.to_string()) + .collect(); + + assert_eq!( + chunks.concat(), + pilot_emoji, + "all text bytes must be preserved" + ); + assert_eq!( + chunks.len(), + 1, + "compound emoji should not be split into multiple chunks, got: {:?}", + chunks, + ); + } } From 1f11592d93fd401520df9f603a5c61aefe967841 Mon Sep 17 00:00:00 2001 From: Dino Date: Mon, 2 Mar 2026 22:13:09 +0000 Subject: [PATCH 246/548] editor: Remove folded buffer ID on all excerpts removed (#50525) Fix a bug in the editor's handling of the `multi_buffer::Event::ExcerptsRemoved` event, where the display map's `folded_buffers` set was not updated according to the list of removed buffer ids. Since the `ProjectSearchView` now relies on the `Editor.has_any_buffer_folded` method in order to decide the state of the expand/collapse all excerpts button this could lead to a bug where, after an initial project search, all excerpts would be collapsed, and performing a new search would leave the button in a wrong state, as all excerpts from the new search result would be expanded, but the button would still consider that there were folded excerpts for the buffers. Closes #50521 Release Notes: - Fixed bug in project search where collapsing a single buffer then performing a new search in the same view, would break the expand/collapse all button behavior --- crates/editor/src/display_map.rs | 4 ++ crates/editor/src/editor.rs | 1 + crates/editor/src/editor_tests.rs | 71 +++++++++++++++++++++++++ crates/multi_buffer/src/multi_buffer.rs | 2 + 4 files changed, 78 insertions(+) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 610e30f4e1538fa1eb91768a91bd816b3cbd00dd..57b8eb8ef6c1b29cb99da3e2a4e731d0c828038e 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1003,6 +1003,10 @@ impl DisplayMap { &self.block_map.folded_buffers } + pub(super) fn clear_folded_buffer(&mut self, buffer_id: language::BufferId) { + self.block_map.folded_buffers.remove(&buffer_id); + } + #[instrument(skip_all)] pub fn insert_creases( &mut self, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index eb8601d59e1c9970f367177f3f365f4feb30811e..6f1961a97880e0f5f55577c406b77e3796568a8e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -24145,6 +24145,7 @@ impl Editor { self.display_map.update(cx, |display_map, cx| { display_map.invalidate_semantic_highlights(*buffer_id); display_map.clear_lsp_folding_ranges(*buffer_id, cx); + display_map.clear_folded_buffer(*buffer_id); }); } jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 525910ed276cdfe5fb5c3c2b784269d834c70316..2898954b75a97c7d7d0a922eae8e71c8b598a7d5 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -24308,6 +24308,77 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_folded_buffers_cleared_on_excerpts_removed(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "file_a.txt": "File A\nFile A\nFile A", + "file_b.txt": "File B\nFile B\nFile B", + }), + ) + .await; + + let project = Project::test(fs, [path!("/root").as_ref()], cx).await; + let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let cx = &mut VisualTestContext::from_window(*window, cx); + let worktree = project.update(cx, |project, cx| { + let mut worktrees = project.worktrees(cx).collect::>(); + assert_eq!(worktrees.len(), 1); + worktrees.pop().unwrap() + }); + let worktree_id = worktree.update(cx, |worktree, _| worktree.id()); + + let buffer_a = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, rel_path("file_a.txt")), cx) + }) + .await + .unwrap(); + let buffer_b = project + .update(cx, |project, cx| { + project.open_buffer((worktree_id, rel_path("file_b.txt")), cx) + }) + .await + .unwrap(); + + let multi_buffer = cx.new(|cx| { + let mut multi_buffer = MultiBuffer::new(ReadWrite); + let range_a = Point::new(0, 0)..Point::new(2, 4); + let range_b = Point::new(0, 0)..Point::new(2, 4); + + multi_buffer.set_excerpts_for_path(PathKey::sorted(0), buffer_a.clone(), [range_a], 0, cx); + multi_buffer.set_excerpts_for_path(PathKey::sorted(1), buffer_b.clone(), [range_b], 0, cx); + multi_buffer + }); + + let editor = cx.new_window_entity(|window, cx| { + Editor::new( + EditorMode::full(), + multi_buffer.clone(), + Some(project.clone()), + window, + cx, + ) + }); + + editor.update(cx, |editor, cx| { + editor.fold_buffer(buffer_a.read(cx).remote_id(), cx); + }); + assert!(editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx))); + + // When the excerpts for `buffer_a` are removed, a + // `multi_buffer::Event::ExcerptsRemoved` event is emitted, which should be + // picked up by the editor and update the display map accordingly. + multi_buffer.update(cx, |multi_buffer, cx| { + multi_buffer.remove_excerpts_for_path(PathKey::sorted(0), cx) + }); + assert!(!editor.update(cx, |editor, cx| editor.has_any_buffer_folded(cx))); +} + #[gpui::test] async fn test_folding_buffers_with_one_excerpt(cx: &mut TestAppContext) { init_test(cx, |_| {}); diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs index a593280d245fd01d623051953e48128c9935df45..c991fd9a5cbfe451b3f86ff016f8467395373564 100644 --- a/crates/multi_buffer/src/multi_buffer.rs +++ b/crates/multi_buffer/src/multi_buffer.rs @@ -105,6 +105,8 @@ pub enum Event { }, ExcerptsRemoved { ids: Vec, + /// Contains only buffer IDs for which all excerpts have been removed. + /// Buffers that still have remaining excerpts are never included. removed_buffer_ids: Vec, }, ExcerptsExpanded { From 7ad524661d4e0afd55441bd64a4e50e5cd8d8e07 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Mon, 2 Mar 2026 23:18:49 +0100 Subject: [PATCH 247/548] Remove Supermaven-related code from Zed (#50537) Follow-up of https://github.com/zed-industries/zed/pull/49317 See also https://supermaven.com/blog/sunsetting-supermaven - N/A --- .github/CODEOWNERS.hold | 2 - Cargo.lock | 45 -- Cargo.toml | 5 - crates/agent_ui/src/agent_ui.rs | 10 - crates/edit_prediction/src/edit_prediction.rs | 2 - crates/edit_prediction_ui/Cargo.toml | 1 - .../src/edit_prediction_button.rs | 153 ------ crates/editor/src/editor.rs | 2 +- crates/icons/src/icons.rs | 4 - crates/language/src/language_settings.rs | 3 +- crates/paths/src/paths.rs | 6 - crates/settings_content/src/language.rs | 5 - crates/supermaven/Cargo.toml | 44 -- crates/supermaven/LICENSE-GPL | 1 - crates/supermaven/src/messages.rs | 146 ------ crates/supermaven/src/supermaven.rs | 485 ------------------ .../supermaven_edit_prediction_delegate.rs | 303 ----------- crates/supermaven_api/Cargo.toml | 23 - crates/supermaven_api/LICENSE-GPL | 1 - crates/supermaven_api/src/supermaven_api.rs | 125 ----- crates/zed/Cargo.toml | 1 - crates/zed/src/main.rs | 1 - crates/zed/src/zed.rs | 1 - .../zed/src/zed/edit_prediction_registry.rs | 10 - docs/src/ai/overview.md | 2 +- docs/src/completions.md | 2 +- docs/src/reference/all-settings.md | 12 +- 27 files changed, 5 insertions(+), 1390 deletions(-) delete mode 100644 crates/supermaven/Cargo.toml delete mode 120000 crates/supermaven/LICENSE-GPL delete mode 100644 crates/supermaven/src/messages.rs delete mode 100644 crates/supermaven/src/supermaven.rs delete mode 100644 crates/supermaven/src/supermaven_edit_prediction_delegate.rs delete mode 100644 crates/supermaven_api/Cargo.toml delete mode 120000 crates/supermaven_api/LICENSE-GPL delete mode 100644 crates/supermaven_api/src/supermaven_api.rs diff --git a/.github/CODEOWNERS.hold b/.github/CODEOWNERS.hold index 449a5fd07315845787c9f2a73f0a0a22608e92c3..3d315b36401b2e27e29a2377aeabab8c09c75d39 100644 --- a/.github/CODEOWNERS.hold +++ b/.github/CODEOWNERS.hold @@ -62,8 +62,6 @@ /crates/rules_library/ @zed-industries/ai-team # SUGGESTED: Review needed - based on Richard Feldman (2 commits) /crates/shell_command_parser/ @zed-industries/ai-team -/crates/supermaven/ @zed-industries/ai-team -/crates/supermaven_api/ @zed-industries/ai-team /crates/vercel/ @zed-industries/ai-team /crates/x_ai/ @zed-industries/ai-team /crates/zeta_prompt/ @zed-industries/ai-team diff --git a/Cargo.lock b/Cargo.lock index 1a192869d6fee631d129e23c275c86e51168fe2f..9fce90755106d9159fe2bd206058a5a86761fdf1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5403,7 +5403,6 @@ dependencies = [ "semver", "serde_json", "settings", - "supermaven", "telemetry", "text", "theme", @@ -16524,49 +16523,6 @@ dependencies = [ "ztracing", ] -[[package]] -name = "supermaven" -version = "0.1.0" -dependencies = [ - "anyhow", - "client", - "collections", - "edit_prediction_types", - "editor", - "env_logger 0.11.8", - "futures 0.3.31", - "gpui", - "http_client", - "language", - "log", - "postage", - "project", - "serde", - "serde_json", - "settings", - "smol", - "supermaven_api", - "text", - "theme", - "ui", - "unicode-segmentation", - "util", -] - -[[package]] -name = "supermaven_api" -version = "0.1.0" -dependencies = [ - "anyhow", - "futures 0.3.31", - "http_client", - "paths", - "serde", - "serde_json", - "smol", - "util", -] - [[package]] name = "sval" version = "2.15.0" @@ -21861,7 +21817,6 @@ dependencies = [ "smol", "snippet_provider", "snippets_ui", - "supermaven", "svg_preview", "sysinfo 0.37.2", "system_specs", diff --git a/Cargo.toml b/Cargo.toml index d505a5ee14b9587c874c33c36fc4b154d900680f..c50b329772669105a7ae3a5f19562fbd186d23ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -182,8 +182,6 @@ members = [ "crates/storybook", "crates/streaming_diff", "crates/sum_tree", - "crates/supermaven", - "crates/supermaven_api", "crates/svg_preview", "crates/system_specs", "crates/tab_switcher", @@ -427,8 +425,6 @@ sqlez_macros = { path = "crates/sqlez_macros" } story = { path = "crates/story" } streaming_diff = { path = "crates/streaming_diff" } sum_tree = { path = "crates/sum_tree" } -supermaven = { path = "crates/supermaven" } -supermaven_api = { path = "crates/supermaven_api" } codestral = { path = "crates/codestral" } system_specs = { path = "crates/system_specs" } tab_switcher = { path = "crates/tab_switcher" } @@ -900,7 +896,6 @@ sidebar = { codegen-units = 1 } snippet = { codegen-units = 1 } snippets_ui = { codegen-units = 1 } story = { codegen-units = 1 } -supermaven_api = { codegen-units = 1 } telemetry_events = { codegen-units = 1 } theme_selector = { codegen-units = 1 } time_format = { codegen-units = 1 } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index ba188ccb592871c62c6f010f026a8948c8cf89fa..ad778ca496f7815d0155f98187c8fad3e81365eb 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -371,7 +371,6 @@ fn update_command_palette_filter(cx: &mut App) { filter.hide_namespace("agents"); filter.hide_namespace("assistant"); filter.hide_namespace("copilot"); - filter.hide_namespace("supermaven"); filter.hide_namespace("zed_predict_onboarding"); filter.hide_namespace("edit_prediction"); @@ -392,19 +391,11 @@ fn update_command_palette_filter(cx: &mut App) { EditPredictionProvider::None => { filter.hide_namespace("edit_prediction"); filter.hide_namespace("copilot"); - filter.hide_namespace("supermaven"); filter.hide_action_types(&edit_prediction_actions); } EditPredictionProvider::Copilot => { filter.show_namespace("edit_prediction"); filter.show_namespace("copilot"); - filter.hide_namespace("supermaven"); - filter.show_action_types(edit_prediction_actions.iter()); - } - EditPredictionProvider::Supermaven => { - filter.show_namespace("edit_prediction"); - filter.hide_namespace("copilot"); - filter.show_namespace("supermaven"); filter.show_action_types(edit_prediction_actions.iter()); } EditPredictionProvider::Zed @@ -416,7 +407,6 @@ fn update_command_palette_filter(cx: &mut App) { | EditPredictionProvider::Experimental(_) => { filter.show_namespace("edit_prediction"); filter.hide_namespace("copilot"); - filter.hide_namespace("supermaven"); filter.show_action_types(edit_prediction_actions.iter()); } } diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 02ffcbe065e8b0334ab7c200c0e43b817cdad416..a29779d30de007043141b3958c0c449b230cc384 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1836,7 +1836,6 @@ fn is_ep_store_provider(provider: EditPredictionProvider) -> bool { | EditPredictionProvider::Experimental(_) => true, EditPredictionProvider::None | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven | EditPredictionProvider::Codestral => false, } } @@ -1877,7 +1876,6 @@ impl EditPredictionStore { EditPredictionProvider::OpenAiCompatibleApi => (false, 2), EditPredictionProvider::None | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven | EditPredictionProvider::Codestral => { log::error!("queue_prediction_refresh called with non-store provider"); return; diff --git a/crates/edit_prediction_ui/Cargo.toml b/crates/edit_prediction_ui/Cargo.toml index d4a7c5d3ab800f54476a8e88914dcaaba3a26547..05afbabd2045e9bca591b6c2edba846e95953a4f 100644 --- a/crates/edit_prediction_ui/Cargo.toml +++ b/crates/edit_prediction_ui/Cargo.toml @@ -40,7 +40,6 @@ paths.workspace = true project.workspace = true regex.workspace = true settings.workspace = true -supermaven.workspace = true telemetry.workspace = true text.workspace = true theme.workspace = true diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index 729b901be1556f011c101258d34af9b98b45f272..c1fcd78f3f0cee24e6e8d936bf6af56f8d1ebda0 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -30,7 +30,6 @@ use std::{ sync::{Arc, LazyLock}, time::Duration, }; -use supermaven::{AccountStatus, Supermaven}; use ui::{ Clickable, ContextMenu, ContextMenuEntry, DocumentationSide, IconButton, IconButtonShape, Indicator, PopoverMenu, PopoverMenuHandle, ProgressBar, Tooltip, prelude::*, @@ -75,13 +74,6 @@ pub struct EditPredictionButton { project: WeakEntity, } -enum SupermavenButtonStatus { - Ready, - Errored(String), - NeedsActivation(String), - Initializing, -} - impl Render for EditPredictionButton { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { // Return empty div if AI is disabled @@ -188,101 +180,6 @@ impl Render for EditPredictionButton { .with_handle(self.popover_menu_handle.clone()), ) } - EditPredictionProvider::Supermaven => { - let Some(supermaven) = Supermaven::global(cx) else { - return div(); - }; - - let supermaven = supermaven.read(cx); - - let status = match supermaven { - Supermaven::Starting => SupermavenButtonStatus::Initializing, - Supermaven::FailedDownload { error } => { - SupermavenButtonStatus::Errored(error.to_string()) - } - Supermaven::Spawned(agent) => { - let account_status = agent.account_status.clone(); - match account_status { - AccountStatus::NeedsActivation { activate_url } => { - SupermavenButtonStatus::NeedsActivation(activate_url) - } - AccountStatus::Unknown => SupermavenButtonStatus::Initializing, - AccountStatus::Ready => SupermavenButtonStatus::Ready, - } - } - Supermaven::Error { error } => { - SupermavenButtonStatus::Errored(error.to_string()) - } - }; - - let icon = status.to_icon(); - let tooltip_text = status.to_tooltip(); - let has_menu = status.has_menu(); - let this = cx.weak_entity(); - let fs = self.fs.clone(); - let file = self.file.clone(); - let language = self.language.clone(); - let project = self.project.clone(); - - div().child( - PopoverMenu::new("supermaven") - .on_open({ - let file = file.clone(); - let language = language; - let project = project; - Rc::new(move |_window, cx| { - emit_edit_prediction_menu_opened( - "supermaven", - &file, - &language, - &project, - cx, - ); - }) - }) - .menu(move |window, cx| match &status { - SupermavenButtonStatus::NeedsActivation(activate_url) => { - Some(ContextMenu::build(window, cx, |menu, _, _| { - let fs = fs.clone(); - let activate_url = activate_url.clone(); - - menu.entry("Sign In", None, move |_, cx| { - cx.open_url(activate_url.as_str()) - }) - .entry( - "Use Zed AI", - None, - move |_, cx| { - set_completion_provider( - fs.clone(), - cx, - EditPredictionProvider::Zed, - ) - }, - ) - })) - } - SupermavenButtonStatus::Ready => this - .update(cx, |this, cx| { - this.build_supermaven_context_menu(window, cx) - }) - .ok(), - _ => None, - }) - .anchor(Corner::BottomRight) - .trigger_with_tooltip( - IconButton::new("supermaven-icon", icon), - move |window, cx| { - if has_menu { - Tooltip::for_action(tooltip_text.clone(), &ToggleMenu, cx) - } else { - Tooltip::text(tooltip_text.clone())(window, cx) - } - }, - ) - .with_handle(self.popover_menu_handle.clone()), - ) - } EditPredictionProvider::Codestral => { let enabled = self.editor_enabled.unwrap_or(true); let has_api_key = codestral::codestral_api_key(cx).is_some(); @@ -1120,21 +1017,6 @@ impl EditPredictionButton { }) } - fn build_supermaven_context_menu( - &self, - window: &mut Window, - cx: &mut Context, - ) -> Entity { - ContextMenu::build(window, cx, |menu, window, cx| { - let menu = self.build_language_settings_menu(menu, window, cx); - let menu = - self.add_provider_switching_section(menu, EditPredictionProvider::Supermaven, cx); - - menu.separator() - .action("Sign Out", supermaven::SignOut.boxed_clone()) - }) - } - fn build_codestral_context_menu( &self, window: &mut Window, @@ -1384,33 +1266,6 @@ impl StatusItemView for EditPredictionButton { } } -impl SupermavenButtonStatus { - fn to_icon(&self) -> IconName { - match self { - SupermavenButtonStatus::Ready => IconName::Supermaven, - SupermavenButtonStatus::Errored(_) => IconName::SupermavenError, - SupermavenButtonStatus::NeedsActivation(_) => IconName::SupermavenInit, - SupermavenButtonStatus::Initializing => IconName::SupermavenInit, - } - } - - fn to_tooltip(&self) -> String { - match self { - SupermavenButtonStatus::Ready => "Supermaven is ready".to_string(), - SupermavenButtonStatus::Errored(error) => format!("Supermaven error: {}", error), - SupermavenButtonStatus::NeedsActivation(_) => "Supermaven needs activation".to_string(), - SupermavenButtonStatus::Initializing => "Supermaven initializing".to_string(), - } - } - - fn has_menu(&self) -> bool { - match self { - SupermavenButtonStatus::Ready | SupermavenButtonStatus::NeedsActivation(_) => true, - SupermavenButtonStatus::Errored(_) | SupermavenButtonStatus::Initializing => false, - } - } -} - async fn open_disabled_globs_setting_in_editor( workspace: WeakEntity, cx: &mut AsyncWindowContext, @@ -1507,14 +1362,6 @@ pub fn get_available_providers(cx: &mut App) -> Vec { providers.push(EditPredictionProvider::Copilot); }; - if let Some(supermaven) = Supermaven::global(cx) { - if let Supermaven::Spawned(agent) = supermaven.read(cx) { - if matches!(agent.account_status, AccountStatus::Ready) { - providers.push(EditPredictionProvider::Supermaven); - } - } - } - if codestral::codestral_api_key(cx).is_some() { providers.push(EditPredictionProvider::Codestral); } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 6f1961a97880e0f5f55577c406b77e3796568a8e..0c2699304830482ba5a9ac23d561d9ea9d8c5b61 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -28579,7 +28579,7 @@ fn edit_prediction_edit_text( } fn edit_prediction_fallback_text(edits: &[(Range, Arc)], cx: &App) -> HighlightedText { - // Fallback for providers that don't provide edit_preview (like Copilot/Supermaven) + // Fallback for providers that don't provide edit_preview (like Copilot) // Just show the raw edit text with basic styling let mut text = String::new(); let mut highlights = Vec::new(); diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 73db39afdc5e9bd15f084043370d27f0494569a6..07204548ff5f2884bb4a5429267a02981ab3e78f 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -225,10 +225,6 @@ pub enum IconName { Star, StarFilled, Stop, - Supermaven, - SupermavenDisabled, - SupermavenError, - SupermavenInit, SwatchBook, SweepAi, SweepAiDisabled, diff --git a/crates/language/src/language_settings.rs b/crates/language/src/language_settings.rs index 40e3da789d4785cc5fd56589b09735ba8592ebc7..9a379697e8bddf9dc71d3d340d5e2a92d8b4405e 100644 --- a/crates/language/src/language_settings.rs +++ b/crates/language/src/language_settings.rs @@ -396,8 +396,7 @@ impl InlayHintSettings { } } -/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot) -/// or [Supermaven](https://supermaven.com). +/// The settings for edit predictions, such as [GitHub Copilot](https://github.com/features/copilot). #[derive(Clone, Debug, Default)] pub struct EditPredictionSettings { /// The provider that supplies edit predictions. diff --git a/crates/paths/src/paths.rs b/crates/paths/src/paths.rs index 656188e249fc864e1328c8f458bdc46aa7eaea3a..40e10fb3badaf2e00c6dbcc75af06e7b758faa81 100644 --- a/crates/paths/src/paths.rs +++ b/crates/paths/src/paths.rs @@ -419,12 +419,6 @@ pub fn copilot_dir() -> &'static PathBuf { COPILOT_DIR.get_or_init(|| data_dir().join("copilot")) } -/// Returns the path to the Supermaven directory. -pub fn supermaven_dir() -> &'static PathBuf { - static SUPERMAVEN_DIR: OnceLock = OnceLock::new(); - SUPERMAVEN_DIR.get_or_init(|| data_dir().join("supermaven")) -} - /// Returns the path to the default Prettier directory. pub fn default_prettier_dir() -> &'static PathBuf { static DEFAULT_PRETTIER_DIR: OnceLock = OnceLock::new(); diff --git a/crates/settings_content/src/language.rs b/crates/settings_content/src/language.rs index ab526c405a4b34962c298d68365cb828975628b1..db22f3a9e1448dbc529c133fb0195c422f02bc40 100644 --- a/crates/settings_content/src/language.rs +++ b/crates/settings_content/src/language.rs @@ -81,7 +81,6 @@ pub enum EditPredictionProvider { None, #[default] Copilot, - Supermaven, Zed, Codestral, Ollama, @@ -103,7 +102,6 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { pub enum Content { None, Copilot, - Supermaven, Zed, Codestral, Ollama, @@ -116,7 +114,6 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { Ok(match Content::deserialize(deserializer)? { Content::None => EditPredictionProvider::None, Content::Copilot => EditPredictionProvider::Copilot, - Content::Supermaven => EditPredictionProvider::Supermaven, Content::Zed => EditPredictionProvider::Zed, Content::Codestral => EditPredictionProvider::Codestral, Content::Ollama => EditPredictionProvider::Ollama, @@ -146,7 +143,6 @@ impl EditPredictionProvider { EditPredictionProvider::Zed => true, EditPredictionProvider::None | EditPredictionProvider::Copilot - | EditPredictionProvider::Supermaven | EditPredictionProvider::Codestral | EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi @@ -160,7 +156,6 @@ impl EditPredictionProvider { match self { EditPredictionProvider::Zed => Some("Zed AI"), EditPredictionProvider::Copilot => Some("GitHub Copilot"), - EditPredictionProvider::Supermaven => Some("Supermaven"), EditPredictionProvider::Codestral => Some("Codestral"), EditPredictionProvider::Sweep => Some("Sweep"), EditPredictionProvider::Mercury => Some("Mercury"), diff --git a/crates/supermaven/Cargo.toml b/crates/supermaven/Cargo.toml deleted file mode 100644 index c2d0c48a9e7733402eae32886c0863326882c134..0000000000000000000000000000000000000000 --- a/crates/supermaven/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "supermaven" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/supermaven.rs" -doctest = false - -[dependencies] -anyhow.workspace = true -client.workspace = true -collections.workspace = true -edit_prediction_types.workspace = true -futures.workspace = true -gpui.workspace = true -language.workspace = true -log.workspace = true -postage.workspace = true -serde.workspace = true -serde_json.workspace = true -settings.workspace = true -smol.workspace = true -supermaven_api.workspace = true -text.workspace = true -ui.workspace = true -unicode-segmentation.workspace = true -util.workspace = true - -[dev-dependencies] -editor = { workspace = true, features = ["test-support"] } -env_logger.workspace = true -gpui = { workspace = true, features = ["test-support"] } -language = { workspace = true, features = ["test-support"] } -project = { workspace = true, features = ["test-support"] } -settings = { workspace = true, features = ["test-support"] } -theme = { workspace = true, features = ["test-support"] } -util = { workspace = true, features = ["test-support"] } -http_client = { workspace = true, features = ["test-support"] } diff --git a/crates/supermaven/LICENSE-GPL b/crates/supermaven/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/supermaven/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/supermaven/src/messages.rs b/crates/supermaven/src/messages.rs deleted file mode 100644 index 9210343587bbb2cbf172a62a2eff73bbbb7cfb72..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/messages.rs +++ /dev/null @@ -1,146 +0,0 @@ -use serde::{Deserialize, Serialize}; - -// Outbound messages -#[derive(Debug, Serialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum OutboundMessage { - StateUpdate(StateUpdateMessage), - #[allow(dead_code)] - UseFreeVersion, - Logout, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct StateUpdateMessage { - pub new_id: String, - pub updates: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum StateUpdate { - FileUpdate(FileUpdateMessage), - CursorUpdate(CursorPositionUpdateMessage), -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct FileUpdateMessage { - pub path: String, - pub content: String, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct CursorPositionUpdateMessage { - pub path: String, - pub offset: usize, -} - -// Inbound messages coming in on stdout - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum ResponseItem { - // A completion - Text { text: String }, - // Vestigial message type from old versions -- safe to ignore - Del { text: String }, - // Be able to delete whitespace prior to the cursor, likely for the rest of the completion - Dedent { text: String }, - // When the completion is over - End, - // Got the closing parentheses and shouldn't show any more after - Barrier, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenResponse { - pub state_id: String, - pub items: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenMetadataMessage { - pub dust_strings: Option>, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenTaskUpdateMessage { - pub task: String, - pub status: TaskStatus, - pub percent_complete: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum TaskStatus { - InProgress, - Complete, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct SupermavenActiveRepoMessage { - pub repo_simple_name: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum SupermavenPopupAction { - OpenUrl { label: String, url: String }, - NoOp { label: String }, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub struct SupermavenPopupMessage { - pub message: String, - pub actions: Vec, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "camelCase")] -pub struct ActivationRequest { - pub activate_url: Option, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenSetMessage { - pub key: String, - pub value: serde_json::Value, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub enum ServiceTier { - FreeNoLicense, - #[serde(other)] - Unknown, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "kind", rename_all = "snake_case")] -pub enum SupermavenMessage { - Response(SupermavenResponse), - Metadata(SupermavenMetadataMessage), - Apology { - message: Option, - }, - ActivationRequest(ActivationRequest), - ActivationSuccess, - Passthrough { - passthrough: Box, - }, - Popup(SupermavenPopupMessage), - TaskStatus(SupermavenTaskUpdateMessage), - ActiveRepo(SupermavenActiveRepoMessage), - ServiceTier { - service_tier: ServiceTier, - }, - - Set(SupermavenSetMessage), - #[serde(other)] - Unknown, -} diff --git a/crates/supermaven/src/supermaven.rs b/crates/supermaven/src/supermaven.rs deleted file mode 100644 index 96f9b9c58bf934ae3991375ee8ef15cbf990dcc4..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/supermaven.rs +++ /dev/null @@ -1,485 +0,0 @@ -mod messages; -mod supermaven_edit_prediction_delegate; - -pub use supermaven_edit_prediction_delegate::*; - -use anyhow::{Context as _, Result}; -#[allow(unused_imports)] -use client::{Client, proto}; -use collections::BTreeMap; - -use futures::{AsyncBufReadExt, StreamExt, channel::mpsc, io::BufReader}; -use gpui::{App, AsyncApp, Context, Entity, EntityId, Global, Task, WeakEntity, actions}; -use language::{ - Anchor, Buffer, BufferSnapshot, ToOffset, language_settings::all_language_settings, -}; -use messages::*; -use postage::watch; -use serde::{Deserialize, Serialize}; -use settings::SettingsStore; -use smol::io::AsyncWriteExt; -use std::{path::PathBuf, sync::Arc}; -use ui::prelude::*; -use util::ResultExt; -use util::command::Child; -use util::command::Stdio; - -actions!( - supermaven, - [ - /// Signs out of Supermaven. - SignOut - ] -); - -pub fn init(client: Arc, cx: &mut App) { - let supermaven = cx.new(|_| Supermaven::Starting); - Supermaven::set_global(supermaven.clone(), cx); - - let mut provider = all_language_settings(None, cx).edit_predictions.provider; - if provider == language::language_settings::EditPredictionProvider::Supermaven { - supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); - } - - cx.observe_global::(move |cx| { - let new_provider = all_language_settings(None, cx).edit_predictions.provider; - if new_provider != provider { - provider = new_provider; - if provider == language::language_settings::EditPredictionProvider::Supermaven { - supermaven.update(cx, |supermaven, cx| supermaven.start(client.clone(), cx)); - } else { - supermaven.update(cx, |supermaven, _cx| supermaven.stop()); - } - } - }) - .detach(); - - cx.on_action(|_: &SignOut, cx| { - if let Some(supermaven) = Supermaven::global(cx) { - supermaven.update(cx, |supermaven, _cx| supermaven.sign_out()); - } - }); -} - -pub enum Supermaven { - Starting, - FailedDownload { error: anyhow::Error }, - Spawned(SupermavenAgent), - Error { error: anyhow::Error }, -} - -#[derive(Clone)] -pub enum AccountStatus { - Unknown, - NeedsActivation { activate_url: String }, - Ready, -} - -#[derive(Clone)] -struct SupermavenGlobal(Entity); - -impl Global for SupermavenGlobal {} - -impl Supermaven { - pub fn global(cx: &App) -> Option> { - cx.try_global::() - .map(|model| model.0.clone()) - } - - pub fn set_global(supermaven: Entity, cx: &mut App) { - cx.set_global(SupermavenGlobal(supermaven)); - } - - pub fn start(&mut self, client: Arc, cx: &mut Context) { - if let Self::Starting = self { - cx.spawn(async move |this, cx| { - let binary_path = - supermaven_api::get_supermaven_agent_path(client.http_client()).await?; - - this.update(cx, |this, cx| { - if let Self::Starting = this { - *this = - Self::Spawned(SupermavenAgent::new(binary_path, client.clone(), cx)?); - } - anyhow::Ok(()) - }) - }) - .detach_and_log_err(cx) - } - } - - pub fn stop(&mut self) { - *self = Self::Starting; - } - - pub fn is_enabled(&self) -> bool { - matches!(self, Self::Spawned { .. }) - } - - pub fn complete( - &mut self, - buffer: &Entity, - cursor_position: Anchor, - cx: &App, - ) -> Option { - if let Self::Spawned(agent) = self { - let buffer_id = buffer.entity_id(); - let buffer = buffer.read(cx); - let path = buffer - .file() - .and_then(|file| Some(file.as_local()?.abs_path(cx))) - .unwrap_or_else(|| PathBuf::from("untitled")) - .to_string_lossy() - .to_string(); - let content = buffer.text(); - let offset = cursor_position.to_offset(buffer); - let state_id = agent.next_state_id; - agent.next_state_id.0 += 1; - - let (updates_tx, mut updates_rx) = watch::channel(); - postage::stream::Stream::try_recv(&mut updates_rx).unwrap(); - - agent.states.insert( - state_id, - SupermavenCompletionState { - buffer_id, - prefix_anchor: cursor_position, - prefix_offset: offset, - text: String::new(), - dedent: String::new(), - updates_tx, - }, - ); - // ensure the states map is max 1000 elements - if agent.states.len() > 1000 { - // state id is monotonic so it's sufficient to remove the first element - agent - .states - .remove(&agent.states.keys().next().unwrap().clone()); - } - - let _ = agent - .outgoing_tx - .unbounded_send(OutboundMessage::StateUpdate(StateUpdateMessage { - new_id: state_id.0.to_string(), - updates: vec![ - StateUpdate::FileUpdate(FileUpdateMessage { - path: path.clone(), - content, - }), - StateUpdate::CursorUpdate(CursorPositionUpdateMessage { path, offset }), - ], - })); - - Some(SupermavenCompletion { - id: state_id, - updates: updates_rx, - }) - } else { - None - } - } - - pub fn completion( - &self, - buffer: &Entity, - cursor_position: Anchor, - cx: &App, - ) -> Option<&str> { - if let Self::Spawned(agent) = self { - find_relevant_completion( - &agent.states, - buffer.entity_id(), - &buffer.read(cx).snapshot(), - cursor_position, - ) - } else { - None - } - } - - pub fn sign_out(&mut self) { - if let Self::Spawned(agent) = self { - agent - .outgoing_tx - .unbounded_send(OutboundMessage::Logout) - .ok(); - // The account status will get set to RequiresActivation or Ready when the next - // message from the agent comes in. Until that happens, set the status to Unknown - // to disable the button. - agent.account_status = AccountStatus::Unknown; - } - } -} - -fn find_relevant_completion<'a>( - states: &'a BTreeMap, - buffer_id: EntityId, - buffer: &BufferSnapshot, - cursor_position: Anchor, -) -> Option<&'a str> { - let mut best_completion: Option<&str> = None; - 'completions: for state in states.values() { - if state.buffer_id != buffer_id { - continue; - } - let Some(state_completion) = state.text.strip_prefix(&state.dedent) else { - continue; - }; - - let current_cursor_offset = cursor_position.to_offset(buffer); - if current_cursor_offset < state.prefix_offset { - continue; - } - - let original_cursor_offset = buffer.clip_offset(state.prefix_offset, text::Bias::Left); - let text_inserted_since_completion_request: String = buffer - .text_for_range(original_cursor_offset..current_cursor_offset) - .collect(); - let trimmed_completion = - match state_completion.strip_prefix(&text_inserted_since_completion_request) { - Some(suffix) => suffix, - None => continue 'completions, - }; - - if best_completion.is_some_and(|best| best.len() > trimmed_completion.len()) { - continue; - } - - best_completion = Some(trimmed_completion); - } - best_completion -} - -pub struct SupermavenAgent { - _process: Child, - next_state_id: SupermavenCompletionStateId, - states: BTreeMap, - outgoing_tx: mpsc::UnboundedSender, - _handle_outgoing_messages: Task>, - _handle_incoming_messages: Task>, - pub account_status: AccountStatus, - service_tier: Option, - #[allow(dead_code)] - client: Arc, -} - -impl SupermavenAgent { - fn new( - binary_path: PathBuf, - client: Arc, - cx: &mut Context, - ) -> Result { - let mut process = util::command::new_command(&binary_path) - .arg("stdio") - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .kill_on_drop(true) - .spawn() - .context("failed to start the binary")?; - - let stdin = process - .stdin - .take() - .context("failed to get stdin for process")?; - let stdout = process - .stdout - .take() - .context("failed to get stdout for process")?; - - let (outgoing_tx, outgoing_rx) = mpsc::unbounded(); - - Ok(Self { - _process: process, - next_state_id: SupermavenCompletionStateId::default(), - states: BTreeMap::default(), - outgoing_tx, - _handle_outgoing_messages: cx.spawn(async move |_, _cx| { - Self::handle_outgoing_messages(outgoing_rx, stdin).await - }), - _handle_incoming_messages: cx.spawn(async move |this, cx| { - Self::handle_incoming_messages(this, stdout, cx).await - }), - account_status: AccountStatus::Unknown, - service_tier: None, - client, - }) - } - - async fn handle_outgoing_messages( - mut outgoing: mpsc::UnboundedReceiver, - mut stdin: W, - ) -> Result<()> { - while let Some(message) = outgoing.next().await { - let bytes = serde_json::to_vec(&message)?; - stdin.write_all(&bytes).await?; - stdin.write_all(&[b'\n']).await?; - } - Ok(()) - } - - async fn handle_incoming_messages( - this: WeakEntity, - stdout: R, - cx: &mut AsyncApp, - ) -> Result<()> { - const MESSAGE_PREFIX: &str = "SM-MESSAGE "; - - let stdout = BufReader::new(stdout); - let mut lines = stdout.lines(); - while let Some(line) = lines.next().await { - let Some(line) = line.context("failed to read line from stdout").log_err() else { - continue; - }; - let Some(line) = line.strip_prefix(MESSAGE_PREFIX) else { - continue; - }; - let Some(message) = serde_json::from_str::(line) - .with_context(|| format!("failed to deserialize line from stdout: {:?}", line)) - .log_err() - else { - continue; - }; - - this.update(cx, |this, _cx| { - if let Supermaven::Spawned(this) = this { - this.handle_message(message); - } - Task::ready(anyhow::Ok(())) - })? - .await?; - } - - Ok(()) - } - - fn handle_message(&mut self, message: SupermavenMessage) { - match message { - SupermavenMessage::ActivationRequest(request) => { - self.account_status = match request.activate_url { - Some(activate_url) => AccountStatus::NeedsActivation { activate_url }, - None => AccountStatus::Ready, - }; - } - SupermavenMessage::ActivationSuccess => { - self.account_status = AccountStatus::Ready; - } - SupermavenMessage::ServiceTier { service_tier } => { - self.account_status = AccountStatus::Ready; - self.service_tier = Some(service_tier); - } - SupermavenMessage::Response(response) => { - let state_id = SupermavenCompletionStateId(response.state_id.parse().unwrap()); - if let Some(state) = self.states.get_mut(&state_id) { - for item in &response.items { - match item { - ResponseItem::Text { text } => state.text.push_str(text), - ResponseItem::Dedent { text } => state.dedent.push_str(text), - _ => {} - } - } - *state.updates_tx.borrow_mut() = (); - } - } - SupermavenMessage::Passthrough { passthrough } => self.handle_message(*passthrough), - _ => { - log::warn!("unhandled message: {:?}", message); - } - } - } -} - -#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize)] -pub struct SupermavenCompletionStateId(usize); - -#[allow(dead_code)] -pub struct SupermavenCompletionState { - buffer_id: EntityId, - prefix_anchor: Anchor, - // prefix_offset is tracked independently because the anchor biases left which - // doesn't allow us to determine if the prior text has been deleted. - prefix_offset: usize, - text: String, - dedent: String, - updates_tx: watch::Sender<()>, -} - -pub struct SupermavenCompletion { - pub id: SupermavenCompletionStateId, - pub updates: watch::Receiver<()>, -} - -#[cfg(test)] -mod tests { - use super::*; - use collections::BTreeMap; - use gpui::TestAppContext; - use language::Buffer; - - #[gpui::test] - async fn test_find_relevant_completion_no_first_letter_skip(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| Buffer::local("hello world", cx)); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let mut states = BTreeMap::new(); - let state_id = SupermavenCompletionStateId(1); - let (updates_tx, _) = watch::channel(); - - states.insert( - state_id, - SupermavenCompletionState { - buffer_id: buffer.entity_id(), - prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer - prefix_offset: 0, - text: "hello".to_string(), - dedent: String::new(), - updates_tx, - }, - ); - - let cursor_position = buffer_snapshot.anchor_after(1); - - let result = find_relevant_completion( - &states, - buffer.entity_id(), - &buffer_snapshot, - cursor_position, - ); - - assert_eq!(result, Some("ello")); - } - - #[gpui::test] - async fn test_find_relevant_completion_with_multiple_chars(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| Buffer::local("hello world", cx)); - let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); - - let mut states = BTreeMap::new(); - let state_id = SupermavenCompletionStateId(1); - let (updates_tx, _) = watch::channel(); - - states.insert( - state_id, - SupermavenCompletionState { - buffer_id: buffer.entity_id(), - prefix_anchor: buffer_snapshot.anchor_before(0), // Start of buffer - prefix_offset: 0, - text: "hello".to_string(), - dedent: String::new(), - updates_tx, - }, - ); - - let cursor_position = buffer_snapshot.anchor_after(3); - - let result = find_relevant_completion( - &states, - buffer.entity_id(), - &buffer_snapshot, - cursor_position, - ); - - assert_eq!(result, Some("lo")); - } -} diff --git a/crates/supermaven/src/supermaven_edit_prediction_delegate.rs b/crates/supermaven/src/supermaven_edit_prediction_delegate.rs deleted file mode 100644 index f9eb4a210cff705d609cad3de13924a86253655a..0000000000000000000000000000000000000000 --- a/crates/supermaven/src/supermaven_edit_prediction_delegate.rs +++ /dev/null @@ -1,303 +0,0 @@ -use crate::{Supermaven, SupermavenCompletionStateId}; -use anyhow::Result; -use edit_prediction_types::{ - EditPrediction, EditPredictionDelegate, EditPredictionDiscardReason, EditPredictionIconSet, -}; -use futures::StreamExt as _; -use gpui::{App, Context, Entity, EntityId, Task}; -use language::{Anchor, Buffer, BufferSnapshot}; -use std::{ - ops::{AddAssign, Range}, - path::Path, - sync::Arc, - time::Duration, -}; -use text::{ToOffset, ToPoint}; -use ui::prelude::*; -use unicode_segmentation::UnicodeSegmentation; - -pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(75); - -pub struct SupermavenEditPredictionDelegate { - supermaven: Entity, - buffer_id: Option, - completion_id: Option, - completion_text: Option, - file_extension: Option, - pending_refresh: Option>>, - completion_position: Option, -} - -impl SupermavenEditPredictionDelegate { - pub fn new(supermaven: Entity) -> Self { - Self { - supermaven, - buffer_id: None, - completion_id: None, - completion_text: None, - file_extension: None, - pending_refresh: None, - completion_position: None, - } - } -} - -// Computes the edit prediction from the difference between the completion text. -// This is defined by greedily matching the buffer text against the completion text. -// Inlays are inserted for parts of the completion text that are not present in the buffer text. -// For example, given the completion text "axbyc" and the buffer text "xy", the rendered output in the editor would be "[a]x[b]y[c]". -// The parts in brackets are the inlays. -fn completion_from_diff( - snapshot: BufferSnapshot, - completion_text: &str, - position: Anchor, - delete_range: Range, -) -> EditPrediction { - let buffer_text = snapshot.text_for_range(delete_range).collect::(); - - let mut edits: Vec<(Range, Arc)> = Vec::new(); - - let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect(); - let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect(); - - let mut offset = position.to_offset(&snapshot); - - let mut i = 0; - let mut j = 0; - while i < completion_graphemes.len() && j < buffer_graphemes.len() { - // find the next instance of the buffer text in the completion text. - let k = completion_graphemes[i..] - .iter() - .position(|c| *c == buffer_graphemes[j]); - match k { - Some(k) => { - if k != 0 { - let offset = snapshot.anchor_after(offset); - // the range from the current position to item is an inlay. - let edit = ( - offset..offset, - completion_graphemes[i..i + k].join("").into(), - ); - edits.push(edit); - } - i += k + 1; - j += 1; - offset.add_assign(buffer_graphemes[j - 1].len()); - } - None => { - // there are no more matching completions, so drop the remaining - // completion text as an inlay. - break; - } - } - } - - if j == buffer_graphemes.len() && i < completion_graphemes.len() { - let offset = snapshot.anchor_after(offset); - // there is leftover completion text, so drop it as an inlay. - let edit_range = offset..offset; - let edit_text = completion_graphemes[i..].join(""); - edits.push((edit_range, edit_text.into())); - } - - EditPrediction::Local { - id: None, - edits, - cursor_position: None, - edit_preview: None, - } -} - -impl EditPredictionDelegate for SupermavenEditPredictionDelegate { - fn name() -> &'static str { - "supermaven" - } - - fn display_name() -> &'static str { - "Supermaven" - } - - fn show_predictions_in_menu() -> bool { - true - } - - fn show_tab_accept_marker() -> bool { - true - } - - fn supports_jump_to_edit() -> bool { - false - } - - fn icons(&self, _cx: &App) -> EditPredictionIconSet { - EditPredictionIconSet::new(IconName::Supermaven) - .with_disabled(IconName::SupermavenDisabled) - .with_error(IconName::SupermavenError) - } - - fn is_enabled(&self, _buffer: &Entity, _cursor_position: Anchor, cx: &App) -> bool { - self.supermaven.read(cx).is_enabled() - } - - fn is_refreshing(&self, _cx: &App) -> bool { - self.pending_refresh.is_some() && self.completion_id.is_none() - } - - fn refresh( - &mut self, - buffer_handle: Entity, - cursor_position: Anchor, - debounce: bool, - cx: &mut Context, - ) { - // Only make new completion requests when debounce is true (i.e., when text is typed) - // When debounce is false (i.e., cursor movement), we should not make new requests - if !debounce { - return; - } - - reset_completion_cache(self, cx); - - let Some(mut completion) = self.supermaven.update(cx, |supermaven, cx| { - supermaven.complete(&buffer_handle, cursor_position, cx) - }) else { - return; - }; - - self.pending_refresh = Some(cx.spawn(async move |this, cx| { - if debounce { - cx.background_executor().timer(DEBOUNCE_TIMEOUT).await; - } - - while let Some(()) = completion.updates.next().await { - this.update(cx, |this, cx| { - // Get the completion text and cache it - if let Some(text) = - this.supermaven - .read(cx) - .completion(&buffer_handle, cursor_position, cx) - { - this.completion_text = Some(text.to_string()); - - this.completion_position = Some(cursor_position); - } - - this.completion_id = Some(completion.id); - this.buffer_id = Some(buffer_handle.entity_id()); - this.file_extension = buffer_handle.read(cx).file().and_then(|file| { - Some( - Path::new(file.file_name(cx)) - .extension()? - .to_str()? - .to_string(), - ) - }); - cx.notify(); - })?; - } - Ok(()) - })); - } - - fn accept(&mut self, _cx: &mut Context) { - reset_completion_cache(self, _cx); - } - - fn discard(&mut self, _reason: EditPredictionDiscardReason, _cx: &mut Context) { - reset_completion_cache(self, _cx); - } - - fn suggest( - &mut self, - buffer: &Entity, - cursor_position: Anchor, - cx: &mut Context, - ) -> Option { - if self.buffer_id != Some(buffer.entity_id()) { - return None; - } - - if self.completion_id.is_none() { - return None; - } - - let completion_text = if let Some(cached_text) = &self.completion_text { - cached_text.as_str() - } else { - let text = self - .supermaven - .read(cx) - .completion(buffer, cursor_position, cx)?; - self.completion_text = Some(text.to_string()); - text - }; - - // Check if the cursor is still at the same position as the completion request - // If we don't have a completion position stored, don't show the completion - if let Some(completion_position) = self.completion_position { - if cursor_position != completion_position { - return None; - } - } else { - return None; - } - - let completion_text = trim_to_end_of_line_unless_leading_newline(completion_text); - - let completion_text = completion_text.trim_end(); - - if !completion_text.trim().is_empty() { - let snapshot = buffer.read(cx).snapshot(); - - // Calculate the range from cursor to end of line correctly - let cursor_point = cursor_position.to_point(&snapshot); - let end_of_line = snapshot.anchor_after(language::Point::new( - cursor_point.row, - snapshot.line_len(cursor_point.row), - )); - let delete_range = cursor_position..end_of_line; - - Some(completion_from_diff( - snapshot, - completion_text, - cursor_position, - delete_range, - )) - } else { - None - } - } -} - -fn reset_completion_cache( - provider: &mut SupermavenEditPredictionDelegate, - _cx: &mut Context, -) { - provider.pending_refresh = None; - provider.completion_id = None; - provider.completion_text = None; - provider.completion_position = None; - provider.buffer_id = None; -} - -fn trim_to_end_of_line_unless_leading_newline(text: &str) -> &str { - if has_leading_newline(text) { - text - } else if let Some(i) = text.find('\n') { - &text[..i] - } else { - text - } -} - -fn has_leading_newline(text: &str) -> bool { - for c in text.chars() { - if c == '\n' { - return true; - } - if !c.is_whitespace() { - return false; - } - } - false -} diff --git a/crates/supermaven_api/Cargo.toml b/crates/supermaven_api/Cargo.toml deleted file mode 100644 index 28868a9a7433f995e99b861cf7f6e9aeeb28942f..0000000000000000000000000000000000000000 --- a/crates/supermaven_api/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "supermaven_api" -version = "0.1.0" -edition.workspace = true -publish.workspace = true -license = "GPL-3.0-or-later" - -[lints] -workspace = true - -[lib] -path = "src/supermaven_api.rs" -doctest = false - -[dependencies] -anyhow.workspace = true -futures.workspace = true -http_client.workspace = true -paths.workspace = true -serde.workspace = true -serde_json.workspace = true -smol.workspace = true -util.workspace = true diff --git a/crates/supermaven_api/LICENSE-GPL b/crates/supermaven_api/LICENSE-GPL deleted file mode 120000 index 89e542f750cd3860a0598eff0dc34b56d7336dc4..0000000000000000000000000000000000000000 --- a/crates/supermaven_api/LICENSE-GPL +++ /dev/null @@ -1 +0,0 @@ -../../LICENSE-GPL \ No newline at end of file diff --git a/crates/supermaven_api/src/supermaven_api.rs b/crates/supermaven_api/src/supermaven_api.rs deleted file mode 100644 index 97e70e58a18fc277d8cb17e2fb8fd3c71b884420..0000000000000000000000000000000000000000 --- a/crates/supermaven_api/src/supermaven_api.rs +++ /dev/null @@ -1,125 +0,0 @@ -use anyhow::{Context as _, Result}; -use futures::AsyncReadExt; -use futures::io::BufReader; -use http_client::{AsyncBody, HttpClient, Request as HttpRequest}; -use paths::supermaven_dir; -use serde::Deserialize; -use smol::fs::{self, File}; -use std::path::{Path, PathBuf}; -use std::sync::Arc; - -use util::fs::{make_file_executable, remove_matching}; - -#[derive(Deserialize)] -pub struct SupermavenApiError { - pub message: String, -} - -#[derive(Debug, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct SupermavenDownloadResponse { - pub download_url: String, - pub version: u64, - pub sha256_hash: String, -} - -pub async fn latest_release( - client: Arc, - platform: &str, - arch: &str, -) -> Result { - let uri = format!( - "https://supermaven.com/api/download-path?platform={}&arch={}", - platform, arch - ); - - // Download is not authenticated - let request = HttpRequest::get(&uri); - - let mut response = client - .send(request.body(AsyncBody::default())?) - .await - .with_context(|| "Unable to acquire Supermaven Agent".to_string())?; - - let mut body = Vec::new(); - response.body_mut().read_to_end(&mut body).await?; - - if response.status().is_client_error() || response.status().is_server_error() { - let body_str = std::str::from_utf8(&body)?; - let error: SupermavenApiError = serde_json::from_str(body_str)?; - anyhow::bail!("Supermaven API error: {}", error.message); - } - - serde_json::from_slice::(&body) - .with_context(|| "Unable to parse Supermaven Agent response".to_string()) -} - -pub fn version_path(version: u64) -> PathBuf { - supermaven_dir().join(format!( - "sm-agent-{}{}", - version, - std::env::consts::EXE_SUFFIX - )) -} - -pub async fn has_version(version_path: &Path) -> bool { - fs::metadata(version_path).await.is_ok_and(|m| m.is_file()) -} - -pub async fn get_supermaven_agent_path(client: Arc) -> Result { - fs::create_dir_all(supermaven_dir()) - .await - .with_context(|| { - format!( - "Could not create Supermaven Agent Directory at {:?}", - supermaven_dir() - ) - })?; - - let platform = match std::env::consts::OS { - "macos" => "darwin", - "windows" => "windows", - "linux" => "linux", - unsupported => anyhow::bail!("unsupported platform {unsupported}"), - }; - - let arch = match std::env::consts::ARCH { - "x86_64" => "amd64", - "aarch64" => "arm64", - unsupported => anyhow::bail!("unsupported architecture {unsupported}"), - }; - - let download_info = latest_release(client.clone(), platform, arch).await?; - - let binary_path = version_path(download_info.version); - - if has_version(&binary_path).await { - // Due to an issue with the Supermaven binary not being made executable on - // earlier Zed versions and Supermaven releases not occurring that frequently, - // we ensure here that the found binary is actually executable. - make_file_executable(&binary_path).await?; - - return Ok(binary_path); - } - - let request = HttpRequest::get(&download_info.download_url); - - let mut response = client - .send(request.body(AsyncBody::default())?) - .await - .with_context(|| "Unable to download Supermaven Agent".to_string())?; - - let mut file = File::create(&binary_path) - .await - .with_context(|| format!("Unable to create file at {:?}", binary_path))?; - - futures::io::copy(BufReader::new(response.body_mut()), &mut file) - .await - .with_context(|| format!("Unable to write binary to file at {:?}", binary_path))?; - - make_file_executable(&binary_path).await?; - - remove_matching(supermaven_dir(), |file| file != binary_path).await; - - Ok(binary_path) -} diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 82b730ee8f1b50f6f46a7400be908a9442e115d1..16385ccf75e245b4e4bf17cf37a1d04ef3ed9c6b 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -189,7 +189,6 @@ sidebar.workspace = true smol.workspace = true snippet_provider.workspace = true snippets_ui.workspace = true -supermaven.workspace = true svg_preview.workspace = true sysinfo.workspace = true tab_switcher.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 0921c12c2f06cea32ccba0e0bc58553d2fa91ab2..fa07355d69e1e9d6511301464e344533f6bdbd7d 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -638,7 +638,6 @@ fn main() { ); copilot_ui::init(&app_state, cx); - supermaven::init(app_state.client.clone(), cx); language_model::init(app_state.client.clone(), cx); language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); acp_tools::init(cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index aa1870ba2a9cb07cd7ffee040c68ffa73759e728..55f185aae13e49c6b90610a50ad197ee47ee8a98 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4860,7 +4860,6 @@ mod tests { "settings_profile_selector", "snippets", "stash_picker", - "supermaven", "svg", "syntax_tree_view", "tab_switcher", diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 9381dae22b055b4bd008ee63d0d283581bd513f4..79b33093d86b306c3b0420f919bd555d9ea4ca7a 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -12,7 +12,6 @@ use settings::{ EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, EditPredictionPromptFormat, SettingsStore, }; use std::{cell::RefCell, rc::Rc, sync::Arc}; -use supermaven::{Supermaven, SupermavenEditPredictionDelegate}; use ui::Window; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { @@ -132,7 +131,6 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option None, EditPredictionProvider::Copilot => Some(EditPredictionProviderConfig::Copilot), - EditPredictionProvider::Supermaven => Some(EditPredictionProviderConfig::Supermaven), EditPredictionProvider::Zed => Some(EditPredictionProviderConfig::Zed( EditPredictionModel::Zeta1, )), @@ -204,7 +202,6 @@ fn infer_prompt_format(model: &str) -> Option { #[derive(Copy, Clone, PartialEq, Eq)] enum EditPredictionProviderConfig { Copilot, - Supermaven, Codestral, Zed(EditPredictionModel), } @@ -213,7 +210,6 @@ impl EditPredictionProviderConfig { fn name(&self) -> &'static str { match self { EditPredictionProviderConfig::Copilot => "Copilot", - EditPredictionProviderConfig::Supermaven => "Supermaven", EditPredictionProviderConfig::Codestral => "Codestral", EditPredictionProviderConfig::Zed(model) => match model { EditPredictionModel::Zeta1 => "Zeta1", @@ -306,12 +302,6 @@ fn assign_edit_prediction_provider( editor.set_edit_prediction_provider(Some(provider), window, cx); } } - Some(EditPredictionProviderConfig::Supermaven) => { - if let Some(supermaven) = Supermaven::global(cx) { - let provider = cx.new(|_| SupermavenEditPredictionDelegate::new(supermaven)); - editor.set_edit_prediction_provider(Some(provider), window, cx); - } - } Some(EditPredictionProviderConfig::Codestral) => { let http_client = client.http_client(); let provider = cx.new(|_| CodestralEditPredictionDelegate::new(http_client)); diff --git a/docs/src/ai/overview.md b/docs/src/ai/overview.md index b05b3ac6a7a3c9ce42e226e75d5e9e28420f8b03..9463f7bbb11cdcb204915fca138e584baa1f9640 100644 --- a/docs/src/ai/overview.md +++ b/docs/src/ai/overview.md @@ -28,7 +28,7 @@ The [Inline Assistant](./inline-assistant.md) works differently: select code or [Edit Prediction](./edit-prediction.md) provides AI code completions on every keystroke. Each keypress sends a request to the prediction provider, which returns single or multi-line suggestions you accept with `tab`. -The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, Supermaven, or Codestral. +The default provider is Zeta, Zed's open-source model trained on open data. You can also use GitHub Copilot, or Codestral. ## Text threads diff --git a/docs/src/completions.md b/docs/src/completions.md index 9962fd5f24c604bb22f73ba5a797de936f9cb0d4..81c2efa3514a4623408b2869325ab0991ce382d6 100644 --- a/docs/src/completions.md +++ b/docs/src/completions.md @@ -8,7 +8,7 @@ description: Zed's code completions from language servers and edit predictions. Zed supports two sources for completions: 1. "Code Completions" provided by Language Servers (LSPs) automatically installed by Zed or via [Zed Language Extensions](languages.md). -2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot) or [Supermaven](#supermaven). +2. "Edit Predictions" provided by Zed's own Zeta model or by external providers like [GitHub Copilot](#github-copilot). ## Language Server Code Completions {#code-completions} diff --git a/docs/src/reference/all-settings.md b/docs/src/reference/all-settings.md index 23b59f0b91002c0a920df0df8d61088652281735..32fec4a84d56cf996dc85cf112e4daec7893311b 100644 --- a/docs/src/reference/all-settings.md +++ b/docs/src/reference/all-settings.md @@ -1800,17 +1800,7 @@ While other options may be changed at a runtime and should be placed under `sett } ``` -3. Use Supermaven as the edit prediction provider: - -```json [settings] -{ - "edit_predictions": { - "provider": "supermaven" - } -} -``` - -4. Turn off edit predictions across all providers +3. Turn off edit predictions across all providers ```json [settings] { From ed3553491be9cb4af889575aa580daafc79c7e49 Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Mon, 2 Mar 2026 16:23:30 -0600 Subject: [PATCH 248/548] ep: Include more context (#50533) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --------- Co-authored-by: Max --- crates/edit_prediction/src/cursor_excerpt.rs | 32 ++++++++++------- crates/edit_prediction/src/edit_prediction.rs | 35 +++++++++++++++++-- crates/edit_prediction/src/zeta.rs | 10 ++---- crates/zeta_prompt/src/zeta_prompt.rs | 6 ++++ 4 files changed, 60 insertions(+), 23 deletions(-) diff --git a/crates/edit_prediction/src/cursor_excerpt.rs b/crates/edit_prediction/src/cursor_excerpt.rs index 73a906e4fc18080bdeb469019f8ec6a3a87c3bb6..690e7001bd45ab3d9a995b4dfd43c2e8e297dbe9 100644 --- a/crates/edit_prediction/src/cursor_excerpt.rs +++ b/crates/edit_prediction/src/cursor_excerpt.rs @@ -13,7 +13,7 @@ pub fn compute_excerpt_ranges( let editable_150 = compute_editable_range(snapshot, position, 150); let editable_180 = compute_editable_range(snapshot, position, 180); let editable_350 = compute_editable_range(snapshot, position, 350); - let full_512 = compute_editable_range(snapshot, position, 512); + let editable_512 = compute_editable_range(snapshot, position, 512); let editable_150_context_350 = expand_context_syntactically_then_linewise(snapshot, editable_150.clone(), 350); @@ -21,19 +21,20 @@ pub fn compute_excerpt_ranges( expand_context_syntactically_then_linewise(snapshot, editable_180.clone(), 350); let editable_350_context_150 = expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 150); + let editable_350_context_512 = + expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 512); + let editable_350_context_1024 = + expand_context_syntactically_then_linewise(snapshot, editable_350.clone(), 1024); + let context_4096 = expand_context_syntactically_then_linewise( + snapshot, + editable_350_context_1024.clone(), + 4096 - 1024, + ); + let context_8192 = + expand_context_syntactically_then_linewise(snapshot, context_4096.clone(), 8192 - 4096); - let full_start_row = full_512 - .start - .row - .min(editable_150_context_350.start.row) - .min(editable_180_context_350.start.row) - .min(editable_350_context_150.start.row); - let full_end_row = full_512 - .end - .row - .max(editable_150_context_350.end.row) - .max(editable_180_context_350.end.row) - .max(editable_350_context_150.end.row); + let full_start_row = context_8192.start.row; + let full_end_row = context_8192.end.row; let full_context = Point::new(full_start_row, 0)..Point::new(full_end_row, snapshot.line_len(full_end_row)); @@ -50,9 +51,14 @@ pub fn compute_excerpt_ranges( editable_150: to_offset(&editable_150), editable_180: to_offset(&editable_180), editable_350: to_offset(&editable_350), + editable_512: Some(to_offset(&editable_512)), editable_150_context_350: to_offset(&editable_150_context_350), editable_180_context_350: to_offset(&editable_180_context_350), editable_350_context_150: to_offset(&editable_350_context_150), + editable_350_context_512: Some(to_offset(&editable_350_context_512)), + editable_350_context_1024: Some(to_offset(&editable_350_context_1024)), + context_4096: Some(to_offset(&context_4096)), + context_8192: Some(to_offset(&context_8192)), }; (full_context, full_context_offset_range, ranges) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index a29779d30de007043141b3958c0c449b230cc384..b25ccee37970f2dc0dfa8bcbec4b1cdcdfe6d506 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -173,6 +173,8 @@ pub struct EditPredictionModelInput { trigger: PredictEditsRequestTrigger, diagnostic_search_range: Range, debug_tx: Option>, + can_collect_data: bool, + is_open_source: bool, pub user_actions: Vec, } @@ -2058,7 +2060,7 @@ impl EditPredictionStore { let stored_events = project_state.events(cx); let has_events = !stored_events.is_empty(); let events: Vec> = - stored_events.into_iter().map(|e| e.event).collect(); + stored_events.iter().map(|e| e.event.clone()).collect(); let debug_tx = project_state.debug_tx.clone(); let snapshot = active_buffer.read(cx).snapshot(); @@ -2092,9 +2094,23 @@ impl EditPredictionStore { let related_files = self.context_for_project(&project, cx); + let is_open_source = snapshot + .file() + .map_or(false, |file| self.is_file_open_source(&project, file, cx)) + && events.iter().all(|event| event.in_open_source_repo()) + && related_files.iter().all(|file| file.in_open_source_repo); + + let can_collect_data = !cfg!(test) + && is_open_source + && self.is_data_collection_enabled(cx) + && matches!( + self.edit_prediction_model, + EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 + ); + let inputs = EditPredictionModelInput { project: project.clone(), - buffer: active_buffer, + buffer: active_buffer.clone(), snapshot: snapshot, position, events, @@ -2104,8 +2120,23 @@ impl EditPredictionStore { diagnostic_search_range: diagnostic_search_range, debug_tx, user_actions, + can_collect_data, + is_open_source, }; + if can_collect_data && rand::random_ratio(1, 1000) { + if let Some(task) = capture_example( + project.clone(), + active_buffer, + position, + stored_events, + false, + cx, + ) { + task.detach(); + } + } + let task = match self.edit_prediction_model { EditPredictionModel::Zeta1 => zeta::request_prediction_with_zeta( self, diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 9c6e9e30d94c5e1988d54da7966a58fd8e69e233..abcfeabec44b26405153c10c43e6c2739e5e802e 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -35,6 +35,8 @@ pub fn request_prediction_with_zeta( debug_tx, trigger, project, + can_collect_data, + is_open_source, .. }: EditPredictionModelInput, preferred_model: Option, @@ -63,14 +65,6 @@ pub fn request_prediction_with_zeta( let llm_token = store.llm_token.clone(); let app_version = AppVersion::global(cx); - let is_open_source = snapshot - .file() - .map_or(false, |file| store.is_file_open_source(&project, file, cx)) - && events.iter().all(|event| event.in_open_source_repo()) - && related_files.iter().all(|file| file.in_open_source_repo); - - let can_collect_data = is_open_source && store.is_data_collection_enabled(cx); - let request_task = cx.background_spawn({ async move { let zeta_version = raw_config diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index bdd5afffa975adc11176928a89e4cb52b4cd69c3..abb1c3ddc74d58d0b300e5e64d77a60a48b83283 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -36,12 +36,18 @@ pub struct ExcerptRanges { pub editable_180: Range, /// Editable region computed with a 350-token budget. pub editable_350: Range, + /// Editable region computed with a 350-token budget. + pub editable_512: Option>, /// Context boundary when using editable_150 with 350 tokens of additional context. pub editable_150_context_350: Range, /// Context boundary when using editable_180 with 350 tokens of additional context. pub editable_180_context_350: Range, /// Context boundary when using editable_350 with 150 tokens of additional context. pub editable_350_context_150: Range, + pub editable_350_context_512: Option>, + pub editable_350_context_1024: Option>, + pub context_4096: Option>, + pub context_8192: Option>, } #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] From 465e2b5ffd9ac8553341bd3989ce960b4793f917 Mon Sep 17 00:00:00 2001 From: it-education-md <128720033+it-education-md@users.noreply.github.com> Date: Mon, 2 Mar 2026 17:45:19 -0500 Subject: [PATCH 249/548] editor: Avoid autoscroll in SplitSelectionIntoLines (#49399) Closes #48812 ## Summary `editor::SplitSelectionIntoLines` currently triggers autoscroll and can jump to the end of a long file. This PR makes the action explicitly no-scroll. ## What changed - Disabled autoscroll in the `unfold_ranges` call inside `split_selection_into_lines`. - Switched selection update to `SelectionEffects::no_scroll()`. ## Testing - Added `test_split_selection_into_lines_does_not_scroll`. ### Screenshots: - Before: see issue video - After: image Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Stop scrolling on `editor::SplitSelectionIntoLines` called in the long files --- crates/editor/src/editor.rs | 4 ++-- crates/editor/src/editor_tests.rs | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 0c2699304830482ba5a9ac23d561d9ea9d8c5b61..efc3cddcc8549df6d832e726c77f2dda600adaa4 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -15397,7 +15397,7 @@ impl Editor { .into_iter() .map(|selection| selection.start..selection.end) .collect::>(); - self.unfold_ranges(&selections, true, true, cx); + self.unfold_ranges(&selections, true, false, cx); let mut new_selection_ranges = Vec::new(); { @@ -15439,7 +15439,7 @@ impl Editor { } } } - self.change_selections(Default::default(), window, cx, |s| { + self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { s.select_ranges(new_selection_ranges); }); } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 2898954b75a97c7d7d0a922eae8e71c8b598a7d5..38abff942acf8717000090a90654f1117ba5005d 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -8538,6 +8538,26 @@ async fn test_split_selection_into_lines(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_split_selection_into_lines_does_not_scroll(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let large_body = "\nline".repeat(300); + cx.set_state(&format!("«ˇstart{large_body}\nend»")); + let initial_scroll_position = cx.update_editor(|editor, _, cx| editor.scroll_position(cx)); + + cx.update_editor(|editor, window, cx| { + editor.split_selection_into_lines(&Default::default(), window, cx); + }); + + let scroll_position_after_split = cx.update_editor(|editor, _, cx| editor.scroll_position(cx)); + assert_eq!( + initial_scroll_position, scroll_position_after_split, + "Scroll position should not change after splitting selection into lines" + ); +} + #[gpui::test] async fn test_split_selection_into_lines_interacting_with_creases(cx: &mut TestAppContext) { init_test(cx, |_| {}); From 1d443f69f6cb4914ff0ca3309a5b5bac1af0e934 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 2 Mar 2026 16:04:40 -0700 Subject: [PATCH 250/548] Add error handling to keep Zed running in the face of transient wgpu errors (#50539) Release Notes: - linux: Reduce crashes in the face of transient GPU errors --- crates/gpui_wgpu/src/wgpu_renderer.rs | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index bbecca198eb3ae46b739ab4c42267e7f04b0f7a9..5beeef6ad1238f25db7c50f739053e138b2e1295 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -11,7 +11,7 @@ use log::warn; #[cfg(not(target_family = "wasm"))] use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; use std::num::NonZeroU64; -use std::sync::Arc; +use std::sync::{Arc, Mutex}; #[repr(C)] #[derive(Clone, Copy, Pod, Zeroable)] @@ -122,6 +122,8 @@ pub struct WgpuRenderer { transparent_alpha_mode: wgpu::CompositeAlphaMode, opaque_alpha_mode: wgpu::CompositeAlphaMode, max_texture_size: u32, + last_error: Arc>>, + failed_frame_count: u32, } impl WgpuRenderer { @@ -367,6 +369,13 @@ impl WgpuRenderer { let adapter_info = context.adapter.get_info(); + let last_error: Arc>> = Arc::new(Mutex::new(None)); + let last_error_clone = Arc::clone(&last_error); + device.on_uncaptured_error(Arc::new(move |error| { + let mut guard = last_error_clone.lock().unwrap(); + *guard = Some(error.to_string()); + })); + Ok(Self { device, queue, @@ -398,6 +407,8 @@ impl WgpuRenderer { transparent_alpha_mode, opaque_alpha_mode, max_texture_size, + last_error, + failed_frame_count: 0, }) } @@ -961,6 +972,20 @@ impl WgpuRenderer { } pub fn draw(&mut self, scene: &Scene) { + let last_error = self.last_error.lock().unwrap().take(); + if let Some(error) = last_error { + self.failed_frame_count += 1; + log::error!( + "GPU error during frame (failure {} of 20): {error}", + self.failed_frame_count + ); + if self.failed_frame_count > 20 { + panic!("Too many consecutive GPU errors. Last error: {error}"); + } + } else { + self.failed_frame_count = 0; + } + self.atlas.before_frame(); let frame = match self.surface.get_current_texture() { From 67b375cc1e38d971c8e84a3c9248a962c8b52a01 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 2 Mar 2026 16:16:17 -0700 Subject: [PATCH 251/548] Restore accidental change (#50543) I'm not sure why, but my dev builds on linux don't output to stdout Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/zed/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index fa07355d69e1e9d6511301464e344533f6bdbd7d..e93bd92d041a18e927e1560379bcdb2886605874 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -276,7 +276,7 @@ fn main() { zlog::init(); - if true { + if stdout_is_a_pty() { zlog::init_output_stdout(); } else { let result = zlog::init_output_file(paths::log_file(), Some(paths::old_log_file())); From 4392e3a9fcb35a8d796076ea10bbbe4b9e48319f Mon Sep 17 00:00:00 2001 From: Sergei Zharinov Date: Mon, 2 Mar 2026 21:18:56 -0300 Subject: [PATCH 252/548] workspace: Fix scrolling to active tab when pinned tabs are present (#50538) When pinned tabs are present, activating an unpinned tab passed the absolute tab index to the scroll handle, which only contains unpinned tabs. This caused the scroll-into-view to silently fail. Subtract `pinned_tab_count` from the index so it maps to the correct child in the unpinned tabs scroll container. Release Notes: - Fixed tab bar not reliably scrolling to the active tab when pinned tabs are present. --- crates/workspace/src/pane.rs | 68 +++++++++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 66e5eeb4734557c818f42b6537859634435fd295..a39be125a5784b8c9d995bb750b9d7ff57a67191 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -1468,7 +1468,8 @@ impl Pane { fn update_active_tab(&mut self, index: usize) { if !self.is_tab_pinned(index) { self.suppress_scroll = false; - self.tab_bar_scroll_handle.scroll_to_item(index); + self.tab_bar_scroll_handle + .scroll_to_item(index - self.pinned_tab_count); } } @@ -7935,6 +7936,71 @@ mod tests { ); } + #[gpui::test] + async fn test_pinned_tabs_scroll_to_item_uses_correct_index(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + let project = Project::test(fs, None, cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone()); + + cx.simulate_resize(size(px(400.), px(300.))); + + for label in ["A", "B", "C"] { + add_labeled_item(&pane, label, false, cx); + } + + pane.update_in(cx, |pane, window, cx| { + pane.pin_tab_at(0, window, cx); + pane.pin_tab_at(1, window, cx); + pane.pin_tab_at(2, window, cx); + }); + + for label in ["D", "E", "F", "G", "H", "I", "J", "K"] { + add_labeled_item(&pane, label, false, cx); + } + + assert_item_labels( + &pane, + ["A!", "B!", "C!", "D", "E", "F", "G", "H", "I", "J", "K*"], + cx, + ); + + cx.run_until_parked(); + + // Verify overflow exists (precondition for scroll test) + let scroll_handle = + pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); + assert!( + scroll_handle.max_offset().width > px(0.), + "Test requires tab overflow to verify scrolling. Increase tab count or reduce window width." + ); + + // Activate a different tab first, then activate K + // This ensures we're not just re-activating an already-active tab + pane.update_in(cx, |pane, window, cx| { + pane.activate_item(3, true, true, window, cx); + }); + cx.run_until_parked(); + + pane.update_in(cx, |pane, window, cx| { + pane.activate_item(10, true, true, window, cx); + }); + cx.run_until_parked(); + + let scroll_handle = + pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); + let k_tab_bounds = cx.debug_bounds("TAB-10").unwrap(); + let scroll_bounds = scroll_handle.bounds(); + + assert!( + k_tab_bounds.left() >= scroll_bounds.left(), + "Active tab K should be scrolled into view" + ); + } + #[gpui::test] async fn test_close_all_items_including_pinned(cx: &mut TestAppContext) { init_test(cx); From 42ba961075b16aaf35d48631f0ce4e1a4196d983 Mon Sep 17 00:00:00 2001 From: Eric Holk Date: Mon, 2 Mar 2026 16:57:15 -0800 Subject: [PATCH 253/548] Persist unsent draft prompt across Zed restarts (#49541) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Store the user's unsent message editor text in DbThread so it survives quitting and reloading Zed. The draft flows through Thread → AcpThread → AcpThreadView on load, and back via a debounced observer on the message editor for saves. Currently works for native Zed agents only; external ACP agents will pick this up once general ACP history persistence lands. ## Changes - **`DbThread`** / **`Thread`**: New `draft_prompt: Option` field, included in `to_db()`/`from_db()` - **`AcpThread`**: Bridge field with getter/setter, populated during `register_session()` - **`NativeAgent::save_thread()`**: Copies draft from `AcpThread` → `Thread` before persisting - **`AcpThreadView`**: Restores draft into `MessageEditor` on load; syncs editor text → `AcpThread` via observer; debounced (500ms) Thread notify triggers DB save Co-authored-by: Anthony Eid Co-authored-by: Mikayla Maki --- crates/acp_thread/src/acp_thread.rs | 11 ++++++ crates/agent/src/agent.rs | 34 ++++++++++++++--- crates/agent/src/db.rs | 21 +++++++++++ crates/agent/src/thread.rs | 13 +++++++ crates/agent/src/thread_store.rs | 1 + .../src/connection_view/thread_view.rs | 37 +++++++++++++++++++ crates/agent_ui/src/message_editor.rs | 24 ++++++++++-- 7 files changed, 133 insertions(+), 8 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index e6da8f3f901b41c0a59d73920c3036fc72d1b906..f57ce1f4d188e260624bd90187a21890379fe6b6 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -970,6 +970,8 @@ pub struct AcpThread { pending_terminal_output: HashMap>>, pending_terminal_exit: HashMap, had_error: bool, + /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. + draft_prompt: Option>, } impl From<&AcpThread> for ActionLogTelemetry { @@ -1207,6 +1209,7 @@ impl AcpThread { pending_terminal_output: HashMap::default(), pending_terminal_exit: HashMap::default(), had_error: false, + draft_prompt: None, } } @@ -1218,6 +1221,14 @@ impl AcpThread { self.prompt_capabilities.clone() } + pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> { + self.draft_prompt.as_deref() + } + + pub fn set_draft_prompt(&mut self, prompt: Option>) { + self.draft_prompt = prompt; + } + pub fn connection(&self) -> &Rc { &self.connection } diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 0bb0f2c8790a5e07b97976ba391105554ad03307..7cf9416840a6bd2870327c9c68135857c01f7c9b 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -351,11 +351,12 @@ impl NativeAgent { let session_id = thread.id().clone(); let parent_session_id = thread.parent_thread_id(); let title = thread.title(); + let draft_prompt = thread.draft_prompt().map(Vec::from); let project = thread.project.clone(); let action_log = thread.action_log.clone(); let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone(); let acp_thread = cx.new(|cx| { - acp_thread::AcpThread::new( + let mut acp_thread = acp_thread::AcpThread::new( parent_session_id, title, connection, @@ -364,7 +365,9 @@ impl NativeAgent { session_id.clone(), prompt_capabilities_rx, cx, - ) + ); + acp_thread.set_draft_prompt(draft_prompt); + acp_thread }); let registry = LanguageModelRegistry::read_global(cx); @@ -844,9 +847,7 @@ impl NativeAgent { return; } - let database_future = ThreadsDatabase::connect(cx); - let (id, db_thread) = - thread.update(cx, |thread, cx| (thread.id().clone(), thread.to_db(cx))); + let id = thread.read(cx).id().clone(); let Some(session) = self.sessions.get_mut(&id) else { return; }; @@ -860,6 +861,12 @@ impl NativeAgent { .collect::>(), ); + let draft_prompt = session.acp_thread.read(cx).draft_prompt().map(Vec::from); + let database_future = ThreadsDatabase::connect(cx); + let db_thread = thread.update(cx, |thread, cx| { + thread.set_draft_prompt(draft_prompt); + thread.to_db(cx) + }); let thread_store = self.thread_store.clone(); session.pending_save = cx.spawn(async move |_, cx| { let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { @@ -2571,6 +2578,18 @@ mod internal_tests { cx.run_until_parked(); + // Set a draft prompt with rich content blocks before saving. + let draft_blocks = vec![ + acp::ContentBlock::Text(acp::TextContent::new("Check out ")), + acp::ContentBlock::ResourceLink(acp::ResourceLink::new("b.md", uri.to_string())), + acp::ContentBlock::Text(acp::TextContent::new(" please")), + ]; + acp_thread.update(cx, |thread, _cx| { + thread.set_draft_prompt(Some(draft_blocks.clone())); + }); + thread.update(cx, |_thread, cx| cx.notify()); + cx.run_until_parked(); + // Close the session so it can be reloaded from disk. cx.update(|cx| connection.clone().close_session(&session_id, cx)) .await @@ -2608,6 +2627,11 @@ mod internal_tests { "} ) }); + + // Ensure the draft prompt with rich content blocks survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + assert_eq!(thread.draft_prompt(), Some(draft_blocks.as_slice())); + }); } fn thread_entries( diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 5a14e920e52c18fb6341e09fa9f747b3c5019f1d..3a7af37cac85065d8853fbb5332093ef3fd20592 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -64,6 +64,8 @@ pub struct DbThread { pub thinking_enabled: bool, #[serde(default)] pub thinking_effort: Option, + #[serde(default)] + pub draft_prompt: Option>, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -105,6 +107,7 @@ impl SharedThread { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, } } @@ -282,6 +285,7 @@ impl DbThread { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, }) } } @@ -632,6 +636,7 @@ mod tests { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, } } @@ -715,6 +720,22 @@ mod tests { ); } + #[test] + fn test_draft_prompt_defaults_to_none() { + let json = r#"{ + "title": "Old Thread", + "messages": [], + "updated_at": "2024-01-01T00:00:00Z" + }"#; + + let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize"); + + assert!( + db_thread.draft_prompt.is_none(), + "Legacy threads without draft_prompt field should default to None" + ); + } + #[gpui::test] async fn test_subagent_context_roundtrips_through_save_load(cx: &mut TestAppContext) { let database = ThreadsDatabase::new(cx.executor()).unwrap(); diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 4c43a66fe5bb67c11fe5f0438d54cc86a498c55c..c5ca1118ace28b66d555d67aa40c718da292f644 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -899,6 +899,8 @@ pub struct Thread { imported: bool, /// If this is a subagent thread, contains context about the parent subagent_context: Option, + /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. + draft_prompt: Option>, /// Weak references to running subagent threads for cancellation propagation running_subagents: Vec>, } @@ -1014,6 +1016,7 @@ impl Thread { file_read_times: HashMap::default(), imported: false, subagent_context: None, + draft_prompt: None, running_subagents: Vec::new(), } } @@ -1229,6 +1232,7 @@ impl Thread { file_read_times: HashMap::default(), imported: db_thread.imported, subagent_context: db_thread.subagent_context, + draft_prompt: db_thread.draft_prompt, running_subagents: Vec::new(), } } @@ -1253,6 +1257,7 @@ impl Thread { speed: self.speed, thinking_enabled: self.thinking_enabled, thinking_effort: self.thinking_effort.clone(), + draft_prompt: self.draft_prompt.clone(), }; cx.background_spawn(async move { @@ -1294,6 +1299,14 @@ impl Thread { self.messages.is_empty() && self.title.is_none() } + pub fn draft_prompt(&self) -> Option<&[acp::ContentBlock]> { + self.draft_prompt.as_deref() + } + + pub fn set_draft_prompt(&mut self, prompt: Option>) { + self.draft_prompt = prompt; + } + pub fn model(&self) -> Option<&Arc> { self.model.as_ref() } diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index 5cdce12125da8f7d26677388169e899f94b7e7f1..f944377e489a88ac0fa6dbb802edf9702e86f5f2 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -145,6 +145,7 @@ mod tests { speed: None, thinking_enabled: false, thinking_effort: None, + draft_prompt: None, } } diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index b8403f8052e32fbeeceb4594438eecf32aa4e2e7..2544305bc8f8666b897d11285ffa7711f3af8794 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -5,6 +5,7 @@ use gpui::{Corner, List}; use language_model::{LanguageModelEffortLevel, Speed}; use settings::update_settings_file; use ui::{ButtonLike, SplitButton, SplitButtonStyle, Tab}; +use workspace::SERIALIZATION_THROTTLE_TIME; use super::*; @@ -239,6 +240,7 @@ pub struct ThreadView { pub resumed_without_history: bool, pub resume_thread_metadata: Option, pub _cancel_task: Option>, + _draft_save_task: Option>, pub skip_queue_processing_count: usize, pub user_interrupted_generation: bool, pub can_fast_track_queue: bool, @@ -345,6 +347,8 @@ impl ThreadView { editor.set_message(blocks, window, cx); } } + } else if let Some(draft) = thread.read(cx).draft_prompt() { + editor.set_message(draft.to_vec(), window, cx); } editor }); @@ -377,6 +381,38 @@ impl ThreadView { Self::handle_message_editor_event, )); + subscriptions.push(cx.observe(&message_editor, |this, editor, cx| { + let is_empty = editor.read(cx).text(cx).is_empty(); + let draft_contents_task = if is_empty { + None + } else { + Some(editor.update(cx, |editor, cx| editor.draft_contents(cx))) + }; + this._draft_save_task = Some(cx.spawn(async move |this, cx| { + let draft = if let Some(task) = draft_contents_task { + let blocks = task.await.ok().filter(|b| !b.is_empty()); + blocks + } else { + None + }; + this.update(cx, |this, cx| { + this.thread.update(cx, |thread, _cx| { + thread.set_draft_prompt(draft); + }); + }) + .ok(); + cx.background_executor() + .timer(SERIALIZATION_THROTTLE_TIME) + .await; + this.update(cx, |this, cx| { + if let Some(thread) = this.as_native_thread(cx) { + thread.update(cx, |_thread, cx| cx.notify()); + } + }) + .ok(); + })); + })); + let recent_history_entries = history.read(cx).get_recent_sessions(3); let mut this = Self { @@ -427,6 +463,7 @@ impl ThreadView { is_loading_contents: false, new_server_version_available: None, _cancel_task: None, + _draft_save_task: None, skip_queue_processing_count: 0, user_interrupted_generation: false, can_fast_track_queue: false, diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 274b076eafbcfab4620c66c027c374025242f821..50b297847b43e4d147978fbcf14dce492fc572d0 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -416,7 +416,27 @@ impl MessageEditor { let text = self.editor.read(cx).text(cx); let available_commands = self.available_commands.borrow().clone(); let agent_name = self.agent_name.clone(); + let build_task = self.build_content_blocks(full_mention_content, cx); + cx.spawn(async move |_, _cx| { + Self::validate_slash_commands(&text, &available_commands, &agent_name)?; + build_task.await + }) + } + + pub fn draft_contents(&self, cx: &mut Context) -> Task>> { + let build_task = self.build_content_blocks(false, cx); + cx.spawn(async move |_, _cx| { + let (blocks, _tracked_buffers) = build_task.await?; + Ok(blocks) + }) + } + + fn build_content_blocks( + &self, + full_mention_content: bool, + cx: &mut Context, + ) -> Task, Vec>)>> { let contents = self .mention_set .update(cx, |store, cx| store.contents(full_mention_content, cx)); @@ -424,18 +444,16 @@ impl MessageEditor { let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context; cx.spawn(async move |_, cx| { - Self::validate_slash_commands(&text, &available_commands, &agent_name)?; - let contents = contents.await?; let mut all_tracked_buffers = Vec::new(); let result = editor.update(cx, |editor, cx| { + let text = editor.text(cx); let (mut ix, _) = text .char_indices() .find(|(_, c)| !c.is_whitespace()) .unwrap_or((0, '\0')); let mut chunks: Vec = Vec::new(); - let text = editor.text(cx); editor.display_map.update(cx, |map, cx| { let snapshot = map.snapshot(cx); for (crease_id, crease) in snapshot.crease_snapshot.creases() { From 3f6a6cfabf9f4a58a235e102c04040e6b736d113 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 2 Mar 2026 19:50:52 -0700 Subject: [PATCH 254/548] Truncate tabs in filenames (#50550) Closes #19208 Authored-By: @ngauder Release Notes: - Editor: truncate long file names in tab titles --------- Co-authored-by: Nikolas Gauder Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- crates/editor/src/items.rs | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index 685387342caf8e705a3648cb07acaa1867db55d8..1a79414ddc3aa57397d964d4e0af0d87bedc9c3b 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -735,10 +735,13 @@ impl Item for Editor { h_flex() .gap_2() .child( - Label::new(self.title(cx).to_string()) - .color(label_color) - .when(params.preview, |this| this.italic()) - .when(was_deleted, |this| this.strikethrough()), + Label::new(util::truncate_and_trailoff( + &self.title(cx), + MAX_TAB_TITLE_LEN, + )) + .color(label_color) + .when(params.preview, |this| this.italic()) + .when(was_deleted, |this| this.strikethrough()), ) .when_some(description, |this, description| { this.child( From 815c6f5141bd556c0c642e08f658a9f614142410 Mon Sep 17 00:00:00 2001 From: dybucc <149513579+dybucc@users.noreply.github.com> Date: Tue, 3 Mar 2026 05:28:12 +0100 Subject: [PATCH 255/548] Add small alloc and lookup optimizations (#49041) A vector was being instantiated when the callsite only required an iterable. Another part of the code was performing multiple `contains()` lookups on a vector, and now it does it on a hashed set. This change has required some extra modifications across the codebase, but the affected sites are minimal and have been adjusted without major issues. The changes include some `Hash` derived implementations, which were proposed in the original `lsp-types` in [[1]], and maybe could be merged into Zed's fork. I went ahead and used a newtype with a custom `Hash` implementation that simply called on the structure's public members' implementations of `Hash`. The next change includes the removal of a check of the request capabilities after having already checked the same thing in the call to `to_lsp_params_or_response()` right before. The result of the `match` expression should already have returned a `Task::ready()` if the above mentioned function failed in performing the check that was later repeated and now removed. Finally, in the `edits_from_lsp()` method, stable sorting was being performed when only unstable sorting would suffice. The method can only sort with respect to the key data, and not the satellite data, as the latter are the literal strings of the edit. It matters not which one of a sequence of overlapping edits (with same ranges that thus resolve the edits for equivalence) should come before the other. [1]: https://github.com/gluon-lang/lsp-types/pull/295/changes#diff-b1a35a68f14e696205874893c07fd24fdb88882b47c23cc0e0c80a30c7d53759R540 - [ ] Tests or screenshots needed? - [ ] Code Reviewed - [ ] Manual QA Release Notes: - Removed a vector allocation where the callsite only required an iterable. - Improved multiple lookup operations when deserializing LSP edit operations. - Removed a double-check of capabilities after requesting and thus determining LSP capabilities. - Replaced stable sorting with unstable sorting of edits returned by the LSP. --- crates/editor/src/editor.rs | 2 +- crates/project/src/lsp_command.rs | 2 +- crates/project/src/lsp_store.rs | 6 +----- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index efc3cddcc8549df6d832e726c77f2dda600adaa4..28d96e721257eaad898408cafba67f9f991e4909 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -15365,7 +15365,7 @@ impl Editor { pub fn select_all(&mut self, _: &SelectAll, window: &mut Window, cx: &mut Context) { self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { - s.select_ranges(vec![Anchor::min()..Anchor::max()]); + s.select_ranges([Anchor::min()..Anchor::max()]); }); } diff --git a/crates/project/src/lsp_command.rs b/crates/project/src/lsp_command.rs index bd94378433d7a8d992b913258999a6004b8031f2..67edd6c13ca5a850a99f28dee849718d9e7ec9ae 100644 --- a/crates/project/src/lsp_command.rs +++ b/crates/project/src/lsp_command.rs @@ -533,7 +533,7 @@ impl LspCommand for PerformRename { .rename_provider .is_some_and(|capability| match capability { OneOf::Left(enabled) => enabled, - OneOf::Right(_options) => true, + OneOf::Right(_) => true, }) } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 45111adf9eb45c3a2595ab557e1fbe986d041610..75f9702e12cf31ce4f555940d7d1918884bbc22a 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -3158,7 +3158,7 @@ impl LocalLspStore { .map(|edit| (range_from_lsp(edit.range), edit.new_text)) .collect::>(); - lsp_edits.sort_by_key(|(range, _)| (range.start, range.end)); + lsp_edits.sort_unstable_by_key(|(range, _)| (range.start, range.end)); let mut lsp_edits = lsp_edits.into_iter().peekable(); let mut edits = Vec::new(); @@ -5001,10 +5001,6 @@ impl LspStore { }; let status = request.status(); - if !request.check_capabilities(language_server.adapter_server_capabilities()) { - return Task::ready(Ok(Default::default())); - } - let request_timeout = ProjectSettings::get_global(cx) .global_lsp_settings .get_request_timeout(); From 880b2e512b31158a43637a1bfe47cb4492884a5f Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 2 Mar 2026 22:24:55 -0800 Subject: [PATCH 256/548] Remove Zeta1 logic, allow choosing zeta2 experiment (#50560) Release Notes: - N/A --- crates/edit_prediction/src/edit_prediction.rs | 105 +++++-- .../src/edit_prediction_tests.rs | 17 +- crates/edit_prediction/src/fim.rs | 6 +- crates/edit_prediction/src/mercury.rs | 34 ++- crates/edit_prediction/src/prediction.rs | 5 +- crates/edit_prediction/src/sweep_ai.rs | 14 +- crates/edit_prediction/src/zeta.rs | 260 ++++++++---------- .../edit_prediction_cli/src/format_prompt.rs | 6 +- .../edit_prediction_cli/src/load_project.rs | 7 +- crates/edit_prediction_cli/src/predict.rs | 4 +- .../edit_prediction_cli/src/pull_examples.rs | 9 +- .../src/reversal_tracking.rs | 14 +- .../src/edit_prediction_button.rs | 50 ++++ crates/settings_content/src/language.rs | 5 +- .../zed/src/zed/edit_prediction_registry.rs | 44 ++- crates/zeta_prompt/src/zeta_prompt.rs | 85 +++--- 16 files changed, 379 insertions(+), 286 deletions(-) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index b25ccee37970f2dc0dfa8bcbec4b1cdcdfe6d506..e6e3a9abdf83deb785cd56d358b065973682b8cc 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -137,10 +137,13 @@ pub struct EditPredictionStore { user_store: Entity, llm_token: LlmApiToken, _llm_token_subscription: Subscription, + _fetch_experiments_task: Task<()>, projects: HashMap, update_required: bool, edit_prediction_model: EditPredictionModel, zeta2_raw_config: Option, + preferred_experiment: Option, + available_experiments: Vec, pub sweep_ai: SweepAi, pub mercury: Mercury, data_collection_choice: DataCollectionChoice, @@ -154,8 +157,7 @@ pub struct EditPredictionStore { #[derive(Copy, Clone, PartialEq, Eq)] pub enum EditPredictionModel { - Zeta1, - Zeta2, + Zeta, Fim { format: EditPredictionPromptFormat }, Sweep, Mercury, @@ -699,11 +701,23 @@ impl EditPredictionStore { }) .detach(); + let mut current_user = user_store.read(cx).watch_current_user(); + let fetch_experiments_task = cx.spawn(async move |this, cx| { + while current_user.borrow().is_none() { + current_user.next().await; + } + this.update(cx, |this, cx| { + this.refresh_available_experiments(cx); + }) + .log_err(); + }); + let this = Self { projects: HashMap::default(), client, user_store, llm_token, + _fetch_experiments_task: fetch_experiments_task, _llm_token_subscription: cx.subscribe( &refresh_llm_token_listener, |this, _listener, _event, cx| { @@ -717,8 +731,10 @@ impl EditPredictionStore { }, ), update_required: false, - edit_prediction_model: EditPredictionModel::Zeta2, + edit_prediction_model: EditPredictionModel::Zeta, zeta2_raw_config: Self::zeta2_raw_config_from_env(), + preferred_experiment: None, + available_experiments: Vec::new(), sweep_ai: SweepAi::new(cx), mercury: Mercury::new(cx), @@ -753,6 +769,60 @@ impl EditPredictionStore { self.zeta2_raw_config.as_ref() } + pub fn preferred_experiment(&self) -> Option<&str> { + self.preferred_experiment.as_deref() + } + + pub fn set_preferred_experiment(&mut self, experiment: Option) { + self.preferred_experiment = experiment; + } + + pub fn available_experiments(&self) -> &[String] { + &self.available_experiments + } + + pub fn refresh_available_experiments(&mut self, cx: &mut Context) { + let client = self.client.clone(); + let llm_token = self.llm_token.clone(); + let app_version = AppVersion::global(cx); + cx.spawn(async move |this, cx| { + let experiments = cx + .background_spawn(async move { + let http_client = client.http_client(); + let token = llm_token.acquire(&client).await?; + let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?; + let request = http_client::Request::builder() + .method(Method::GET) + .uri(url.as_ref()) + .header("Authorization", format!("Bearer {}", token)) + .header(ZED_VERSION_HEADER_NAME, app_version.to_string()) + .body(Default::default())?; + let mut response = http_client.send(request).await?; + if response.status().is_success() { + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; + let experiments: Vec = serde_json::from_slice(&body)?; + Ok(experiments) + } else { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + anyhow::bail!( + "Failed to fetch experiments: {:?}\nBody: {}", + response.status(), + body + ); + } + }) + .await?; + this.update(cx, |this, cx| { + this.available_experiments = experiments; + cx.notify(); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + pub fn icons(&self, cx: &App) -> edit_prediction_types::EditPredictionIconSet { use ui::IconName; match self.edit_prediction_model { @@ -766,7 +836,7 @@ impl EditPredictionStore { EditPredictionModel::Mercury => { edit_prediction_types::EditPredictionIconSet::new(IconName::Inception) } - EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + EditPredictionModel::Zeta => { edit_prediction_types::EditPredictionIconSet::new(IconName::ZedPredict) .with_disabled(IconName::ZedPredictDisabled) .with_up(IconName::ZedPredictUp) @@ -895,10 +965,7 @@ impl EditPredictionStore { } pub fn usage(&self, cx: &App) -> Option { - if matches!( - self.edit_prediction_model, - EditPredictionModel::Zeta2 | EditPredictionModel::Zeta1 - ) { + if matches!(self.edit_prediction_model, EditPredictionModel::Zeta) { self.user_store.read(cx).edit_prediction_usage() } else { None @@ -1347,7 +1414,7 @@ impl EditPredictionStore { cx, ); } - EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + EditPredictionModel::Zeta => { let is_cloud = !matches!( all_language_settings(None, cx).edit_predictions.provider, EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi @@ -1608,7 +1675,7 @@ impl EditPredictionStore { cx: &App, ) { match self.edit_prediction_model { - EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 => { + EditPredictionModel::Zeta => { let is_cloud = !matches!( all_language_settings(None, cx).edit_predictions.provider, EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi @@ -2103,10 +2170,7 @@ impl EditPredictionStore { let can_collect_data = !cfg!(test) && is_open_source && self.is_data_collection_enabled(cx) - && matches!( - self.edit_prediction_model, - EditPredictionModel::Zeta1 | EditPredictionModel::Zeta2 - ); + && matches!(self.edit_prediction_model, EditPredictionModel::Zeta); let inputs = EditPredictionModelInput { project: project.clone(), @@ -2138,18 +2202,7 @@ impl EditPredictionStore { } let task = match self.edit_prediction_model { - EditPredictionModel::Zeta1 => zeta::request_prediction_with_zeta( - self, - inputs, - Some(zeta_prompt::EditPredictionModelKind::Zeta1), - cx, - ), - EditPredictionModel::Zeta2 => zeta::request_prediction_with_zeta( - self, - inputs, - Some(zeta_prompt::EditPredictionModelKind::Zeta2), - cx, - ), + EditPredictionModel::Zeta => zeta::request_prediction_with_zeta(self, inputs, cx), EditPredictionModel::Fim { format } => fim::request_prediction(inputs, format, cx), EditPredictionModel::Sweep => self.sweep_ai.request_prediction_with_sweep(inputs, cx), EditPredictionModel::Mercury => self.mercury.request_prediction(inputs, cx), diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index beeb855c7b84bae53ea2f8f8bd6a117403e77db1..cc3bb84808981fd1430f9e71aa796e590cc78169 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -1704,12 +1704,8 @@ async fn test_rejections_flushing(cx: &mut TestAppContext) { // Generate a model response that would apply the given diff to the active file. fn model_response(request: &PredictEditsV3Request, diff_to_apply: &str) -> PredictEditsV3Response { - let editable_range = request - .input - .excerpt_ranges - .as_ref() - .map(|r| zeta_prompt::excerpt_range_for_format(Default::default(), r).1) - .unwrap_or(request.input.editable_range_in_excerpt.clone()); + let editable_range = + zeta_prompt::excerpt_range_for_format(Default::default(), &request.input.excerpt_ranges).1; let excerpt = request.input.cursor_excerpt[editable_range.clone()].to_string(); let new_excerpt = apply_diff_to_string(diff_to_apply, &excerpt).unwrap(); @@ -1846,11 +1842,10 @@ async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) { related_files: Default::default(), cursor_path: Path::new("").into(), cursor_excerpt: "".into(), - editable_range_in_excerpt: 0..0, cursor_offset_in_excerpt: 0, excerpt_start_row: None, - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: Default::default(), + experiment: None, in_open_source_repo: false, can_collect_data: false, }, @@ -2183,7 +2178,7 @@ async fn make_test_ep_store( let ep_store = cx.new(|cx| { let mut ep_store = EditPredictionStore::new(client, project.read(cx).user_store(), cx); - ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1); + ep_store.set_edit_prediction_model(EditPredictionModel::Zeta); let worktrees = project.read(cx).worktrees(cx).collect::>(); for worktree in worktrees { @@ -2282,7 +2277,7 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut cx.background_executor.run_until_parked(); let completion_task = ep_store.update(cx, |ep_store, cx| { - ep_store.set_edit_prediction_model(EditPredictionModel::Zeta1); + ep_store.set_edit_prediction_model(EditPredictionModel::Zeta); ep_store.request_prediction(&project, &buffer, cursor, Default::default(), cx) }); diff --git a/crates/edit_prediction/src/fim.rs b/crates/edit_prediction/src/fim.rs index dda008133d3726f5e7ba32ec05c770878d16585f..66f2e58a3b01b4fbf49b11864db4daec6b4dc1c2 100644 --- a/crates/edit_prediction/src/fim.rs +++ b/crates/edit_prediction/src/fim.rs @@ -72,16 +72,14 @@ pub fn request_prediction( events, related_files: Vec::new(), cursor_offset_in_excerpt: cursor_offset - excerpt_offset_range.start, - editable_range_in_excerpt: cursor_offset - excerpt_offset_range.start - ..cursor_offset - excerpt_offset_range.start, cursor_path: full_path.clone(), excerpt_start_row: Some(excerpt_range.start.row), cursor_excerpt: snapshot .text_for_range(excerpt_range) .collect::() .into(), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: Default::default(), + experiment: None, in_open_source_repo: false, can_collect_data: false, }; diff --git a/crates/edit_prediction/src/mercury.rs b/crates/edit_prediction/src/mercury.rs index f3adba55e620e77ffd7bb12b0e950fd4d3f011fc..bf9b43d528db1717f54143e4805e41aefc81f64a 100644 --- a/crates/edit_prediction/src/mercury.rs +++ b/crates/edit_prediction/src/mercury.rs @@ -16,7 +16,7 @@ use release_channel::AppVersion; use serde::Serialize; use std::{mem, ops::Range, path::Path, sync::Arc, time::Instant}; -use zeta_prompt::ZetaPromptInput; +use zeta_prompt::{ExcerptRanges, ZetaPromptInput}; const MERCURY_API_URL: &str = "https://api.inceptionlabs.ai/v1/edit/completions"; const MAX_REWRITE_TOKENS: usize = 150; @@ -83,6 +83,12 @@ impl Mercury { let editable_offset_range = editable_range.to_offset(&snapshot); + let editable_range_in_excerpt = (editable_offset_range.start + - context_offset_range.start) + ..(editable_offset_range.end - context_offset_range.start); + let context_range_in_excerpt = + 0..(context_offset_range.end - context_offset_range.start); + let inputs = zeta_prompt::ZetaPromptInput { events, related_files, @@ -93,12 +99,17 @@ impl Mercury { .text_for_range(context_range) .collect::() .into(), - editable_range_in_excerpt: (editable_offset_range.start - - context_offset_range.start) - ..(editable_offset_range.end - context_offset_range.start), + experiment: None, excerpt_start_row: Some(context_start_row), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: editable_range_in_excerpt.clone(), + editable_180: editable_range_in_excerpt.clone(), + editable_350: editable_range_in_excerpt.clone(), + editable_150_context_350: context_range_in_excerpt.clone(), + editable_180_context_350: context_range_in_excerpt.clone(), + editable_350_context_150: context_range_in_excerpt.clone(), + ..Default::default() + }, in_open_source_repo: false, can_collect_data: false, }; @@ -273,19 +284,18 @@ fn build_prompt(inputs: &ZetaPromptInput) -> String { prompt.push_str(inputs.cursor_path.as_os_str().to_string_lossy().as_ref()); prompt.push('\n'); - prompt.push_str(&inputs.cursor_excerpt[0..inputs.editable_range_in_excerpt.start]); + let editable_range = &inputs.excerpt_ranges.editable_350; + prompt.push_str(&inputs.cursor_excerpt[0..editable_range.start]); push_delimited(prompt, CODE_TO_EDIT_START..CODE_TO_EDIT_END, |prompt| { prompt.push_str( - &inputs.cursor_excerpt - [inputs.editable_range_in_excerpt.start..inputs.cursor_offset_in_excerpt], + &inputs.cursor_excerpt[editable_range.start..inputs.cursor_offset_in_excerpt], ); prompt.push_str(CURSOR_TAG); prompt.push_str( - &inputs.cursor_excerpt - [inputs.cursor_offset_in_excerpt..inputs.editable_range_in_excerpt.end], + &inputs.cursor_excerpt[inputs.cursor_offset_in_excerpt..editable_range.end], ); }); - prompt.push_str(&inputs.cursor_excerpt[inputs.editable_range_in_excerpt.end..]); + prompt.push_str(&inputs.cursor_excerpt[editable_range.end..]); }, ); diff --git a/crates/edit_prediction/src/prediction.rs b/crates/edit_prediction/src/prediction.rs index 9c17f29fe29bc711f6750cf6fe24586067bfc619..0dd33c03a95d77ec680d47d96daa8e6a44f51b62 100644 --- a/crates/edit_prediction/src/prediction.rs +++ b/crates/edit_prediction/src/prediction.rs @@ -160,10 +160,9 @@ mod tests { cursor_path: Path::new("path.txt").into(), cursor_offset_in_excerpt: 0, cursor_excerpt: "".into(), - editable_range_in_excerpt: 0..0, excerpt_start_row: None, - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: Default::default(), + experiment: None, in_open_source_repo: false, can_collect_data: false, }, diff --git a/crates/edit_prediction/src/sweep_ai.rs b/crates/edit_prediction/src/sweep_ai.rs index 5a9fcf0e6ce7bfa5476d6c48245068994178f7bc..d88a159a47aa7633a5b064e72a75dd61604710e1 100644 --- a/crates/edit_prediction/src/sweep_ai.rs +++ b/crates/edit_prediction/src/sweep_ai.rs @@ -215,12 +215,18 @@ impl SweepAi { related_files: inputs.related_files.clone(), cursor_path: full_path.clone(), cursor_excerpt: request_body.file_contents.clone().into(), - // we actually don't know - editable_range_in_excerpt: 0..inputs.snapshot.len(), cursor_offset_in_excerpt: request_body.cursor_position, excerpt_start_row: Some(0), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: zeta_prompt::ExcerptRanges { + editable_150: 0..inputs.snapshot.len(), + editable_180: 0..inputs.snapshot.len(), + editable_350: 0..inputs.snapshot.len(), + editable_150_context_350: 0..inputs.snapshot.len(), + editable_180_context_350: 0..inputs.snapshot.len(), + editable_350_context_150: 0..inputs.snapshot.len(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, }; diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index abcfeabec44b26405153c10c43e6c2739e5e802e..f6a786572736908556535b9131c1cf7814a6126f 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -13,14 +13,15 @@ use gpui::{App, AppContext as _, Task, http_client, prelude::*}; use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings}; use language::{BufferSnapshot, ToOffset as _, ToPoint, text_diff}; use release_channel::AppVersion; +use settings::EditPredictionPromptFormat; use text::{Anchor, Bias}; use std::env; use std::ops::Range; use std::{path::Path, sync::Arc, time::Instant}; use zeta_prompt::{ - CURSOR_MARKER, EditPredictionModelKind, ZetaFormat, clean_zeta2_model_output, - format_zeta_prompt, get_prefill, prompt_input_contains_special_tokens, + CURSOR_MARKER, ZetaFormat, clean_zeta2_model_output, format_zeta_prompt, get_prefill, + prompt_input_contains_special_tokens, zeta1::{self, EDITABLE_REGION_END_MARKER}, }; @@ -39,7 +40,6 @@ pub fn request_prediction_with_zeta( is_open_source, .. }: EditPredictionModelInput, - preferred_model: Option, cx: &mut Context, ) -> Task>> { let settings = &all_language_settings(None, cx).edit_predictions; @@ -55,6 +55,7 @@ pub fn request_prediction_with_zeta( let http_client = cx.http_client(); let buffer_snapshotted_at = Instant::now(); let raw_config = store.zeta2_raw_config().cloned(); + let preferred_experiment = store.preferred_experiment().map(|s| s.to_owned()); let excerpt_path: Arc = snapshot .file() @@ -80,8 +81,7 @@ pub fn request_prediction_with_zeta( events, excerpt_path, cursor_offset, - zeta_version, - preferred_model, + preferred_experiment, is_open_source, can_collect_data, ); @@ -90,22 +90,8 @@ pub fn request_prediction_with_zeta( return Ok((None, None)); } - let is_zeta1 = preferred_model == Some(EditPredictionModelKind::Zeta1); - let excerpt_ranges = prompt_input - .excerpt_ranges - .as_ref() - .ok_or_else(|| anyhow::anyhow!("excerpt_ranges missing from prompt input"))?; - if let Some(debug_tx) = &debug_tx { - let prompt = if is_zeta1 { - zeta1::format_zeta1_from_input( - &prompt_input, - excerpt_ranges.editable_350.clone(), - excerpt_ranges.editable_350_context_150.clone(), - ) - } else { - format_zeta_prompt(&prompt_input, zeta_version) - }; + let prompt = format_zeta_prompt(&prompt_input, zeta_version); debug_tx .unbounded_send(DebugEvent::EditPredictionStarted( EditPredictionStartedDebugEvent { @@ -119,130 +105,133 @@ pub fn request_prediction_with_zeta( log::trace!("Sending edit prediction request"); - let (request_id, output_text, model_version, usage) = if let Some(custom_settings) = - &custom_server_settings - { - let max_tokens = custom_settings.max_output_tokens * 4; - - if is_zeta1 { - let ranges = excerpt_ranges; - let prompt = zeta1::format_zeta1_from_input( - &prompt_input, - ranges.editable_350.clone(), - ranges.editable_350_context_150.clone(), - ); - editable_range_in_excerpt = ranges.editable_350.clone(); - let stop_tokens = vec![ - EDITABLE_REGION_END_MARKER.to_string(), - format!("{EDITABLE_REGION_END_MARKER}\n"), - format!("{EDITABLE_REGION_END_MARKER}\n\n"), - format!("{EDITABLE_REGION_END_MARKER}\n\n\n"), - ]; - - let (response_text, request_id) = send_custom_server_request( - provider, - custom_settings, + let (request_id, output_text, model_version, usage) = + if let Some(custom_settings) = &custom_server_settings { + let max_tokens = custom_settings.max_output_tokens * 4; + + match custom_settings.prompt_format { + EditPredictionPromptFormat::Zeta => { + let ranges = &prompt_input.excerpt_ranges; + let prompt = zeta1::format_zeta1_from_input( + &prompt_input, + ranges.editable_350.clone(), + ranges.editable_350_context_150.clone(), + ); + editable_range_in_excerpt = ranges.editable_350.clone(); + let stop_tokens = vec![ + EDITABLE_REGION_END_MARKER.to_string(), + format!("{EDITABLE_REGION_END_MARKER}\n"), + format!("{EDITABLE_REGION_END_MARKER}\n\n"), + format!("{EDITABLE_REGION_END_MARKER}\n\n\n"), + ]; + + let (response_text, request_id) = send_custom_server_request( + provider, + custom_settings, + prompt, + max_tokens, + stop_tokens, + &http_client, + ) + .await?; + + let request_id = EditPredictionId(request_id.into()); + let output_text = zeta1::clean_zeta1_model_output(&response_text); + + (request_id, output_text, None, None) + } + EditPredictionPromptFormat::Zeta2 => { + let prompt = format_zeta_prompt(&prompt_input, zeta_version); + let prefill = get_prefill(&prompt_input, zeta_version); + let prompt = format!("{prompt}{prefill}"); + + editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format( + zeta_version, + &prompt_input.excerpt_ranges, + ) + .0; + + let (response_text, request_id) = send_custom_server_request( + provider, + custom_settings, + prompt, + max_tokens, + vec![], + &http_client, + ) + .await?; + + let request_id = EditPredictionId(request_id.into()); + let output_text = if response_text.is_empty() { + None + } else { + let output = format!("{prefill}{response_text}"); + Some(clean_zeta2_model_output(&output, zeta_version).to_string()) + }; + + (request_id, output_text, None, None) + } + _ => anyhow::bail!("unsupported prompt format"), + } + } else if let Some(config) = &raw_config { + let prompt = format_zeta_prompt(&prompt_input, config.format); + let prefill = get_prefill(&prompt_input, config.format); + let prompt = format!("{prompt}{prefill}"); + let request = RawCompletionRequest { + model: config.model_id.clone().unwrap_or_default(), prompt, - max_tokens, - stop_tokens, - &http_client, + temperature: None, + stop: vec![], + max_tokens: Some(2048), + environment: Some(config.format.to_string().to_lowercase()), + }; + + editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format( + config.format, + &prompt_input.excerpt_ranges, + ) + .1; + + let (mut response, usage) = EditPredictionStore::send_raw_llm_request( + request, + client, + None, + llm_token, + app_version, ) .await?; - let request_id = EditPredictionId(request_id.into()); - let output_text = zeta1::clean_zeta1_model_output(&response_text); + let request_id = EditPredictionId(response.id.clone().into()); + let output_text = response.choices.pop().map(|choice| { + let response = &choice.text; + let output = format!("{prefill}{response}"); + clean_zeta2_model_output(&output, config.format).to_string() + }); - (request_id, output_text, None, None) + (request_id, output_text, None, usage) } else { - let prompt = format_zeta_prompt(&prompt_input, zeta_version); - let prefill = get_prefill(&prompt_input, zeta_version); - let prompt = format!("{prompt}{prefill}"); - - editable_range_in_excerpt = prompt_input - .excerpt_ranges - .as_ref() - .map(|ranges| zeta_prompt::excerpt_range_for_format(zeta_version, ranges).0) - .unwrap_or(prompt_input.editable_range_in_excerpt.clone()); - - let (response_text, request_id) = send_custom_server_request( - provider, - custom_settings, - prompt, - max_tokens, - vec![], - &http_client, + // Use V3 endpoint - server handles model/version selection and suffix stripping + let (response, usage) = EditPredictionStore::send_v3_request( + prompt_input.clone(), + client, + llm_token, + app_version, + trigger, ) .await?; - let request_id = EditPredictionId(request_id.into()); - let output_text = if response_text.is_empty() { + let request_id = EditPredictionId(response.request_id.into()); + let output_text = if response.output.is_empty() { None } else { - let output = format!("{prefill}{response_text}"); - Some(clean_zeta2_model_output(&output, zeta_version).to_string()) + Some(response.output) }; + editable_range_in_excerpt = response.editable_range; + let model_version = response.model_version; - (request_id, output_text, None, None) - } - } else if let Some(config) = &raw_config { - let prompt = format_zeta_prompt(&prompt_input, config.format); - let prefill = get_prefill(&prompt_input, config.format); - let prompt = format!("{prompt}{prefill}"); - let request = RawCompletionRequest { - model: config.model_id.clone().unwrap_or_default(), - prompt, - temperature: None, - stop: vec![], - max_tokens: Some(2048), - environment: Some(config.format.to_string().to_lowercase()), + (request_id, output_text, model_version, usage) }; - editable_range_in_excerpt = prompt_input - .excerpt_ranges - .as_ref() - .map(|ranges| zeta_prompt::excerpt_range_for_format(config.format, ranges).1) - .unwrap_or(prompt_input.editable_range_in_excerpt.clone()); - - let (mut response, usage) = EditPredictionStore::send_raw_llm_request( - request, - client, - None, - llm_token, - app_version, - ) - .await?; - - let request_id = EditPredictionId(response.id.clone().into()); - let output_text = response.choices.pop().map(|choice| { - let response = &choice.text; - let output = format!("{prefill}{response}"); - clean_zeta2_model_output(&output, config.format).to_string() - }); - - (request_id, output_text, None, usage) - } else { - // Use V3 endpoint - server handles model/version selection and suffix stripping - let (response, usage) = EditPredictionStore::send_v3_request( - prompt_input.clone(), - client, - llm_token, - app_version, - trigger, - ) - .await?; - - let request_id = EditPredictionId(response.request_id.into()); - let output_text = if response.output.is_empty() { - None - } else { - Some(response.output) - }; - editable_range_in_excerpt = response.editable_range; - let model_version = response.model_version; - - (request_id, output_text, model_version, usage) - }; - let received_response_at = Instant::now(); log::trace!("Got edit prediction response"); @@ -373,8 +362,7 @@ pub fn zeta2_prompt_input( events: Vec>, excerpt_path: Arc, cursor_offset: usize, - zeta_format: ZetaFormat, - preferred_model: Option, + preferred_experiment: Option, is_open_source: bool, can_collect_data: bool, ) -> (Range, zeta_prompt::ZetaPromptInput) { @@ -392,11 +380,6 @@ pub fn zeta2_prompt_input( let full_context_start_offset = full_context_offset_range.start; let full_context_start_row = full_context.start.row; - let editable_offset_range = match preferred_model { - Some(EditPredictionModelKind::Zeta1) => excerpt_ranges.editable_350.clone(), - _ => zeta_prompt::excerpt_range_for_format(zeta_format, &excerpt_ranges).0, - }; - let cursor_offset_in_excerpt = cursor_offset - full_context_start_offset; let prompt_input = zeta_prompt::ZetaPromptInput { @@ -405,13 +388,12 @@ pub fn zeta2_prompt_input( .text_for_range(full_context) .collect::() .into(), - editable_range_in_excerpt: editable_offset_range, cursor_offset_in_excerpt, excerpt_start_row: Some(full_context_start_row), events, related_files, - excerpt_ranges: Some(excerpt_ranges), - preferred_model, + excerpt_ranges, + experiment: preferred_experiment, in_open_source_repo: is_open_source, can_collect_data, }; diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index 6cdfeef8f569df9277d3417c0134b2c7047bee30..ecacd963023d7d113ea5ad77b61fd1d88306fc95 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -36,12 +36,8 @@ pub async fn run_format_prompt( step_progress.set_substatus("formatting teacher prompt"); let zeta_format = ZetaFormat::default(); - let excerpt_ranges = prompt_inputs - .excerpt_ranges - .as_ref() - .context("prompt_inputs must have excerpt_ranges")?; let (editable_range, context_range) = - excerpt_range_for_format(zeta_format, excerpt_ranges); + excerpt_range_for_format(zeta_format, &prompt_inputs.excerpt_ranges); let prompt = TeacherPrompt::format_prompt(example, editable_range, context_range); example.prompt = Some(ExamplePrompt { diff --git a/crates/edit_prediction_cli/src/load_project.rs b/crates/edit_prediction_cli/src/load_project.rs index 680af6f0168c766c6066a91a8f57fe4573b46403..dcf417c2e8cc70dfcaffdf4b96dbe3b17daa61d4 100644 --- a/crates/edit_prediction_cli/src/load_project.rs +++ b/crates/edit_prediction_cli/src/load_project.rs @@ -93,21 +93,18 @@ pub async fn run_load_project( let cursor_offset_in_excerpt = cursor_offset - full_context_offset_range.start; let excerpt_start_row = Some(full_context_point_range.start.row); - let editable_range_in_excerpt = excerpt_ranges.editable_350.clone(); - ( ZetaPromptInput { cursor_path: example.spec.cursor_path.clone(), cursor_excerpt, - editable_range_in_excerpt, cursor_offset_in_excerpt, excerpt_start_row, events, related_files: existing_related_files, - excerpt_ranges: Some(excerpt_ranges), - preferred_model: None, + excerpt_ranges, in_open_source_repo: false, can_collect_data: false, + experiment: None, }, language_name, ) diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs index e02fcbdb425a62fb478b8be36fdd034eede27622..02ba24b8a4f2627b9542254e3d118981737f8318 100644 --- a/crates/edit_prediction_cli/src/predict.rs +++ b/crates/edit_prediction_cli/src/predict.rs @@ -110,8 +110,8 @@ pub async fn run_prediction( ep_store.update(&mut cx, |store, _cx| { let model = match provider { - PredictionProvider::Zeta1 => edit_prediction::EditPredictionModel::Zeta1, - PredictionProvider::Zeta2(_) => edit_prediction::EditPredictionModel::Zeta2, + PredictionProvider::Zeta1 => edit_prediction::EditPredictionModel::Zeta, + PredictionProvider::Zeta2(_) => edit_prediction::EditPredictionModel::Zeta, PredictionProvider::Sweep => edit_prediction::EditPredictionModel::Sweep, PredictionProvider::Mercury => edit_prediction::EditPredictionModel::Mercury, PredictionProvider::Teacher(..) diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index b53a3d5546e1a5697550ed24715f049c36c98178..2f371675b29015795beef550ce5e3956c63751f9 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -1115,11 +1115,8 @@ fn build_settled_example( requested_format: ZetaFormat, zed_version: Option, ) -> Example { - let requested_editable_range = input - .excerpt_ranges - .as_ref() - .map(|ranges| excerpt_range_for_format(requested_format, ranges).0) - .unwrap_or_else(|| input.editable_range_in_excerpt.clone()); + let requested_editable_range = + excerpt_range_for_format(requested_format, &input.excerpt_ranges).0; let base_cursor_excerpt = input.cursor_excerpt.to_string(); @@ -1268,7 +1265,7 @@ fn build_rejected_example( let rejected_patch = build_output_patch( &input.cursor_path, input.cursor_excerpt.as_ref(), - &input.editable_range_in_excerpt, + &input.excerpt_ranges.editable_350, &output, ); let mut example = build_example_from_snowflake( diff --git a/crates/edit_prediction_cli/src/reversal_tracking.rs b/crates/edit_prediction_cli/src/reversal_tracking.rs index cc558939e9aecf826afce77d6205b0ff49ab87bc..2d578c8666f217365ed2ed24ff766ed6f19566d7 100644 --- a/crates/edit_prediction_cli/src/reversal_tracking.rs +++ b/crates/edit_prediction_cli/src/reversal_tracking.rs @@ -655,6 +655,7 @@ mod tests { use super::*; use edit_prediction::udiff::apply_diff_to_string; use indoc::indoc; + use zeta_prompt::ExcerptRanges; fn make_test_prompt_inputs( content: &str, @@ -664,13 +665,20 @@ mod tests { ZetaPromptInput { cursor_path: Arc::from(Path::new("src/test.rs")), cursor_excerpt: content.into(), - editable_range_in_excerpt: 0..content.len(), cursor_offset_in_excerpt: 0, excerpt_start_row, events, related_files: Vec::new(), - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: 0..content.len(), + editable_180: 0..content.len(), + editable_350: 0..content.len(), + editable_150_context_350: 0..content.len(), + editable_180_context_350: 0..content.len(), + editable_350_context_150: 0..content.len(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, } diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index c1fcd78f3f0cee24e6e8d936bf6af56f8d1ebda0..6339c7d6cd9fa1cc40101cc1bf14650a6904b3c7 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -1194,6 +1194,56 @@ impl EditPredictionButton { menu = self.build_language_settings_menu(menu, window, cx); } menu = self.add_provider_switching_section(menu, provider, cx); + + if cx.is_staff() { + if let Some(store) = EditPredictionStore::try_global(cx) { + let store = store.read(cx); + let experiments = store.available_experiments().to_vec(); + let preferred = store.preferred_experiment().map(|s| s.to_owned()); + + let preferred_for_submenu = preferred.clone(); + menu = menu + .separator() + .submenu("Experiment", move |menu, _window, _cx| { + let mut menu = menu.toggleable_entry( + "Default", + preferred_for_submenu.is_none(), + IconPosition::Start, + None, + { + move |_window, cx| { + if let Some(store) = EditPredictionStore::try_global(cx) { + store.update(cx, |store, _cx| { + store.set_preferred_experiment(None); + }); + } + } + }, + ); + for experiment in &experiments { + let is_selected = preferred.as_deref() == Some(experiment.as_str()); + let experiment_name = experiment.clone(); + menu = menu.toggleable_entry( + experiment.clone(), + is_selected, + IconPosition::Start, + None, + move |_window, cx| { + if let Some(store) = EditPredictionStore::try_global(cx) { + store.update(cx, |store, _cx| { + store.set_preferred_experiment(Some( + experiment_name.clone(), + )); + }); + } + }, + ); + } + menu + }); + } + } + menu = menu.separator().item( ContextMenuEntry::new("Configure Providers") .icon(IconName::Settings) diff --git a/crates/settings_content/src/language.rs b/crates/settings_content/src/language.rs index db22f3a9e1448dbc529c133fb0195c422f02bc40..d429f53824fd0f4f0a5810bce01b05badcfb9a51 100644 --- a/crates/settings_content/src/language.rs +++ b/crates/settings_content/src/language.rs @@ -123,9 +123,7 @@ impl<'de> Deserialize<'de> for EditPredictionProvider { Content::Experimental(name) if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME => { - EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) + EditPredictionProvider::Zed } Content::Experimental(name) => { return Err(D::Error::custom(format!( @@ -240,6 +238,7 @@ pub enum EditPredictionPromptFormat { #[default] Infer, Zeta, + Zeta2, CodeLlama, StarCoder, DeepseekCoder, diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 79b33093d86b306c3b0420f919bd555d9ea4ca7a..67b0d26c88cf0bd254a776834de09fb89d6ea195 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -15,6 +15,8 @@ use std::{cell::RefCell, rc::Rc, sync::Arc}; use ui::Window; pub fn init(client: Arc, user_store: Entity, cx: &mut App) { + edit_prediction::EditPredictionStore::global(&client, &user_store, cx); + let editors: Rc, AnyWindowHandle>>> = Rc::default(); cx.observe_new({ let editors = editors.clone(); @@ -131,9 +133,9 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option None, EditPredictionProvider::Copilot => Some(EditPredictionProviderConfig::Copilot), - EditPredictionProvider::Zed => Some(EditPredictionProviderConfig::Zed( - EditPredictionModel::Zeta1, - )), + EditPredictionProvider::Zed => { + Some(EditPredictionProviderConfig::Zed(EditPredictionModel::Zeta)) + } EditPredictionProvider::Codestral => Some(EditPredictionProviderConfig::Codestral), EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi => { let custom_settings = if provider == EditPredictionProvider::Ollama { @@ -153,9 +155,7 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option Option() { - Some(EditPredictionProviderConfig::Zed( - EditPredictionModel::Zeta2, - )) + Some(EditPredictionProviderConfig::Zed(EditPredictionModel::Zeta)) } else { None } @@ -212,8 +210,7 @@ impl EditPredictionProviderConfig { EditPredictionProviderConfig::Copilot => "Copilot", EditPredictionProviderConfig::Codestral => "Codestral", EditPredictionProviderConfig::Zed(model) => match model { - EditPredictionModel::Zeta1 => "Zeta1", - EditPredictionModel::Zeta2 => "Zeta2", + EditPredictionModel::Zeta => "Zeta", EditPredictionModel::Fim { .. } => "FIM", EditPredictionModel::Sweep => "Sweep", EditPredictionModel::Mercury => "Mercury", @@ -311,26 +308,23 @@ fn assign_edit_prediction_provider( let ep_store = edit_prediction::EditPredictionStore::global(client, &user_store, cx); if let Some(project) = editor.project() { - let has_model = ep_store.update(cx, |ep_store, cx| { + ep_store.update(cx, |ep_store, cx| { ep_store.set_edit_prediction_model(model); if let Some(buffer) = &singleton_buffer { ep_store.register_buffer(buffer, project, cx); } - true }); - if has_model { - let provider = cx.new(|cx| { - ZedEditPredictionDelegate::new( - project.clone(), - singleton_buffer, - &client, - &user_store, - cx, - ) - }); - editor.set_edit_prediction_provider(Some(provider), window, cx); - } + let provider = cx.new(|cx| { + ZedEditPredictionDelegate::new( + project.clone(), + singleton_buffer, + &client, + &user_store, + cx, + ) + }); + editor.set_edit_prediction_provider(Some(provider), window, cx); } } } diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index abb1c3ddc74d58d0b300e5e64d77a60a48b83283..0cd37a455397334933dbfa2464c2dbcb72bba456 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -18,17 +18,10 @@ fn estimate_tokens(bytes: usize) -> usize { bytes / 3 } -/// The client's preferred edit prediction model. The server may override this. -#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] -pub enum EditPredictionModelKind { - Zeta1, - Zeta2, -} - /// Pre-computed byte offset ranges within `cursor_excerpt` for different /// editable and context token budgets. Allows the server to select the /// appropriate ranges for whichever model it uses. -#[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, PartialEq, Hash, Serialize, Deserialize)] pub struct ExcerptRanges { /// Editable region computed with a 150-token budget. pub editable_150: Range, @@ -54,21 +47,16 @@ pub struct ExcerptRanges { pub struct ZetaPromptInput { pub cursor_path: Arc, pub cursor_excerpt: Arc, - pub editable_range_in_excerpt: Range, pub cursor_offset_in_excerpt: usize, #[serde(default, skip_serializing_if = "Option::is_none")] pub excerpt_start_row: Option, pub events: Vec>, pub related_files: Vec, - /// When set, the excerpt was computed with a larger budget (~512 tokens) - /// and these ranges let the server select model-appropriate subsets. - /// When absent, the excerpt IS the context region and - /// `editable_range_in_excerpt` is the only editable range. + /// These ranges let the server select model-appropriate subsets. + pub excerpt_ranges: ExcerptRanges, + /// The name of the edit prediction model experiment to use. #[serde(default, skip_serializing_if = "Option::is_none")] - pub excerpt_ranges: Option, - /// Client's preferred model. The server may override. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub preferred_model: Option, + pub experiment: Option, #[serde(default)] pub in_open_source_repo: bool, #[serde(default)] @@ -274,15 +262,7 @@ pub fn resolve_cursor_region( input: &ZetaPromptInput, format: ZetaFormat, ) -> (&str, Range, usize) { - let Some(ranges) = &input.excerpt_ranges else { - return ( - &input.cursor_excerpt, - input.editable_range_in_excerpt.clone(), - input.cursor_offset_in_excerpt, - ); - }; - - let (editable_range, context_range) = excerpt_range_for_format(format, ranges); + let (editable_range, context_range) = excerpt_range_for_format(format, &input.excerpt_ranges); let context_start = context_range.start; let context_text = &input.cursor_excerpt[context_range]; let adjusted_editable = @@ -1159,16 +1139,24 @@ mod tests { events: Vec, related_files: Vec, ) -> ZetaPromptInput { + let context_range = 0..cursor_excerpt.len(); ZetaPromptInput { cursor_path: Path::new("test.rs").into(), cursor_excerpt: cursor_excerpt.into(), - editable_range_in_excerpt: editable_range, cursor_offset_in_excerpt: cursor_offset, excerpt_start_row: None, events: events.into_iter().map(Arc::new).collect(), related_files, - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range, + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range, + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, } @@ -1752,13 +1740,20 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("src/main.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: 15..41, cursor_offset_in_excerpt: 30, excerpt_start_row: Some(0), events: vec![Arc::new(make_event("other.rs", "-old\n+new\n"))], related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: 15..41, + editable_180: 15..41, + editable_350: 15..41, + editable_150_context_350: 0..excerpt.len(), + editable_180_context_350: 0..excerpt.len(), + editable_350_context_150: 0..excerpt.len(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, }; @@ -1807,13 +1802,20 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("src/main.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: 0..28, cursor_offset_in_excerpt: 15, excerpt_start_row: Some(10), events: vec![], related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: 0..28, + editable_180: 0..28, + editable_350: 0..28, + editable_150_context_350: 0..28, + editable_180_context_350: 0..28, + editable_350_context_150: 0..28, + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, }; @@ -1857,13 +1859,20 @@ mod tests { let input = ZetaPromptInput { cursor_path: Path::new("test.rs").into(), cursor_excerpt: excerpt.into(), - editable_range_in_excerpt: editable_range.clone(), cursor_offset_in_excerpt: 25, excerpt_start_row: Some(0), events: vec![], related_files: vec![], - excerpt_ranges: None, - preferred_model: None, + excerpt_ranges: ExcerptRanges { + editable_150: editable_range.clone(), + editable_180: editable_range.clone(), + editable_350: editable_range.clone(), + editable_150_context_350: context_range.clone(), + editable_180_context_350: context_range.clone(), + editable_350_context_150: context_range.clone(), + ..Default::default() + }, + experiment: None, in_open_source_repo: false, can_collect_data: false, }; From 6808acce93fe170cb40e0dfb99604805336b61aa Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 2 Mar 2026 23:31:11 -0700 Subject: [PATCH 257/548] Fix a few cases where we weren't escaping shell vars correctly (#50562) Release Notes: - N/A --- Cargo.lock | 1 + crates/gpui_linux/src/linux/platform.rs | 15 ++-- crates/remote/Cargo.toml | 1 + crates/remote/src/transport/docker.rs | 2 +- crates/remote/src/transport/ssh.rs | 49 +++++++++--- crates/remote/src/transport/wsl.rs | 11 +-- crates/repl/Cargo.toml | 1 + crates/repl/src/kernels/wsl_kernel.rs | 99 +++++++++++++------------ 8 files changed, 108 insertions(+), 71 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9fce90755106d9159fe2bd206058a5a86761fdf1..9f56a58f68f39671c03d591cc8535cbdc4cde6d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -14240,6 +14240,7 @@ dependencies = [ "serde", "serde_json", "settings", + "shlex", "smol", "telemetry", "terminal", diff --git a/crates/gpui_linux/src/linux/platform.rs b/crates/gpui_linux/src/linux/platform.rs index 924303cc84b5c662847bdde96979239073adbe19..4cd89f35d1e757ca30acd33b1362d147a95b63ef 100644 --- a/crates/gpui_linux/src/linux/platform.rs +++ b/crates/gpui_linux/src/linux/platform.rs @@ -229,17 +229,14 @@ impl Platform for LinuxPlatform

{ log::info!("Restarting process, using app path: {:?}", app_path); // Script to wait for the current process to exit and then restart the app. - let script = format!( - r#" - while kill -0 {pid} 2>/dev/null; do + // Pass dynamic values as positional parameters to avoid shell interpolation issues. + let script = r#" + while kill -0 "$0" 2>/dev/null; do sleep 0.1 done - {app_path} - "#, - pid = app_pid, - app_path = app_path.display() - ); + "$1" + "#; #[allow( clippy::disallowed_methods, @@ -249,6 +246,8 @@ impl Platform for LinuxPlatform

{ .arg("bash") .arg("-c") .arg(script) + .arg(&app_pid) + .arg(&app_path) .process_group(0) .spawn(); diff --git a/crates/remote/Cargo.toml b/crates/remote/Cargo.toml index 50026904a8f1ae9bf1954b8c41383487f59a001b..c08561954ebc0ba47a7bf1ab58092275161679a0 100644 --- a/crates/remote/Cargo.toml +++ b/crates/remote/Cargo.toml @@ -48,3 +48,4 @@ which.workspace = true [dev-dependencies] gpui = { workspace = true, features = ["test-support"] } fs = { workspace = true, features = ["test-support"] } +util = { workspace = true, features = ["test-support"] } diff --git a/crates/remote/src/transport/docker.rs b/crates/remote/src/transport/docker.rs index 1bcf80880ab17ddea63bd56fb54acfddc48db2dd..74076b58e35bd1ea7759927bad255925e7f7d9b9 100644 --- a/crates/remote/src/transport/docker.rs +++ b/crates/remote/src/transport/docker.rs @@ -635,7 +635,7 @@ impl RemoteConnection for DockerExecConnection { for env_var in ["RUST_LOG", "RUST_BACKTRACE", "ZED_GENERATE_MINIDUMPS"] { if let Some(value) = std::env::var(env_var).ok() { docker_args.push("-e".to_string()); - docker_args.push(format!("{}='{}'", env_var, value)); + docker_args.push(format!("{env_var}={value}")); } } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index d27662dde3656de1e2434273bee554a168198371..42cfc8f86dc34712e6b2cd0e4b5d8f379e443834 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -463,7 +463,7 @@ impl RemoteConnection for SshRemoteConnection { let mut proxy_args = vec![]; for env_var in VARS { if let Some(value) = std::env::var(env_var).ok() { - proxy_args.push(format!("{}='{}'", env_var, value)); + proxy_args.push(format!("{env_var}={value}")); } } proxy_args.push(remote_binary_path.display(self.path_style()).into_owned()); @@ -1666,12 +1666,11 @@ fn build_command_posix( write!(exec, "exec env ")?; for (k, v) in input_env.iter() { - write!( - exec, - "{}={} ", - k, - ssh_shell_kind.try_quote(v).context("shell quoting")? - )?; + let assignment = format!("{k}={v}"); + let assignment = ssh_shell_kind + .try_quote(&assignment) + .context("shell quoting")?; + write!(exec, "{assignment} ")?; } if let Some(input_program) = input_program { @@ -1882,7 +1881,7 @@ mod tests { "-q", "-t", "user@host", - "cd \"$HOME/work\" && exec env INPUT_VA=val remote_program arg1 arg2" + "cd \"$HOME/work\" && exec env 'INPUT_VA=val' remote_program arg1 arg2" ] ); assert_eq!(command.env, env); @@ -1918,7 +1917,7 @@ mod tests { "-q", "-t", "user@host", - "cd && exec env INPUT_VA=val /bin/fish -l" + "cd && exec env 'INPUT_VA=val' /bin/fish -l" ] ); assert_eq!(command.env, env); @@ -1926,6 +1925,38 @@ mod tests { Ok(()) } + #[test] + fn test_build_command_quotes_env_assignment() -> Result<()> { + let mut input_env = HashMap::default(); + input_env.insert("ZED$(echo foo)".to_string(), "value".to_string()); + + let command = build_command_posix( + Some("remote_program".to_string()), + &[], + &input_env, + None, + None, + HashMap::default(), + PathStyle::Posix, + "/bin/bash", + ShellKind::Posix, + vec![], + "user@host", + Interactive::No, + )?; + + let remote_command = command + .args + .last() + .context("missing remote command argument")?; + assert!( + remote_command.contains("exec env 'ZED$(echo foo)=value' remote_program"), + "expected env assignment to be quoted, got: {remote_command}" + ); + + Ok(()) + } + #[test] fn scp_args_exclude_port_forward_flags() { let options = SshConnectionOptions { diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 2eb2aea59abdbe24a3dae168d4399aaa59a9c6e3..5a37e1c65bfe11221b60499779c57f0ce7dca364 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -450,13 +450,10 @@ impl RemoteConnection for WslRemoteConnection { let mut exec = String::from("exec env "); - for (k, v) in env.iter() { - write!( - exec, - "{}={} ", - k, - shell_kind.try_quote(v).context("shell quoting")? - )?; + for (key, value) in env.iter() { + let assignment = format!("{key}={value}"); + let assignment = shell_kind.try_quote(&assignment).context("shell quoting")?; + write!(exec, "{assignment} ")?; } if let Some(program) = program { diff --git a/crates/repl/Cargo.toml b/crates/repl/Cargo.toml index 7bf63657bdea126d7a3f77681e587521356f9eb1..c2d6f745d9272651bd90bcdfdc689263958b8b09 100644 --- a/crates/repl/Cargo.toml +++ b/crates/repl/Cargo.toml @@ -47,6 +47,7 @@ runtimelib.workspace = true serde.workspace = true serde_json.workspace = true settings.workspace = true +shlex.workspace = true smol.workspace = true telemetry.workspace = true terminal.workspace = true diff --git a/crates/repl/src/kernels/wsl_kernel.rs b/crates/repl/src/kernels/wsl_kernel.rs index 1cdb774008d6a40e57b0abeeec73e294896c221a..34340c74feeb76cc4822a6ca5d669693cc448334 100644 --- a/crates/repl/src/kernels/wsl_kernel.rs +++ b/crates/repl/src/kernels/wsl_kernel.rs @@ -21,6 +21,7 @@ use std::{ path::PathBuf, sync::Arc, }; + use uuid::Uuid; // Find a set of open ports. This creates a listener with port set to 0. The listener will be closed at the end when it goes out of scope. @@ -56,6 +57,15 @@ impl Debug for WslRunningKernel { } } +fn quote_posix_shell_arguments(arguments: &[String]) -> Result { + let mut quoted_arguments = Vec::with_capacity(arguments.len()); + for argument in arguments { + let quoted = shlex::try_quote(argument).map(|quoted| quoted.into_owned())?; + quoted_arguments.push(quoted); + } + Ok(quoted_arguments.join(" ")) +} + impl WslRunningKernel { pub fn new( kernel_specification: WslKernelSpecification, @@ -129,9 +139,8 @@ impl WslRunningKernel { // `wsl -d --exec ...` // But we need to replace {connection_file} with wsl_connection_path. - let argv = kernel_specification.kernelspec.argv; anyhow::ensure!( - !argv.is_empty(), + !kernel_specification.kernelspec.argv.is_empty(), "Empty argv in kernelspec {}", kernel_specification.name ); @@ -182,50 +191,57 @@ impl WslRunningKernel { // We use bash -lc to run in a login shell for proper environment setup let mut kernel_args: Vec = Vec::new(); - if let Some(env) = &kernel_specification.kernelspec.env { - if !env.is_empty() { - kernel_args.push("env".to_string()); - for (k, v) in env { - kernel_args.push(format!("{}={}", k, v)); + let resolved_argv: Vec = kernel_specification + .kernelspec + .argv + .iter() + .map(|arg| { + if arg == "{connection_file}" { + wsl_connection_path.clone() + } else { + arg.clone() } + }) + .collect(); + + let executable = resolved_argv.first().map(String::as_str); + let needs_python_resolution = executable.map_or(false, |executable| { + executable == "python" || executable == "python3" || !executable.starts_with('/') + }); + + let mut env_assignments: Vec = Vec::new(); + if let Some(env) = &kernel_specification.kernelspec.env { + env_assignments.reserve(env.len()); + for (key, value) in env { + let assignment = format!("{key}={value}"); + let assignment = shlex::try_quote(&assignment) + .map(|quoted| quoted.into_owned())?; + env_assignments.push(assignment); } - } - for arg in argv { - if arg == "{connection_file}" { - kernel_args.push(wsl_connection_path.clone()); - } else { - kernel_args.push(arg.clone()); + if !env_assignments.is_empty() { + kernel_args.push("env".to_string()); + kernel_args.extend(env_assignments.iter().cloned()); } } - // because first command is python/python3 we need make sure it's present in the env - let first_cmd = kernel_args.first().map(|arg| { - arg.split_whitespace().next().unwrap_or(arg) - }); - - let needs_python_resolution = first_cmd.map_or(false, |cmd| { - cmd == "python" || cmd == "python3" || !cmd.starts_with('/') - }); + kernel_args.extend(resolved_argv.iter().cloned()); let shell_command = if needs_python_resolution { // 1. Check for .venv/bin/python or .venv/bin/python3 in working directory // 2. Fall back to system python3 or python - let rest_args: Vec = kernel_args.iter().skip(1).cloned().collect(); - let rest_string = rest_args - .iter() - .map(|arg| { - if arg.contains(' ') || arg.contains('\'') || arg.contains('"') { - format!("'{}'", arg.replace('\'', "'\\''")) - } else { - arg.clone() - } - }) - .collect::>() - .join(" "); + let rest_args: Vec = resolved_argv.iter().skip(1).cloned().collect(); + let arg_string = quote_posix_shell_arguments(&rest_args)?; + let set_env_command = if env_assignments.is_empty() { + String::new() + } else { + format!("export {}; ", env_assignments.join(" ")) + }; let cd_command = if let Some(wd) = wsl_working_directory.as_ref() { - format!("cd '{}' && ", wd.replace('\'', "'\\''")) + let quoted_wd = shlex::try_quote(wd) + .map(|quoted| quoted.into_owned())?; + format!("cd {quoted_wd} && ") } else { String::new() }; @@ -233,6 +249,7 @@ impl WslRunningKernel { format!( "set -e; \ + {} \ {} \ echo \"Working directory: $(pwd)\" >&2; \ if [ -x .venv/bin/python ]; then \ @@ -254,20 +271,10 @@ impl WslRunningKernel { echo 'PATH:' \"$PATH\" >&2; \ exit 127; \ fi", - cd_command, rest_string, rest_string, rest_string, rest_string + cd_command, set_env_command, arg_string, arg_string, arg_string, arg_string ) } else { - kernel_args - .iter() - .map(|arg| { - if arg.contains(' ') || arg.contains('\'') || arg.contains('"') { - format!("'{}'", arg.replace('\'', "'\\''")) - } else { - arg.clone() - } - }) - .collect::>() - .join(" ") + quote_posix_shell_arguments(&kernel_args)? }; cmd.arg("bash") From 502460a00594fad4fb7911a3c3fb5c458aa6a25a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 3 Mar 2026 09:37:58 +0100 Subject: [PATCH 258/548] zlog: Fix log file rotation never truncating original file (#50571) Release Notes: - Fixed an issue where the log file and log file backup would keep grow indefinitely --- crates/zlog/src/sink.rs | 122 ++++++++++++++++++++++------------------ 1 file changed, 67 insertions(+), 55 deletions(-) diff --git a/crates/zlog/src/sink.rs b/crates/zlog/src/sink.rs index 07e87be1b071f2538e716bb8fd2b692527363fc4..2aea9c957756011689d81618eedcf22979ea2077 100644 --- a/crates/zlog/src/sink.rs +++ b/crates/zlog/src/sink.rs @@ -56,10 +56,9 @@ pub fn init_output_file( path: &'static PathBuf, path_rotate: Option<&'static PathBuf>, ) -> io::Result<()> { - let mut file = std::fs::OpenOptions::new() - .create(true) - .append(true) - .open(path)?; + let mut enabled_sinks_file = ENABLED_SINKS_FILE + .try_lock() + .expect("Log file lock is available during init"); SINK_FILE_PATH .set(path) @@ -70,22 +69,30 @@ pub fn init_output_file( .expect("Init file output should only be called once"); } - let mut enabled_sinks_file = ENABLED_SINKS_FILE - .try_lock() - .expect("Log file lock is available during init"); - - let size_bytes = file.metadata().map_or(0, |metadata| metadata.len()); - if size_bytes >= SINK_FILE_SIZE_BYTES_MAX { - rotate_log_file(&mut file, Some(path), path_rotate, &SINK_FILE_SIZE_BYTES); - } else { - SINK_FILE_SIZE_BYTES.store(size_bytes, Ordering::Release); - } - + let file = open_or_create_log_file(path, path_rotate, SINK_FILE_SIZE_BYTES_MAX)?; + SINK_FILE_SIZE_BYTES.store(file.metadata().map_or(0, |m| m.len()), Ordering::Release); *enabled_sinks_file = Some(file); Ok(()) } +fn open_or_create_log_file( + path: &PathBuf, + path_rotate: Option<&PathBuf>, + sink_file_size_bytes_max: u64, +) -> Result { + let size_bytes = std::fs::metadata(path).map(|metadata| metadata.len()); + match size_bytes { + Ok(size_bytes) if size_bytes >= sink_file_size_bytes_max => { + rotate_log_file(Some(path), path_rotate).map(|it| it.unwrap()) + } + _ => std::fs::OpenOptions::new() + .create(true) + .append(true) + .open(path), + } +} + const LEVEL_OUTPUT_STRINGS: [&str; 6] = [ " ", // nop: ERROR = 1 "ERROR", // @@ -144,11 +151,11 @@ pub fn submit(mut record: Record) { record.message ); } - let mut file = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| { + let mut file_guard = ENABLED_SINKS_FILE.lock().unwrap_or_else(|handle| { ENABLED_SINKS_FILE.clear_poison(); handle.into_inner() }); - if let Some(file) = file.as_mut() { + if let Some(file) = file_guard.as_mut() { struct SizedWriter<'a> { file: &'a mut std::fs::File, written: u64, @@ -182,12 +189,16 @@ pub fn submit(mut record: Record) { SINK_FILE_SIZE_BYTES.fetch_add(writer.written, Ordering::AcqRel) + writer.written }; if file_size_bytes > SINK_FILE_SIZE_BYTES_MAX { - rotate_log_file( - file, - SINK_FILE_PATH.get(), - SINK_FILE_PATH_ROTATE.get(), - &SINK_FILE_SIZE_BYTES, - ); + *file_guard = None; + let file = rotate_log_file(SINK_FILE_PATH.get(), SINK_FILE_PATH_ROTATE.get()); + match file { + Ok(Some(file)) => *file_guard = Some(file), + Ok(None) => {} + Err(e) => { + eprintln!("Failed to open log file: {e}") + } + } + SINK_FILE_SIZE_BYTES.store(0, Ordering::Release); } } } @@ -247,19 +258,13 @@ impl std::fmt::Display for SourceFmt<'_> { } fn rotate_log_file( - file: &mut fs::File, path: Option, path_rotate: Option, - atomic_size: &AtomicU64, -) where +) -> std::io::Result> +where PathRef: AsRef, { - if let Err(err) = file.flush() { - eprintln!( - "Failed to flush log file before rotating, some logs may be lost: {}", - err - ); - } + let path = path.as_ref().map(PathRef::as_ref); let rotation_error = match (path, path_rotate) { (Some(_), None) => Some(anyhow::anyhow!("No rotation log file path configured")), (None, _) => Some(anyhow::anyhow!("No log file path configured")), @@ -270,46 +275,53 @@ fn rotate_log_file( if let Some(err) = rotation_error { eprintln!("Log file rotation failed. Truncating log file anyways: {err}",); } - _ = file.set_len(0); - - // SAFETY: It is safe to set size to 0 even if set_len fails as - // according to the documentation, it only fails if: - // - the file is not writeable: should never happen, - // - the size would cause an overflow (implementation specific): 0 should never cause an overflow - atomic_size.store(0, Ordering::Release); + path.map(|path| { + fs::OpenOptions::new() + .create(true) + .write(true) + .truncate(true) + .open(path) + }) + .transpose() } #[cfg(test)] mod tests { + use super::*; #[test] - fn test_rotate_log_file() { + fn test_open_or_create_log_file_rotate() { let temp_dir = tempfile::tempdir().unwrap(); let log_file_path = temp_dir.path().join("log.txt"); let rotation_log_file_path = temp_dir.path().join("log_rotated.txt"); - let mut file = fs::File::create(&log_file_path).unwrap(); let contents = String::from("Hello, world!"); - file.write_all(contents.as_bytes()).unwrap(); + std::fs::write(&log_file_path, &contents).unwrap(); - let size = AtomicU64::new(contents.len() as u64); - - rotate_log_file( - &mut file, - Some(&log_file_path), - Some(&rotation_log_file_path), - &size, - ); + open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), 4).unwrap(); assert!(log_file_path.exists()); assert_eq!(log_file_path.metadata().unwrap().len(), 0); assert!(rotation_log_file_path.exists()); - assert_eq!( - std::fs::read_to_string(&rotation_log_file_path).unwrap(), - contents, - ); - assert_eq!(size.load(Ordering::Acquire), 0); + assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), ""); + } + + #[test] + fn test_open_or_create_log_file() { + let temp_dir = tempfile::tempdir().unwrap(); + let log_file_path = temp_dir.path().join("log.txt"); + let rotation_log_file_path = temp_dir.path().join("log_rotated.txt"); + + let contents = String::from("Hello, world!"); + std::fs::write(&log_file_path, &contents).unwrap(); + + open_or_create_log_file(&log_file_path, Some(&rotation_log_file_path), !0).unwrap(); + + assert!(log_file_path.exists()); + assert_eq!(log_file_path.metadata().unwrap().len(), 13); + assert!(!rotation_log_file_path.exists()); + assert_eq!(std::fs::read_to_string(&log_file_path).unwrap(), contents); } /// Regression test, ensuring that if log level values change we are made aware From 3bb5aec7009fcb3f8ffc9a8359920d9a50b00a05 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 3 Mar 2026 10:24:07 +0100 Subject: [PATCH 259/548] agent: Remove unused `edit_agent_output` from edit file tool (#50576) Removes unused `edit_agent_output` from `EditFileTool`. This makes it easier to maintain compatibility between the `EditFileTool` and `StreamingEditFileTool`. Release Notes: - N/A --- crates/agent/src/tools/edit_file_tool.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index b680e3b885f7d002657ee4b0bc384d6d9afaa055..d8c380eba326d089b848563cca04557e903ba0f4 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -3,7 +3,7 @@ use super::save_file_tool::SaveFileTool; use super::tool_permissions::authorize_file_edit; use crate::{ AgentTool, Templates, Thread, ToolCallEventStream, ToolInput, - edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat}, + edit_agent::{EditAgent, EditAgentOutputEvent, EditFormat}, }; use acp_thread::Diff; use agent_client_protocol::{self as acp, ToolCallLocation, ToolCallUpdateFields}; @@ -104,8 +104,6 @@ pub enum EditFileToolOutput { old_text: Arc, #[serde(default)] diff: String, - #[serde(alias = "raw_output")] - edit_agent_output: EditAgentOutput, }, Error { error: String, @@ -436,7 +434,7 @@ impl AgentTool for EditFileTool { } } - let edit_agent_output = output.await?; + output.await?; let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| { let settings = language_settings::language_settings( @@ -528,7 +526,6 @@ impl AgentTool for EditFileTool { new_text, old_text, diff: unified_diff, - edit_agent_output, }) }.await; result From 9ba65944df02890b36de6fe38888e3b5154f2c8d Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 3 Mar 2026 10:25:01 +0100 Subject: [PATCH 260/548] livekit: Correctly handle runtime deps: libva and libva-drm (#50527) Release Notes: - N/A --- Cargo.lock | 15 ++++++++------- Cargo.toml | 4 ++-- crates/zed/Cargo.toml | 3 +++ crates/zed/build.rs | 19 +++++++++++++++++++ nix/build.nix | 1 + script/linux | 4 ++++ 6 files changed, 37 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9f56a58f68f39671c03d591cc8535cbdc4cde6d6..c4dcfa054efa372259880c3a813a5d203e9c1be7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9699,7 +9699,7 @@ dependencies = [ [[package]] name = "libwebrtc" version = "0.3.26" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "cxx", "glib", @@ -9797,7 +9797,7 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "livekit" version = "0.7.32" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "base64 0.22.1", "bmrng", @@ -9823,7 +9823,7 @@ dependencies = [ [[package]] name = "livekit-api" version = "0.4.14" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "base64 0.21.7", "futures-util", @@ -9850,7 +9850,7 @@ dependencies = [ [[package]] name = "livekit-protocol" version = "0.7.1" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "futures-util", "livekit-runtime", @@ -9866,7 +9866,7 @@ dependencies = [ [[package]] name = "livekit-runtime" version = "0.4.0" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "tokio", "tokio-stream", @@ -19841,7 +19841,7 @@ dependencies = [ [[package]] name = "webrtc-sys" version = "0.3.23" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "cc", "cxx", @@ -19855,7 +19855,7 @@ dependencies = [ [[package]] name = "webrtc-sys-build" version = "0.3.13" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=fccf29d1d0d0a139b2e50746b0b9a1bac828fa04#fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" dependencies = [ "anyhow", "fs2", @@ -21791,6 +21791,7 @@ dependencies = [ "parking_lot", "paths", "picker", + "pkg-config", "pretty_assertions", "profiling", "project", diff --git a/Cargo.toml b/Cargo.toml index c50b329772669105a7ae3a5f19562fbd186d23ea..98fccfaeb21bc6107323378605c8299d5bd5838f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -835,8 +835,8 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24c notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24cad542c28e04ced02e20325a4ec28a31d" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } calloop = { git = "https://github.com/zed-industries/calloop" } -livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" } -libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "fccf29d1d0d0a139b2e50746b0b9a1bac828fa04" } +livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" } +libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" } [profile.dev] split-debuginfo = "unpacked" diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 16385ccf75e245b4e4bf17cf37a1d04ef3ed9c6b..cf8df08c010bfe643b93b5628cf520ee2ec1dd8b 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -241,6 +241,9 @@ gpui = { workspace = true, features = [ ] } ashpd.workspace = true +[target.'cfg(target_os = "linux")'.build-dependencies] +pkg-config = "0.3.22" + [dev-dependencies] call = { workspace = true, features = ["test-support"] } dap = { workspace = true, features = ["test-support"] } diff --git a/crates/zed/build.rs b/crates/zed/build.rs index 7e22752d35d9115efd735bfc7b2690f4bf5680d3..e169760acf16d6caa44aeb2004cd823a355f36ee 100644 --- a/crates/zed/build.rs +++ b/crates/zed/build.rs @@ -2,6 +2,25 @@ use std::process::Command; fn main() { + #[cfg(target_os = "linux")] + { + // Add rpaths for libraries that webrtc-sys dlopens at runtime. + // This is mostly required for hosts with non-standard SO installation + // locations such as NixOS. + let dlopened_libs = ["libva", "libva-drm"]; + + let mut rpath_dirs = std::collections::BTreeSet::new(); + for lib in &dlopened_libs { + if let Some(libdir) = pkg_config::get_variable(lib, "libdir").ok() { + rpath_dirs.insert(libdir); + } + } + + for dir in &rpath_dirs { + println!("cargo:rustc-link-arg=-Wl,-rpath,{dir}"); + } + } + if cfg!(target_os = "macos") { println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7"); diff --git a/nix/build.nix b/nix/build.nix index 3ae9ca95506baf05b8a433d1232190773b41321e..68f8a4acdbe83f7e8981659dd0376ec87ef52dfe 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -233,6 +233,7 @@ let lib.makeLibraryPath [ gpu-lib wayland + libva ] }"; diff --git a/script/linux b/script/linux index b3bd03eacc2baf976744ff19b049b7781c330a8e..706fa63b037e290cd7991d3adfa42fac0c0cfe25 100755 --- a/script/linux +++ b/script/linux @@ -155,6 +155,7 @@ if [[ -n $zyp ]]; then cmake fontconfig-devel gcc + libva-devel gcc-c++ glib2-devel git @@ -191,6 +192,7 @@ if [[ -n $pacman ]]; then alsa-lib fontconfig glib2 + libva wayland libgit2 libxcb @@ -222,6 +224,7 @@ if [[ -n $xbps ]]; then alsa-lib-devel fontconfig-devel glib-devel + libva-devel libxcb-devel libxkbcommon-devel libzstd-devel @@ -249,6 +252,7 @@ if [[ -n $emerge ]]; then dev-util/cmake media-libs/alsa-lib media-libs/fontconfig + media-libs/libva media-libs/vulkan-loader x11-libs/libxcb x11-libs/libxkbcommon From 906f5a64e9057e9df19a76248b09c13e668798bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Tue, 3 Mar 2026 11:16:49 +0100 Subject: [PATCH 261/548] agent: Cancel retries when the turn is cancelled (#50580) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When a completion request fails with a retryable error (e.g. a 500 from the upstream provider), the retry loop waits on a timer before trying again. This timer did not race with the cancellation signal, so if the user switched models and submitted a new message during the retry delay, the old turn would continue retrying with the stale model for up to 15 seconds — making requests to the wrong provider and corrupting the thread's message list with spurious Resume entries. Now the retry delay races with the cancellation receiver, so the old turn exits immediately when cancelled. Release Notes: - Fixed cancelled turns in a conversation that failed (e.g. 500 from the LLM provider) bein retried even after cancellation --- crates/agent/src/tests/mod.rs | 78 +++++++++++++++++++++++++++++++++++ crates/agent/src/thread.rs | 10 ++++- 2 files changed, 87 insertions(+), 1 deletion(-) diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index 8d75aae7e2948ef9c0934a72da112b926f633941..23ebe41d3c42654cb8fcdc0266009416686858aa 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -2631,6 +2631,84 @@ async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) { assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); } +#[gpui::test] +async fn test_retry_cancelled_promptly_on_new_send(cx: &mut TestAppContext) { + // Regression test: when a completion fails with a retryable error (e.g. upstream 500), + // the retry loop waits on a timer. If the user switches models and sends a new message + // during that delay, the old turn should exit immediately instead of retrying with the + // stale model. + let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; + let model_a = model.as_fake(); + + // Start a turn with model_a. + let events_1 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Hello"], cx) + }) + .unwrap(); + cx.run_until_parked(); + assert_eq!(model_a.completion_count(), 1); + + // Model returns a retryable upstream 500. The turn enters the retry delay. + model_a.send_last_completion_stream_error( + LanguageModelCompletionError::UpstreamProviderError { + message: "Internal server error".to_string(), + status: http_client::StatusCode::INTERNAL_SERVER_ERROR, + retry_after: None, + }, + ); + model_a.end_last_completion_stream(); + cx.run_until_parked(); + + // The old completion was consumed; model_a has no pending requests yet because the + // retry timer hasn't fired. + assert_eq!(model_a.completion_count(), 0); + + // Switch to model_b and send a new message. This cancels the old turn. + let model_b = Arc::new(FakeLanguageModel::with_id_and_thinking( + "fake", "model-b", "Model B", false, + )); + thread.update(cx, |thread, cx| { + thread.set_model(model_b.clone(), cx); + }); + let events_2 = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Continue"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // model_b should have received its completion request. + assert_eq!(model_b.as_fake().completion_count(), 1); + + // Advance the clock well past the retry delay (BASE_RETRY_DELAY = 5s). + cx.executor().advance_clock(Duration::from_secs(10)); + cx.run_until_parked(); + + // model_a must NOT have received another completion request — the cancelled turn + // should have exited during the retry delay rather than retrying with the old model. + assert_eq!( + model_a.completion_count(), + 0, + "old model should not receive a retry request after cancellation" + ); + + // Complete model_b's turn. + model_b + .as_fake() + .send_last_completion_stream_text_chunk("Done!"); + model_b + .as_fake() + .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)); + model_b.as_fake().end_last_completion_stream(); + + let events_1 = events_1.collect::>().await; + assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]); + + let events_2 = events_2.collect::>().await; + assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]); +} + #[gpui::test] async fn test_subsequent_successful_sends_dont_cancel(cx: &mut TestAppContext) { let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await; diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index c5ca1118ace28b66d555d67aa40c718da292f644..2e693a85cd1f86d232e392860d8bd83509ce131a 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -1940,7 +1940,15 @@ impl Thread { })??; let timer = cx.background_executor().timer(retry.duration); event_stream.send_retry(retry); - timer.await; + futures::select! { + _ = timer.fuse() => {} + _ = cancellation_rx.changed().fuse() => { + if *cancellation_rx.borrow() { + log::debug!("Turn cancelled during retry delay, exiting"); + return Ok(()); + } + } + } this.update(cx, |this, _cx| { if let Some(Message::Agent(message)) = this.messages.last() { if message.tool_results.is_empty() { From 197cf60d05ea7d2b3e9126c70b05e9554691aa12 Mon Sep 17 00:00:00 2001 From: Rabi Mishra Date: Tue, 3 Mar 2026 15:51:40 +0530 Subject: [PATCH 262/548] agent_ui: Refresh ACP history after thread create/load (#49796) Moves loading of the history connection to once we have a new connection, not on every thread view. Release Notes: - N/A --------- Signed-off-by: rabi Co-authored-by: Ben Brandt --- crates/agent_ui/src/connection_view.rs | 205 +++++++++++++++++++------ 1 file changed, 159 insertions(+), 46 deletions(-) diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 93bf7c98098530b23522c60f987f9e341ebc69ca..bc58120a964b7cb10eb4c779eb24fa8507030bc6 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -728,6 +728,14 @@ impl ConnectionView { } let id = current.read(cx).thread.read(cx).session_id().clone(); + let session_list = if connection.supports_session_history() { + connection.session_list(cx) + } else { + None + }; + this.history.update(cx, |history, cx| { + history.set_session_list(session_list, cx); + }); this.set_server_state( ServerState::Connected(ConnectedServerState { connection, @@ -833,14 +841,6 @@ impl ConnectionView { let connection = thread.read(cx).connection().clone(); let session_id = thread.read(cx).session_id().clone(); - let session_list = if connection.supports_session_history() { - connection.session_list(cx) - } else { - None - }; - self.history.update(cx, |history, cx| { - history.set_session_list(session_list, cx); - }); // Check for config options first // Config options take precedence over legacy mode/model selectors @@ -2835,6 +2835,33 @@ pub(crate) mod tests { }); } + #[gpui::test] + async fn test_new_thread_creation_triggers_session_list_refresh(cx: &mut TestAppContext) { + init_test(cx); + + let session = AgentSessionInfo::new(SessionId::new("history-session")); + let (thread_view, history, cx) = setup_thread_view_with_history( + StubAgentServer::new(SessionHistoryConnection::new(vec![session.clone()])), + cx, + ) + .await; + + history.read_with(cx, |history, _cx| { + assert!( + history.has_session_list(), + "session list should be attached after thread creation" + ); + }); + + active_thread(&thread_view, cx).read_with(cx, |view, _cx| { + assert_eq!(view.recent_history_entries.len(), 1); + assert_eq!( + view.recent_history_entries[0].session_id, + session.session_id + ); + }); + } + #[gpui::test] async fn test_resume_without_history_adds_notice(cx: &mut TestAppContext) { init_test(cx); @@ -3482,6 +3509,18 @@ pub(crate) mod tests { agent: impl AgentServer + 'static, cx: &mut TestAppContext, ) -> (Entity, &mut VisualTestContext) { + let (thread_view, _history, cx) = setup_thread_view_with_history(agent, cx).await; + (thread_view, cx) + } + + async fn setup_thread_view_with_history( + agent: impl AgentServer + 'static, + cx: &mut TestAppContext, + ) -> ( + Entity, + Entity, + &mut VisualTestContext, + ) { let fs = FakeFs::new(cx.executor()); let project = Project::test(fs, [], cx).await; let (multi_workspace, cx) = @@ -3501,14 +3540,14 @@ pub(crate) mod tests { project, Some(thread_store), None, - history, + history.clone(), window, cx, ) }) }); cx.run_until_parked(); - (thread_view, cx) + (thread_view, history, cx) } fn add_to_workspace(thread_view: Entity, cx: &mut VisualTestContext) { @@ -3648,6 +3687,102 @@ pub(crate) mod tests { ) -> Task> { Task::ready(Ok(AgentSessionListResponse::new(self.sessions.clone()))) } + + fn into_any(self: Rc) -> Rc { + self + } + } + + #[derive(Clone)] + struct SessionHistoryConnection { + sessions: Vec, + } + + impl SessionHistoryConnection { + fn new(sessions: Vec) -> Self { + Self { sessions } + } + } + + fn build_test_thread( + connection: Rc, + project: Entity, + name: &'static str, + session_id: SessionId, + cx: &mut App, + ) -> Entity { + let action_log = cx.new(|_| ActionLog::new(project.clone())); + cx.new(|cx| { + AcpThread::new( + None, + name, + connection, + project, + action_log, + session_id, + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), + cx, + ) + }) + } + + impl AgentConnection for SessionHistoryConnection { + fn telemetry_id(&self) -> SharedString { + "history-connection".into() + } + + fn new_session( + self: Rc, + project: Entity, + _cwd: &Path, + cx: &mut App, + ) -> Task>> { + let thread = build_test_thread( + self, + project, + "SessionHistoryConnection", + SessionId::new("history-session"), + cx, + ); + Task::ready(Ok(thread)) + } + + fn supports_load_session(&self) -> bool { + true + } + + fn session_list(&self, _cx: &mut App) -> Option> { + Some(Rc::new(StubSessionList::new(self.sessions.clone()))) + } + + fn auth_methods(&self) -> &[acp::AuthMethod] { + &[] + } + + fn authenticate( + &self, + _method_id: acp::AuthMethodId, + _cx: &mut App, + ) -> Task> { + Task::ready(Ok(())) + } + + fn prompt( + &self, + _id: Option, + _params: acp::PromptRequest, + _cx: &mut App, + ) -> Task> { + Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))) + } + + fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {} + fn into_any(self: Rc) -> Rc { self } @@ -3667,24 +3802,13 @@ pub(crate) mod tests { _cwd: &Path, cx: &mut gpui::App, ) -> Task>> { - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|cx| { - AcpThread::new( - None, - "ResumeOnlyAgentConnection", - self.clone(), - project, - action_log, - SessionId::new("new-session"), - watch::Receiver::constant( - acp::PromptCapabilities::new() - .image(true) - .audio(true) - .embedded_context(true), - ), - cx, - ) - }); + let thread = build_test_thread( + self, + project, + "ResumeOnlyAgentConnection", + SessionId::new("new-session"), + cx, + ); Task::ready(Ok(thread)) } @@ -3699,24 +3823,13 @@ pub(crate) mod tests { _cwd: &Path, cx: &mut App, ) -> Task>> { - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|cx| { - AcpThread::new( - None, - "ResumeOnlyAgentConnection", - self.clone(), - project, - action_log, - session.session_id, - watch::Receiver::constant( - acp::PromptCapabilities::new() - .image(true) - .audio(true) - .embedded_context(true), - ), - cx, - ) - }); + let thread = build_test_thread( + self, + project, + "ResumeOnlyAgentConnection", + session.session_id, + cx, + ); Task::ready(Ok(thread)) } From 58ad0ff69184c48439a4ae58a8e7d20b84a6b8b7 Mon Sep 17 00:00:00 2001 From: Tom Zaspel <40226087+tzabbi@users.noreply.github.com> Date: Tue, 3 Mar 2026 13:58:13 +0100 Subject: [PATCH 263/548] Add file icons for YAML, Helm and GitLab (#50529) Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) I used the icons from here: - GitLab: https://about.gitlab.com/press/press-kit/ - Helm: https://www.svgrepo.com/svg/330624/helm - Yaml: https://icons.getbootstrap.com/icons/filetype-yml/ FYI: I'm not familiar with Rust please review the rust code. Release Notes: - Added file icons for YAML, Helm and GitLab files, and used the Docker icon for `Containerfile`. --------- Co-authored-by: Danilo Leal --- assets/icons/file_icons/gitlab.svg | 1 + assets/icons/file_icons/helm.svg | 1 + assets/icons/file_icons/yaml.svg | 1 + crates/theme/src/icon_theme.rs | 35 ++++++++++++++++++++++++++---- 4 files changed, 34 insertions(+), 4 deletions(-) create mode 100644 assets/icons/file_icons/gitlab.svg create mode 100644 assets/icons/file_icons/helm.svg create mode 100644 assets/icons/file_icons/yaml.svg diff --git a/assets/icons/file_icons/gitlab.svg b/assets/icons/file_icons/gitlab.svg new file mode 100644 index 0000000000000000000000000000000000000000..f0faf570b125c7764e769ae60f7a6ce6f7825ceb --- /dev/null +++ b/assets/icons/file_icons/gitlab.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/file_icons/helm.svg b/assets/icons/file_icons/helm.svg new file mode 100644 index 0000000000000000000000000000000000000000..03e702f2d5081c4e96ff4db7ba7428817b08748f --- /dev/null +++ b/assets/icons/file_icons/helm.svg @@ -0,0 +1 @@ + diff --git a/assets/icons/file_icons/yaml.svg b/assets/icons/file_icons/yaml.svg new file mode 100644 index 0000000000000000000000000000000000000000..2c3efd46cd45ff67d6c46d84476d563dd5ac3a73 --- /dev/null +++ b/assets/icons/file_icons/yaml.svg @@ -0,0 +1 @@ + diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index 8415462595cb93a19365a929660b4e8e3f78f8d8..7c2d603281ec50c1daa6f21e1dc3487bfc394a67 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -66,7 +66,7 @@ pub struct IconDefinition { } const FILE_STEMS_BY_ICON_KEY: &[(&str, &[&str])] = &[ - ("docker", &["Dockerfile"]), + ("docker", &["Containerfile", "Dockerfile"]), ("ruby", &["Podfile"]), ("heroku", &["Procfile"]), ]; @@ -99,6 +99,15 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("cue", &["cue"]), ("dart", &["dart"]), ("diff", &["diff"]), + ( + "docker", + &[ + "docker-compose.yml", + "docker-compose.yaml", + "compose.yml", + "compose.yaml", + ], + ), ( "document", &[ @@ -138,12 +147,27 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("font", &["otf", "ttf", "woff", "woff2"]), ("fsharp", &["fs"]), ("fsproj", &["fsproj"]), - ("gitlab", &["gitlab-ci.yml"]), + ("gitlab", &["gitlab-ci.yml", "gitlab-ci.yaml"]), ("gleam", &["gleam"]), ("go", &["go", "mod", "work"]), ("graphql", &["gql", "graphql", "graphqls"]), ("haskell", &["hs"]), ("hcl", &["hcl"]), + ( + "helm", + &[ + "helmfile.yaml", + "helmfile.yml", + "Chart.yaml", + "Chart.yml", + "Chart.lock", + "values.yaml", + "values.yml", + "requirements.yaml", + "requirements.yml", + "tpl", + ], + ), ("html", &["htm", "html"]), ( "image", @@ -198,7 +222,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("rust", &["rs"]), ("sass", &["sass", "scss"]), ("scala", &["scala", "sc"]), - ("settings", &["conf", "ini", "yaml", "yml"]), + ("settings", &["conf", "ini"]), ("solidity", &["sol"]), ( "storage", @@ -279,6 +303,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ("vue", &["vue"]), ("vyper", &["vy", "vyi"]), ("wgsl", &["wgsl"]), + ("yaml", &["yaml", "yml"]), ("zig", &["zig"]), ]; @@ -310,12 +335,13 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("font", "icons/file_icons/font.svg"), ("fsharp", "icons/file_icons/fsharp.svg"), ("fsproj", "icons/file_icons/file.svg"), - ("gitlab", "icons/file_icons/settings.svg"), + ("gitlab", "icons/file_icons/gitlab.svg"), ("gleam", "icons/file_icons/gleam.svg"), ("go", "icons/file_icons/go.svg"), ("graphql", "icons/file_icons/graphql.svg"), ("haskell", "icons/file_icons/haskell.svg"), ("hcl", "icons/file_icons/hcl.svg"), + ("helm", "icons/file_icons/helm.svg"), ("heroku", "icons/file_icons/heroku.svg"), ("html", "icons/file_icons/html.svg"), ("image", "icons/file_icons/image.svg"), @@ -371,6 +397,7 @@ const FILE_ICONS: &[(&str, &str)] = &[ ("vue", "icons/file_icons/vue.svg"), ("vyper", "icons/file_icons/vyper.svg"), ("wgsl", "icons/file_icons/wgsl.svg"), + ("yaml", "icons/file_icons/yaml.svg"), ("zig", "icons/file_icons/zig.svg"), ]; From 83fd8fa4dff43fda82567343f7fa8639f7e8b3b8 Mon Sep 17 00:00:00 2001 From: Umesh Yadav <23421535+imumesh18@users.noreply.github.com> Date: Tue, 3 Mar 2026 19:28:47 +0530 Subject: [PATCH 264/548] language_models: Handle usage-only events with empty choices in OpenRouter (#50603) Closes #50569 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Previously, OpenRouter responses containing only usage data (without any choices) would cause an error. Now the mapper properly emits usage updates for these events without failing. Release Notes: - Fixed an error when OpenRouter returns a usage-only event with empty choices. --- .../src/provider/open_router.rs | 54 +++++++++++++------ 1 file changed, 39 insertions(+), 15 deletions(-) diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index a044c7c25d7858f69dc8c4ac9fa0c8bda73f6e91..3e5128fcc5a366b4156afe6b28f3efc7bd697e12 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -1,4 +1,4 @@ -use anyhow::{Result, anyhow}; +use anyhow::Result; use collections::HashMap; use futures::{FutureExt, Stream, StreamExt, future::BoxFuture}; use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task}; @@ -591,14 +591,21 @@ impl OpenRouterEventMapper { &mut self, event: ResponseStreamEvent, ) -> Vec> { + let mut events = Vec::new(); + + if let Some(usage) = event.usage { + events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { + input_tokens: usage.prompt_tokens, + output_tokens: usage.completion_tokens, + cache_creation_input_tokens: 0, + cache_read_input_tokens: 0, + }))); + } + let Some(choice) = event.choices.first() else { - return vec![Err(LanguageModelCompletionError::from(anyhow!( - "Response contained no choices" - )))]; + return events; }; - let mut events = Vec::new(); - if let Some(details) = choice.delta.reasoning_details.clone() { // Emit reasoning_details immediately events.push(Ok(LanguageModelCompletionEvent::ReasoningDetails( @@ -646,15 +653,6 @@ impl OpenRouterEventMapper { } } - if let Some(usage) = event.usage { - events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage { - input_tokens: usage.prompt_tokens, - output_tokens: usage.completion_tokens, - cache_creation_input_tokens: 0, - cache_read_input_tokens: 0, - }))); - } - match choice.finish_reason.as_deref() { Some("stop") => { // Don't emit reasoning_details here - already emitted immediately when captured @@ -1055,6 +1053,32 @@ mod tests { ); } + #[gpui::test] + async fn test_usage_only_chunk_with_empty_choices_does_not_error() { + let mut mapper = OpenRouterEventMapper::new(); + + let events = mapper.map_event(ResponseStreamEvent { + id: Some("response_123".into()), + created: 1234567890, + model: "google/gemini-3-flash-preview".into(), + choices: Vec::new(), + usage: Some(open_router::Usage { + prompt_tokens: 12, + completion_tokens: 7, + total_tokens: 19, + }), + }); + + assert_eq!(events.len(), 1); + match events.into_iter().next().unwrap() { + Ok(LanguageModelCompletionEvent::UsageUpdate(usage)) => { + assert_eq!(usage.input_tokens, 12); + assert_eq!(usage.output_tokens, 7); + } + other => panic!("Expected usage update event, got: {other:?}"), + } + } + #[gpui::test] async fn test_agent_prevents_empty_reasoning_details_overwrite() { // This test verifies that the agent layer prevents empty reasoning_details From e652d967b839e6a0f88dd6025b98e7a8a3d9a967 Mon Sep 17 00:00:00 2001 From: Oleksandr Kholiavko <43780952+HalavicH@users.noreply.github.com> Date: Tue, 3 Mar 2026 15:49:40 +0100 Subject: [PATCH 265/548] Add CSV preview with live table view and interactive features (#48207) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## **Description:** **Context:** This PR introduces an initial CSV preview feature for Zed, building upon two previously merged infrastructure PRs: - [#46341](https://github.com/zed-industries/zed/pull/46341) - Data table dynamic column support (removed const generics) - [#46190](https://github.com/zed-industries/zed/pull/46190) - Variable row height mode for data tables This implementation is based on the [original draft PR #44344](https://github.com/zed-industries/zed/pull/44344), which has been carefully decomposed into smaller, reviewable pieces. --- #### **Features Included:** **Core Infrastructure:** - Live CSV parsing with smart debouncing (200ms cooldown) - Performance monitoring with built-in timing metrics (not displayed in UI yet) - Automatic file change detection and re-parsing - Support for quoted fields, multiline cells, and escaped characters **Table Display:** - Variable row height rendering with fallback to uniform mode (switchable via settings) - Draggable column resizing (reusing existing data table infrastructure) - Row identifiers supporting both source line numbers and sequential row numbers - Configurable font rendering (UI font vs monospace) - Tooltips showing full cell content on hover **Interactive Features:** - Column sorting (ascending/descending) with visual indicators **Settings Panel:** - Toggle between variable/uniform row rendering - Font type selection (UI/monospace) - Row identifier type configuration - Debug information display - Multiline cell rendering options --- #### **Features Intentionally Removed for This PR:** To reduce complexity and review scope, the following features were temporarily reverted and will be reintroduced in subsequent PRs: - ❌ Settings pannel with performance metrics overlay - ❌ Cell selection (single, multiple, and range selections) - ❌ Keyboard navigation with arrow keys and selection extension - ❌ Copy functionality supporting CSV, TSV, and Markdown table formats - ❌ Inline cell editing with file persistence - ❌ Viewport following for large datasets - ❌ Column filtering and search capabilities These removals were done via "time-machine" commits that cleanly nuked vertical slices of functionality from the complete implementation. --- **Technical Implementation:** The feature is organized into a dedicated `csv_preview` crate with the following structure: ``` crates/csv_preview/ ├── src/ │ ├── csv_preview.rs # Main view and coordination logic │ ├── parser.rs # CSV parsing and editor integration │ ├── settings.rs # Configuration types and defaults │ ├── table_data_engine.rs # Data transformation logic │ ├── renderer/ # UI rendering modules │ │ ├── preview_view.rs # Main render implementation │ │ ├── render_table.rs # Table component assembly │ │ ├── table_cell.rs # Individual cell rendering │ │ ├── table_header.rs # Header with sorting controls │ │ └── row_identifiers.rs # Line number column │ └── types/ # Core data structures │ ├── table_like_content.rs │ ├── coordinates.rs # Display vs data coordinate systems │ └── table_cell.rs ``` **Key architectural decisions:** - **Dual coordinate system**: Separates data indices from display indices to support sorting/filtering - **Component reuse**: Leverages existing `data_table` infrastructure from the keymap editor --- **Integration:** - Registers `csv::OpenPreview` action (currently without default keybindings) - Follows the same workspace integration pattern as `markdown_preview` and `svg_preview` - Automatically detects `.csv` file extensions - Tab integration with appropriate icons and naming --- **Code Structure Note:** Some code structures, types, and documentation may appear redundant or over-engineered in this initial implementation. This is intentional - the feature was developed as a complete system and then decomposed by functionality rather than being built incrementally. The "extra" infrastructure supports features that were removed for this PR but will be reintroduced in subsequent ones. This approach was chosen over extensive refactoring because: 1. The complete feature took 200+ commits to develop with significant rewrites 2. Clean extraction of vertical slices was more feasible than rebuilding incrementally 3. The end state will utilize all these components, making current "redundancy" temporary I apologize for any inconvenience this may cause during review, but the alternative would have required significant refactoring effort just to make intermediate states "prettier," which seemed counterproductive. --- **Future Work:** This lays the groundwork for upcoming PRs that will reintroduce the removed features: - Cell selection and keyboard navigation - Copy functionality with multiple output formats - Inline editing capabilities with undo/redo - Column filtering and search - TSV and other delimiter support - Improved horizontal scrolling behavior - Settings persistence **Testing:** Includes test fixtures demonstrating multiline cell handling, various column counts, and edge cases. --- **Release Notes:** - N/A This is feature flagged --------- Co-authored-by: Anthony Eid --- Cargo.lock | 15 + Cargo.toml | 2 + crates/csv_preview/Cargo.toml | 21 + crates/csv_preview/LICENSE-GPL | 1 + crates/csv_preview/src/csv_preview.rs | 302 +++++++++++ crates/csv_preview/src/parser.rs | 513 ++++++++++++++++++ crates/csv_preview/src/renderer.rs | 5 + .../csv_preview/src/renderer/preview_view.rs | 50 ++ .../csv_preview/src/renderer/render_table.rs | 193 +++++++ .../src/renderer/row_identifiers.rs | 189 +++++++ crates/csv_preview/src/renderer/table_cell.rs | 72 +++ .../csv_preview/src/renderer/table_header.rs | 94 ++++ crates/csv_preview/src/settings.rs | 46 ++ crates/csv_preview/src/table_data_engine.rs | 90 +++ .../table_data_engine/sorting_by_column.rs | 49 ++ crates/csv_preview/src/types.rs | 17 + crates/csv_preview/src/types/coordinates.rs | 127 +++++ crates/csv_preview/src/types/table_cell.rs | 54 ++ .../src/types/table_like_content.rs | 32 ++ crates/ui/src/components/data_table.rs | 43 +- crates/zed/Cargo.toml | 1 + crates/zed/src/main.rs | 1 + crates/zed/src/zed.rs | 1 + .../zed/src/zed/quick_action_bar/preview.rs | 17 + 24 files changed, 1928 insertions(+), 7 deletions(-) create mode 100644 crates/csv_preview/Cargo.toml create mode 120000 crates/csv_preview/LICENSE-GPL create mode 100644 crates/csv_preview/src/csv_preview.rs create mode 100644 crates/csv_preview/src/parser.rs create mode 100644 crates/csv_preview/src/renderer.rs create mode 100644 crates/csv_preview/src/renderer/preview_view.rs create mode 100644 crates/csv_preview/src/renderer/render_table.rs create mode 100644 crates/csv_preview/src/renderer/row_identifiers.rs create mode 100644 crates/csv_preview/src/renderer/table_cell.rs create mode 100644 crates/csv_preview/src/renderer/table_header.rs create mode 100644 crates/csv_preview/src/settings.rs create mode 100644 crates/csv_preview/src/table_data_engine.rs create mode 100644 crates/csv_preview/src/table_data_engine/sorting_by_column.rs create mode 100644 crates/csv_preview/src/types.rs create mode 100644 crates/csv_preview/src/types/coordinates.rs create mode 100644 crates/csv_preview/src/types/table_cell.rs create mode 100644 crates/csv_preview/src/types/table_like_content.rs diff --git a/Cargo.lock b/Cargo.lock index c4dcfa054efa372259880c3a813a5d203e9c1be7..99347bd08f0d5b3ae13ab352612e3876a3cf6a11 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4340,6 +4340,20 @@ dependencies = [ "syn 2.0.106", ] +[[package]] +name = "csv_preview" +version = "0.1.0" +dependencies = [ + "anyhow", + "editor", + "feature_flags", + "gpui", + "log", + "text", + "ui", + "workspace", +] + [[package]] name = "ctor" version = "0.4.3" @@ -21727,6 +21741,7 @@ dependencies = [ "copilot_chat", "copilot_ui", "crashes", + "csv_preview", "dap", "dap_adapters", "db", diff --git a/Cargo.toml b/Cargo.toml index 98fccfaeb21bc6107323378605c8299d5bd5838f..8e1312f032e19b2c2c189677f144f04dd7f4589c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -45,6 +45,7 @@ members = [ "crates/copilot_chat", "crates/crashes", "crates/credentials_provider", + "crates/csv_preview", "crates/dap", "crates/dap_adapters", "crates/db", @@ -298,6 +299,7 @@ copilot_ui = { path = "crates/copilot_ui" } crashes = { path = "crates/crashes" } credentials_provider = { path = "crates/credentials_provider" } crossbeam = "0.8.4" +csv_preview = { path = "crates/csv_preview"} dap = { path = "crates/dap" } dap_adapters = { path = "crates/dap_adapters" } db = { path = "crates/db" } diff --git a/crates/csv_preview/Cargo.toml b/crates/csv_preview/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..7e9ce2c4d515cfce9586a0686475a8dfed0ddc95 --- /dev/null +++ b/crates/csv_preview/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "csv_preview" +version = "0.1.0" +publish.workspace = true +edition.workspace = true + +[lib] +path = "src/csv_preview.rs" + +[dependencies] +anyhow.workspace = true +feature_flags.workspace = true +gpui.workspace = true +editor.workspace = true +ui.workspace = true +workspace.workspace = true +log.workspace = true +text.workspace = true + +[lints] +workspace = true diff --git a/crates/csv_preview/LICENSE-GPL b/crates/csv_preview/LICENSE-GPL new file mode 120000 index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4 --- /dev/null +++ b/crates/csv_preview/LICENSE-GPL @@ -0,0 +1 @@ +../../LICENSE-GPL \ No newline at end of file diff --git a/crates/csv_preview/src/csv_preview.rs b/crates/csv_preview/src/csv_preview.rs new file mode 100644 index 0000000000000000000000000000000000000000..f056f5a12225b000527b9087760e3d683bda1b5b --- /dev/null +++ b/crates/csv_preview/src/csv_preview.rs @@ -0,0 +1,302 @@ +use editor::{Editor, EditorEvent}; +use feature_flags::{FeatureFlag, FeatureFlagAppExt as _}; +use gpui::{ + AppContext, Entity, EventEmitter, FocusHandle, Focusable, ListAlignment, Task, actions, +}; +use std::{ + collections::HashMap, + time::{Duration, Instant}, +}; + +use crate::table_data_engine::TableDataEngine; +use ui::{SharedString, TableColumnWidths, TableInteractionState, prelude::*}; +use workspace::{Item, SplitDirection, Workspace}; + +use crate::{parser::EditorState, settings::CsvPreviewSettings, types::TableLikeContent}; + +mod parser; +mod renderer; +mod settings; +mod table_data_engine; +mod types; + +actions!(csv, [OpenPreview, OpenPreviewToTheSide]); + +pub struct TabularDataPreviewFeatureFlag; + +impl FeatureFlag for TabularDataPreviewFeatureFlag { + const NAME: &'static str = "tabular-data-preview"; +} + +pub struct CsvPreviewView { + pub(crate) engine: TableDataEngine, + + pub(crate) focus_handle: FocusHandle, + active_editor_state: EditorState, + pub(crate) table_interaction_state: Entity, + pub(crate) column_widths: ColumnWidths, + pub(crate) parsing_task: Option>>, + pub(crate) settings: CsvPreviewSettings, + /// Performance metrics for debugging and monitoring CSV operations. + pub(crate) performance_metrics: PerformanceMetrics, + pub(crate) list_state: gpui::ListState, + /// Time when the last parsing operation ended, used for smart debouncing + pub(crate) last_parse_end_time: Option, +} + +pub fn init(cx: &mut App) { + cx.observe_new(|workspace: &mut Workspace, _, _| { + CsvPreviewView::register(workspace); + }) + .detach() +} + +impl CsvPreviewView { + pub fn register(workspace: &mut Workspace) { + workspace.register_action_renderer(|div, _, _, cx| { + div.when(cx.has_flag::(), |div| { + div.on_action(cx.listener(|workspace, _: &OpenPreview, window, cx| { + if let Some(editor) = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .filter(|editor| Self::is_csv_file(editor, cx)) + { + let csv_preview = Self::new(&editor, cx); + workspace.active_pane().update(cx, |pane, cx| { + let existing = pane + .items_of_type::() + .find(|view| view.read(cx).active_editor_state.editor == editor); + if let Some(idx) = existing.and_then(|e| pane.index_for_item(&e)) { + pane.activate_item(idx, true, true, window, cx); + } else { + pane.add_item(Box::new(csv_preview), true, true, None, window, cx); + } + }); + cx.notify(); + } + })) + .on_action(cx.listener( + |workspace, _: &OpenPreviewToTheSide, window, cx| { + if let Some(editor) = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx)) + .filter(|editor| Self::is_csv_file(editor, cx)) + { + let csv_preview = Self::new(&editor, cx); + let pane = workspace + .find_pane_in_direction(SplitDirection::Right, cx) + .unwrap_or_else(|| { + workspace.split_pane( + workspace.active_pane().clone(), + SplitDirection::Right, + window, + cx, + ) + }); + pane.update(cx, |pane, cx| { + let existing = + pane.items_of_type::().find(|view| { + view.read(cx).active_editor_state.editor == editor + }); + if let Some(idx) = existing.and_then(|e| pane.index_for_item(&e)) { + pane.activate_item(idx, true, true, window, cx); + } else { + pane.add_item( + Box::new(csv_preview), + false, + false, + None, + window, + cx, + ); + } + }); + cx.notify(); + } + }, + )) + }) + }); + } + + fn new(editor: &Entity, cx: &mut Context) -> Entity { + let contents = TableLikeContent::default(); + let table_interaction_state = cx.new(|cx| { + TableInteractionState::new(cx) + .with_custom_scrollbar(ui::Scrollbars::for_settings::()) + }); + + cx.new(|cx| { + let subscription = cx.subscribe( + editor, + |this: &mut CsvPreviewView, _editor, event: &EditorEvent, cx| { + match event { + EditorEvent::Edited { .. } + | EditorEvent::DirtyChanged + | EditorEvent::ExcerptsEdited { .. } => { + this.parse_csv_from_active_editor(true, cx); + } + _ => {} + }; + }, + ); + + let mut view = CsvPreviewView { + focus_handle: cx.focus_handle(), + active_editor_state: EditorState { + editor: editor.clone(), + _subscription: subscription, + }, + table_interaction_state, + column_widths: ColumnWidths::new(cx, 1), + parsing_task: None, + performance_metrics: PerformanceMetrics::default(), + list_state: gpui::ListState::new(contents.rows.len(), ListAlignment::Top, px(1.)), + settings: CsvPreviewSettings::default(), + last_parse_end_time: None, + engine: TableDataEngine::default(), + }; + + view.parse_csv_from_active_editor(false, cx); + view + }) + } + + pub(crate) fn editor_state(&self) -> &EditorState { + &self.active_editor_state + } + pub(crate) fn apply_sort(&mut self) { + self.performance_metrics.record("Sort", || { + self.engine.apply_sort(); + }); + } + + /// Update ordered indices when ordering or content changes + pub(crate) fn apply_filter_sort(&mut self) { + self.performance_metrics.record("Filter&sort", || { + self.engine.calculate_d2d_mapping(); + }); + + // Update list state with filtered row count + let visible_rows = self.engine.d2d_mapping().visible_row_count(); + self.list_state = gpui::ListState::new(visible_rows, ListAlignment::Top, px(1.)); + } + + pub fn resolve_active_item_as_csv_editor( + workspace: &Workspace, + cx: &mut Context, + ) -> Option> { + let editor = workspace + .active_item(cx) + .and_then(|item| item.act_as::(cx))?; + Self::is_csv_file(&editor, cx).then_some(editor) + } + + fn is_csv_file(editor: &Entity, cx: &App) -> bool { + editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .and_then(|buffer| { + buffer + .read(cx) + .file() + .and_then(|file| file.path().extension()) + .map(|ext| ext.eq_ignore_ascii_case("csv")) + }) + .unwrap_or(false) + } +} + +impl Focusable for CsvPreviewView { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl EventEmitter<()> for CsvPreviewView {} + +impl Item for CsvPreviewView { + type Event = (); + + fn tab_icon(&self, _window: &Window, _cx: &App) -> Option { + Some(Icon::new(IconName::FileDoc)) + } + + fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString { + self.editor_state() + .editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .and_then(|b| { + let file = b.read(cx).file()?; + let local_file = file.as_local()?; + local_file + .abs_path(cx) + .file_name() + .map(|name| format!("Preview {}", name.to_string_lossy()).into()) + }) + .unwrap_or_else(|| SharedString::from("CSV Preview")) + } +} + +#[derive(Debug, Default)] +pub struct PerformanceMetrics { + /// Map of timing metrics with their duration and measurement time. + pub timings: HashMap<&'static str, (Duration, Instant)>, + /// List of display indices that were rendered in the current frame. + pub rendered_indices: Vec, +} +impl PerformanceMetrics { + pub fn record(&mut self, name: &'static str, mut f: F) -> R + where + F: FnMut() -> R, + { + let start_time = Instant::now(); + let ret = f(); + let duration = start_time.elapsed(); + self.timings.insert(name, (duration, Instant::now())); + ret + } + + /// Displays all metrics sorted A-Z in format: `{name}: {took}ms {ago}s ago` + pub fn display(&self) -> String { + let mut metrics = self.timings.iter().collect::>(); + metrics.sort_by_key(|&(name, _)| *name); + metrics + .iter() + .map(|(name, (duration, time))| { + let took = duration.as_secs_f32() * 1000.; + let ago = time.elapsed().as_secs(); + format!("{name}: {took:.2}ms {ago}s ago") + }) + .collect::>() + .join("\n") + } + + /// Get timing for a specific metric + pub fn get_timing(&self, name: &str) -> Option { + self.timings.get(name).map(|(duration, _)| *duration) + } +} + +/// Holds state of column widths for a table component in CSV preview. +pub(crate) struct ColumnWidths { + pub widths: Entity, +} + +impl ColumnWidths { + pub(crate) fn new(cx: &mut Context, cols: usize) -> Self { + Self { + widths: cx.new(|cx| TableColumnWidths::new(cols, cx)), + } + } + /// Replace the current `TableColumnWidths` entity with a new one for the given column count. + pub(crate) fn replace(&self, cx: &mut Context, cols: usize) { + self.widths + .update(cx, |entity, cx| *entity = TableColumnWidths::new(cols, cx)); + } +} diff --git a/crates/csv_preview/src/parser.rs b/crates/csv_preview/src/parser.rs new file mode 100644 index 0000000000000000000000000000000000000000..b087404e0ebbd13cdaf20cab692f5470ea6ce292 --- /dev/null +++ b/crates/csv_preview/src/parser.rs @@ -0,0 +1,513 @@ +use crate::{ + CsvPreviewView, + types::TableLikeContent, + types::{LineNumber, TableCell}, +}; +use editor::Editor; +use gpui::{AppContext, Context, Entity, Subscription, Task}; +use std::time::{Duration, Instant}; +use text::BufferSnapshot; +use ui::{SharedString, table_row::TableRow}; + +pub(crate) const REPARSE_DEBOUNCE: Duration = Duration::from_millis(200); + +pub(crate) struct EditorState { + pub editor: Entity, + pub _subscription: Subscription, +} + +impl CsvPreviewView { + pub(crate) fn parse_csv_from_active_editor( + &mut self, + wait_for_debounce: bool, + cx: &mut Context, + ) { + let editor = self.active_editor_state.editor.clone(); + self.parsing_task = Some(self.parse_csv_in_background(wait_for_debounce, editor, cx)); + } + + fn parse_csv_in_background( + &mut self, + wait_for_debounce: bool, + editor: Entity, + cx: &mut Context, + ) -> Task> { + cx.spawn(async move |view, cx| { + if wait_for_debounce { + // Smart debouncing: check if cooldown period has already passed + let now = Instant::now(); + let should_wait = view.update(cx, |view, _| { + if let Some(last_end) = view.last_parse_end_time { + let cooldown_until = last_end + REPARSE_DEBOUNCE; + if now < cooldown_until { + Some(cooldown_until - now) + } else { + None // Cooldown already passed, parse immediately + } + } else { + None // First parse, no debounce + } + })?; + + if let Some(wait_duration) = should_wait { + cx.background_executor().timer(wait_duration).await; + } + } + + let buffer_snapshot = view.update(cx, |_, cx| { + editor + .read(cx) + .buffer() + .read(cx) + .as_singleton() + .map(|b| b.read(cx).text_snapshot()) + })?; + + let Some(buffer_snapshot) = buffer_snapshot else { + return Ok(()); + }; + + let instant = Instant::now(); + let parsed_csv = cx + .background_spawn(async move { from_buffer(&buffer_snapshot) }) + .await; + let parse_duration = instant.elapsed(); + let parse_end_time: Instant = Instant::now(); + log::debug!("Parsed CSV in {}ms", parse_duration.as_millis()); + view.update(cx, move |view, cx| { + view.performance_metrics + .timings + .insert("Parsing", (parse_duration, Instant::now())); + + log::debug!("Parsed {} rows", parsed_csv.rows.len()); + // Update table width so it can be rendered properly + let cols = parsed_csv.headers.cols(); + view.column_widths.replace(cx, cols + 1); // Add 1 for the line number column + + view.engine.contents = parsed_csv; + view.last_parse_end_time = Some(parse_end_time); + + view.apply_filter_sort(); + cx.notify(); + }) + }) + } +} + +pub fn from_buffer(buffer_snapshot: &BufferSnapshot) -> TableLikeContent { + let text = buffer_snapshot.text(); + + if text.trim().is_empty() { + return TableLikeContent::default(); + } + + let (parsed_cells_with_positions, line_numbers) = parse_csv_with_positions(&text); + if parsed_cells_with_positions.is_empty() { + return TableLikeContent::default(); + } + let raw_headers = parsed_cells_with_positions[0].clone(); + + // Calculating the longest row, as CSV might have less headers than max row width + let Some(max_number_of_cols) = parsed_cells_with_positions.iter().map(|r| r.len()).max() else { + return TableLikeContent::default(); + }; + + // Convert to TableCell objects with buffer positions + let headers = create_table_row(&buffer_snapshot, max_number_of_cols, raw_headers); + + let rows = parsed_cells_with_positions + .into_iter() + .skip(1) + .map(|row| create_table_row(&buffer_snapshot, max_number_of_cols, row)) + .collect(); + + let row_line_numbers = line_numbers.into_iter().skip(1).collect(); + + TableLikeContent { + headers, + rows, + line_numbers: row_line_numbers, + number_of_cols: max_number_of_cols, + } +} + +/// Parse CSV and track byte positions for each cell +fn parse_csv_with_positions( + text: &str, +) -> ( + Vec)>>, + Vec, +) { + let mut rows = Vec::new(); + let mut line_numbers = Vec::new(); + let mut current_row: Vec<(SharedString, std::ops::Range)> = Vec::new(); + let mut current_field = String::new(); + let mut field_start_offset = 0; + let mut current_offset = 0; + let mut in_quotes = false; + let mut current_line = 1; // 1-based line numbering + let mut row_start_line = 1; + let mut chars = text.chars().peekable(); + + while let Some(ch) = chars.next() { + let char_byte_len = ch.len_utf8(); + + match ch { + '"' => { + if in_quotes { + if chars.peek() == Some(&'"') { + // Escaped quote + chars.next(); + current_field.push('"'); + current_offset += 1; // Skip the second quote + } else { + // End of quoted field + in_quotes = false; + } + } else { + // Start of quoted field + in_quotes = true; + if current_field.is_empty() { + // Include the opening quote in the range + field_start_offset = current_offset; + } + } + } + ',' if !in_quotes => { + // Field separator + let field_end_offset = current_offset; + if current_field.is_empty() && !in_quotes { + field_start_offset = current_offset; + } + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + current_field.clear(); + field_start_offset = current_offset + char_byte_len; + } + '\n' => { + current_line += 1; + if !in_quotes { + // Row separator (only when not inside quotes) + let field_end_offset = current_offset; + if current_field.is_empty() && current_row.is_empty() { + field_start_offset = 0; + } + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + current_field.clear(); + + // Only add non-empty rows + if !current_row.is_empty() + && !current_row.iter().all(|(field, _)| field.trim().is_empty()) + { + rows.push(current_row); + // Add line number info for this row + let line_info = if row_start_line == current_line - 1 { + LineNumber::Line(row_start_line) + } else { + LineNumber::LineRange(row_start_line, current_line - 1) + }; + line_numbers.push(line_info); + } + current_row = Vec::new(); + row_start_line = current_line; + field_start_offset = current_offset + char_byte_len; + } else { + // Newline inside quotes - preserve it + current_field.push(ch); + } + } + '\r' => { + if chars.peek() == Some(&'\n') { + // Handle Windows line endings (\r\n): account for \r byte, let \n be handled next + current_offset += char_byte_len; + continue; + } else { + // Standalone \r + current_line += 1; + if !in_quotes { + // Row separator (only when not inside quotes) + let field_end_offset = current_offset; + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + current_field.clear(); + + // Only add non-empty rows + if !current_row.is_empty() + && !current_row.iter().all(|(field, _)| field.trim().is_empty()) + { + rows.push(current_row); + // Add line number info for this row + let line_info = if row_start_line == current_line - 1 { + LineNumber::Line(row_start_line) + } else { + LineNumber::LineRange(row_start_line, current_line - 1) + }; + line_numbers.push(line_info); + } + current_row = Vec::new(); + row_start_line = current_line; + field_start_offset = current_offset + char_byte_len; + } else { + // \r inside quotes - preserve it + current_field.push(ch); + } + } + } + _ => { + if current_field.is_empty() && !in_quotes { + field_start_offset = current_offset; + } + current_field.push(ch); + } + } + + current_offset += char_byte_len; + } + + // Add the last field and row if not empty + if !current_field.is_empty() || !current_row.is_empty() { + let field_end_offset = current_offset; + current_row.push(( + current_field.clone().into(), + field_start_offset..field_end_offset, + )); + } + if !current_row.is_empty() && !current_row.iter().all(|(field, _)| field.trim().is_empty()) { + rows.push(current_row); + // Add line number info for the last row + let line_info = if row_start_line == current_line { + LineNumber::Line(row_start_line) + } else { + LineNumber::LineRange(row_start_line, current_line) + }; + line_numbers.push(line_info); + } + + (rows, line_numbers) +} + +fn create_table_row( + buffer_snapshot: &BufferSnapshot, + max_number_of_cols: usize, + row: Vec<(SharedString, std::ops::Range)>, +) -> TableRow { + let mut raw_row = row + .into_iter() + .map(|(content, range)| { + TableCell::from_buffer_position(content, range.start, range.end, &buffer_snapshot) + }) + .collect::>(); + + let append_elements = max_number_of_cols - raw_row.len(); + if append_elements > 0 { + for _ in 0..append_elements { + raw_row.push(TableCell::Virtual); + } + } + + TableRow::from_vec(raw_row, max_number_of_cols) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_csv_parsing_basic() { + let csv_data = "Name,Age,City\nJohn,30,New York\nJane,25,Los Angeles"; + let parsed = TableLikeContent::from_str(csv_data.to_string()); + + assert_eq!(parsed.headers.cols(), 3); + assert_eq!(parsed.headers[0].display_value().unwrap().as_ref(), "Name"); + assert_eq!(parsed.headers[1].display_value().unwrap().as_ref(), "Age"); + assert_eq!(parsed.headers[2].display_value().unwrap().as_ref(), "City"); + + assert_eq!(parsed.rows.len(), 2); + assert_eq!(parsed.rows[0][0].display_value().unwrap().as_ref(), "John"); + assert_eq!(parsed.rows[0][1].display_value().unwrap().as_ref(), "30"); + assert_eq!( + parsed.rows[0][2].display_value().unwrap().as_ref(), + "New York" + ); + } + + #[test] + fn test_csv_parsing_with_quotes() { + let csv_data = r#"Name,Description +"John Doe","A person with ""special"" characters" +Jane,"Simple name""#; + let parsed = TableLikeContent::from_str(csv_data.to_string()); + + assert_eq!(parsed.headers.cols(), 2); + assert_eq!(parsed.rows.len(), 2); + assert_eq!( + parsed.rows[0][1].display_value().unwrap().as_ref(), + r#"A person with "special" characters"# + ); + } + + #[test] + fn test_csv_parsing_with_newlines_in_quotes() { + let csv_data = "Name,Description,Status\n\"John\nDoe\",\"A person with\nmultiple lines\",Active\n\"Jane Smith\",\"Simple\",\"Also\nActive\""; + let parsed = TableLikeContent::from_str(csv_data.to_string()); + + assert_eq!(parsed.headers.cols(), 3); + assert_eq!(parsed.headers[0].display_value().unwrap().as_ref(), "Name"); + assert_eq!( + parsed.headers[1].display_value().unwrap().as_ref(), + "Description" + ); + assert_eq!( + parsed.headers[2].display_value().unwrap().as_ref(), + "Status" + ); + + assert_eq!(parsed.rows.len(), 2); + assert_eq!( + parsed.rows[0][0].display_value().unwrap().as_ref(), + "John\nDoe" + ); + assert_eq!( + parsed.rows[0][1].display_value().unwrap().as_ref(), + "A person with\nmultiple lines" + ); + assert_eq!( + parsed.rows[0][2].display_value().unwrap().as_ref(), + "Active" + ); + + assert_eq!( + parsed.rows[1][0].display_value().unwrap().as_ref(), + "Jane Smith" + ); + assert_eq!( + parsed.rows[1][1].display_value().unwrap().as_ref(), + "Simple" + ); + assert_eq!( + parsed.rows[1][2].display_value().unwrap().as_ref(), + "Also\nActive" + ); + + // Check line numbers + assert_eq!(parsed.line_numbers.len(), 2); + match &parsed.line_numbers[0] { + LineNumber::LineRange(start, end) => { + assert_eq!(start, &2); + assert_eq!(end, &4); + } + _ => panic!("Expected LineRange for multiline row"), + } + match &parsed.line_numbers[1] { + LineNumber::LineRange(start, end) => { + assert_eq!(start, &5); + assert_eq!(end, &6); + } + _ => panic!("Expected LineRange for second multiline row"), + } + } + + #[test] + fn test_empty_csv() { + let parsed = TableLikeContent::from_str("".to_string()); + assert_eq!(parsed.headers.cols(), 0); + assert!(parsed.rows.is_empty()); + } + + #[test] + fn test_csv_parsing_quote_offset_handling() { + let csv_data = r#"first,"se,cond",third"#; + let (parsed_cells, _) = parse_csv_with_positions(csv_data); + + assert_eq!(parsed_cells.len(), 1); // One row + assert_eq!(parsed_cells[0].len(), 3); // Three cells + + // first: 0..5 (no quotes) + let (content1, range1) = &parsed_cells[0][0]; + assert_eq!(content1.as_ref(), "first"); + assert_eq!(*range1, 0..5); + + // "se,cond": 6..15 (includes quotes in range, content without quotes) + let (content2, range2) = &parsed_cells[0][1]; + assert_eq!(content2.as_ref(), "se,cond"); + assert_eq!(*range2, 6..15); + + // third: 16..21 (no quotes) + let (content3, range3) = &parsed_cells[0][2]; + assert_eq!(content3.as_ref(), "third"); + assert_eq!(*range3, 16..21); + } + + #[test] + fn test_csv_parsing_complex_quotes() { + let csv_data = r#"id,"name with spaces","description, with commas",status +1,"John Doe","A person with ""quotes"" and, commas",active +2,"Jane Smith","Simple description",inactive"#; + let (parsed_cells, _) = parse_csv_with_positions(csv_data); + + assert_eq!(parsed_cells.len(), 3); // header + 2 rows + + // Check header row + let header_row = &parsed_cells[0]; + assert_eq!(header_row.len(), 4); + + // id: 0..2 + assert_eq!(header_row[0].0.as_ref(), "id"); + assert_eq!(header_row[0].1, 0..2); + + // "name with spaces": 3..21 (includes quotes) + assert_eq!(header_row[1].0.as_ref(), "name with spaces"); + assert_eq!(header_row[1].1, 3..21); + + // "description, with commas": 22..48 (includes quotes) + assert_eq!(header_row[2].0.as_ref(), "description, with commas"); + assert_eq!(header_row[2].1, 22..48); + + // status: 49..55 + assert_eq!(header_row[3].0.as_ref(), "status"); + assert_eq!(header_row[3].1, 49..55); + + // Check first data row + let first_row = &parsed_cells[1]; + assert_eq!(first_row.len(), 4); + + // 1: 56..57 + assert_eq!(first_row[0].0.as_ref(), "1"); + assert_eq!(first_row[0].1, 56..57); + + // "John Doe": 58..68 (includes quotes) + assert_eq!(first_row[1].0.as_ref(), "John Doe"); + assert_eq!(first_row[1].1, 58..68); + + // Content should be stripped of quotes but include escaped quotes + assert_eq!( + first_row[2].0.as_ref(), + r#"A person with "quotes" and, commas"# + ); + // The range should include the outer quotes: 69..107 + assert_eq!(first_row[2].1, 69..107); + + // active: 108..114 + assert_eq!(first_row[3].0.as_ref(), "active"); + assert_eq!(first_row[3].1, 108..114); + } +} + +impl TableLikeContent { + #[cfg(test)] + pub fn from_str(text: String) -> Self { + use text::{Buffer, BufferId, ReplicaId}; + + let buffer_id = BufferId::new(1).unwrap(); + let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, text); + let snapshot = buffer.snapshot(); + from_buffer(snapshot) + } +} diff --git a/crates/csv_preview/src/renderer.rs b/crates/csv_preview/src/renderer.rs new file mode 100644 index 0000000000000000000000000000000000000000..42ae05936c7ebd3fb9c619793376998b6d33e2c1 --- /dev/null +++ b/crates/csv_preview/src/renderer.rs @@ -0,0 +1,5 @@ +mod preview_view; +mod render_table; +mod row_identifiers; +mod table_cell; +mod table_header; diff --git a/crates/csv_preview/src/renderer/preview_view.rs b/crates/csv_preview/src/renderer/preview_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..55e62d03806b578f59c2542cf997f90ec22a1f8f --- /dev/null +++ b/crates/csv_preview/src/renderer/preview_view.rs @@ -0,0 +1,50 @@ +use std::time::Instant; + +use ui::{div, prelude::*}; + +use crate::{CsvPreviewView, settings::FontType}; + +impl Render for CsvPreviewView { + fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { + let theme = cx.theme(); + + self.performance_metrics.rendered_indices.clear(); + let render_prep_start = Instant::now(); + let table_with_settings = v_flex() + .size_full() + .p_4() + .bg(theme.colors().editor_background) + .track_focus(&self.focus_handle) + .child({ + if self.engine.contents.number_of_cols == 0 { + div() + .flex() + .items_center() + .justify_center() + .h_32() + .text_ui(cx) + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .text_color(cx.theme().colors().text_muted) + .child("No CSV content to display") + .into_any_element() + } else { + self.create_table(&self.column_widths.widths, cx) + } + }); + + let render_prep_duration = render_prep_start.elapsed(); + self.performance_metrics.timings.insert( + "render_prep", + (render_prep_duration, std::time::Instant::now()), + ); + + div() + .relative() + .w_full() + .h_full() + .child(table_with_settings) + } +} diff --git a/crates/csv_preview/src/renderer/render_table.rs b/crates/csv_preview/src/renderer/render_table.rs new file mode 100644 index 0000000000000000000000000000000000000000..0cc3bc3c46fb24570b3c99c9121dff3860c6b820 --- /dev/null +++ b/crates/csv_preview/src/renderer/render_table.rs @@ -0,0 +1,193 @@ +use crate::types::TableCell; +use gpui::{AnyElement, Entity}; +use std::ops::Range; +use ui::Table; +use ui::TableColumnWidths; +use ui::TableResizeBehavior; +use ui::UncheckedTableRow; +use ui::{DefiniteLength, div, prelude::*}; + +use crate::{ + CsvPreviewView, + settings::RowRenderMechanism, + types::{AnyColumn, DisplayCellId, DisplayRow}, +}; + +impl CsvPreviewView { + /// Creates a new table. + /// Column number is derived from the `TableColumnWidths` entity. + pub(crate) fn create_table( + &self, + current_widths: &Entity, + cx: &mut Context, + ) -> AnyElement { + let cols = current_widths.read(cx).cols(); + let remaining_col_number = cols - 1; + let fraction = if remaining_col_number > 0 { + 1. / remaining_col_number as f32 + } else { + 1. // only column with line numbers is present. Put 100%, but it will be overwritten anyways :D + }; + let mut widths = vec![DefiniteLength::Fraction(fraction); cols]; + let line_number_width = self.calculate_row_identifier_column_width(); + widths[0] = DefiniteLength::Absolute(AbsoluteLength::Pixels(line_number_width.into())); + + let mut resize_behaviors = vec![TableResizeBehavior::Resizable; cols]; + resize_behaviors[0] = TableResizeBehavior::None; + + self.create_table_inner( + self.engine.contents.rows.len(), + widths, + resize_behaviors, + current_widths, + cx, + ) + } + + fn create_table_inner( + &self, + row_count: usize, + widths: UncheckedTableRow, + resize_behaviors: UncheckedTableRow, + current_widths: &Entity, + cx: &mut Context, + ) -> AnyElement { + let cols = widths.len(); + // Create headers array with interactive elements + let mut headers = Vec::with_capacity(cols); + + headers.push(self.create_row_identifier_header(cx)); + + // Add the actual CSV headers with sort buttons + for i in 0..(cols - 1) { + let header_text = self + .engine + .contents + .headers + .get(AnyColumn(i)) + .and_then(|h| h.display_value().cloned()) + .unwrap_or_else(|| format!("Col {}", i + 1).into()); + + headers.push(self.create_header_element_with_sort_button( + header_text, + cx, + AnyColumn::from(i), + )); + } + + Table::new(cols) + .interactable(&self.table_interaction_state) + .striped() + .column_widths(widths) + .resizable_columns(resize_behaviors, current_widths, cx) + .header(headers) + .disable_base_style() + .map(|table| { + let row_identifier_text_color = cx.theme().colors().editor_line_number; + match self.settings.rendering_with { + RowRenderMechanism::VariableList => { + table.variable_row_height_list(row_count, self.list_state.clone(), { + cx.processor(move |this, display_row: usize, _window, cx| { + this.performance_metrics.rendered_indices.push(display_row); + + let display_row = DisplayRow(display_row); + Self::render_single_table_row( + this, + cols, + display_row, + row_identifier_text_color, + cx, + ) + .unwrap_or_else(|| panic!("Expected to render a table row")) + }) + }) + } + RowRenderMechanism::UniformList => { + table.uniform_list("csv-table", row_count, { + cx.processor(move |this, range: Range, _window, cx| { + // Record all display indices in the range for performance metrics + this.performance_metrics + .rendered_indices + .extend(range.clone()); + + range + .filter_map(|display_index| { + Self::render_single_table_row( + this, + cols, + DisplayRow(display_index), + row_identifier_text_color, + cx, + ) + }) + .collect() + }) + }) + } + } + }) + .into_any_element() + } + + /// Render a single table row + /// + /// Used both by UniformList and VariableRowHeightList + fn render_single_table_row( + this: &CsvPreviewView, + cols: usize, + display_row: DisplayRow, + row_identifier_text_color: gpui::Hsla, + cx: &Context, + ) -> Option> { + // Get the actual row index from our sorted indices + let data_row = this.engine.d2d_mapping().get_data_row(display_row)?; + let row = this.engine.contents.get_row(data_row)?; + + let mut elements = Vec::with_capacity(cols); + elements.push(this.create_row_identifier_cell(display_row, data_row, cx)?); + + // Remaining columns: actual CSV data + for col in (0..this.engine.contents.number_of_cols).map(AnyColumn) { + let table_cell = row.expect_get(col); + + // TODO: Introduce `` cell type + let cell_content = table_cell.display_value().cloned().unwrap_or_default(); + + let display_cell_id = DisplayCellId::new(display_row, col); + + let cell = div().size_full().whitespace_nowrap().text_ellipsis().child( + CsvPreviewView::create_selectable_cell( + display_cell_id, + cell_content, + this.settings.vertical_alignment, + this.settings.font_type, + cx, + ), + ); + + elements.push( + div() + .size_full() + .when(this.settings.show_debug_info, |parent| { + parent.child(div().text_color(row_identifier_text_color).child( + match table_cell { + TableCell::Real { position: pos, .. } => { + let slv = pos.start.timestamp().value; + let so = pos.start.offset; + let elv = pos.end.timestamp().value; + let eo = pos.end.offset; + format!("Pos {so}(L{slv})-{eo}(L{elv})") + } + TableCell::Virtual => "Virtual cell".into(), + }, + )) + }) + .text_ui(cx) + .child(cell) + .into_any_element(), + ); + } + + Some(elements) + } +} diff --git a/crates/csv_preview/src/renderer/row_identifiers.rs b/crates/csv_preview/src/renderer/row_identifiers.rs new file mode 100644 index 0000000000000000000000000000000000000000..a122aa9bf3d803b9deb9c6211e117ba4aa593d93 --- /dev/null +++ b/crates/csv_preview/src/renderer/row_identifiers.rs @@ -0,0 +1,189 @@ +use ui::{ + ActiveTheme as _, AnyElement, Button, ButtonCommon as _, ButtonSize, ButtonStyle, + Clickable as _, Context, ElementId, FluentBuilder as _, IntoElement as _, ParentElement as _, + SharedString, Styled as _, StyledTypography as _, Tooltip, div, +}; + +use crate::{ + CsvPreviewView, + settings::{FontType, RowIdentifiers}, + types::{DataRow, DisplayRow, LineNumber}, +}; + +pub enum RowIdentDisplayMode { + /// E.g + /// ```text + /// 1 + /// ... + /// 5 + /// ``` + Vertical, + /// E.g. + /// ```text + /// 1-5 + /// ``` + Horizontal, +} + +impl LineNumber { + pub fn display_string(&self, mode: RowIdentDisplayMode) -> String { + match *self { + LineNumber::Line(line) => line.to_string(), + LineNumber::LineRange(start, end) => match mode { + RowIdentDisplayMode::Vertical => { + if start + 1 == end { + format!("{start}\n{end}") + } else { + format!("{start}\n...\n{end}") + } + } + RowIdentDisplayMode::Horizontal => { + format!("{start}-{end}") + } + }, + } + } +} + +impl CsvPreviewView { + /// Calculate the optimal width for the row identifier column (line numbers or row numbers). + /// + /// This ensures the column is wide enough to display the largest identifier comfortably, + /// but not wastefully wide for small files. + pub(crate) fn calculate_row_identifier_column_width(&self) -> f32 { + match self.settings.numbering_type { + RowIdentifiers::SrcLines => self.calculate_line_number_width(), + RowIdentifiers::RowNum => self.calculate_row_number_width(), + } + } + + /// Calculate width needed for line numbers (can be multi-line) + fn calculate_line_number_width(&self) -> f32 { + // Find the maximum line number that could be displayed + let max_line_number = self + .engine + .contents + .line_numbers + .iter() + .map(|ln| match ln { + LineNumber::Line(n) => *n, + LineNumber::LineRange(_, end) => *end, + }) + .max() + .unwrap_or_default(); + + let digit_count = if max_line_number == 0 { + 1 + } else { + (max_line_number as f32).log10().floor() as usize + 1 + }; + + // if !self.settings.multiline_cells_enabled { + // // Uses horizontal line numbers layout like `123-456`. Needs twice the size + // digit_count *= 2; + // } + + let char_width_px = 9.0; // TODO: get real width of the characters + let base_width = (digit_count as f32) * char_width_px; + let padding = 20.0; + let min_width = 60.0; + (base_width + padding).max(min_width) + } + + /// Calculate width needed for sequential row numbers + fn calculate_row_number_width(&self) -> f32 { + let max_row_number = self.engine.contents.rows.len(); + + let digit_count = if max_row_number == 0 { + 1 + } else { + (max_row_number as f32).log10().floor() as usize + 1 + }; + + let char_width_px = 9.0; // TODO: get real width of the characters + let base_width = (digit_count as f32) * char_width_px; + let padding = 20.0; + let min_width = 60.0; + (base_width + padding).max(min_width) + } + + pub(crate) fn create_row_identifier_header( + &self, + cx: &mut Context<'_, CsvPreviewView>, + ) -> AnyElement { + // First column: row identifier (clickable to toggle between Lines and Rows) + let row_identifier_text = match self.settings.numbering_type { + RowIdentifiers::SrcLines => "Lines", + RowIdentifiers::RowNum => "Rows", + }; + + let view = cx.entity(); + let value = div() + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .child( + Button::new( + ElementId::Name("row-identifier-toggle".into()), + row_identifier_text, + ) + .style(ButtonStyle::Subtle) + .size(ButtonSize::Compact) + .tooltip(Tooltip::text( + "Toggle between: file line numbers or sequential row numbers", + )) + .on_click(move |_event, _window, cx| { + view.update(cx, |this, cx| { + this.settings.numbering_type = match this.settings.numbering_type { + RowIdentifiers::SrcLines => RowIdentifiers::RowNum, + RowIdentifiers::RowNum => RowIdentifiers::SrcLines, + }; + cx.notify(); + }); + }), + ) + .into_any_element(); + value + } + + pub(crate) fn create_row_identifier_cell( + &self, + display_row: DisplayRow, + data_row: DataRow, + cx: &Context<'_, CsvPreviewView>, + ) -> Option { + let row_identifier: SharedString = match self.settings.numbering_type { + RowIdentifiers::SrcLines => self + .engine + .contents + .line_numbers + .get(*data_row)? + .display_string(if self.settings.multiline_cells_enabled { + RowIdentDisplayMode::Vertical + } else { + RowIdentDisplayMode::Horizontal + }) + .into(), + RowIdentifiers::RowNum => (*display_row + 1).to_string().into(), + }; + + let value = div() + .flex() + .px_1() + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .h_full() + .text_ui(cx) + // Row identifiers are always centered + .items_center() + .justify_end() + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .child(row_identifier) + .into_any_element(); + Some(value) + } +} diff --git a/crates/csv_preview/src/renderer/table_cell.rs b/crates/csv_preview/src/renderer/table_cell.rs new file mode 100644 index 0000000000000000000000000000000000000000..32900ab77708936e218e9af10a4de5fba796e6a7 --- /dev/null +++ b/crates/csv_preview/src/renderer/table_cell.rs @@ -0,0 +1,72 @@ +//! Table Cell Rendering + +use gpui::{AnyElement, ElementId}; +use ui::{SharedString, Tooltip, div, prelude::*}; + +use crate::{ + CsvPreviewView, + settings::{FontType, VerticalAlignment}, + types::DisplayCellId, +}; + +impl CsvPreviewView { + /// Create selectable table cell with mouse event handlers. + pub fn create_selectable_cell( + display_cell_id: DisplayCellId, + cell_content: SharedString, + vertical_alignment: VerticalAlignment, + font_type: FontType, + cx: &Context, + ) -> AnyElement { + create_table_cell( + display_cell_id, + cell_content, + vertical_alignment, + font_type, + cx, + ) + // Mouse events handlers will be here + .into_any_element() + } +} + +/// Create styled table cell div element. +fn create_table_cell( + display_cell_id: DisplayCellId, + cell_content: SharedString, + vertical_alignment: VerticalAlignment, + font_type: FontType, + cx: &Context<'_, CsvPreviewView>, +) -> gpui::Stateful

{ + div() + .id(ElementId::NamedInteger( + format!( + "csv-display-cell-{}-{}", + *display_cell_id.row, *display_cell_id.col + ) + .into(), + 0, + )) + .cursor_pointer() + .flex() + .h_full() + .px_1() + .bg(cx.theme().colors().editor_background) + .border_b_1() + .border_r_1() + .border_color(cx.theme().colors().border_variant) + .map(|div| match vertical_alignment { + VerticalAlignment::Top => div.items_start(), + VerticalAlignment::Center => div.items_center(), + }) + .map(|div| match vertical_alignment { + VerticalAlignment::Top => div.content_start(), + VerticalAlignment::Center => div.content_center(), + }) + .map(|div| match font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .tooltip(Tooltip::text(cell_content.clone())) + .child(div().child(cell_content)) +} diff --git a/crates/csv_preview/src/renderer/table_header.rs b/crates/csv_preview/src/renderer/table_header.rs new file mode 100644 index 0000000000000000000000000000000000000000..52a16be9fc81ef1c3f001513b652a33c3b06dc82 --- /dev/null +++ b/crates/csv_preview/src/renderer/table_header.rs @@ -0,0 +1,94 @@ +use gpui::ElementId; +use ui::{Tooltip, prelude::*}; + +use crate::{ + CsvPreviewView, + settings::FontType, + table_data_engine::sorting_by_column::{AppliedSorting, SortDirection}, + types::AnyColumn, +}; + +impl CsvPreviewView { + /// Create header for data, which is orderable with text on the left and sort button on the right + pub(crate) fn create_header_element_with_sort_button( + &self, + header_text: SharedString, + cx: &mut Context<'_, CsvPreviewView>, + col_idx: AnyColumn, + ) -> AnyElement { + // CSV data columns: text + filter/sort buttons + h_flex() + .justify_between() + .items_center() + .w_full() + .map(|div| match self.settings.font_type { + FontType::Ui => div.font_ui(cx), + FontType::Monospace => div.font_buffer(cx), + }) + .child(div().child(header_text)) + .child(h_flex().gap_1().child(self.create_sort_button(cx, col_idx))) + .into_any_element() + } + + fn create_sort_button( + &self, + cx: &mut Context<'_, CsvPreviewView>, + col_idx: AnyColumn, + ) -> Button { + let sort_btn = Button::new( + ElementId::NamedInteger("sort-button".into(), col_idx.get() as u64), + match self.engine.applied_sorting { + Some(ordering) if ordering.col_idx == col_idx => match ordering.direction { + SortDirection::Asc => "↓", + SortDirection::Desc => "↑", + }, + _ => "↕", // Unsorted/available for sorting + }, + ) + .size(ButtonSize::Compact) + .style( + if self + .engine + .applied_sorting + .is_some_and(|o| o.col_idx == col_idx) + { + ButtonStyle::Filled + } else { + ButtonStyle::Subtle + }, + ) + .tooltip(Tooltip::text(match self.engine.applied_sorting { + Some(ordering) if ordering.col_idx == col_idx => match ordering.direction { + SortDirection::Asc => "Sorted A-Z. Click to sort Z-A", + SortDirection::Desc => "Sorted Z-A. Click to disable sorting", + }, + _ => "Not sorted. Click to sort A-Z", + })) + .on_click(cx.listener(move |this, _event, _window, cx| { + let new_sorting = match this.engine.applied_sorting { + Some(ordering) if ordering.col_idx == col_idx => { + // Same column clicked - cycle through states + match ordering.direction { + SortDirection::Asc => Some(AppliedSorting { + col_idx, + direction: SortDirection::Desc, + }), + SortDirection::Desc => None, // Clear sorting + } + } + _ => { + // Different column or no sorting - start with ascending + Some(AppliedSorting { + col_idx, + direction: SortDirection::Asc, + }) + } + }; + + this.engine.applied_sorting = new_sorting; + this.apply_sort(); + cx.notify(); + })); + sort_btn + } +} diff --git a/crates/csv_preview/src/settings.rs b/crates/csv_preview/src/settings.rs new file mode 100644 index 0000000000000000000000000000000000000000..e627b3cc994a84f54268a05ba17534789f631fe0 --- /dev/null +++ b/crates/csv_preview/src/settings.rs @@ -0,0 +1,46 @@ +#[derive(Default, Clone, Copy)] +pub enum RowRenderMechanism { + /// Default behaviour + #[default] + VariableList, + /// More performance oriented, but all rows are same height + #[allow(dead_code)] // Will be used when settings ui is added + UniformList, +} + +#[derive(Default, Clone, Copy)] +pub enum VerticalAlignment { + /// Align text to the top of cells + #[default] + Top, + /// Center text vertically in cells + Center, +} + +#[derive(Default, Clone, Copy)] +pub enum FontType { + /// Use the default UI font + #[default] + Ui, + /// Use monospace font (same as buffer/editor font) + Monospace, +} + +#[derive(Default, Clone, Copy)] +pub enum RowIdentifiers { + /// Show original line numbers from CSV file + #[default] + SrcLines, + /// Show sequential row numbers starting from 1 + RowNum, +} + +#[derive(Clone, Default)] +pub(crate) struct CsvPreviewSettings { + pub(crate) rendering_with: RowRenderMechanism, + pub(crate) vertical_alignment: VerticalAlignment, + pub(crate) font_type: FontType, + pub(crate) numbering_type: RowIdentifiers, + pub(crate) show_debug_info: bool, + pub(crate) multiline_cells_enabled: bool, +} diff --git a/crates/csv_preview/src/table_data_engine.rs b/crates/csv_preview/src/table_data_engine.rs new file mode 100644 index 0000000000000000000000000000000000000000..382b41a28507213dcc5993adb49a1fddc5e7b64c --- /dev/null +++ b/crates/csv_preview/src/table_data_engine.rs @@ -0,0 +1,90 @@ +//! This module defines core operations and config of tabular data view (CSV table) +//! It operates in 2 coordinate systems: +//! - `DataCellId` - indices of src data cells +//! - `DisplayCellId` - indices of data after applied transformations like sorting/filtering, which is used to render cell on the screen +//! +//! It's designed to contain core logic of operations without relying on `CsvPreviewView`, context or window handles. + +use std::{collections::HashMap, sync::Arc}; + +use ui::table_row::TableRow; + +use crate::{ + table_data_engine::sorting_by_column::{AppliedSorting, sort_data_rows}, + types::{DataRow, DisplayRow, TableCell, TableLikeContent}, +}; + +pub mod sorting_by_column; + +#[derive(Default)] +pub(crate) struct TableDataEngine { + pub applied_sorting: Option, + d2d_mapping: DisplayToDataMapping, + pub contents: TableLikeContent, +} + +impl TableDataEngine { + pub(crate) fn d2d_mapping(&self) -> &DisplayToDataMapping { + &self.d2d_mapping + } + + pub(crate) fn apply_sort(&mut self) { + self.d2d_mapping + .apply_sorting(self.applied_sorting, &self.contents.rows); + self.d2d_mapping.merge_mappings(); + } + + /// Applies sorting and filtering to the data and produces display to data mapping + pub(crate) fn calculate_d2d_mapping(&mut self) { + self.d2d_mapping + .apply_sorting(self.applied_sorting, &self.contents.rows); + self.d2d_mapping.merge_mappings(); + } +} + +/// Relation of Display (rendered) rows to Data (src) rows with applied transformations +/// Transformations applied: +/// - sorting by column +#[derive(Debug, Default)] +pub struct DisplayToDataMapping { + /// All rows sorted, regardless of applied filtering. Applied every time sorting changes + pub sorted_rows: Vec, + /// Filtered and sorted rows. Computed cheaply from `sorted_mapping` and `filtered_out_rows` + pub mapping: Arc>, +} + +impl DisplayToDataMapping { + /// Get the data row for a given display row + pub fn get_data_row(&self, display_row: DisplayRow) -> Option { + self.mapping.get(&display_row).copied() + } + + /// Get the number of filtered rows + pub fn visible_row_count(&self) -> usize { + self.mapping.len() + } + + /// Computes sorting + fn apply_sorting(&mut self, sorting: Option, rows: &[TableRow]) { + let data_rows: Vec = (0..rows.len()).map(DataRow).collect(); + + let sorted_rows = if let Some(sorting) = sorting { + sort_data_rows(&rows, data_rows, sorting) + } else { + data_rows + }; + + self.sorted_rows = sorted_rows; + } + + /// Take pre-computed sorting and filtering results, and apply them to the mapping + fn merge_mappings(&mut self) { + self.mapping = Arc::new( + self.sorted_rows + .iter() + .enumerate() + .map(|(display, data)| (DisplayRow(display), *data)) + .collect(), + ); + } +} diff --git a/crates/csv_preview/src/table_data_engine/sorting_by_column.rs b/crates/csv_preview/src/table_data_engine/sorting_by_column.rs new file mode 100644 index 0000000000000000000000000000000000000000..52d61351a3d4a8fad0cec60d8c6c594fec05c545 --- /dev/null +++ b/crates/csv_preview/src/table_data_engine/sorting_by_column.rs @@ -0,0 +1,49 @@ +use ui::table_row::TableRow; + +use crate::types::{AnyColumn, DataRow, TableCell}; + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum SortDirection { + Asc, + Desc, +} + +/// Config or currently active sorting +#[derive(Debug, Clone, Copy)] +pub struct AppliedSorting { + /// 0-based column index + pub col_idx: AnyColumn, + /// Direction of sorting (asc/desc) + pub direction: SortDirection, +} + +pub fn sort_data_rows( + content_rows: &[TableRow], + mut data_row_ids: Vec, + sorting: AppliedSorting, +) -> Vec { + data_row_ids.sort_by(|&a, &b| { + let row_a = &content_rows[*a]; + let row_b = &content_rows[*b]; + + // TODO: Decide how to handle nulls (on top or on bottom) + let val_a = row_a + .get(sorting.col_idx) + .and_then(|tc| tc.display_value()) + .map(|tc| tc.as_str()) + .unwrap_or(""); + let val_b = row_b + .get(sorting.col_idx) + .and_then(|tc| tc.display_value()) + .map(|tc| tc.as_str()) + .unwrap_or(""); + + let cmp = val_a.cmp(val_b); + match sorting.direction { + SortDirection::Asc => cmp, + SortDirection::Desc => cmp.reverse(), + } + }); + + data_row_ids +} diff --git a/crates/csv_preview/src/types.rs b/crates/csv_preview/src/types.rs new file mode 100644 index 0000000000000000000000000000000000000000..87fc513f53e61db996d39dcb05409c765fd0c6dc --- /dev/null +++ b/crates/csv_preview/src/types.rs @@ -0,0 +1,17 @@ +use std::fmt::Debug; + +pub use coordinates::*; +mod coordinates; +pub use table_cell::*; +mod table_cell; +pub use table_like_content::*; +mod table_like_content; + +/// Line number information for CSV rows +#[derive(Debug, Clone, Copy)] +pub enum LineNumber { + /// Single line row + Line(usize), + /// Multi-line row spanning from start to end line. Incluisive + LineRange(usize, usize), +} diff --git a/crates/csv_preview/src/types/coordinates.rs b/crates/csv_preview/src/types/coordinates.rs new file mode 100644 index 0000000000000000000000000000000000000000..d800bef6ce0dd54d5ae65301163f79013e447ce3 --- /dev/null +++ b/crates/csv_preview/src/types/coordinates.rs @@ -0,0 +1,127 @@ +//! Type definitions for CSV table coordinates and cell identifiers. +//! +//! Provides newtypes for self-documenting coordinate systems: +//! - Display coordinates: Visual positions in rendered table +//! - Data coordinates: Original CSV data positions + +use std::ops::Deref; + +///// Rows ///// +/// Visual row position in rendered table. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct DisplayRow(pub usize); + +impl DisplayRow { + /// Create a new display row + pub fn new(row: usize) -> Self { + Self(row) + } + + /// Get the inner row value + pub fn get(self) -> usize { + self.0 + } +} + +impl Deref for DisplayRow { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +/// Original CSV row position. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct DataRow(pub usize); + +impl DataRow { + /// Create a new data row + pub fn new(row: usize) -> Self { + Self(row) + } +} + +impl Deref for DataRow { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for DisplayRow { + fn from(row: usize) -> Self { + DisplayRow::new(row) + } +} + +impl From for DataRow { + fn from(row: usize) -> Self { + DataRow::new(row) + } +} + +///// Columns ///// +/// Data column position in CSV table. 0-based +/// +/// Currently represents both display and data coordinate systems since +/// column reordering is not yet implemented. When column reordering is added, +/// this will need to be split into `DisplayColumn` and `DataColumn` types. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct AnyColumn(pub usize); + +impl AnyColumn { + /// Create a new column ID + pub fn new(col: usize) -> Self { + Self(col) + } + + /// Get the inner column value + pub fn get(self) -> usize { + self.0 + } +} + +impl Deref for AnyColumn { + type Target = usize; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for AnyColumn { + fn from(col: usize) -> Self { + AnyColumn::new(col) + } +} + +impl From for usize { + fn from(value: AnyColumn) -> Self { + *value + } +} + +///// Cells ///// +/// Visual cell position in rendered table. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct DisplayCellId { + pub row: DisplayRow, + pub col: AnyColumn, +} + +impl DisplayCellId { + /// Create a new display cell ID + pub fn new(row: impl Into, col: impl Into) -> Self { + Self { + row: row.into(), + col: col.into(), + } + } + + /// Returns (row, column) + pub fn to_raw(&self) -> (usize, usize) { + (self.row.0, self.col.0) + } +} diff --git a/crates/csv_preview/src/types/table_cell.rs b/crates/csv_preview/src/types/table_cell.rs new file mode 100644 index 0000000000000000000000000000000000000000..b6f9adb3fe82b0d468d1ffc8404e707a762e94ea --- /dev/null +++ b/crates/csv_preview/src/types/table_cell.rs @@ -0,0 +1,54 @@ +use text::Anchor; +use ui::SharedString; + +/// Position of a cell within the source CSV buffer +#[derive(Clone, Debug)] +pub struct CellContentSpan { + /// Start anchor of the cell content in the source buffer + pub start: Anchor, + /// End anchor of the cell content in the source buffer + pub end: Anchor, +} + +/// A table cell with its content and position in the source buffer +#[derive(Clone, Debug)] +pub enum TableCell { + /// Cell existing in the CSV + Real { + /// Position of this cell in the source buffer + position: CellContentSpan, + /// Cached display value (for performance) + cached_value: SharedString, + }, + /// Virtual cell, created to pad malformed row + Virtual, +} + +impl TableCell { + /// Create a TableCell with buffer position tracking + pub fn from_buffer_position( + content: SharedString, + start_offset: usize, + end_offset: usize, + buffer_snapshot: &text::BufferSnapshot, + ) -> Self { + let start_anchor = buffer_snapshot.anchor_before(start_offset); + let end_anchor = buffer_snapshot.anchor_after(end_offset); + + Self::Real { + position: CellContentSpan { + start: start_anchor, + end: end_anchor, + }, + cached_value: content, + } + } + + /// Get the display value for this cell + pub fn display_value(&self) -> Option<&SharedString> { + match self { + TableCell::Real { cached_value, .. } => Some(cached_value), + TableCell::Virtual => None, + } + } +} diff --git a/crates/csv_preview/src/types/table_like_content.rs b/crates/csv_preview/src/types/table_like_content.rs new file mode 100644 index 0000000000000000000000000000000000000000..7bf205af812c24d70f33157f8ab7acc454c3b0d5 --- /dev/null +++ b/crates/csv_preview/src/types/table_like_content.rs @@ -0,0 +1,32 @@ +use ui::table_row::TableRow; + +use crate::types::{DataRow, LineNumber, TableCell}; + +/// Generic container struct of table-like data (CSV, TSV, etc) +#[derive(Clone)] +pub struct TableLikeContent { + /// Number of data columns. + /// Defines table width used to validate `TableRow` on creation + pub number_of_cols: usize, + pub headers: TableRow, + pub rows: Vec>, + /// Follows the same indices as `rows` + pub line_numbers: Vec, +} + +impl Default for TableLikeContent { + fn default() -> Self { + Self { + number_of_cols: 0, + headers: TableRow::::from_vec(vec![], 0), + rows: vec![], + line_numbers: vec![], + } + } +} + +impl TableLikeContent { + pub(crate) fn get_row(&self, data_row: DataRow) -> Option<&TableRow> { + self.rows.get(*data_row) + } +} diff --git a/crates/ui/src/components/data_table.rs b/crates/ui/src/components/data_table.rs index 8a40c246ca44ea9dbb25e61bb611882343ba7f94..76ed64850c92e274bd8aeca483dd197cfbccbf52 100644 --- a/crates/ui/src/components/data_table.rs +++ b/crates/ui/src/components/data_table.rs @@ -36,6 +36,13 @@ pub mod table_row { pub struct TableRow(Vec); impl TableRow { + pub fn from_element(element: T, length: usize) -> Self + where + T: Clone, + { + Self::from_vec(vec![element; length], length) + } + /// Constructs a `TableRow` from a `Vec`, panicking if the length does not match `expected_length`. /// /// Use this when you want to ensure at construction time that the row has the correct number of columns. @@ -70,7 +77,8 @@ pub mod table_row { /// /// # Panics /// Panics if `col` is out of bounds (i.e., `col >= self.cols()`). - pub fn expect_get(&self, col: usize) -> &T { + pub fn expect_get(&self, col: impl Into) -> &T { + let col = col.into(); self.0.get(col).unwrap_or_else(|| { panic!( "Expected table row of `{}` to have {col:?}", @@ -79,8 +87,8 @@ pub mod table_row { }) } - pub fn get(&self, col: usize) -> Option<&T> { - self.0.get(col) + pub fn get(&self, col: impl Into) -> Option<&T> { + self.0.get(col.into()) } pub fn as_slice(&self) -> &[T] { @@ -735,6 +743,7 @@ pub struct Table { empty_table_callback: Option AnyElement>>, /// The number of columns in the table. Used to assert column numbers in `TableRow` collections cols: usize, + disable_base_cell_style: bool, } impl Table { @@ -753,9 +762,19 @@ impl Table { use_ui_font: true, empty_table_callback: None, col_widths: None, + disable_base_cell_style: false, } } + /// Disables based styling of row cell (paddings, text ellipsis, nowrap, etc), keeping width settings + /// + /// Doesn't affect base style of header cell. + /// Doesn't remove overflow-hidden + pub fn disable_base_style(mut self) -> Self { + self.disable_base_cell_style = true; + self + } + /// Enables uniform list rendering. /// The provided function will be passed directly to the `uniform_list` element. /// Therefore, if this method is called, any calls to [`Table::row`] before or after @@ -973,10 +992,18 @@ pub fn render_table_row( .into_iter() .zip(column_widths.into_vec()) .map(|(cell, width)| { - base_cell_style_text(width, table_context.use_ui_font, cx) - .px_1() - .py_0p5() - .child(cell) + if table_context.disable_base_cell_style { + div() + .when_some(width, |this, width| this.w(width)) + .when(width.is_none(), |this| this.flex_1()) + .overflow_hidden() + .child(cell) + } else { + base_cell_style_text(width, table_context.use_ui_font, cx) + .px_1() + .py_0p5() + .child(cell) + } }), ); @@ -1071,6 +1098,7 @@ pub struct TableRenderContext { pub column_widths: Option>, pub map_row: Option), &mut Window, &mut App) -> AnyElement>>, pub use_ui_font: bool, + pub disable_base_cell_style: bool, } impl TableRenderContext { @@ -1083,6 +1111,7 @@ impl TableRenderContext { column_widths: table.col_widths.as_ref().map(|widths| widths.lengths(cx)), map_row: table.map_row.clone(), use_ui_font: table.use_ui_font, + disable_base_cell_style: table.disable_base_cell_style, } } } diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index cf8df08c010bfe643b93b5628cf520ee2ec1dd8b..c04e10636f9088cf5f12dbda526a4e933a5e37e3 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -94,6 +94,7 @@ copilot.workspace = true copilot_chat.workspace = true copilot_ui.workspace = true crashes.workspace = true +csv_preview.workspace = true dap_adapters.workspace = true db.workspace = true debug_adapter_extension.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index e93bd92d041a18e927e1560379bcdb2886605874..38238d8af519c0506ab451bccaa1abe3a893e4c9 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -715,6 +715,7 @@ fn main() { git_graph::init(cx); feedback::init(cx); markdown_preview::init(cx); + csv_preview::init(cx); svg_preview::init(cx); onboarding::init(cx); settings_ui::init(cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 55f185aae13e49c6b90610a50ad197ee47ee8a98..a0a6e424d46790ad49c860377c5d1e711aae6b61 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4809,6 +4809,7 @@ mod tests { "console", "context_server", "copilot", + "csv", "debug_panel", "debugger", "dev", diff --git a/crates/zed/src/zed/quick_action_bar/preview.rs b/crates/zed/src/zed/quick_action_bar/preview.rs index 5d43e79542357977b06fbbd884472f94ad3595c8..01e2d164d7d7a8a81e64ab77ad646111e4baacd7 100644 --- a/crates/zed/src/zed/quick_action_bar/preview.rs +++ b/crates/zed/src/zed/quick_action_bar/preview.rs @@ -1,3 +1,8 @@ +use csv_preview::{ + CsvPreviewView, OpenPreview as CsvOpenPreview, OpenPreviewToTheSide as CsvOpenPreviewToTheSide, + TabularDataPreviewFeatureFlag, +}; +use feature_flags::FeatureFlagAppExt as _; use gpui::{AnyElement, Modifiers, WeakEntity}; use markdown_preview::{ OpenPreview as MarkdownOpenPreview, OpenPreviewToTheSide as MarkdownOpenPreviewToTheSide, @@ -16,6 +21,7 @@ use super::QuickActionBar; enum PreviewType { Markdown, Svg, + Csv, } impl QuickActionBar { @@ -35,6 +41,10 @@ impl QuickActionBar { } else if SvgPreviewView::resolve_active_item_as_svg_buffer(workspace, cx).is_some() { preview_type = Some(PreviewType::Svg); + } else if cx.has_flag::() + && CsvPreviewView::resolve_active_item_as_csv_editor(workspace, cx).is_some() + { + preview_type = Some(PreviewType::Csv); } }); } @@ -57,6 +67,13 @@ impl QuickActionBar { Box::new(SvgOpenPreviewToTheSide) as Box, &svg_preview::OpenPreview as &dyn gpui::Action, ), + PreviewType::Csv => ( + "toggle-csv-preview", + "Preview CSV", + Box::new(CsvOpenPreview) as Box, + Box::new(CsvOpenPreviewToTheSide) as Box, + &csv_preview::OpenPreview as &dyn gpui::Action, + ), }; let alt_click = gpui::Keystroke { From 62b9a98ddbc73c5a0f9feb56b6f5b410903aa418 Mon Sep 17 00:00:00 2001 From: Daniel Llamas Date: Tue, 3 Mar 2026 09:06:22 -0600 Subject: [PATCH 266/548] agent_ui: Make file mention chips clickable to open files (#46751) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Summary Makes file mention chips in the AI chat input clickable to open the referenced files. Previously, chips like `@README.md` were purely visual indicators with no interaction. ### Changes - **Clickable mention chips**: Users can now click on file mentions in the chat input to open those files in the editor - **Support for all mention types**: - Files → Opens in editor - Files with line numbers → Opens and scrolls to line - Directories → Reveals in project panel - Threads → Navigates to thread - Rules → Opens rules library - URLs → Opens in browser - **Handles files outside workspace**: Falls back to `open_abs_path()` for files not in the current workspace ### Implementation Threads `MentionUri` and `WeakEntity` through the crease rendering pipeline: 1. Updated `insert_crease_for_mention()` to accept mention URI and workspace references 2. Added click handler to `MentionCrease` component using `.when()` for conditional attachment 3. Implemented file opening helpers that mirror the existing `thread_view.rs::open_link()` logic ### Demo https://github.com/user-attachments/assets/21b2afb7-7a86-4a0a-aba1-e24bb1b650c2 ### Testing Manually tested: - [x] Clicking `@README.md` opens file - [x] Clicking file with line numbers navigates correctly - [x] Clicking directory reveals in project panel - [x] Files outside workspace open via absolute path ### Files Changed - `crates/agent_ui/src/mention_set.rs` - Thread URI/workspace through pipeline - `crates/agent_ui/src/ui/mention_crease.rs` - Add click handler and file opening logic - `crates/agent_ui/src/acp/message_editor.rs` - Update call sites ### Review feedback addressed - Replaced `.when()` + `unwrap()` with `.when_some()` + `Option::zip()` (`0e36efb4eb`) - De-duplicated `open_file` and `open_file_at_line` into a single function with `Option>` (`dbcbb69a4b`) - Rebased onto latest `main` and resolved conflicts Also update item 2 under Implementation from: _Added click handler to MentionCrease component using `.when()` for conditional attachment_ to: _Added click handler to MentionCrease component using `.when_some()` with `Option::zip()` for conditional attachment_ ### Release Notes: - agent: File mention chips in the chat input are now clickable and will open the referenced files in the editor. Closes #46746 --------- Co-authored-by: Claude Opus 4.6 --- crates/agent_ui/src/mention_set.rs | 18 ++ crates/agent_ui/src/message_editor.rs | 8 + crates/agent_ui/src/ui/mention_crease.rs | 199 ++++++++++++++++++++++- 3 files changed, 223 insertions(+), 2 deletions(-) diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs index 58e7e4cdfc196862bb3b8936f8582ba1ad54bda5..792bfc11a63471e02b22835823fa8c59cdfc9bcf 100644 --- a/crates/agent_ui/src/mention_set.rs +++ b/crates/agent_ui/src/mention_set.rs @@ -234,6 +234,8 @@ impl MentionSet { mention_uri.name().into(), IconName::Image.path().into(), mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(workspace.downgrade()), Some(image), editor.clone(), window, @@ -247,6 +249,8 @@ impl MentionSet { crease_text, mention_uri.icon_path(cx), mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(workspace.downgrade()), None, editor.clone(), window, @@ -699,6 +703,8 @@ pub(crate) async fn insert_images_as_context( MentionUri::PastedImage.name().into(), IconName::Image.path().into(), None, + None, + None, Some(Task::ready(Ok(image.clone())).shared()), editor.clone(), window, @@ -810,6 +816,8 @@ pub(crate) fn insert_crease_for_mention( crease_label: SharedString, crease_icon: SharedString, crease_tooltip: Option, + mention_uri: Option, + workspace: Option>, image: Option, String>>>>, editor: Entity, window: &mut Window, @@ -830,6 +838,8 @@ pub(crate) fn insert_crease_for_mention( crease_label.clone(), crease_icon.clone(), crease_tooltip, + mention_uri.clone(), + workspace.clone(), start..end, rx, image, @@ -1029,6 +1039,8 @@ fn render_mention_fold_button( label: SharedString, icon: SharedString, tooltip: Option, + mention_uri: Option, + workspace: Option>, range: Range, mut loading_finished: postage::barrier::Receiver, image_task: Option, String>>>>, @@ -1049,6 +1061,8 @@ fn render_mention_fold_button( label, icon, tooltip, + mention_uri: mention_uri.clone(), + workspace: workspace.clone(), range, editor, loading: Some(loading), @@ -1063,6 +1077,8 @@ struct LoadingContext { label: SharedString, icon: SharedString, tooltip: Option, + mention_uri: Option, + workspace: Option>, range: Range, editor: WeakEntity, loading: Option>, @@ -1079,6 +1095,8 @@ impl Render for LoadingContext { let id = ElementId::from(("loading_context", self.id)); MentionCrease::new(id, self.icon.clone(), self.label.clone()) + .mention_uri(self.mention_uri.clone()) + .workspace(self.workspace.clone()) .is_toggled(is_in_text_selection) .is_loading(self.loading.is_some()) .when_some(self.tooltip.clone(), |this, tooltip_text| { diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 50b297847b43e4d147978fbcf14dce492fc572d0..36d18a5843dac6d7ae52b591a2e5a402093ac118 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -722,6 +722,8 @@ impl MessageEditor { crease_text.into(), mention_uri.icon_path(cx), mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), None, self.editor.clone(), window, @@ -833,6 +835,8 @@ impl MessageEditor { mention_uri.name().into(), mention_uri.icon_path(cx), mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), None, self.editor.clone(), window, @@ -1014,6 +1018,8 @@ impl MessageEditor { mention_uri.name().into(), mention_uri.icon_path(cx), mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), None, self.editor.clone(), window, @@ -1370,6 +1376,8 @@ impl MessageEditor { mention_uri.name().into(), mention_uri.icon_path(cx), mention_uri.tooltip_text(), + Some(mention_uri.clone()), + Some(self.workspace.clone()), None, self.editor.clone(), window, diff --git a/crates/agent_ui/src/ui/mention_crease.rs b/crates/agent_ui/src/ui/mention_crease.rs index 2d464039dc552203ad76979239673ec27d5568c7..0a61b8e4ef2ec69714f158a72f83cc0528cc8a8f 100644 --- a/crates/agent_ui/src/ui/mention_crease.rs +++ b/crates/agent_ui/src/ui/mention_crease.rs @@ -1,15 +1,25 @@ -use std::time::Duration; +use std::{ops::RangeInclusive, path::PathBuf, time::Duration}; -use gpui::{Animation, AnimationExt, AnyView, IntoElement, Window, pulsating_between}; +use acp_thread::MentionUri; +use agent_client_protocol as acp; +use editor::{Editor, SelectionEffects, scroll::Autoscroll}; +use gpui::{ + Animation, AnimationExt, AnyView, Context, IntoElement, WeakEntity, Window, pulsating_between, +}; +use prompt_store::PromptId; +use rope::Point; use settings::Settings; use theme::ThemeSettings; use ui::{ButtonLike, TintColor, Tooltip, prelude::*}; +use workspace::{OpenOptions, Workspace}; #[derive(IntoElement)] pub struct MentionCrease { id: ElementId, icon: SharedString, label: SharedString, + mention_uri: Option, + workspace: Option>, is_toggled: bool, is_loading: bool, tooltip: Option, @@ -26,6 +36,8 @@ impl MentionCrease { id: id.into(), icon: icon.into(), label: label.into(), + mention_uri: None, + workspace: None, is_toggled: false, is_loading: false, tooltip: None, @@ -33,6 +45,16 @@ impl MentionCrease { } } + pub fn mention_uri(mut self, mention_uri: Option) -> Self { + self.mention_uri = mention_uri; + self + } + + pub fn workspace(mut self, workspace: Option>) -> Self { + self.workspace = workspace; + self + } + pub fn is_toggled(mut self, is_toggled: bool) -> Self { self.is_toggled = is_toggled; self @@ -76,6 +98,14 @@ impl RenderOnce for MentionCrease { .height(button_height) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .toggle_state(self.is_toggled) + .when_some( + self.mention_uri.clone().zip(self.workspace.clone()), + |this, (mention_uri, workspace)| { + this.on_click(move |_event, window, cx| { + open_mention_uri(mention_uri.clone(), &workspace, window, cx); + }) + }, + ) .child( h_flex() .pb_px() @@ -114,3 +144,168 @@ impl RenderOnce for MentionCrease { }) } } + +fn open_mention_uri( + mention_uri: MentionUri, + workspace: &WeakEntity, + window: &mut Window, + cx: &mut App, +) { + let Some(workspace) = workspace.upgrade() else { + return; + }; + + workspace.update(cx, |workspace, cx| match mention_uri { + MentionUri::File { abs_path } => { + open_file(workspace, abs_path, None, window, cx); + } + MentionUri::Symbol { + abs_path, + line_range, + .. + } + | MentionUri::Selection { + abs_path: Some(abs_path), + line_range, + } => { + open_file(workspace, abs_path, Some(line_range), window, cx); + } + MentionUri::Directory { abs_path } => { + reveal_in_project_panel(workspace, abs_path, cx); + } + MentionUri::Thread { id, name } => { + open_thread(workspace, id, name, window, cx); + } + MentionUri::TextThread { .. } => {} + MentionUri::Rule { id, .. } => { + open_rule(workspace, id, window, cx); + } + MentionUri::Fetch { url } => { + cx.open_url(url.as_str()); + } + MentionUri::PastedImage + | MentionUri::Selection { abs_path: None, .. } + | MentionUri::Diagnostics { .. } + | MentionUri::TerminalSelection { .. } + | MentionUri::GitDiff { .. } => {} + }); +} + +fn open_file( + workspace: &mut Workspace, + abs_path: PathBuf, + line_range: Option>, + window: &mut Window, + cx: &mut Context, +) { + let project = workspace.project(); + + if let Some(project_path) = + project.update(cx, |project, cx| project.find_project_path(&abs_path, cx)) + { + let item = workspace.open_path(project_path, None, true, window, cx); + if let Some(line_range) = line_range { + window + .spawn(cx, async move |cx| { + let Some(editor) = item.await?.downcast::() else { + return Ok(()); + }; + editor + .update_in(cx, |editor, window, cx| { + let range = Point::new(*line_range.start(), 0) + ..Point::new(*line_range.start(), 0); + editor.change_selections( + SelectionEffects::scroll(Autoscroll::center()), + window, + cx, + |selections| selections.select_ranges(vec![range]), + ); + }) + .ok(); + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } else { + item.detach_and_log_err(cx); + } + } else if abs_path.exists() { + workspace + .open_abs_path( + abs_path, + OpenOptions { + focus: Some(true), + ..Default::default() + }, + window, + cx, + ) + .detach_and_log_err(cx); + } +} + +fn reveal_in_project_panel( + workspace: &mut Workspace, + abs_path: PathBuf, + cx: &mut Context, +) { + let project = workspace.project(); + let Some(entry_id) = project.update(cx, |project, cx| { + let path = project.find_project_path(&abs_path, cx)?; + project.entry_for_path(&path, cx).map(|entry| entry.id) + }) else { + return; + }; + + project.update(cx, |_, cx| { + cx.emit(project::Event::RevealInProjectPanel(entry_id)); + }); +} + +fn open_thread( + workspace: &mut Workspace, + id: acp::SessionId, + name: String, + window: &mut Window, + cx: &mut Context, +) { + use crate::AgentPanel; + use acp_thread::AgentSessionInfo; + + let Some(panel) = workspace.panel::(cx) else { + return; + }; + + panel.update(cx, |panel, cx| { + panel.load_agent_thread( + AgentSessionInfo { + session_id: id, + cwd: None, + title: Some(name.into()), + updated_at: None, + meta: None, + }, + window, + cx, + ) + }); +} + +fn open_rule( + _workspace: &mut Workspace, + id: PromptId, + window: &mut Window, + cx: &mut Context, +) { + use zed_actions::assistant::OpenRulesLibrary; + + let PromptId::User { uuid } = id else { + return; + }; + + window.dispatch_action( + Box::new(OpenRulesLibrary { + prompt_to_select: Some(uuid.0), + }), + cx, + ); +} From 7c9a9d40c06c58c30d555d4398224a4737cd5f98 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 3 Mar 2026 10:25:36 -0500 Subject: [PATCH 267/548] Add "Start Thread in New Worktree" (#49141) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add the thread target selector in the agent panel behind the `agent-git-worktrees` flag: Screenshot 2026-03-02 at 11 50 47 PM - Add a "Start Thread In..." dropdown to the agent panel toolbar, gated behind `AgentV2FeatureFlag` - Options: "Local Project" (default) and "New Worktree" - The "New Worktree" option is disabled when there's no git repository or in collab mode Closes AI-34 Release Notes: - N/A --------- Signed-off-by: Xiaobo Liu Co-authored-by: Oleksiy Syvokon Co-authored-by: Ben Brandt Co-authored-by: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Co-authored-by: Remco Smits Co-authored-by: morgankrey Co-authored-by: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Co-authored-by: Ben Kunkle Co-authored-by: Finn Evers Co-authored-by: Bennet Bo Fenner Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> Co-authored-by: MostlyK <135974627+MostlyKIGuess@users.noreply.github.com> Co-authored-by: cameron Co-authored-by: Max Brunsfeld Co-authored-by: John Tur Co-authored-by: Conrad Irwin Co-authored-by: Wuji Chen Co-authored-by: Claude Co-authored-by: Smit Barmase Co-authored-by: Cole Miller Co-authored-by: Kasper Nyhus Co-authored-by: dino Co-authored-by: Anthony Eid Co-authored-by: Josh Robson Chase Co-authored-by: ozacod <47009516+ozacod@users.noreply.github.com> Co-authored-by: ozacod Co-authored-by: Xiaobo Liu Co-authored-by: Lena <241371603+zelenenka@users.noreply.github.com> Co-authored-by: 0x2CA <2478557459@qq.com> Co-authored-by: Joseph T. Lyons Co-authored-by: Albab Hasan <155961300+Albab-Hasan@users.noreply.github.com> Co-authored-by: KyleBarton Co-authored-by: Kunall Banerjee Co-authored-by: Lukas Wirth Co-authored-by: Tom Houlé <13155277+tomhoule@users.noreply.github.com> Co-authored-by: Nikhil Pandey Co-authored-by: Mikayla Maki Co-authored-by: dancer <144584931+dancer@users.noreply.github.com> Co-authored-by: Kirill Bulatov Co-authored-by: Danilo Leal --- Cargo.lock | 1 + crates/agent_ui/Cargo.toml | 1 + crates/agent_ui/src/agent_panel.rs | 1507 +++++++++++++++-- crates/agent_ui/src/agent_ui.rs | 16 +- crates/agent_ui/src/connection_view.rs | 16 +- .../src/connection_view/thread_view.rs | 93 +- crates/collab/src/db/queries/projects.rs | 3 +- crates/collab/src/db/queries/rooms.rs | 3 +- crates/feature_flags/src/flags.rs | 10 + crates/git/src/repository.rs | 48 + crates/git_ui/src/worktree_picker.rs | 4 +- crates/project/src/git_store.rs | 71 +- crates/proto/proto/git.proto | 1 + crates/workspace/src/persistence/model.rs | 12 +- crates/workspace/src/workspace.rs | 171 +- crates/zed/src/visual_test_runner.rs | 649 ++++++- crates/zed/src/zed.rs | 28 +- 17 files changed, 2379 insertions(+), 255 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 99347bd08f0d5b3ae13ab352612e3876a3cf6a11..96caec077edd4bdf8c02a3e1ff1fc10340d2b9b0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -368,6 +368,7 @@ dependencies = [ "fs", "futures 0.3.31", "fuzzy", + "git", "gpui", "gpui_tokio", "html_to_markdown", diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 2a31781054fd29b30a3c8119e87491edbfb1e658..3e46e14b53c46a2aec3ac9552246a10ffc2aeee9 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -58,6 +58,7 @@ feature_flags.workspace = true file_icons.workspace = true fs.workspace = true futures.workspace = true +git.workspace = true fuzzy.workspace = true gpui.workspace = true gpui_tokio.workspace = true diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 7097e5be156eb33382a1a0f47c1b4256c84ce9b1..c5c1c345318b6f88c59ba2886507324e83d36ad3 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -1,6 +1,6 @@ use std::{ ops::Range, - path::Path, + path::{Path, PathBuf}, rc::Rc, sync::{ Arc, @@ -22,15 +22,18 @@ use project::{ use serde::{Deserialize, Serialize}; use settings::{LanguageModelProviderSetting, LanguageModelSelection}; +use feature_flags::{AgentGitWorktreesFeatureFlag, AgentV2FeatureFlag, FeatureFlagAppExt as _}; use zed_actions::agent::{OpenClaudeAgentOnboardingModal, ReauthenticateAgent, ReviewBranchDiff}; +use crate::ManageProfiles; use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal}; use crate::{ AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow, InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, - OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, ToggleNavigationMenu, - ToggleNewThreadMenu, ToggleOptionsMenu, + OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn, + ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, agent_configuration::{AgentConfiguration, AssistantConfigurationEvent}, + connection_view::{AcpThreadViewEvent, ThreadView}, slash_command::SlashCommandCompletionProvider, text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate}, ui::EndTrialUpsell, @@ -42,7 +45,6 @@ use crate::{ ExpandMessageEditor, ThreadHistory, ThreadHistoryEvent, text_thread_history::{TextThreadHistory, TextThreadHistoryEvent}, }; -use crate::{ManageProfiles, connection_view::ThreadView}; use agent_settings::AgentSettings; use ai_onboarding::AgentPanelOnboarding; use anyhow::{Result, anyhow}; @@ -54,6 +56,7 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer}; use extension::ExtensionEvents; use extension_host::ExtensionStore; use fs::Fs; +use git::repository::validate_worktree_directory; use gpui::{ Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner, DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, @@ -61,15 +64,17 @@ use gpui::{ }; use language::LanguageRegistry; use language_model::{ConfigurationError, LanguageModelRegistry}; +use project::project_settings::ProjectSettings; use project::{Project, ProjectPath, Worktree}; use prompt_store::{PromptBuilder, PromptStore, UserPromptId}; +use rand::Rng as _; use rules_library::{RulesLibrary, open_rules_library}; use search::{BufferSearchBar, buffer_search}; use settings::{Settings, update_settings_file}; use theme::ThemeSettings; use ui::{ - Callout, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle, Tab, - Tooltip, prelude::*, utils::WithRemSize, + Button, Callout, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, PopoverMenu, + PopoverMenuHandle, SpinnerLabel, Tab, Tooltip, prelude::*, utils::WithRemSize, }; use util::ResultExt as _; use workspace::{ @@ -123,6 +128,8 @@ struct SerializedAgentPanel { selected_agent: Option, #[serde(default)] last_active_thread: Option, + #[serde(default)] + start_thread_in: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -324,6 +331,13 @@ pub fn init(cx: &mut App) { cx, ); }); + }) + .register_action(|workspace, action: &StartThreadIn, _window, cx| { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.set_start_thread_in(action, cx); + }); + } }); }, ) @@ -371,6 +385,10 @@ pub enum AgentType { } impl AgentType { + pub fn is_native(&self) -> bool { + matches!(self, Self::NativeAgent) + } + fn label(&self) -> SharedString { match self { Self::NativeAgent | Self::TextThread => "Zed Agent".into(), @@ -395,6 +413,29 @@ impl From for AgentType { } } +impl StartThreadIn { + fn label(&self) -> SharedString { + match self { + Self::LocalProject => "Local Project".into(), + Self::NewWorktree => "New Worktree".into(), + } + } + + fn icon(&self) -> IconName { + match self { + Self::LocalProject => IconName::Screen, + Self::NewWorktree => IconName::GitBranchPlus, + } + } +} + +#[derive(Clone, Debug)] +#[allow(dead_code)] +pub enum WorktreeCreationStatus { + Creating, + Error(SharedString), +} + impl ActiveView { pub fn which_font_size_used(&self) -> WhichFontSize { match self { @@ -515,6 +556,7 @@ pub struct AgentPanel { previous_view: Option, _active_view_observation: Option, new_thread_menu_handle: PopoverMenuHandle, + start_thread_in_menu_handle: PopoverMenuHandle, agent_panel_menu_handle: PopoverMenuHandle, agent_navigation_menu_handle: PopoverMenuHandle, agent_navigation_menu: Option>, @@ -525,6 +567,10 @@ pub struct AgentPanel { pending_serialization: Option>>, onboarding: Entity, selected_agent: AgentType, + start_thread_in: StartThreadIn, + worktree_creation_status: Option, + _thread_view_subscription: Option, + _worktree_creation_task: Option>, show_trust_workspace_message: bool, last_configuration_error_telemetry: Option, on_boarding_upsell_dismissed: AtomicBool, @@ -538,6 +584,7 @@ impl AgentPanel { let width = self.width; let selected_agent = self.selected_agent.clone(); + let start_thread_in = Some(self.start_thread_in); let last_active_thread = self.active_agent_thread(cx).map(|thread| { let thread = thread.read(cx); @@ -561,6 +608,7 @@ impl AgentPanel { width, selected_agent: Some(selected_agent), last_active_thread, + start_thread_in, }, ) .await?; @@ -605,6 +653,37 @@ impl AgentPanel { })? .await?; + let last_active_thread = if let Some(thread_info) = serialized_panel + .as_ref() + .and_then(|p| p.last_active_thread.clone()) + { + if thread_info.agent_type.is_native() { + let session_id = acp::SessionId::new(thread_info.session_id.clone()); + let load_result = cx.update(|_window, cx| { + let thread_store = ThreadStore::global(cx); + thread_store.update(cx, |store, cx| store.load_thread(session_id, cx)) + }); + let thread_exists = if let Ok(task) = load_result { + task.await.ok().flatten().is_some() + } else { + false + }; + if thread_exists { + Some(thread_info) + } else { + log::warn!( + "last active thread {} not found in database, skipping restoration", + thread_info.session_id + ); + None + } + } else { + Some(thread_info) + } + } else { + None + }; + let panel = workspace.update_in(cx, |workspace, window, cx| { let panel = cx.new(|cx| Self::new(workspace, text_thread_store, prompt_store, window, cx)); @@ -615,44 +694,45 @@ impl AgentPanel { if let Some(selected_agent) = serialized_panel.selected_agent.clone() { panel.selected_agent = selected_agent; } + if let Some(start_thread_in) = serialized_panel.start_thread_in { + let is_worktree_flag_enabled = + cx.has_flag::(); + let is_valid = match &start_thread_in { + StartThreadIn::LocalProject => true, + StartThreadIn::NewWorktree => { + let project = panel.project.read(cx); + is_worktree_flag_enabled && !project.is_via_collab() + } + }; + if is_valid { + panel.start_thread_in = start_thread_in; + } else { + log::info!( + "deserialized start_thread_in {:?} is no longer valid, falling back to LocalProject", + start_thread_in, + ); + } + } cx.notify(); }); } - panel - })?; - - if let Some(thread_info) = serialized_panel.and_then(|p| p.last_active_thread) { - let session_id = acp::SessionId::new(thread_info.session_id.clone()); - let load_task = panel.update(cx, |panel, cx| { - let thread_store = panel.thread_store.clone(); - thread_store.update(cx, |store, cx| store.load_thread(session_id, cx)) - }); - let thread_exists = load_task - .await - .map(|thread: Option| thread.is_some()) - .unwrap_or(false); - - if thread_exists { - panel.update_in(cx, |panel, window, cx| { - panel.selected_agent = thread_info.agent_type.clone(); - let session_info = AgentSessionInfo { - session_id: acp::SessionId::new(thread_info.session_id), - cwd: thread_info.cwd, - title: thread_info.title.map(SharedString::from), - updated_at: None, - meta: None, - }; + if let Some(thread_info) = last_active_thread { + let agent_type = thread_info.agent_type.clone(); + let session_info = AgentSessionInfo { + session_id: acp::SessionId::new(thread_info.session_id), + cwd: thread_info.cwd, + title: thread_info.title.map(SharedString::from), + updated_at: None, + meta: None, + }; + panel.update(cx, |panel, cx| { + panel.selected_agent = agent_type; panel.load_agent_thread(session_info, window, cx); - })?; - } else { - log::error!( - "could not restore last active thread: \ - no thread found in database with ID {:?}", - thread_info.session_id - ); + }); } - } + panel + })?; Ok(panel) }) @@ -800,6 +880,7 @@ impl AgentPanel { previous_view: None, _active_view_observation: None, new_thread_menu_handle: PopoverMenuHandle::default(), + start_thread_in_menu_handle: PopoverMenuHandle::default(), agent_panel_menu_handle: PopoverMenuHandle::default(), agent_navigation_menu_handle: PopoverMenuHandle::default(), agent_navigation_menu: None, @@ -813,6 +894,10 @@ impl AgentPanel { text_thread_history, thread_store, selected_agent: AgentType::default(), + start_thread_in: StartThreadIn::default(), + worktree_creation_status: None, + _thread_view_subscription: None, + _worktree_creation_task: None, show_trust_workspace_message: false, last_configuration_error_telemetry: None, on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()), @@ -1044,7 +1129,7 @@ impl AgentPanel { let server = ext_agent.server(fs, thread_store); this.update_in(cx, |agent_panel, window, cx| { - agent_panel._external_thread( + agent_panel.create_external_thread( server, resume_thread, initial_content, @@ -1618,15 +1703,28 @@ impl AgentPanel { self.active_view = new_view; } + // Subscribe to the active ThreadView's events (e.g. FirstSendRequested) + // so the panel can intercept the first send for worktree creation. + // Re-subscribe whenever the ConnectionView changes, since the inner + // ThreadView may have been replaced (e.g. navigating between threads). self._active_view_observation = match &self.active_view { ActiveView::AgentThread { server_view } => { - Some(cx.observe(server_view, |this, _, cx| { - cx.emit(AgentPanelEvent::ActiveViewChanged); - this.serialize(cx); - cx.notify(); - })) + self._thread_view_subscription = + Self::subscribe_to_active_thread_view(server_view, window, cx); + Some( + cx.observe_in(server_view, window, |this, server_view, window, cx| { + this._thread_view_subscription = + Self::subscribe_to_active_thread_view(&server_view, window, cx); + cx.emit(AgentPanelEvent::ActiveViewChanged); + this.serialize(cx); + cx.notify(); + }), + ) + } + _ => { + self._thread_view_subscription = None; + None } - _ => None, }; let is_in_agent_history = matches!( @@ -1740,6 +1838,56 @@ impl AgentPanel { self.selected_agent.clone() } + fn subscribe_to_active_thread_view( + server_view: &Entity, + window: &mut Window, + cx: &mut Context, + ) -> Option { + server_view.read(cx).active_thread().cloned().map(|tv| { + cx.subscribe_in( + &tv, + window, + |this, view, event: &AcpThreadViewEvent, window, cx| match event { + AcpThreadViewEvent::FirstSendRequested { content } => { + this.handle_first_send_requested(view.clone(), content.clone(), window, cx); + } + }, + ) + }) + } + + pub fn start_thread_in(&self) -> &StartThreadIn { + &self.start_thread_in + } + + fn set_start_thread_in(&mut self, action: &StartThreadIn, cx: &mut Context) { + if matches!(action, StartThreadIn::NewWorktree) + && !cx.has_flag::() + { + return; + } + + let new_target = match *action { + StartThreadIn::LocalProject => StartThreadIn::LocalProject, + StartThreadIn::NewWorktree => { + if !self.project_has_git_repository(cx) { + log::error!( + "set_start_thread_in: cannot use NewWorktree without a git repository" + ); + return; + } + if self.project.read(cx).is_via_collab() { + log::error!("set_start_thread_in: cannot use NewWorktree in a collab project"); + return; + } + StartThreadIn::NewWorktree + } + }; + self.start_thread_in = new_target; + self.serialize(cx); + cx.notify(); + } + fn selected_external_agent(&self) -> Option { match &self.selected_agent { AgentType::NativeAgent => Some(ExternalAgent::NativeAgent), @@ -1830,7 +1978,7 @@ impl AgentPanel { self.external_thread(Some(agent), Some(thread), None, window, cx); } - fn _external_thread( + pub(crate) fn create_external_thread( &mut self, server: Rc, resume_thread: Option, @@ -1869,140 +2017,616 @@ impl AgentPanel { self.set_active_view(ActiveView::AgentThread { server_view }, true, window, cx); } -} -impl Focusable for AgentPanel { - fn focus_handle(&self, cx: &App) -> FocusHandle { - match &self.active_view { - ActiveView::Uninitialized => self.focus_handle.clone(), - ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx), - ActiveView::History { kind } => match kind { - HistoryKind::AgentThreads => self.acp_history.focus_handle(cx), - HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx), - }, - ActiveView::TextThread { - text_thread_editor, .. - } => text_thread_editor.focus_handle(cx), - ActiveView::Configuration => { - if let Some(configuration) = self.configuration.as_ref() { - configuration.focus_handle(cx) - } else { - self.focus_handle.clone() - } - } + fn active_thread_has_messages(&self, cx: &App) -> bool { + self.active_agent_thread(cx) + .is_some_and(|thread| !thread.read(cx).entries().is_empty()) + } + + fn handle_first_send_requested( + &mut self, + thread_view: Entity, + content: Vec, + window: &mut Window, + cx: &mut Context, + ) { + if self.start_thread_in == StartThreadIn::NewWorktree { + self.handle_worktree_creation_requested(content, window, cx); + } else { + cx.defer_in(window, move |_this, window, cx| { + thread_view.update(cx, |thread_view, cx| { + let editor = thread_view.message_editor.clone(); + thread_view.send_impl(editor, window, cx); + }); + }); } } -} -fn agent_panel_dock_position(cx: &App) -> DockPosition { - AgentSettings::get_global(cx).dock.into() -} + fn generate_agent_branch_name() -> String { + let mut rng = rand::rng(); + let id: String = (0..8) + .map(|_| { + let idx: u8 = rng.random_range(0..36); + if idx < 10 { + (b'0' + idx) as char + } else { + (b'a' + idx - 10) as char + } + }) + .collect(); + format!("agent-{id}") + } -pub enum AgentPanelEvent { - ActiveViewChanged, -} + /// Partitions the project's visible worktrees into git-backed repositories + /// and plain (non-git) paths. Git repos will have worktrees created for + /// them; non-git paths are carried over to the new workspace as-is. + /// + /// When multiple worktrees map to the same repository, the most specific + /// match wins (deepest work directory path), with a deterministic + /// tie-break on entity id. Each repository appears at most once. + fn classify_worktrees( + &self, + cx: &App, + ) -> (Vec>, Vec) { + let project = &self.project; + let repositories = project.read(cx).repositories(cx).clone(); + let mut git_repos: Vec> = Vec::new(); + let mut non_git_paths: Vec = Vec::new(); + let mut seen_repo_ids = std::collections::HashSet::new(); + + for worktree in project.read(cx).visible_worktrees(cx) { + let wt_path = worktree.read(cx).abs_path(); + + let matching_repo = repositories + .iter() + .filter_map(|(id, repo)| { + let work_dir = repo.read(cx).work_directory_abs_path.clone(); + if wt_path.starts_with(work_dir.as_ref()) + || work_dir.starts_with(wt_path.as_ref()) + { + Some((*id, repo.clone(), work_dir.as_ref().components().count())) + } else { + None + } + }) + .max_by( + |(left_id, _left_repo, left_depth), (right_id, _right_repo, right_depth)| { + left_depth + .cmp(right_depth) + .then_with(|| left_id.cmp(right_id)) + }, + ); -impl EventEmitter for AgentPanel {} -impl EventEmitter for AgentPanel {} + if let Some((id, repo, _)) = matching_repo { + if seen_repo_ids.insert(id) { + git_repos.push(repo); + } + } else { + non_git_paths.push(wt_path.to_path_buf()); + } + } -impl Panel for AgentPanel { - fn persistent_name() -> &'static str { - "AgentPanel" + (git_repos, non_git_paths) } - fn panel_key() -> &'static str { - AGENT_PANEL_KEY - } + /// Kicks off an async git-worktree creation for each repository. Returns: + /// + /// - `creation_infos`: a vec of `(repo, new_path, receiver)` tuples—the + /// receiver resolves once the git worktree command finishes. + /// - `path_remapping`: `(old_work_dir, new_worktree_path)` pairs used + /// later to remap open editor tabs into the new workspace. + fn start_worktree_creations( + git_repos: &[Entity], + branch_name: &str, + worktree_directory_setting: &str, + cx: &mut Context, + ) -> Result<( + Vec<( + Entity, + PathBuf, + futures::channel::oneshot::Receiver>, + )>, + Vec<(PathBuf, PathBuf)>, + )> { + let mut creation_infos = Vec::new(); + let mut path_remapping = Vec::new(); + + for repo in git_repos { + let (work_dir, new_path, receiver) = repo.update(cx, |repo, _cx| { + let original_repo = repo.original_repo_abs_path.clone(); + let directory = + validate_worktree_directory(&original_repo, worktree_directory_setting)?; + let new_path = directory.join(branch_name); + let receiver = repo.create_worktree(branch_name.to_string(), directory, None); + let work_dir = repo.work_directory_abs_path.clone(); + anyhow::Ok((work_dir, new_path, receiver)) + })?; + path_remapping.push((work_dir.to_path_buf(), new_path.clone())); + creation_infos.push((repo.clone(), new_path, receiver)); + } - fn position(&self, _window: &Window, cx: &App) -> DockPosition { - agent_panel_dock_position(cx) + Ok((creation_infos, path_remapping)) } - fn position_is_valid(&self, position: DockPosition) -> bool { - position != DockPosition::Bottom - } + /// Waits for every in-flight worktree creation to complete. If any + /// creation fails, all successfully-created worktrees are rolled back + /// (removed) so the project isn't left in a half-migrated state. + async fn await_and_rollback_on_failure( + creation_infos: Vec<( + Entity, + PathBuf, + futures::channel::oneshot::Receiver>, + )>, + cx: &mut AsyncWindowContext, + ) -> Result> { + let mut created_paths: Vec = Vec::new(); + let mut repos_and_paths: Vec<(Entity, PathBuf)> = + Vec::new(); + let mut first_error: Option = None; + + for (repo, new_path, receiver) in creation_infos { + match receiver.await { + Ok(Ok(())) => { + created_paths.push(new_path.clone()); + repos_and_paths.push((repo, new_path)); + } + Ok(Err(err)) => { + if first_error.is_none() { + first_error = Some(err); + } + } + Err(_canceled) => { + if first_error.is_none() { + first_error = Some(anyhow!("Worktree creation was canceled")); + } + } + } + } - fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context) { - settings::update_settings_file(self.fs.clone(), cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_dock(position.into()); - }); - } + let Some(err) = first_error else { + return Ok(created_paths); + }; - fn size(&self, window: &Window, cx: &App) -> Pixels { - let settings = AgentSettings::get_global(cx); - match self.position(window, cx) { - DockPosition::Left | DockPosition::Right => { - self.width.unwrap_or(settings.default_width) + // Rollback all successfully created worktrees + let mut rollback_receivers = Vec::new(); + for (rollback_repo, rollback_path) in &repos_and_paths { + if let Ok(receiver) = cx.update(|_, cx| { + rollback_repo.update(cx, |repo, _cx| { + repo.remove_worktree(rollback_path.clone(), true) + }) + }) { + rollback_receivers.push((rollback_path.clone(), receiver)); } - DockPosition::Bottom => self.height.unwrap_or(settings.default_height), } - } - - fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { - match self.position(window, cx) { - DockPosition::Left | DockPosition::Right => self.width = size, - DockPosition::Bottom => self.height = size, + let mut rollback_failures: Vec = Vec::new(); + for (path, receiver) in rollback_receivers { + match receiver.await { + Ok(Ok(())) => {} + Ok(Err(rollback_err)) => { + log::error!( + "failed to rollback worktree at {}: {rollback_err}", + path.display() + ); + rollback_failures.push(format!("{}: {rollback_err}", path.display())); + } + Err(rollback_err) => { + log::error!( + "failed to rollback worktree at {}: {rollback_err}", + path.display() + ); + rollback_failures.push(format!("{}: {rollback_err}", path.display())); + } + } } - self.serialize(cx); - cx.notify(); + let mut error_message = format!("Failed to create worktree: {err}"); + if !rollback_failures.is_empty() { + error_message.push_str("\n\nFailed to clean up: "); + error_message.push_str(&rollback_failures.join(", ")); + } + Err(anyhow!(error_message)) } - fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context) { - if active && matches!(self.active_view, ActiveView::Uninitialized) { + fn set_worktree_creation_error( + &mut self, + message: SharedString, + window: &mut Window, + cx: &mut Context, + ) { + self.worktree_creation_status = Some(WorktreeCreationStatus::Error(message)); + if matches!(self.active_view, ActiveView::Uninitialized) { let selected_agent = self.selected_agent.clone(); self.new_agent_thread(selected_agent, window, cx); } + cx.notify(); } - fn remote_id() -> Option { - Some(proto::PanelId::AssistantPanel) - } + fn handle_worktree_creation_requested( + &mut self, + content: Vec, + window: &mut Window, + cx: &mut Context, + ) { + if matches!( + self.worktree_creation_status, + Some(WorktreeCreationStatus::Creating) + ) { + return; + } - fn icon(&self, _window: &Window, cx: &App) -> Option { - (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant) - } + self.worktree_creation_status = Some(WorktreeCreationStatus::Creating); + cx.notify(); - fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> { - Some("Agent Panel") - } + let branch_name = Self::generate_agent_branch_name(); - fn toggle_action(&self) -> Box { - Box::new(ToggleFocus) - } + let (git_repos, non_git_paths) = self.classify_worktrees(cx); - fn activation_priority(&self) -> u32 { - 3 - } + if git_repos.is_empty() { + self.set_worktree_creation_error( + "No git repositories found in the project".into(), + window, + cx, + ); + return; + } - fn enabled(&self, cx: &App) -> bool { - AgentSettings::get_global(cx).enabled(cx) - } + let worktree_directory_setting = ProjectSettings::get_global(cx) + .git + .worktree_directory + .clone(); - fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool { - self.zoomed - } + let (creation_infos, path_remapping) = match Self::start_worktree_creations( + &git_repos, + &branch_name, + &worktree_directory_setting, + cx, + ) { + Ok(result) => result, + Err(err) => { + self.set_worktree_creation_error( + format!("Failed to validate worktree directory: {err}").into(), + window, + cx, + ); + return; + } + }; - fn set_zoomed(&mut self, zoomed: bool, _window: &mut Window, cx: &mut Context) { - self.zoomed = zoomed; - cx.notify(); - } -} + let (dock_structure, open_file_paths) = self + .workspace + .upgrade() + .map(|workspace| { + let dock_structure = workspace.read(cx).capture_dock_state(window, cx); + let open_file_paths = workspace.read(cx).open_item_abs_paths(cx); + (dock_structure, open_file_paths) + }) + .unwrap_or_default(); -impl AgentPanel { - fn render_title_view(&self, _window: &mut Window, cx: &Context) -> AnyElement { - const LOADING_SUMMARY_PLACEHOLDER: &str = "Loading Summary…"; + let workspace = self.workspace.clone(); + let window_handle = window + .window_handle() + .downcast::(); - let content = match &self.active_view { - ActiveView::AgentThread { server_view } => { - let is_generating_title = server_view - .read(cx) - .as_native_thread(cx) - .map_or(false, |t| t.read(cx).is_generating_title()); + let task = cx.spawn_in(window, async move |this, cx| { + let created_paths = match Self::await_and_rollback_on_failure(creation_infos, cx).await + { + Ok(paths) => paths, + Err(err) => { + this.update_in(cx, |this, window, cx| { + this.set_worktree_creation_error(format!("{err}").into(), window, cx); + })?; + return anyhow::Ok(()); + } + }; - if let Some(title_editor) = server_view + let mut all_paths = created_paths; + let has_non_git = !non_git_paths.is_empty(); + all_paths.extend(non_git_paths.iter().cloned()); + + let app_state = match workspace.upgrade() { + Some(workspace) => cx.update(|_, cx| workspace.read(cx).app_state().clone())?, + None => { + this.update_in(cx, |this, window, cx| { + this.set_worktree_creation_error( + "Workspace no longer available".into(), + window, + cx, + ); + })?; + return anyhow::Ok(()); + } + }; + + let this_for_error = this.clone(); + if let Err(err) = Self::setup_new_workspace( + this, + all_paths, + app_state, + window_handle, + dock_structure, + open_file_paths, + path_remapping, + non_git_paths, + has_non_git, + content, + cx, + ) + .await + { + this_for_error + .update_in(cx, |this, window, cx| { + this.set_worktree_creation_error( + format!("Failed to set up workspace: {err}").into(), + window, + cx, + ); + }) + .log_err(); + } + anyhow::Ok(()) + }); + + self._worktree_creation_task = Some(cx.foreground_executor().spawn(async move { + task.await.log_err(); + })); + } + + async fn setup_new_workspace( + this: WeakEntity, + all_paths: Vec, + app_state: Arc, + window_handle: Option>, + dock_structure: workspace::DockStructure, + open_file_paths: Vec, + path_remapping: Vec<(PathBuf, PathBuf)>, + non_git_paths: Vec, + has_non_git: bool, + content: Vec, + cx: &mut AsyncWindowContext, + ) -> Result<()> { + let init: Option< + Box) + Send>, + > = Some(Box::new(move |workspace, window, cx| { + workspace.set_dock_structure(dock_structure, window, cx); + })); + + let (new_window_handle, _) = cx + .update(|_window, cx| { + Workspace::new_local(all_paths, app_state, window_handle, None, init, false, cx) + })? + .await?; + + let new_workspace = new_window_handle.update(cx, |multi_workspace, _window, _cx| { + let workspaces = multi_workspace.workspaces(); + workspaces.last().cloned() + })?; + + let Some(new_workspace) = new_workspace else { + anyhow::bail!("New workspace was not added to MultiWorkspace"); + }; + + let panels_task = new_window_handle.update(cx, |_, _, cx| { + new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task()) + })?; + if let Some(task) = panels_task { + task.await.log_err(); + } + + let initial_content = AgentInitialContent::ContentBlock { + blocks: content, + auto_submit: true, + }; + + new_window_handle.update(cx, |_multi_workspace, window, cx| { + new_workspace.update(cx, |workspace, cx| { + if has_non_git { + let toast_id = workspace::notifications::NotificationId::unique::(); + workspace.show_toast( + workspace::Toast::new( + toast_id, + "Some project folders are not git repositories. \ + They were included as-is without creating a worktree.", + ), + cx, + ); + } + + let remapped_paths: Vec = open_file_paths + .iter() + .filter_map(|original_path| { + let best_match = path_remapping + .iter() + .filter_map(|(old_root, new_root)| { + original_path.strip_prefix(old_root).ok().map(|relative| { + (old_root.components().count(), new_root.join(relative)) + }) + }) + .max_by_key(|(depth, _)| *depth); + + if let Some((_, remapped_path)) = best_match { + return Some(remapped_path); + } + + for non_git in &non_git_paths { + if original_path.starts_with(non_git) { + return Some(original_path.clone()); + } + } + None + }) + .collect(); + + if !remapped_paths.is_empty() { + workspace + .open_paths( + remapped_paths, + workspace::OpenOptions::default(), + None, + window, + cx, + ) + .detach(); + } + + workspace.focus_panel::(window, cx); + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.external_thread(None, None, Some(initial_content), window, cx); + }); + } + }); + })?; + + new_window_handle.update(cx, |multi_workspace, _window, cx| { + multi_workspace.activate(new_workspace.clone(), cx); + })?; + + this.update_in(cx, |this, _window, cx| { + this.worktree_creation_status = None; + cx.notify(); + })?; + + anyhow::Ok(()) + } +} + +impl Focusable for AgentPanel { + fn focus_handle(&self, cx: &App) -> FocusHandle { + match &self.active_view { + ActiveView::Uninitialized => self.focus_handle.clone(), + ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx), + ActiveView::History { kind } => match kind { + HistoryKind::AgentThreads => self.acp_history.focus_handle(cx), + HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx), + }, + ActiveView::TextThread { + text_thread_editor, .. + } => text_thread_editor.focus_handle(cx), + ActiveView::Configuration => { + if let Some(configuration) = self.configuration.as_ref() { + configuration.focus_handle(cx) + } else { + self.focus_handle.clone() + } + } + } + } +} + +fn agent_panel_dock_position(cx: &App) -> DockPosition { + AgentSettings::get_global(cx).dock.into() +} + +pub enum AgentPanelEvent { + ActiveViewChanged, +} + +impl EventEmitter for AgentPanel {} +impl EventEmitter for AgentPanel {} + +impl Panel for AgentPanel { + fn persistent_name() -> &'static str { + "AgentPanel" + } + + fn panel_key() -> &'static str { + AGENT_PANEL_KEY + } + + fn position(&self, _window: &Window, cx: &App) -> DockPosition { + agent_panel_dock_position(cx) + } + + fn position_is_valid(&self, position: DockPosition) -> bool { + position != DockPosition::Bottom + } + + fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context) { + settings::update_settings_file(self.fs.clone(), cx, move |settings, _| { + settings + .agent + .get_or_insert_default() + .set_dock(position.into()); + }); + } + + fn size(&self, window: &Window, cx: &App) -> Pixels { + let settings = AgentSettings::get_global(cx); + match self.position(window, cx) { + DockPosition::Left | DockPosition::Right => { + self.width.unwrap_or(settings.default_width) + } + DockPosition::Bottom => self.height.unwrap_or(settings.default_height), + } + } + + fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) { + match self.position(window, cx) { + DockPosition::Left | DockPosition::Right => self.width = size, + DockPosition::Bottom => self.height = size, + } + self.serialize(cx); + cx.notify(); + } + + fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context) { + if active + && matches!(self.active_view, ActiveView::Uninitialized) + && !matches!( + self.worktree_creation_status, + Some(WorktreeCreationStatus::Creating) + ) + { + let selected_agent = self.selected_agent.clone(); + self.new_agent_thread(selected_agent, window, cx); + } + } + + fn remote_id() -> Option { + Some(proto::PanelId::AssistantPanel) + } + + fn icon(&self, _window: &Window, cx: &App) -> Option { + (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant) + } + + fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> { + Some("Agent Panel") + } + + fn toggle_action(&self) -> Box { + Box::new(ToggleFocus) + } + + fn activation_priority(&self) -> u32 { + 3 + } + + fn enabled(&self, cx: &App) -> bool { + AgentSettings::get_global(cx).enabled(cx) + } + + fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool { + self.zoomed + } + + fn set_zoomed(&mut self, zoomed: bool, _window: &mut Window, cx: &mut Context) { + self.zoomed = zoomed; + cx.notify(); + } +} + +impl AgentPanel { + fn render_title_view(&self, _window: &mut Window, cx: &Context) -> AnyElement { + const LOADING_SUMMARY_PLACEHOLDER: &str = "Loading Summary…"; + + let content = match &self.active_view { + ActiveView::AgentThread { server_view } => { + let is_generating_title = server_view + .read(cx) + .as_native_thread(cx) + .map_or(false, |t| t.read(cx).is_generating_title()); + + if let Some(title_editor) = server_view .read(cx) .parent_thread(cx) .map(|r| r.read(cx).title_editor.clone()) @@ -2331,6 +2955,99 @@ impl AgentPanel { }) } + fn project_has_git_repository(&self, cx: &App) -> bool { + !self.project.read(cx).repositories(cx).is_empty() + } + + fn render_start_thread_in_selector(&self, cx: &mut Context) -> impl IntoElement { + let has_git_repo = self.project_has_git_repository(cx); + let is_via_collab = self.project.read(cx).is_via_collab(); + + let is_creating = matches!( + self.worktree_creation_status, + Some(WorktreeCreationStatus::Creating) + ); + + let current_target = self.start_thread_in; + let trigger_label = self.start_thread_in.label(); + + let icon = if self.start_thread_in_menu_handle.is_deployed() { + IconName::ChevronUp + } else { + IconName::ChevronDown + }; + + let trigger_button = Button::new("thread-target-trigger", trigger_label) + .label_size(LabelSize::Small) + .color(Color::Muted) + .icon(icon) + .icon_size(IconSize::XSmall) + .icon_position(IconPosition::End) + .icon_color(Color::Muted) + .disabled(is_creating); + + let dock_position = AgentSettings::get_global(cx).dock; + let documentation_side = match dock_position { + settings::DockPosition::Left => DocumentationSide::Right, + settings::DockPosition::Bottom | settings::DockPosition::Right => { + DocumentationSide::Left + } + }; + + PopoverMenu::new("thread-target-selector") + .trigger(trigger_button) + .anchor(gpui::Corner::BottomRight) + .with_handle(self.start_thread_in_menu_handle.clone()) + .menu(move |window, cx| { + let current_target = current_target; + Some(ContextMenu::build(window, cx, move |menu, _window, _cx| { + let is_local_selected = current_target == StartThreadIn::LocalProject; + let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree; + + let new_worktree_disabled = !has_git_repo || is_via_collab; + + menu.header("Start Thread In…") + .item( + ContextMenuEntry::new("Local Project") + .icon(StartThreadIn::LocalProject.icon()) + .icon_color(Color::Muted) + .toggleable(IconPosition::End, is_local_selected) + .handler(|window, cx| { + window + .dispatch_action(Box::new(StartThreadIn::LocalProject), cx); + }), + ) + .item({ + let entry = ContextMenuEntry::new("New Worktree") + .icon(StartThreadIn::NewWorktree.icon()) + .icon_color(Color::Muted) + .toggleable(IconPosition::End, is_new_worktree_selected) + .disabled(new_worktree_disabled) + .handler(|window, cx| { + window + .dispatch_action(Box::new(StartThreadIn::NewWorktree), cx); + }); + + if new_worktree_disabled { + entry.documentation_aside(documentation_side, move |_| { + let reason = if !has_git_repo { + "No git repository found in this project." + } else { + "Not available for remote/collab projects yet." + }; + Label::new(reason) + .color(Color::Muted) + .size(LabelSize::Small) + .into_any_element() + }) + } else { + entry + } + }) + })) + }) + } + fn render_toolbar(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let agent_server_store = self.project.read(cx).agent_server_store().clone(); let focus_handle = self.focus_handle(cx); @@ -2718,6 +3435,7 @@ impl AgentPanel { }; let show_history_menu = self.history_kind_for_selected_agent(cx).is_some(); + let has_v2_flag = cx.has_flag::(); h_flex() .id("agent-panel-toolbar") @@ -2748,6 +3466,12 @@ impl AgentPanel { .gap(DynamicSpacing::Base02.rems(cx)) .pl(DynamicSpacing::Base04.rems(cx)) .pr(DynamicSpacing::Base06.rems(cx)) + .when( + has_v2_flag + && cx.has_flag::() + && !self.active_thread_has_messages(cx), + |this| this.child(self.render_start_thread_in_selector(cx)), + ) .child(new_thread_menu) .when(show_history_menu, |this| { this.child(self.render_recent_entries_menu( @@ -2760,6 +3484,51 @@ impl AgentPanel { ) } + fn render_worktree_creation_status(&self, cx: &mut Context) -> Option { + let status = self.worktree_creation_status.as_ref()?; + match status { + WorktreeCreationStatus::Creating => Some( + h_flex() + .w_full() + .px(DynamicSpacing::Base06.rems(cx)) + .py(DynamicSpacing::Base02.rems(cx)) + .gap_2() + .bg(cx.theme().colors().surface_background) + .border_b_1() + .border_color(cx.theme().colors().border) + .child(SpinnerLabel::new().size(LabelSize::Small)) + .child( + Label::new("Creating worktree…") + .color(Color::Muted) + .size(LabelSize::Small), + ) + .into_any_element(), + ), + WorktreeCreationStatus::Error(message) => Some( + h_flex() + .w_full() + .px(DynamicSpacing::Base06.rems(cx)) + .py(DynamicSpacing::Base02.rems(cx)) + .gap_2() + .bg(cx.theme().colors().surface_background) + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::Warning) + .size(IconSize::Small) + .color(Color::Warning), + ) + .child( + Label::new(message.clone()) + .color(Color::Warning) + .size(LabelSize::Small) + .truncate(), + ) + .into_any_element(), + ), + } + } + fn should_render_trial_end_upsell(&self, cx: &mut Context) -> bool { if TrialEndUpsell::dismissed() { return false; @@ -3191,6 +3960,7 @@ impl Render for AgentPanel { } })) .child(self.render_toolbar(window, cx)) + .children(self.render_worktree_creation_status(cx)) .children(self.render_workspace_trust_message(cx)) .children(self.render_onboarding(window, cx)) .map(|parent| { @@ -3456,7 +4226,7 @@ impl AgentPanel { name: server.name(), }; - self._external_thread( + self.create_external_thread( server, None, None, workspace, project, ext_agent, window, cx, ); } @@ -3468,6 +4238,61 @@ impl AgentPanel { pub fn active_thread_view_for_tests(&self) -> Option<&Entity> { self.active_thread_view() } + + /// Sets the start_thread_in value directly, bypassing validation. + /// + /// This is a test-only helper for visual tests that need to show specific + /// start_thread_in states without requiring a real git repository. + pub fn set_start_thread_in_for_tests(&mut self, target: StartThreadIn, cx: &mut Context) { + self.start_thread_in = target; + cx.notify(); + } + + /// Returns the current worktree creation status. + /// + /// This is a test-only helper for visual tests. + pub fn worktree_creation_status_for_tests(&self) -> Option<&WorktreeCreationStatus> { + self.worktree_creation_status.as_ref() + } + + /// Sets the worktree creation status directly. + /// + /// This is a test-only helper for visual tests that need to show the + /// "Creating worktree…" spinner or error banners. + pub fn set_worktree_creation_status_for_tests( + &mut self, + status: Option, + cx: &mut Context, + ) { + self.worktree_creation_status = status; + cx.notify(); + } + + /// Opens the history view. + /// + /// This is a test-only helper that exposes the private `open_history()` + /// method for visual tests. + pub fn open_history_for_tests(&mut self, window: &mut Window, cx: &mut Context) { + self.open_history(window, cx); + } + + /// Opens the start_thread_in selector popover menu. + /// + /// This is a test-only helper for visual tests. + pub fn open_start_thread_in_menu_for_tests( + &mut self, + window: &mut Window, + cx: &mut Context, + ) { + self.start_thread_in_menu_handle.show(window, cx); + } + + /// Dismisses the start_thread_in dropdown menu. + /// + /// This is a test-only helper for visual tests. + pub fn close_start_thread_in_menu_for_tests(&mut self, cx: &mut Context) { + self.start_thread_in_menu_handle.hide(cx); + } } #[cfg(test)] @@ -3479,6 +4304,7 @@ mod tests { use fs::FakeFs; use gpui::{TestAppContext, VisualTestContext}; use project::Project; + use serde_json::json; use workspace::MultiWorkspace; #[gpui::test] @@ -3581,9 +4407,7 @@ mod tests { .expect("panel B load should succeed"); cx.run_until_parked(); - // Workspace A should restore width and agent type, but the thread - // should NOT be restored because the stub agent never persisted it - // to the database (the load-side validation skips missing threads). + // Workspace A should restore its thread, width, and agent type loaded_a.read_with(cx, |panel, _cx| { assert_eq!( panel.width, @@ -3594,6 +4418,10 @@ mod tests { panel.selected_agent, agent_type_a, "workspace A agent type should be restored" ); + assert!( + panel.active_thread_view().is_some(), + "workspace A should have its active thread restored" + ); }); // Workspace B should restore its own width and agent type, with no thread @@ -3663,4 +4491,383 @@ mod tests { cx.run_until_parked(); } + + #[gpui::test] + async fn test_thread_target_local_project(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + fs.set_branch_name(Path::new("/project/.git"), Some("main")); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + // Wait for the project to discover the git repository. + cx.run_until_parked(); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + cx.run_until_parked(); + + // Default thread target should be LocalProject. + panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::LocalProject, + "default thread target should be LocalProject" + ); + }); + + // Start a new thread with the default LocalProject target. + // Use StubAgentServer so the thread connects immediately in tests. + panel.update_in(cx, |panel, window, cx| { + panel.open_external_thread_with_server( + Rc::new(StubAgentServer::default_response()), + window, + cx, + ); + }); + + cx.run_until_parked(); + + // MultiWorkspace should still have exactly one workspace (no worktree created). + multi_workspace + .read_with(cx, |multi_workspace, _cx| { + assert_eq!( + multi_workspace.workspaces().len(), + 1, + "LocalProject should not create a new workspace" + ); + }) + .unwrap(); + + // The thread should be active in the panel. + panel.read_with(cx, |panel, cx| { + assert!( + panel.active_agent_thread(cx).is_some(), + "a thread should be running in the current workspace" + ); + }); + + // The thread target should still be LocalProject (unchanged). + panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::LocalProject, + "thread target should remain LocalProject" + ); + }); + + // No worktree creation status should be set. + panel.read_with(cx, |panel, _cx| { + assert!( + panel.worktree_creation_status.is_none(), + "no worktree creation should have occurred" + ); + }); + } + + #[gpui::test] + async fn test_thread_target_serialization_round_trip(cx: &mut TestAppContext) { + init_test(cx); + cx.update(|cx| { + cx.update_flags( + true, + vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()], + ); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + fs.set_branch_name(Path::new("/project/.git"), Some("main")); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + // Wait for the project to discover the git repository. + cx.run_until_parked(); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + cx.run_until_parked(); + + // Default should be LocalProject. + panel.read_with(cx, |panel, _cx| { + assert_eq!(*panel.start_thread_in(), StartThreadIn::LocalProject); + }); + + // Change thread target to NewWorktree. + panel.update(cx, |panel, cx| { + panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx); + }); + + panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::NewWorktree, + "thread target should be NewWorktree after set_thread_target" + ); + }); + + // Let serialization complete. + cx.run_until_parked(); + + // Load a fresh panel from the serialized data. + let prompt_builder = Arc::new(prompt_store::PromptBuilder::new(None).unwrap()); + let async_cx = cx.update(|window, cx| window.to_async(cx)); + let loaded_panel = + AgentPanel::load(workspace.downgrade(), prompt_builder.clone(), async_cx) + .await + .expect("panel load should succeed"); + cx.run_until_parked(); + + loaded_panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::NewWorktree, + "thread target should survive serialization round-trip" + ); + }); + } + + #[gpui::test] + async fn test_thread_target_deserialization_falls_back_when_worktree_flag_disabled( + cx: &mut TestAppContext, + ) { + init_test(cx); + cx.update(|cx| { + cx.update_flags( + true, + vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()], + ); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + fs.set_branch_name(Path::new("/project/.git"), Some("main")); + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + workspace.update(cx, |workspace, _cx| { + workspace.set_random_database_id(); + }); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + // Wait for the project to discover the git repository. + cx.run_until_parked(); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + cx.run_until_parked(); + + panel.update(cx, |panel, cx| { + panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx); + }); + + panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::NewWorktree, + "thread target should be NewWorktree before reload" + ); + }); + + // Let serialization complete. + cx.run_until_parked(); + + // Disable worktree flag and reload panel from serialized data. + cx.update(|_, cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + }); + + let prompt_builder = Arc::new(prompt_store::PromptBuilder::new(None).unwrap()); + let async_cx = cx.update(|window, cx| window.to_async(cx)); + let loaded_panel = + AgentPanel::load(workspace.downgrade(), prompt_builder.clone(), async_cx) + .await + .expect("panel load should succeed"); + cx.run_until_parked(); + + loaded_panel.read_with(cx, |panel, _cx| { + assert_eq!( + *panel.start_thread_in(), + StartThreadIn::LocalProject, + "thread target should fall back to LocalProject when worktree flag is disabled" + ); + }); + } + + #[gpui::test] + async fn test_set_active_blocked_during_worktree_creation(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + cx.update(|cx| { + cx.update_flags(true, vec!["agent-v2".to_string()]); + agent::ThreadStore::init_global(cx); + language_model::LanguageModelRegistry::test(cx); + ::set_global(fs.clone(), cx); + }); + + fs.insert_tree( + "/project", + json!({ + ".git": {}, + "src": { + "main.rs": "fn main() {}" + } + }), + ) + .await; + + let project = Project::test(fs.clone(), [Path::new("/project")], cx).await; + + let multi_workspace = + cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + + let workspace = multi_workspace + .read_with(cx, |multi_workspace, _cx| { + multi_workspace.workspace().clone() + }) + .unwrap(); + + let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx); + + let panel = workspace.update_in(cx, |workspace, window, cx| { + let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx)); + let panel = + cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx)); + workspace.add_panel(panel.clone(), window, cx); + panel + }); + + cx.run_until_parked(); + + // Simulate worktree creation in progress and reset to Uninitialized + panel.update_in(cx, |panel, window, cx| { + panel.worktree_creation_status = Some(WorktreeCreationStatus::Creating); + panel.active_view = ActiveView::Uninitialized; + Panel::set_active(panel, true, window, cx); + assert!( + matches!(panel.active_view, ActiveView::Uninitialized), + "set_active should not create a thread while worktree is being created" + ); + }); + + // Clear the creation status and use open_external_thread_with_server + // (which bypasses new_agent_thread) to verify the panel can transition + // out of Uninitialized. We can't call set_active directly because + // new_agent_thread requires full agent server infrastructure. + panel.update_in(cx, |panel, window, cx| { + panel.worktree_creation_status = None; + panel.active_view = ActiveView::Uninitialized; + panel.open_external_thread_with_server( + Rc::new(StubAgentServer::default_response()), + window, + cx, + ); + }); + + cx.run_until_parked(); + + panel.read_with(cx, |panel, _cx| { + assert!( + !matches!(panel.active_view, ActiveView::Uninitialized), + "panel should transition out of Uninitialized once worktree creation is cleared" + ); + }); + } } diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index ad778ca496f7815d0155f98187c8fad3e81365eb..58a8edca779daa50862549058a0068e2ddb7c5bf 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -55,7 +55,9 @@ use std::any::TypeId; use workspace::Workspace; use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal}; -pub use crate::agent_panel::{AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate}; +pub use crate::agent_panel::{ + AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate, WorktreeCreationStatus, +}; use crate::agent_registry_ui::AgentRegistryPage; pub use crate::inline_assistant::InlineAssistant; pub use agent_diff::{AgentDiffPane, AgentDiffToolbar}; @@ -222,6 +224,18 @@ impl ExternalAgent { } } +/// Sets where new threads will run. +#[derive( + Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action, +)] +#[action(namespace = agent)] +#[serde(rename_all = "snake_case", tag = "kind")] +pub enum StartThreadIn { + #[default] + LocalProject, + NewWorktree, +} + /// Content to initialize new external agent with. pub enum AgentInitialContent { ThreadSummary(acp_thread::AgentSessionInfo), diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index bc58120a964b7cb10eb4c779eb24fa8507030bc6..835ff611288c2bf6867a885ed2be8c6a66679cdb 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -26,10 +26,10 @@ use fs::Fs; use futures::FutureExt as _; use gpui::{ Action, Animation, AnimationExt, AnyView, App, ClickEvent, ClipboardItem, CursorStyle, - ElementId, Empty, Entity, FocusHandle, Focusable, Hsla, ListOffset, ListState, ObjectFit, - PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle, WeakEntity, Window, - WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient, list, point, - pulsating_between, + ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, Hsla, ListOffset, ListState, + ObjectFit, PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle, + WeakEntity, Window, WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient, + list, point, pulsating_between, }; use language::Buffer; use language_model::LanguageModelRegistry; @@ -295,6 +295,12 @@ impl Conversation { } } +pub enum AcpServerViewEvent { + ActiveThreadChanged, +} + +impl EventEmitter for ConnectionView {} + pub struct ConnectionView { agent: Rc, agent_server_store: Entity, @@ -386,6 +392,7 @@ impl ConnectionView { if let Some(view) = self.active_thread() { view.focus_handle(cx).focus(window, cx); } + cx.emit(AcpServerViewEvent::ActiveThreadChanged); cx.notify(); } } @@ -524,6 +531,7 @@ impl ConnectionView { } self.server_state = state; + cx.emit(AcpServerViewEvent::ActiveThreadChanged); cx.notify(); } diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 2544305bc8f8666b897d11285ffa7711f3af8794..8ce4da360664774342c4167f7c8dfbce914b647e 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -1,6 +1,8 @@ use acp_thread::ContentBlock; use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody}; use editor::actions::OpenExcerpts; + +use crate::StartThreadIn; use gpui::{Corner, List}; use language_model::{LanguageModelEffortLevel, Speed}; use settings::update_settings_file; @@ -191,6 +193,12 @@ impl DiffStats { } } +pub enum AcpThreadViewEvent { + FirstSendRequested { content: Vec }, +} + +impl EventEmitter for ThreadView {} + pub struct ThreadView { pub id: acp::SessionId, pub parent_id: Option, @@ -518,6 +526,24 @@ impl ThreadView { .thread(acp_thread.session_id(), cx) } + /// Resolves the message editor's contents into content blocks. For profiles + /// that do not enable any tools, directory mentions are expanded to inline + /// file contents since the agent can't read files on its own. + fn resolve_message_contents( + &self, + message_editor: &Entity, + cx: &mut App, + ) -> Task, Vec>)>> { + let expand = self.as_native_thread(cx).is_some_and(|thread| { + let thread = thread.read(cx); + AgentSettings::get_global(cx) + .profiles + .get(thread.profile()) + .is_some_and(|profile| profile.tools.is_empty()) + }); + message_editor.update(cx, |message_editor, cx| message_editor.contents(expand, cx)) + } + pub fn current_model_id(&self, cx: &App) -> Option { let selector = self.model_selector.as_ref()?; let model = selector.read(cx).active_model(cx)?; @@ -731,6 +757,46 @@ impl ThreadView { } let message_editor = self.message_editor.clone(); + + // Intercept the first send so the agent panel can capture the full + // content blocks — needed for "Start thread in New Worktree", + // which must create a workspace before sending the message there. + let intercept_first_send = self.thread.read(cx).entries().is_empty() + && !message_editor.read(cx).is_empty(cx) + && self + .workspace + .upgrade() + .and_then(|workspace| workspace.read(cx).panel::(cx)) + .is_some_and(|panel| { + panel.read(cx).start_thread_in() == &StartThreadIn::NewWorktree + }); + + if intercept_first_send { + let content_task = self.resolve_message_contents(&message_editor, cx); + + cx.spawn(async move |this, cx| match content_task.await { + Ok((content, _tracked_buffers)) => { + if content.is_empty() { + return; + } + + this.update(cx, |_, cx| { + cx.emit(AcpThreadViewEvent::FirstSendRequested { content }); + }) + .ok(); + } + Err(error) => { + this.update(cx, |this, cx| { + this.handle_thread_error(error, cx); + }) + .ok(); + } + }) + .detach(); + + return; + } + let is_editor_empty = message_editor.read(cx).is_empty(cx); let is_generating = thread.read(cx).status() != ThreadStatus::Idle; @@ -794,18 +860,7 @@ impl ThreadView { window: &mut Window, cx: &mut Context, ) { - let full_mention_content = self.as_native_thread(cx).is_some_and(|thread| { - // Include full contents when using minimal profile - let thread = thread.read(cx); - AgentSettings::get_global(cx) - .profiles - .get(thread.profile()) - .is_some_and(|profile| profile.tools.is_empty()) - }); - - let contents = message_editor.update(cx, |message_editor, cx| { - message_editor.contents(full_mention_content, cx) - }); + let contents = self.resolve_message_contents(&message_editor, cx); self.thread_error.take(); self.thread_feedback.clear(); @@ -1140,21 +1195,11 @@ impl ThreadView { let is_idle = self.thread.read(cx).status() == acp_thread::ThreadStatus::Idle; if is_idle { - self.send_impl(message_editor.clone(), window, cx); + self.send_impl(message_editor, window, cx); return; } - let full_mention_content = self.as_native_thread(cx).is_some_and(|thread| { - let thread = thread.read(cx); - AgentSettings::get_global(cx) - .profiles - .get(thread.profile()) - .is_some_and(|profile| profile.tools.is_empty()) - }); - - let contents = message_editor.update(cx, |message_editor, cx| { - message_editor.contents(full_mention_content, cx) - }); + let contents = self.resolve_message_contents(&message_editor, cx); cx.spawn_in(window, async move |this, cx| { let (content, tracked_buffers) = contents.await?; diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index ed6325c62173358c8deac2dcd6289ce0b8ae5e71..fa3f99e1483e8a5d8410378493556b189eff78f1 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -1002,7 +1002,7 @@ impl Database { repositories.push(proto::UpdateRepository { project_id: db_repository_entry.project_id.0 as u64, id: db_repository_entry.id as u64, - abs_path: db_repository_entry.abs_path, + abs_path: db_repository_entry.abs_path.clone(), entry_ids, updated_statuses, removed_statuses: Vec::new(), @@ -1015,6 +1015,7 @@ impl Database { stash_entries: Vec::new(), remote_upstream_url: db_repository_entry.remote_upstream_url.clone(), remote_origin_url: db_repository_entry.remote_origin_url.clone(), + original_repo_abs_path: Some(db_repository_entry.abs_path), }); } } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index d8fca0306f5b2ae5668a735db578061275192b58..7c007a570a0cb25c5302495d7342882eec0e1942 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -791,13 +791,14 @@ impl Database { head_commit_details, project_id: project_id.to_proto(), id: db_repository.id as u64, - abs_path: db_repository.abs_path, + abs_path: db_repository.abs_path.clone(), scan_id: db_repository.scan_id as u64, is_last_update: true, merge_message: db_repository.merge_message, stash_entries: Vec::new(), remote_upstream_url: db_repository.remote_upstream_url.clone(), remote_origin_url: db_repository.remote_origin_url.clone(), + original_repo_abs_path: Some(db_repository.abs_path), }); } } diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index eab9f8c1036a83451fc3201f97cfb1cc8c885043..c8524022d9d8295900638a09c528dfc3fdb85afd 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -37,6 +37,16 @@ impl FeatureFlag for AgentSharingFeatureFlag { const NAME: &'static str = "agent-sharing"; } +pub struct AgentGitWorktreesFeatureFlag; + +impl FeatureFlag for AgentGitWorktreesFeatureFlag { + const NAME: &'static str = "agent-git-worktrees"; + + fn enabled_for_staff() -> bool { + false + } +} + pub struct DiffReviewFeatureFlag; impl FeatureFlag for DiffReviewFeatureFlag { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index ba77199d75f624c0dd44ad0b2ba4eec812d9a711..bd07555d05b759a33080b9ae9f166145c3d26d14 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -55,6 +55,26 @@ pub const GRAPH_CHUNK_SIZE: usize = 1000; /// Default value for the `git.worktree_directory` setting. pub const DEFAULT_WORKTREE_DIRECTORY: &str = "../worktrees"; +/// Given the git common directory (from `commondir()`), derive the original +/// repository's working directory. +/// +/// For a standard checkout, `common_dir` is `/.git`, so the parent +/// is the working directory. For a git worktree, `common_dir` is the **main** +/// repo's `.git` directory, so the parent is the original repo's working directory. +/// +/// Falls back to returning `common_dir` itself if it doesn't end with `.git` +/// (e.g. bare repos or unusual layouts). +pub fn original_repo_path_from_common_dir(common_dir: &Path) -> PathBuf { + if common_dir.file_name() == Some(OsStr::new(".git")) { + common_dir + .parent() + .map(|p| p.to_path_buf()) + .unwrap_or_else(|| common_dir.to_path_buf()) + } else { + common_dir.to_path_buf() + } +} + /// Resolves the configured worktree directory to an absolute path. /// /// `worktree_directory_setting` is the raw string from the user setting @@ -4272,6 +4292,34 @@ mod tests { ); } + #[test] + fn test_original_repo_path_from_common_dir() { + // Normal repo: common_dir is /.git + assert_eq!( + original_repo_path_from_common_dir(Path::new("/code/zed5/.git")), + PathBuf::from("/code/zed5") + ); + + // Worktree: common_dir is the main repo's .git + // (same result — that's the point, it always traces back to the original) + assert_eq!( + original_repo_path_from_common_dir(Path::new("/code/zed5/.git")), + PathBuf::from("/code/zed5") + ); + + // Bare repo: no .git suffix, returns as-is + assert_eq!( + original_repo_path_from_common_dir(Path::new("/code/zed5.git")), + PathBuf::from("/code/zed5.git") + ); + + // Root-level .git directory + assert_eq!( + original_repo_path_from_common_dir(Path::new("/.git")), + PathBuf::from("/") + ); + } + #[test] fn test_validate_worktree_directory() { let work_dir = Path::new("/code/my-project"); diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs index f2826a2b543a73c5341653c42bbb5f1540213b2a..9f70c29da86ee52668984f92b247331524fc5936 100644 --- a/crates/git_ui/src/worktree_picker.rs +++ b/crates/git_ui/src/worktree_picker.rs @@ -275,9 +275,9 @@ impl WorktreeListDelegate { .git .worktree_directory .clone(); - let work_dir = repo.work_directory_abs_path.clone(); + let original_repo = repo.original_repo_abs_path.clone(); let directory = - validate_worktree_directory(&work_dir, &worktree_directory_setting)?; + validate_worktree_directory(&original_repo, &worktree_directory_setting)?; let new_worktree_path = directory.join(&branch); let receiver = repo.create_worktree(branch.clone(), directory, commit); anyhow::Ok((receiver, new_worktree_path)) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index ae776966a770ccadcffdbf9b140ed10d4871b317..487e7f5f9699382ce4930141f7a0c7c50a1d23b8 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -266,6 +266,11 @@ pub struct RepositorySnapshot { pub id: RepositoryId, pub statuses_by_path: SumTree, pub work_directory_abs_path: Arc, + /// The working directory of the original repository. For a normal + /// checkout this equals `work_directory_abs_path`. For a git worktree + /// checkout, this is the original repo's working directory — used to + /// anchor new worktree creation so they don't nest. + pub original_repo_abs_path: Arc, pub path_style: PathStyle, pub branch: Option, pub head_commit: Option, @@ -1505,16 +1510,19 @@ impl GitStore { new_work_directory_abs_path: Some(work_directory_abs_path), dot_git_abs_path: Some(dot_git_abs_path), repository_dir_abs_path: Some(_repository_dir_abs_path), - common_dir_abs_path: Some(_common_dir_abs_path), + common_dir_abs_path: Some(common_dir_abs_path), .. } = update { + let original_repo_abs_path: Arc = + git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into(); let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release)); let git_store = cx.weak_entity(); let repo = cx.new(|cx| { let mut repo = Repository::local( id, work_directory_abs_path.clone(), + original_repo_abs_path.clone(), dot_git_abs_path.clone(), project_environment.downgrade(), fs.clone(), @@ -1840,6 +1848,11 @@ impl GitStore { let id = RepositoryId::from_proto(update.id); let client = this.upstream_client().context("no upstream client")?; + let original_repo_abs_path: Option> = update + .original_repo_abs_path + .as_deref() + .map(|p| Path::new(p).into()); + let mut repo_subscription = None; let repo = this.repositories.entry(id).or_insert_with(|| { let git_store = cx.weak_entity(); @@ -1847,6 +1860,7 @@ impl GitStore { Repository::remote( id, Path::new(&update.abs_path).into(), + original_repo_abs_path.clone(), path_style, ProjectId(update.project_id), client, @@ -3481,10 +3495,17 @@ impl RepositoryId { } impl RepositorySnapshot { - fn empty(id: RepositoryId, work_directory_abs_path: Arc, path_style: PathStyle) -> Self { + fn empty( + id: RepositoryId, + work_directory_abs_path: Arc, + original_repo_abs_path: Option>, + path_style: PathStyle, + ) -> Self { Self { id, statuses_by_path: Default::default(), + original_repo_abs_path: original_repo_abs_path + .unwrap_or_else(|| work_directory_abs_path.clone()), work_directory_abs_path, branch: None, head_commit: None, @@ -3528,6 +3549,9 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), + original_repo_abs_path: Some( + self.original_repo_abs_path.to_string_lossy().into_owned(), + ), } } @@ -3599,6 +3623,9 @@ impl RepositorySnapshot { .collect(), remote_upstream_url: self.remote_upstream_url.clone(), remote_origin_url: self.remote_origin_url.clone(), + original_repo_abs_path: Some( + self.original_repo_abs_path.to_string_lossy().into_owned(), + ), } } @@ -3757,14 +3784,19 @@ impl Repository { fn local( id: RepositoryId, work_directory_abs_path: Arc, + original_repo_abs_path: Arc, dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = - RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local()); + let snapshot = RepositorySnapshot::empty( + id, + work_directory_abs_path.clone(), + Some(original_repo_abs_path), + PathStyle::local(), + ); let state = cx .spawn(async move |_, cx| { LocalRepositoryState::new( @@ -3818,13 +3850,19 @@ impl Repository { fn remote( id: RepositoryId, work_directory_abs_path: Arc, + original_repo_abs_path: Option>, path_style: PathStyle, project_id: ProjectId, client: AnyProtoClient, git_store: WeakEntity, cx: &mut Context, ) -> Self { - let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style); + let snapshot = RepositorySnapshot::empty( + id, + work_directory_abs_path, + original_repo_abs_path, + path_style, + ); let repository_state = RemoteRepositoryState { project_id, client }; let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx); let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared(); @@ -5650,6 +5688,24 @@ impl Repository { ) } + pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver> { + self.send_job( + Some("git worktree remove".into()), + move |repo, _cx| async move { + match repo { + RepositoryState::Local(LocalRepositoryState { backend, .. }) => { + backend.remove_worktree(path, force).await + } + RepositoryState::Remote(_) => { + anyhow::bail!( + "Removing worktrees on remote repositories is not yet supported" + ) + } + } + }, + ) + } + pub fn default_branch( &mut self, include_remote_name: bool, @@ -5988,6 +6044,10 @@ impl Repository { update: proto::UpdateRepository, cx: &mut Context, ) -> Result<()> { + if let Some(main_path) = &update.original_repo_abs_path { + self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into(); + } + let new_branch = update.branch_summary.as_ref().map(proto_to_branch); let new_head_commit = update .head_commit_details @@ -6784,6 +6844,7 @@ async fn compute_snapshot( id, statuses_by_path, work_directory_abs_path, + original_repo_abs_path: prev_snapshot.original_repo_abs_path, path_style: prev_snapshot.path_style, scan_id: prev_snapshot.scan_id + 1, branch, diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 86f3d4c328af06e1a3f4f7cc406ac84272577cd0..6cb3acfcd878c8f970c4e99789939424a3835709 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -125,6 +125,7 @@ message UpdateRepository { repeated StashEntry stash_entries = 13; optional string remote_upstream_url = 14; optional string remote_origin_url = 15; + optional string original_repo_abs_path = 16; } message RemoveRepository { diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index cdb646ec3b8248bdd0b5784424ed7b8df8ac0ee8..0971ebd0ddc9265ccf9ea10da7745ba59914db30 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -93,9 +93,9 @@ pub(crate) struct SerializedWorkspace { #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)] pub struct DockStructure { - pub(crate) left: DockData, - pub(crate) right: DockData, - pub(crate) bottom: DockData, + pub left: DockData, + pub right: DockData, + pub bottom: DockData, } impl RemoteConnectionKind { @@ -143,9 +143,9 @@ impl Bind for DockStructure { #[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)] pub struct DockData { - pub(crate) visible: bool, - pub(crate) active_panel: Option, - pub(crate) zoom: bool, + pub visible: bool, + pub active_panel: Option, + pub zoom: bool, } impl Column for DockData { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b62f6b5eb60eafb7177f7883b825a208e7c81d62..3839b4446e7399536a12e7951c004cce81d5c4e6 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -79,7 +79,10 @@ pub use pane_group::{ use persistence::{DB, SerializedWindowBounds, model::SerializedWorkspace}; pub use persistence::{ DB as WORKSPACE_DB, WorkspaceDb, delete_unloaded_items, - model::{ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, SessionWorkspace}, + model::{ + DockStructure, ItemId, SerializedMultiWorkspace, SerializedWorkspaceLocation, + SessionWorkspace, + }, read_serialized_multi_workspaces, }; use postage::stream::Stream; @@ -149,7 +152,7 @@ use crate::{item::ItemBufferKind, notifications::NotificationId}; use crate::{ persistence::{ SerializedAxis, - model::{DockData, DockStructure, SerializedItem, SerializedPane, SerializedPaneGroup}, + model::{DockData, SerializedItem, SerializedPane, SerializedPaneGroup}, }, security_modal::SecurityModal, }; @@ -628,7 +631,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c }) .ok(); } else { - let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, cx); + let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, true, cx); cx.spawn(async move |cx| { let (window, _) = task.await?; window.update(cx, |multi_workspace, window, cx| { @@ -1290,6 +1293,7 @@ pub struct Workspace { scheduled_tasks: Vec>, last_open_dock_positions: Vec, removing: bool, + _panels_task: Option>>, } impl EventEmitter for Workspace {} @@ -1660,6 +1664,7 @@ impl Workspace { left_dock, bottom_dock, right_dock, + _panels_task: None, project: project.clone(), follower_states: Default::default(), last_leaders_by_pane: Default::default(), @@ -1703,6 +1708,7 @@ impl Workspace { requesting_window: Option>, env: Option>, init: Option) + Send>>, + activate: bool, cx: &mut App, ) -> Task< anyhow::Result<( @@ -1830,7 +1836,11 @@ impl Workspace { workspace }); - multi_workspace.activate(workspace.clone(), cx); + if activate { + multi_workspace.activate(workspace.clone(), cx); + } else { + multi_workspace.add_workspace(workspace.clone(), cx); + } workspace })?; (window, workspace) @@ -1984,6 +1994,76 @@ impl Workspace { [&self.left_dock, &self.bottom_dock, &self.right_dock] } + pub fn capture_dock_state(&self, _window: &Window, cx: &App) -> DockStructure { + let left_dock = self.left_dock.read(cx); + let left_visible = left_dock.is_open(); + let left_active_panel = left_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + // `zoomed_position` is kept in sync with individual panel zoom state + // by the dock code in `Dock::new` and `Dock::add_panel`. + let left_dock_zoom = self.zoomed_position == Some(DockPosition::Left); + + let right_dock = self.right_dock.read(cx); + let right_visible = right_dock.is_open(); + let right_active_panel = right_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + let right_dock_zoom = self.zoomed_position == Some(DockPosition::Right); + + let bottom_dock = self.bottom_dock.read(cx); + let bottom_visible = bottom_dock.is_open(); + let bottom_active_panel = bottom_dock + .active_panel() + .map(|panel| panel.persistent_name().to_string()); + let bottom_dock_zoom = self.zoomed_position == Some(DockPosition::Bottom); + + DockStructure { + left: DockData { + visible: left_visible, + active_panel: left_active_panel, + zoom: left_dock_zoom, + }, + right: DockData { + visible: right_visible, + active_panel: right_active_panel, + zoom: right_dock_zoom, + }, + bottom: DockData { + visible: bottom_visible, + active_panel: bottom_active_panel, + zoom: bottom_dock_zoom, + }, + } + } + + pub fn set_dock_structure( + &self, + docks: DockStructure, + window: &mut Window, + cx: &mut Context, + ) { + for (dock, data) in [ + (&self.left_dock, docks.left), + (&self.bottom_dock, docks.bottom), + (&self.right_dock, docks.right), + ] { + dock.update(cx, |dock, cx| { + dock.serialized_dock = Some(data); + dock.restore_state(window, cx); + }); + } + } + + pub fn open_item_abs_paths(&self, cx: &App) -> Vec { + self.items(cx) + .filter_map(|item| { + let project_path = item.project_path(cx)?; + self.project.read(cx).absolute_path(&project_path, cx) + }) + .collect() + } + pub fn dock_at_position(&self, position: DockPosition) -> &Entity { match position { DockPosition::Left => &self.left_dock, @@ -2043,6 +2123,14 @@ impl Workspace { &self.app_state } + pub fn set_panels_task(&mut self, task: Task>) { + self._panels_task = Some(task); + } + + pub fn take_panels_task(&mut self) -> Option>> { + self._panels_task.take() + } + pub fn user_store(&self) -> &Entity { &self.app_state.user_store } @@ -2548,7 +2636,15 @@ impl Workspace { Task::ready(Ok(callback(self, window, cx))) } else { let env = self.project.read(cx).cli_environment(cx); - let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx); + let task = Self::new_local( + Vec::new(), + self.app_state.clone(), + None, + env, + None, + true, + cx, + ); cx.spawn_in(window, async move |_vh, cx| { let (multi_workspace_window, _) = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { @@ -2578,7 +2674,15 @@ impl Workspace { Task::ready(Ok(callback(self, window, cx))) } else { let env = self.project.read(cx).cli_environment(cx); - let task = Self::new_local(Vec::new(), self.app_state.clone(), None, env, None, cx); + let task = Self::new_local( + Vec::new(), + self.app_state.clone(), + None, + env, + None, + true, + cx, + ); cx.spawn_in(window, async move |_vh, cx| { let (multi_workspace_window, _) = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { @@ -6012,53 +6116,7 @@ impl Workspace { window: &mut Window, cx: &mut App, ) -> DockStructure { - let left_dock = this.left_dock.read(cx); - let left_visible = left_dock.is_open(); - let left_active_panel = left_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let left_dock_zoom = left_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - let right_dock = this.right_dock.read(cx); - let right_visible = right_dock.is_open(); - let right_active_panel = right_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let right_dock_zoom = right_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - let bottom_dock = this.bottom_dock.read(cx); - let bottom_visible = bottom_dock.is_open(); - let bottom_active_panel = bottom_dock - .active_panel() - .map(|panel| panel.persistent_name().to_string()); - let bottom_dock_zoom = bottom_dock - .active_panel() - .map(|panel| panel.is_zoomed(window, cx)) - .unwrap_or(false); - - DockStructure { - left: DockData { - visible: left_visible, - active_panel: left_active_panel, - zoom: left_dock_zoom, - }, - right: DockData { - visible: right_visible, - active_panel: right_active_panel, - zoom: right_dock_zoom, - }, - bottom: DockData { - visible: bottom_visible, - active_panel: bottom_active_panel, - zoom: bottom_dock_zoom, - }, - } + this.capture_dock_state(window, cx) } match self.workspace_location(cx) { @@ -8087,6 +8145,7 @@ pub async fn restore_multiworkspace( None, None, None, + true, cx, ) }) @@ -8116,6 +8175,7 @@ pub async fn restore_multiworkspace( Some(window_handle), None, None, + true, cx, ) }) @@ -8385,6 +8445,7 @@ pub fn join_channel( requesting_window, None, None, + true, cx, ) }) @@ -8457,7 +8518,7 @@ pub async fn get_any_active_multi_workspace( // find an existing workspace to focus and show call controls let active_window = activate_any_workspace_window(&mut cx); if active_window.is_none() { - cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, cx)) + cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), None, None, None, true, cx)) .await?; } activate_any_workspace_window(&mut cx).context("could not open zed") @@ -8845,6 +8906,7 @@ pub fn open_paths( open_options.replace_window, open_options.env, None, + true, cx, ) }) @@ -8908,6 +8970,7 @@ pub fn open_new( open_options.replace_window, open_options.env, Some(Box::new(init)), + true, cx, ); cx.spawn(async move |cx| { diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index 0ae98d510aa34b05f7fa1766176f21ea353394d9..df673f0b4869af8fa55b0e83af10553df8afb4d8 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -71,7 +71,7 @@ use { time::Duration, }, util::ResultExt as _, - workspace::{AppState, MultiWorkspace, Workspace, WorkspaceId}, + workspace::{AppState, MultiWorkspace, Panel as _, Workspace, WorkspaceId}, zed_actions::OpenSettingsAt, }; @@ -548,6 +548,27 @@ fn run_visual_tests(project_path: PathBuf, update_baseline: bool) -> Result<()> } } + // Run Test 11: Thread target selector visual tests + #[cfg(feature = "visual-tests")] + { + println!("\n--- Test 11: start_thread_in_selector (6 variants) ---"); + match run_start_thread_in_selector_visual_tests(app_state.clone(), &mut cx, update_baseline) + { + Ok(TestResult::Passed) => { + println!("✓ start_thread_in_selector: PASSED"); + passed += 1; + } + Ok(TestResult::BaselineUpdated(_)) => { + println!("✓ start_thread_in_selector: Baselines updated"); + updated += 1; + } + Err(e) => { + eprintln!("✗ start_thread_in_selector: FAILED - {}", e); + failed += 1; + } + } + } + // Run Test 9: Tool Permissions Settings UI visual test println!("\n--- Test 9: tool_permissions_settings ---"); match run_tool_permissions_visual_tests(app_state.clone(), &mut cx, update_baseline) { @@ -3066,3 +3087,629 @@ fn run_error_wrapping_visual_tests( Ok(test_result) } + +#[cfg(all(target_os = "macos", feature = "visual-tests"))] +/// Runs a git command in the given directory and returns an error with +/// stderr/stdout context if the command fails (non-zero exit status). +fn run_git_command(args: &[&str], dir: &std::path::Path) -> Result<()> { + let output = std::process::Command::new("git") + .args(args) + .current_dir(dir) + .output() + .with_context(|| format!("failed to spawn `git {}`", args.join(" ")))?; + + if !output.status.success() { + let stdout = String::from_utf8_lossy(&output.stdout); + let stderr = String::from_utf8_lossy(&output.stderr); + anyhow::bail!( + "`git {}` failed (exit {})\nstdout: {}\nstderr: {}", + args.join(" "), + output.status, + stdout.trim(), + stderr.trim(), + ); + } + Ok(()) +} + +#[cfg(all(target_os = "macos", feature = "visual-tests"))] +fn run_start_thread_in_selector_visual_tests( + app_state: Arc, + cx: &mut VisualTestAppContext, + update_baseline: bool, +) -> Result { + use agent_ui::{AgentPanel, StartThreadIn, WorktreeCreationStatus}; + + // Enable feature flags so the thread target selector renders + cx.update(|cx| { + cx.update_flags( + true, + vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()], + ); + }); + + // Create a temp directory with a real git repo so "New Worktree" is enabled + let temp_dir = tempfile::tempdir()?; + let temp_path = temp_dir.keep(); + let canonical_temp = temp_path.canonicalize()?; + let project_path = canonical_temp.join("project"); + std::fs::create_dir_all(&project_path)?; + + // Initialize git repo + run_git_command(&["init"], &project_path)?; + run_git_command(&["config", "user.email", "test@test.com"], &project_path)?; + run_git_command(&["config", "user.name", "Test User"], &project_path)?; + + // Create source files + let src_dir = project_path.join("src"); + std::fs::create_dir_all(&src_dir)?; + std::fs::write( + src_dir.join("main.rs"), + r#"fn main() { + println!("Hello, world!"); + + let x = 42; + let y = x * 2; + + if y > 50 { + println!("y is greater than 50"); + } else { + println!("y is not greater than 50"); + } + + for i in 0..10 { + println!("i = {}", i); + } +} + +fn helper_function(a: i32, b: i32) -> i32 { + a + b +} +"#, + )?; + + std::fs::write( + project_path.join("Cargo.toml"), + r#"[package] +name = "test_project" +version = "0.1.0" +edition = "2021" +"#, + )?; + + // Commit so git status is clean + run_git_command(&["add", "."], &project_path)?; + run_git_command(&["commit", "-m", "Initial commit"], &project_path)?; + + let project = cx.update(|cx| { + project::Project::local( + app_state.client.clone(), + app_state.node_runtime.clone(), + app_state.user_store.clone(), + app_state.languages.clone(), + app_state.fs.clone(), + None, + project::LocalProjectFlags { + init_worktree_trust: false, + ..Default::default() + }, + cx, + ) + }); + + // Use a wide window so we see project panel + editor + agent panel + let window_size = size(px(1280.0), px(800.0)); + let bounds = Bounds { + origin: point(px(0.0), px(0.0)), + size: window_size, + }; + + let workspace_window: WindowHandle = cx + .update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + focus: false, + show: false, + ..Default::default() + }, + |window, cx| { + let workspace = cx.new(|cx| { + Workspace::new(None, project.clone(), app_state.clone(), window, cx) + }); + cx.new(|cx| MultiWorkspace::new(workspace, window, cx)) + }, + ) + }) + .context("Failed to open thread target selector test window")?; + + cx.run_until_parked(); + + // Create and register the workspace sidebar + let sidebar = workspace_window + .update(cx, |_multi_workspace, window, cx| { + let multi_workspace_handle = cx.entity(); + cx.new(|cx| sidebar::Sidebar::new(multi_workspace_handle, window, cx)) + }) + .context("Failed to create sidebar")?; + + workspace_window + .update(cx, |multi_workspace, window, cx| { + multi_workspace.register_sidebar(sidebar.clone(), window, cx); + }) + .context("Failed to register sidebar")?; + + // Open the sidebar + workspace_window + .update(cx, |multi_workspace, window, cx| { + multi_workspace.toggle_sidebar(window, cx); + }) + .context("Failed to toggle sidebar")?; + + cx.run_until_parked(); + + // Add the git project as a worktree + let add_worktree_task = workspace_window + .update(cx, |multi_workspace, _window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + project.find_or_create_worktree(&project_path, true, cx) + }) + }) + .context("Failed to start adding worktree")?; + + cx.background_executor.allow_parking(); + cx.foreground_executor + .block_test(add_worktree_task) + .context("Failed to add worktree")?; + cx.background_executor.forbid_parking(); + + cx.run_until_parked(); + + // Wait for worktree scan and git status + for _ in 0..5 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + // Open the project panel + let (weak_workspace, async_window_cx) = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + (workspace.read(cx).weak_handle(), window.to_async(cx)) + }) + .context("Failed to get workspace handle")?; + + cx.background_executor.allow_parking(); + let project_panel = cx + .foreground_executor + .block_test(ProjectPanel::load(weak_workspace, async_window_cx)) + .context("Failed to load project panel")?; + cx.background_executor.forbid_parking(); + + workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + workspace.update(cx, |workspace, cx| { + workspace.add_panel(project_panel, window, cx); + workspace.open_panel::(window, cx); + }); + }) + .context("Failed to add project panel")?; + + cx.run_until_parked(); + + // Open main.rs in the editor + let open_file_task = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + workspace.update(cx, |workspace, cx| { + let worktree = workspace.project().read(cx).worktrees(cx).next(); + if let Some(worktree) = worktree { + let worktree_id = worktree.read(cx).id(); + let rel_path: std::sync::Arc = + util::rel_path::rel_path("src/main.rs").into(); + let project_path: project::ProjectPath = (worktree_id, rel_path).into(); + Some(workspace.open_path(project_path, None, true, window, cx)) + } else { + None + } + }) + }) + .log_err() + .flatten(); + + if let Some(task) = open_file_task { + cx.background_executor.allow_parking(); + cx.foreground_executor.block_test(task).log_err(); + cx.background_executor.forbid_parking(); + } + + cx.run_until_parked(); + + // Load the AgentPanel + let (weak_workspace, async_window_cx) = workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + (workspace.read(cx).weak_handle(), window.to_async(cx)) + }) + .context("Failed to get workspace handle for agent panel")?; + + let prompt_builder = + cx.update(|cx| prompt_store::PromptBuilder::load(app_state.fs.clone(), false, cx)); + + // Register an observer so that workspaces created by the worktree creation + // flow get AgentPanel and ProjectPanel loaded automatically. Without this, + // `workspace.panel::(cx)` returns None in the new workspace and + // the creation flow's `focus_panel::` call is a no-op. + let _workspace_observer = cx.update({ + let prompt_builder = prompt_builder.clone(); + |cx| { + cx.observe_new(move |workspace: &mut Workspace, window, cx| { + let Some(window) = window else { return }; + let prompt_builder = prompt_builder.clone(); + let panels_task = cx.spawn_in(window, async move |workspace_handle, cx| { + let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); + let agent_panel = + AgentPanel::load(workspace_handle.clone(), prompt_builder, cx.clone()); + if let Ok(panel) = project_panel.await { + workspace_handle + .update_in(cx, |workspace, window, cx| { + workspace.add_panel(panel, window, cx); + }) + .log_err(); + } + if let Ok(panel) = agent_panel.await { + workspace_handle + .update_in(cx, |workspace, window, cx| { + workspace.add_panel(panel, window, cx); + }) + .log_err(); + } + anyhow::Ok(()) + }); + workspace.set_panels_task(panels_task); + }) + } + }); + + cx.background_executor.allow_parking(); + let panel = cx + .foreground_executor + .block_test(AgentPanel::load( + weak_workspace, + prompt_builder, + async_window_cx, + )) + .context("Failed to load AgentPanel")?; + cx.background_executor.forbid_parking(); + + workspace_window + .update(cx, |multi_workspace, window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + workspace.update(cx, |workspace, cx| { + workspace.add_panel(panel.clone(), window, cx); + workspace.open_panel::(window, cx); + }); + }) + .context("Failed to add and open AgentPanel")?; + + cx.run_until_parked(); + + // Inject the stub server and open a thread so the toolbar is visible + let connection = StubAgentConnection::new(); + let stub_agent: Rc = Rc::new(StubAgentServer::new(connection)); + + cx.update_window(workspace_window.into(), |_, window, cx| { + panel.update(cx, |panel, cx| { + panel.open_external_thread_with_server(stub_agent.clone(), window, cx); + }); + })?; + + cx.run_until_parked(); + + // ---- Screenshot 1: Default "Local Project" selector (dropdown closed) ---- + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_default = run_visual_test( + "start_thread_in_selector_default", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 2: Dropdown open showing menu entries ---- + cx.update_window(workspace_window.into(), |_, window, cx| { + panel.update(cx, |panel, cx| { + panel.open_start_thread_in_menu_for_tests(window, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_open_dropdown = run_visual_test( + "start_thread_in_selector_open", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 3: "New Worktree" selected (dropdown closed, label changed) ---- + // First dismiss the dropdown, then change the target so the toolbar label is visible + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.close_start_thread_in_menu_for_tests(cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_start_thread_in_for_tests(StartThreadIn::NewWorktree, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_new_worktree = run_visual_test( + "start_thread_in_selector_new_worktree", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 4: "Creating worktree…" status banner ---- + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel + .set_worktree_creation_status_for_tests(Some(WorktreeCreationStatus::Creating), cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_creating = run_visual_test( + "worktree_creation_status_creating", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 5: Error status banner ---- + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_worktree_creation_status_for_tests( + Some(WorktreeCreationStatus::Error( + "Failed to create worktree: branch already exists".into(), + )), + cx, + ); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_error = run_visual_test( + "worktree_creation_status_error", + workspace_window.into(), + cx, + update_baseline, + ); + + // ---- Screenshot 6: Worktree creation succeeded ---- + // Clear the error status and re-select New Worktree to ensure a clean state. + cx.update_window(workspace_window.into(), |_, _window, cx| { + panel.update(cx, |panel, cx| { + panel.set_worktree_creation_status_for_tests(None, cx); + }); + })?; + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, cx| { + window.dispatch_action(Box::new(StartThreadIn::NewWorktree), cx); + })?; + cx.run_until_parked(); + + // Insert a message into the active thread's message editor and submit. + let thread_view = cx + .read(|cx| panel.read(cx).as_active_thread_view(cx)) + .ok_or_else(|| anyhow::anyhow!("No active thread view"))?; + + cx.update_window(workspace_window.into(), |_, window, cx| { + let message_editor = thread_view.read(cx).message_editor.clone(); + message_editor.update(cx, |message_editor, cx| { + message_editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "Add a CLI flag to set the log level".to_string(), + ))], + window, + cx, + ); + message_editor.send(cx); + }); + })?; + cx.run_until_parked(); + + // Wait for the full worktree creation flow to complete. The creation status + // is cleared to `None` at the very end of the async task, after panels are + // loaded, the agent panel is focused, and the new workspace is activated. + cx.background_executor.allow_parking(); + let mut creation_complete = false; + for _ in 0..120 { + cx.run_until_parked(); + let status_cleared = cx.read(|cx| { + panel + .read(cx) + .worktree_creation_status_for_tests() + .is_none() + }); + let workspace_count = workspace_window.update(cx, |multi_workspace, _window, _cx| { + multi_workspace.workspaces().len() + })?; + if workspace_count == 2 && status_cleared { + creation_complete = true; + break; + } + cx.advance_clock(Duration::from_millis(100)); + } + cx.background_executor.forbid_parking(); + + if !creation_complete { + return Err(anyhow::anyhow!("Worktree creation did not complete")); + } + + // The creation flow called `external_thread` on the new workspace's agent + // panel, which tried to launch a real agent binary and failed. Replace the + // error state by injecting the stub server, and shrink the panel so the + // editor content is visible. + workspace_window.update(cx, |multi_workspace, window, cx| { + let new_workspace = &multi_workspace.workspaces()[1]; + new_workspace.update(cx, |workspace, cx| { + if let Some(new_panel) = workspace.panel::(cx) { + new_panel.update(cx, |panel, cx| { + panel.set_size(Some(px(480.0)), window, cx); + panel.open_external_thread_with_server(stub_agent.clone(), window, cx); + }); + } + }); + })?; + cx.run_until_parked(); + + // Type and send a message so the thread target dropdown disappears. + let new_panel = workspace_window.update(cx, |multi_workspace, _window, cx| { + let new_workspace = &multi_workspace.workspaces()[1]; + new_workspace.read(cx).panel::(cx) + })?; + if let Some(new_panel) = new_panel { + let new_thread_view = cx.read(|cx| new_panel.read(cx).as_active_thread_view(cx)); + if let Some(new_thread_view) = new_thread_view { + cx.update_window(workspace_window.into(), |_, window, cx| { + let message_editor = new_thread_view.read(cx).message_editor.clone(); + message_editor.update(cx, |editor, cx| { + editor.set_message( + vec![acp::ContentBlock::Text(acp::TextContent::new( + "Add a CLI flag to set the log level".to_string(), + ))], + window, + cx, + ); + editor.send(cx); + }); + })?; + cx.run_until_parked(); + } + } + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.refresh(); + })?; + cx.run_until_parked(); + + let result_succeeded = run_visual_test( + "worktree_creation_succeeded", + workspace_window.into(), + cx, + update_baseline, + ); + + // Clean up — drop the workspace observer first so no new panels are + // registered on workspaces created during teardown. + drop(_workspace_observer); + + workspace_window + .update(cx, |multi_workspace, _window, cx| { + let workspace = &multi_workspace.workspaces()[0]; + let project = workspace.read(cx).project().clone(); + project.update(cx, |project, cx| { + let worktree_ids: Vec<_> = + project.worktrees(cx).map(|wt| wt.read(cx).id()).collect(); + for id in worktree_ids { + project.remove_worktree(id, cx); + } + }); + }) + .log_err(); + + cx.run_until_parked(); + + cx.update_window(workspace_window.into(), |_, window, _cx| { + window.remove_window(); + }) + .log_err(); + + cx.run_until_parked(); + + for _ in 0..15 { + cx.advance_clock(Duration::from_millis(100)); + cx.run_until_parked(); + } + + // Delete the preserved temp directory so visual-test runs don't + // accumulate filesystem artifacts. + if let Err(err) = std::fs::remove_dir_all(&temp_path) { + log::warn!( + "failed to clean up visual-test temp dir {}: {err}", + temp_path.display() + ); + } + + // Reset feature flags + cx.update(|cx| { + cx.update_flags(false, vec![]); + }); + + let results = [ + ("default", result_default), + ("open_dropdown", result_open_dropdown), + ("new_worktree", result_new_worktree), + ("creating", result_creating), + ("error", result_error), + ("succeeded", result_succeeded), + ]; + + let mut has_baseline_update = None; + let mut failures = Vec::new(); + + for (name, result) in &results { + match result { + Ok(TestResult::Passed) => {} + Ok(TestResult::BaselineUpdated(p)) => { + has_baseline_update = Some(p.clone()); + } + Err(e) => { + failures.push(format!("{}: {}", name, e)); + } + } + } + + if !failures.is_empty() { + Err(anyhow::anyhow!( + "start_thread_in_selector failures: {}", + failures.join("; ") + )) + } else if let Some(p) = has_baseline_update { + Ok(TestResult::BaselineUpdated(p)) + } else { + Ok(TestResult::Passed) + } +} diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index a0a6e424d46790ad49c860377c5d1e711aae6b61..17832bdd1833cabb42af2195f9d9aab1a6bf3fab 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -496,7 +496,8 @@ pub fn initialize_workspace( status_bar.add_right_item(image_info, window, cx); }); - initialize_panels(prompt_builder.clone(), window, cx); + let panels_task = initialize_panels(prompt_builder.clone(), window, cx); + workspace.set_panels_task(panels_task); register_actions(app_state.clone(), workspace, window, cx); workspace.focus_handle(cx).focus(window, cx); @@ -620,7 +621,7 @@ fn initialize_panels( prompt_builder: Arc, window: &mut Window, cx: &mut Context, -) { +) -> Task> { cx.spawn_in(window, async move |workspace_handle, cx| { let project_panel = ProjectPanel::load(workspace_handle.clone(), cx.clone()); let outline_panel = OutlinePanel::load(workspace_handle.clone(), cx.clone()); @@ -662,7 +663,6 @@ fn initialize_panels( anyhow::Ok(()) }) - .detach(); } fn setup_or_teardown_ai_panel( @@ -1103,7 +1103,7 @@ fn register_actions( ); }, ) - .detach(); + .detach_and_log_err(cx); } } }) @@ -5808,7 +5808,15 @@ mod tests { // Window B: workspace for dir3 let (window_a, _) = cx .update(|cx| { - Workspace::new_local(vec![dir1.into()], app_state.clone(), None, None, None, cx) + Workspace::new_local( + vec![dir1.into()], + app_state.clone(), + None, + None, + None, + true, + cx, + ) }) .await .expect("failed to open first workspace"); @@ -5824,7 +5832,15 @@ mod tests { let (window_b, _) = cx .update(|cx| { - Workspace::new_local(vec![dir3.into()], app_state.clone(), None, None, None, cx) + Workspace::new_local( + vec![dir3.into()], + app_state.clone(), + None, + None, + None, + true, + cx, + ) }) .await .expect("failed to open third workspace"); From 38c7e63af3a4264598308b6ca07119e097777a8b Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Tue, 3 Mar 2026 10:52:00 -0500 Subject: [PATCH 268/548] git: Fix commit message buffer header not being disabled after cloning commit view (#50606) Release Notes: - Fixed extraneous buffer header when splitting the commit view. --- crates/editor/src/display_map.rs | 4 ++++ crates/editor/src/display_map/block_map.rs | 2 ++ crates/editor/src/editor.rs | 2 ++ 3 files changed, 8 insertions(+) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 57b8eb8ef6c1b29cb99da3e2a4e731d0c828038e..b666557b90a3c1181404d8f09b1d50ff9f8402a9 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -789,6 +789,9 @@ impl DisplayMap { .collect(), cx, ); + for buffer_id in &other.block_snapshot.buffers_with_disabled_headers { + self.disable_header_for_buffer(*buffer_id, cx); + } } /// Creates folds for the given creases. @@ -1003,6 +1006,7 @@ impl DisplayMap { &self.block_map.folded_buffers } + #[instrument(skip_all)] pub(super) fn clear_folded_buffer(&mut self, buffer_id: language::BufferId) { self.block_map.folded_buffers.remove(&buffer_id); } diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index db7eb53b39088c6026d3d36bef636f748c80d587..2673baae84ab74b2852004320cf1d94c5ed1ed42 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -78,6 +78,7 @@ pub struct BlockSnapshot { custom_blocks_by_id: TreeMap>, pub(super) buffer_header_height: u32, pub(super) excerpt_header_height: u32, + pub(super) buffers_with_disabled_headers: HashSet, } impl Deref for BlockSnapshot { @@ -657,6 +658,7 @@ impl BlockMap { custom_blocks_by_id: self.custom_blocks_by_id.clone(), buffer_header_height: self.buffer_header_height, excerpt_header_height: self.excerpt_header_height, + buffers_with_disabled_headers: self.buffers_with_disabled_headers.clone(), }, } } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 28d96e721257eaad898408cafba67f9f991e4909..5504305f86eb95dee000cec4099e366bbf86ffef 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -1973,6 +1973,8 @@ impl Editor { .clone_state(&self.scroll_manager, &my_snapshot, &clone_snapshot, cx); clone.searchable = self.searchable; clone.read_only = self.read_only; + clone.buffers_with_disabled_indent_guides = + self.buffers_with_disabled_indent_guides.clone(); clone } From c19cc4c51e0f64eec42168943050f2deeccaa076 Mon Sep 17 00:00:00 2001 From: Chriss4123 <87142779+Chriss4123@users.noreply.github.com> Date: Tue, 3 Mar 2026 18:11:51 +0200 Subject: [PATCH 269/548] Fix Linux watcher cleanup for recreated directories (#50412) ## Problem - On Linux, non-recursive watcher registrations remained path-cached after deleting and recreating a directory in the same session. - The recreated directory was not re-watched, so newly created child entries under that path could be missing. ## Summary - Remove directory watcher registrations when worktree paths are removed from snapshot state. - Ensure recreated directories can be watched again on Linux by allowing `scan_dir` to re-add fresh watches. - Add a Linux integration regression test for directory delete/recreate path reuse and child file creation. ## Testing - `cargo test -p project --features test-support --test integration test_recreated_directory_receives_child_events -- --exact` - `cargo test -p project --features test-support --test integration test_rescan_and_remote_updates -- --exact` ## Related - #46709 Release Notes: - Fixed Linux worktree file watching so child entries appear after deleting and recreating a directory at the same path. --- .../tests/integration/project_tests.rs | 46 +++++++++++++++++++ crates/worktree/src/worktree.rs | 18 ++++++-- 2 files changed, 61 insertions(+), 3 deletions(-) diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index 6092836c19ef280aa2d13abcb32932f3b47703b6..d597377910a2a837e456ac4384b06c333887dfb3 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -5359,6 +5359,52 @@ async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) { }); } +#[cfg(target_os = "linux")] +#[gpui::test(retries = 5)] +async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) { + init_test(cx); + cx.executor().allow_parking(); + + let dir = TempTree::new(json!({})); + let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await; + let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap()); + + tree.flush_fs_events(cx).await; + + let repro_dir = dir.path().join("repro"); + std::fs::create_dir(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some()); + }); + + std::fs::remove_dir_all(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none()); + }); + + std::fs::create_dir(&repro_dir).unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some()); + }); + + std::fs::write(repro_dir.join("repro-marker"), "").unwrap(); + tree.flush_fs_events(cx).await; + + cx.update(|cx| { + assert!( + tree.read(cx) + .entry_for_path(rel_path("repro/repro-marker")) + .is_some() + ); + }); +} + #[gpui::test(iterations = 10)] async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) { init_test(cx); diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 69b0be24e7ffb09d3fe759ec0bd3d54b54db21d3..9e62beb3c375fb8d580be02382091cafe04d31e2 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2945,7 +2945,7 @@ impl BackgroundScannerState { self.snapshot.check_invariants(false); } - fn remove_path(&mut self, path: &RelPath) { + fn remove_path(&mut self, path: &RelPath, watcher: &dyn Watcher) { log::trace!("background scanner removing path {path:?}"); let mut new_entries; let removed_entries; @@ -2961,7 +2961,12 @@ impl BackgroundScannerState { self.snapshot.entries_by_path = new_entries; let mut removed_ids = Vec::with_capacity(removed_entries.summary().count); + let mut removed_dir_abs_paths = Vec::new(); for entry in removed_entries.cursor::<()>(()) { + if entry.is_dir() { + removed_dir_abs_paths.push(self.snapshot.absolutize(&entry.path)); + } + match self.removed_entries.entry(entry.inode) { hash_map::Entry::Occupied(mut e) => { let prev_removed_entry = e.get_mut(); @@ -2997,6 +3002,10 @@ impl BackgroundScannerState { .git_repositories .retain(|id, _| removed_ids.binary_search(id).is_err()); + for removed_dir_abs_path in removed_dir_abs_paths { + watcher.remove(&removed_dir_abs_path).log_err(); + } + #[cfg(feature = "test-support")] self.snapshot.check_invariants(false); } @@ -4461,7 +4470,10 @@ impl BackgroundScanner { if self.settings.is_path_excluded(&child_path) { log::debug!("skipping excluded child entry {child_path:?}"); - self.state.lock().await.remove_path(&child_path); + self.state + .lock() + .await + .remove_path(&child_path, self.watcher.as_ref()); continue; } @@ -4651,7 +4663,7 @@ impl BackgroundScanner { // detected regardless of the order of the paths. for (path, metadata) in relative_paths.iter().zip(metadata.iter()) { if matches!(metadata, Ok(None)) || doing_recursive_update { - state.remove_path(path); + state.remove_path(path, self.watcher.as_ref()); } } From d2a71b0a6985cf4d2f76d8deee9f6d75a7917fa2 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Tue, 3 Mar 2026 17:12:51 +0100 Subject: [PATCH 270/548] auto_update_helper: Rollback for all errors including FileNotFound (#50607) We would mark `FileNotFound` as success and progress the update loop which does not make much sense. Release Notes: - Do not skip update roll back in presence of FileNotFound errors on Windows. Co-authored-by: Miguel Raz Guzman Macedo --- crates/auto_update_helper/src/updater.rs | 27 +++++++++++++----------- 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index 076e11fb4eef1e5c53e2bdc290be7117330c3e61..70d5e97c67169ce9737c274f90bc72cbe7ceedf5 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -279,19 +279,22 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option, launch: bool) unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? }; break; } - Err(err) => { - // Check if it's a "not found" error - let io_err = err.downcast_ref::().unwrap(); - if io_err.kind() == std::io::ErrorKind::NotFound { - log::warn!("File or folder not found."); - last_successful_job = Some(i); - unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? }; - break; + Err(err) => match err.downcast_ref::() { + Some(io_err) => match io_err.kind() { + std::io::ErrorKind::NotFound => { + log::error!("Operation failed with file not found, aborting: {}", err); + break 'outer; + } + _ => { + log::error!("Operation failed (retrying): {}", err); + std::thread::sleep(Duration::from_millis(50)); + } + }, + None => { + log::error!("Operation failed with unexpected error, aborting: {}", err); + break 'outer; } - - log::error!("Operation failed: {} ({:?})", err, io_err.kind()); - std::thread::sleep(Duration::from_millis(50)); - } + }, } } } From 6195b702d644c4f21bffda1309f119d7846d409d Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 3 Mar 2026 09:47:35 -0700 Subject: [PATCH 271/548] Try to fix auto-updates when Explorer.exe holds Zed.exe (#50332) Release Notes: - Windows: make auto-update more robust in the face of apps holding the Zed.exe handle --------- Co-authored-by: Jakub Konka --- Cargo.lock | 1 + Cargo.toml | 1 + crates/auto_update_helper/Cargo.toml | 1 + crates/auto_update_helper/src/updater.rs | 112 ++++++++++++++++++++++- 4 files changed, 112 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 96caec077edd4bdf8c02a3e1ff1fc10340d2b9b0..dcecec352bf1426fb76956f04224c66b04143627 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1352,6 +1352,7 @@ version = "0.1.0" dependencies = [ "anyhow", "log", + "scopeguard", "simplelog", "tempfile", "windows 0.61.3", diff --git a/Cargo.toml b/Cargo.toml index 8e1312f032e19b2c2c189677f144f04dd7f4589c..35180020a8d70d83c113172051d12a85f33c55ca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -815,6 +815,7 @@ features = [ "Win32_System_Ole", "Win32_System_Performance", "Win32_System_Pipes", + "Win32_System_RestartManager", "Win32_System_SystemInformation", "Win32_System_SystemServices", "Win32_System_Threading", diff --git a/crates/auto_update_helper/Cargo.toml b/crates/auto_update_helper/Cargo.toml index 73c38d80dd12e9c42daa42b7e6f2c9d6975cf47b..aa5bf6ac40b0e1ab20cbde510be5d7f389c7ade8 100644 --- a/crates/auto_update_helper/Cargo.toml +++ b/crates/auto_update_helper/Cargo.toml @@ -19,6 +19,7 @@ log.workspace = true simplelog.workspace = true [target.'cfg(target_os = "windows")'.dependencies] +scopeguard = "1.2" windows.workspace = true [target.'cfg(target_os = "windows")'.dev-dependencies] diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index 70d5e97c67169ce9737c274f90bc72cbe7ceedf5..7821c908c40873637c4ac3993c320416e2a4b978 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -1,13 +1,22 @@ use std::{ + ffi::OsStr, + os::windows::ffi::OsStrExt, path::Path, sync::LazyLock, time::{Duration, Instant}, }; use anyhow::{Context as _, Result}; -use windows::Win32::{ - Foundation::{HWND, LPARAM, WPARAM}, - UI::WindowsAndMessaging::PostMessageW, +use windows::{ + Win32::{ + Foundation::{HWND, LPARAM, WPARAM}, + System::RestartManager::{ + CCH_RM_SESSION_KEY, RmEndSession, RmGetList, RmRegisterResources, RmShutdown, + RmStartSession, + }, + UI::WindowsAndMessaging::PostMessageW, + }, + core::{PCWSTR, PWSTR}, }; use crate::windows_impl::WM_JOB_UPDATED; @@ -262,9 +271,106 @@ pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| { ] }); +/// Attempts to use Windows Restart Manager to release file handles held by other processes +/// (e.g., Explorer.exe) on the files we need to move during the update. +/// +/// This is a best-effort operation - if it fails, we'll still try the update and rely on +/// the retry logic. +fn release_file_handles(app_dir: &Path) -> Result<()> { + // Files that commonly get locked by Explorer or other processes + let files_to_release = [ + app_dir.join("Zed.exe"), + app_dir.join("bin\\Zed.exe"), + app_dir.join("bin\\zed"), + app_dir.join("conpty.dll"), + ]; + + log::info!("Attempting to release file handles using Restart Manager..."); + + let mut session: u32 = 0; + let mut session_key = [0u16; CCH_RM_SESSION_KEY as usize + 1]; + + // Start a Restart Manager session + let err = unsafe { + RmStartSession( + &mut session, + Some(0), + PWSTR::from_raw(session_key.as_mut_ptr()), + ) + }; + if err.is_err() { + anyhow::bail!("RmStartSession failed: {err:?}"); + } + + // Ensure we end the session when done + let _session_guard = scopeguard::guard(session, |s| { + let _ = unsafe { RmEndSession(s) }; + }); + + // Convert paths to wide strings for Windows API + let wide_paths: Vec> = files_to_release + .iter() + .filter(|p| p.exists()) + .map(|p| { + OsStr::new(p) + .encode_wide() + .chain(std::iter::once(0)) + .collect() + }) + .collect(); + + if wide_paths.is_empty() { + log::info!("No files to release handles for"); + return Ok(()); + } + + let pcwstr_paths: Vec = wide_paths + .iter() + .map(|p| PCWSTR::from_raw(p.as_ptr())) + .collect(); + + // Register the files we want to modify + let err = unsafe { RmRegisterResources(session, Some(&pcwstr_paths), None, None) }; + if err.is_err() { + anyhow::bail!("RmRegisterResources failed: {err:?}"); + } + + // Check if any processes are using these files + let mut needed: u32 = 0; + let mut count: u32 = 0; + let mut reboot_reasons: u32 = 0; + let _ = unsafe { RmGetList(session, &mut needed, &mut count, None, &mut reboot_reasons) }; + + if needed == 0 { + log::info!("No processes are holding handles to the files"); + return Ok(()); + } + + log::info!( + "{} process(es) are holding handles to the files, requesting release...", + needed + ); + + // Request processes to release their handles + // RmShutdown with flags=0 asks applications to release handles gracefully + // For Explorer, this typically releases icon cache handles without closing Explorer + let err = unsafe { RmShutdown(session, 0, None) }; + if err.is_err() { + anyhow::bail!("RmShutdown failed: {:?}", err); + } + + log::info!("Successfully requested handle release"); + Ok(()) +} + pub(crate) fn perform_update(app_dir: &Path, hwnd: Option, launch: bool) -> Result<()> { let hwnd = hwnd.map(|ptr| HWND(ptr as _)); + // Try to release file handles before starting the update + if let Err(e) = release_file_handles(app_dir) { + log::warn!("Restart Manager failed (will continue anyway): {}", e); + } + let mut last_successful_job = None; 'outer: for (i, job) in JOBS.iter().enumerate() { let start = Instant::now(); From e20905f2848183c856146634bc5e387cb1c831cb Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 3 Mar 2026 18:16:42 +0100 Subject: [PATCH 272/548] Only use `StreamingEditFileTool` when streaming is available (#50616) Release Notes: - N/A --- crates/agent/src/thread.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 2e693a85cd1f86d232e392860d8bd83509ce131a..c57bd1e99b9ae4fd1a93214e2a5d5937d1ab0274 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -2617,7 +2617,8 @@ impl Thread { } } - let use_streaming_edit_tool = cx.has_flag::(); + let use_streaming_edit_tool = + cx.has_flag::() && model.supports_streaming_tools(); let mut tools = self .tools From 528bf7c251362ce75f9f272a3dfafa21bf7fe695 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Tue, 3 Mar 2026 19:27:52 +0200 Subject: [PATCH 273/548] ep: Fix fetching rated-after (#50617) Release Notes: - N/A --- crates/edit_prediction_cli/src/pull_examples.rs | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/crates/edit_prediction_cli/src/pull_examples.rs b/crates/edit_prediction_cli/src/pull_examples.rs index 2f371675b29015795beef550ce5e3956c63751f9..cccd351dcdeda0dbf059d851a44b02bc1e558654 100644 --- a/crates/edit_prediction_cli/src/pull_examples.rs +++ b/crates/edit_prediction_cli/src/pull_examples.rs @@ -34,7 +34,7 @@ pub struct MinCaptureVersion { pub patch: u32, } -const DEFAULT_STATEMENT_TIMEOUT_SECONDS: u64 = 120; +const DEFAULT_STATEMENT_TIMEOUT_SECONDS: u64 = 240; const SETTLED_STATEMENT_TIMEOUT_SECONDS: u64 = 240; pub(crate) const POLL_INTERVAL: Duration = Duration::from_secs(2); pub(crate) const MAX_POLL_ATTEMPTS: usize = 120; @@ -715,7 +715,7 @@ pub async fn fetch_rated_examples_after( AND rated.event_properties:inputs IS NOT NULL AND rated.event_properties:inputs:cursor_excerpt IS NOT NULL AND rated.event_properties:output IS NOT NULL - AND rated.event_properties:can_collect_data = true + AND rated.event_properties:inputs:can_collect_data = true ORDER BY rated.time ASC LIMIT ? OFFSET ? @@ -823,11 +823,11 @@ fn rated_examples_from_response<'a>( let environment = get_string("environment"); let zed_version = get_string("zed_version"); - match (inputs, output.clone(), rating.clone(), device_id.clone(), time.clone()) { - (Some(inputs), Some(output), Some(rating), Some(device_id), Some(time)) => { + match (inputs, output.clone(), rating.clone(), time.clone()) { + (Some(inputs), Some(output), Some(rating), Some(time)) => { Some(build_rated_example( request_id, - device_id, + device_id.unwrap_or_default(), time, inputs, output, @@ -840,11 +840,10 @@ fn rated_examples_from_response<'a>( } _ => { log::warn!( - "skipping row {row_index}: missing fields - inputs={:?} output={:?} rating={:?} device_id={:?} time={:?}", + "skipping row {row_index}: missing fields - inputs={:?} output={:?} rating={:?} time={:?}", inputs_json.is_some(), output.is_some(), rating.is_some(), - device_id.is_some(), time.is_some(), ); None From d312312fa8ded4ea0e9c1ef1f355d7b3f2735e94 Mon Sep 17 00:00:00 2001 From: Justin Su Date: Tue, 3 Mar 2026 12:56:51 -0500 Subject: [PATCH 274/548] Add ctrl-enter keybind (macOS) to type newline in search bars (#50420) I've been using https://github.com/zed-industries/zed/issues/15046#issuecomment-3259286451 for half a year now, and it seems worthy of inclusion in the default keymap. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Added `ctrl-enter` keybind on macOS to type a newline in search bars --- assets/keymaps/default-macos.json | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 5f210cb4da35f9909767035c941289ee24a2ee3f..410c13687fbe0c19fbcb4c155ebba36dd068354c 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -448,6 +448,13 @@ "down": "search::NextHistoryQuery", }, }, + { + "context": "BufferSearchBar || ProjectSearchBar", + "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchBar", "use_key_equivalents": true, From 0a1a92131fc7bbe6b80c7b590d672814d7bbff5e Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Tue, 3 Mar 2026 19:01:28 +0100 Subject: [PATCH 275/548] git: Fix remote worktree support (#50614) The main issue is that we weren't forwarding the proto messages through the collab server to the host. After fixing that I added integration tests to cover local worktrees, remote worktrees, and ssh worktrees. I also fixed a bug with FakeRepository where it wouldn't name its current branch as a worktree when calling git worktree, which doesn't match the behavior of the git binary. Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects Release Notes: - git: Fix bug that caused the git worktree picker from displaying and creating worktrees over collab --- crates/collab/src/rpc.rs | 2 + crates/collab/tests/integration/git_tests.rs | 143 ++++++++++++++- .../remote_editing_collaboration_tests.rs | 126 ++++++++++++- crates/fs/Cargo.toml | 2 +- crates/fs/src/fake_git_repo.rs | 168 +++--------------- crates/fs/tests/integration/fake_git_repo.rs | 141 ++++++++++++++- crates/git/src/repository.rs | 3 + crates/project/tests/integration/git_store.rs | 119 +++++++++++++ 8 files changed, 555 insertions(+), 149 deletions(-) diff --git a/crates/collab/src/rpc.rs b/crates/collab/src/rpc.rs index 087dbe2a0ba23851689e75401c62b64775cf2282..b521f6b083ae311d98ec46c900ce821fd8042e4a 100644 --- a/crates/collab/src/rpc.rs +++ b/crates/collab/src/rpc.rs @@ -437,6 +437,8 @@ impl Server { .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) + .add_request_handler(forward_read_only_project_request::) + .add_request_handler(forward_mutating_project_request::) .add_request_handler(forward_mutating_project_request::) .add_message_handler(broadcast_project_message_from_host::) .add_message_handler(update_context) diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index f3abb5bc3f3e1a12e7ecb56c985f2cff46582cee..6792eb92484d34f3085287b57f48a5761e760c92 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -1,9 +1,9 @@ -use std::path::Path; +use std::path::{Path, PathBuf}; use call::ActiveCall; use git::status::{FileStatus, StatusCode, TrackedStatus}; use git_ui::project_diff::ProjectDiff; -use gpui::{AppContext as _, TestAppContext, VisualTestContext}; +use gpui::{AppContext as _, BackgroundExecutor, TestAppContext, VisualTestContext}; use project::ProjectPath; use serde_json::json; use util::{path, rel_path::rel_path}; @@ -141,3 +141,142 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) ); }); } + +#[gpui::test] +async fn test_remote_git_worktrees( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ ".git": {}, "file.txt": "content" }), + ) + .await; + + let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await; + + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + executor.run_until_parked(); + + let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap()); + + // Initially only the main worktree (the repo itself) should be present + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/project"))); + + // Client B creates a git worktree via the remote project + let worktree_directory = PathBuf::from(path!("/project")); + cx_b.update(|cx| { + repo_b.update(cx, |repository, _| { + repository.create_worktree( + "feature-branch".to_string(), + worktree_directory.clone(), + Some("abc123".to_string()), + ) + }) + }) + .await + .unwrap() + .unwrap(); + + executor.run_until_parked(); + + // Client B lists worktrees — should see main + the one just created + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from(path!("/project"))); + assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch")); + assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch"); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + // Verify from the host side that the worktree was actually created + let host_worktrees = { + let repo_a = cx_a.update(|cx| { + project_a + .read(cx) + .repositories(cx) + .values() + .next() + .unwrap() + .clone() + }); + cx_a.update(|cx| repo_a.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap() + }; + assert_eq!(host_worktrees.len(), 2); + assert_eq!(host_worktrees[0].path, PathBuf::from(path!("/project"))); + assert_eq!( + host_worktrees[1].path, + worktree_directory.join("feature-branch") + ); + + // Client B creates a second git worktree without an explicit commit + cx_b.update(|cx| { + repo_b.update(cx, |repository, _| { + repository.create_worktree( + "bugfix-branch".to_string(), + worktree_directory.clone(), + None, + ) + }) + }) + .await + .unwrap() + .unwrap(); + + executor.run_until_parked(); + + // Client B lists worktrees — should now have main + two created + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repository, _| repository.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 3); + + let feature_worktree = worktrees + .iter() + .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/feature-branch") + .expect("should find feature-branch worktree"); + assert_eq!( + feature_worktree.path, + worktree_directory.join("feature-branch") + ); + + let bugfix_worktree = worktrees + .iter() + .find(|worktree| worktree.ref_name.as_ref() == "refs/heads/bugfix-branch") + .expect("should find bugfix-branch worktree"); + assert_eq!( + bugfix_worktree.path, + worktree_directory.join("bugfix-branch") + ); + assert_eq!(bugfix_worktree.sha.as_ref(), "fake-sha"); +} diff --git a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs index 4556c740ec74f6fb1bc8a2c760812376dae6b4a8..6825c468e783ee8d3a2a6107a031accfc108abd0 100644 --- a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs +++ b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs @@ -33,7 +33,7 @@ use settings::{ SettingsStore, }; use std::{ - path::Path, + path::{Path, PathBuf}, sync::{ Arc, atomic::{AtomicUsize, Ordering}, @@ -396,6 +396,130 @@ async fn test_ssh_collaboration_git_branches( }); } +#[gpui::test] +async fn test_ssh_collaboration_git_worktrees( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + server_cx: &mut TestAppContext, +) { + cx_a.set_name("a"); + cx_b.set_name("b"); + server_cx.set_name("server"); + + cx_a.update(|cx| { + release_channel::init(semver::Version::new(0, 0, 0), cx); + }); + server_cx.update(|cx| { + release_channel::init(semver::Version::new(0, 0, 0), cx); + }); + + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)]) + .await; + + let (opts, server_ssh, _) = RemoteClient::fake_server(cx_a, server_cx); + let remote_fs = FakeFs::new(server_cx.executor()); + remote_fs + .insert_tree("/project", json!({ ".git": {}, "file.txt": "content" })) + .await; + + server_cx.update(HeadlessProject::init); + let languages = Arc::new(LanguageRegistry::new(server_cx.executor())); + let headless_project = server_cx.new(|cx| { + HeadlessProject::new( + HeadlessAppState { + session: server_ssh, + fs: remote_fs.clone(), + http_client: Arc::new(BlockedHttpClient), + node_runtime: NodeRuntime::unavailable(), + languages, + extension_host_proxy: Arc::new(ExtensionHostProxy::new()), + startup_time: std::time::Instant::now(), + }, + false, + cx, + ) + }); + + let client_ssh = RemoteClient::connect_mock(opts, cx_a).await; + let (project_a, _) = client_a + .build_ssh_project("/project", client_ssh, false, cx_a) + .await; + + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + executor.run_until_parked(); + + let repo_b = cx_b.update(|cx| project_b.read(cx).active_repository(cx).unwrap()); + + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repo, _| repo.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 1); + + let worktree_directory = PathBuf::from("/project"); + cx_b.update(|cx| { + repo_b.update(cx, |repo, _| { + repo.create_worktree( + "feature-branch".to_string(), + worktree_directory.clone(), + Some("abc123".to_string()), + ) + }) + }) + .await + .unwrap() + .unwrap(); + + executor.run_until_parked(); + + let worktrees = cx_b + .update(|cx| repo_b.update(cx, |repo, _| repo.worktrees())) + .await + .unwrap() + .unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[1].path, worktree_directory.join("feature-branch")); + assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch"); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + let server_worktrees = { + let server_repo = server_cx.update(|cx| { + headless_project.update(cx, |headless_project, cx| { + headless_project + .git_store + .read(cx) + .repositories() + .values() + .next() + .unwrap() + .clone() + }) + }); + server_cx + .update(|cx| server_repo.update(cx, |repo, _| repo.worktrees())) + .await + .unwrap() + .unwrap() + }; + assert_eq!(server_worktrees.len(), 2); + assert_eq!( + server_worktrees[1].path, + worktree_directory.join("feature-branch") + ); +} + #[gpui::test] async fn test_ssh_collaboration_formatting_with_prettier( executor: BackgroundExecutor, diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml index 6355524e4f328df0ca7fcf24c1df0557676ba6a6..04cae2dd2ad18f85a7c2ed663c1c3482febb22d3 100644 --- a/crates/fs/Cargo.toml +++ b/crates/fs/Cargo.toml @@ -58,4 +58,4 @@ gpui = { workspace = true, features = ["test-support"] } git = { workspace = true, features = ["test-support"] } [features] -test-support = ["gpui/test-support", "git/test-support"] +test-support = ["gpui/test-support", "git/test-support", "util/test-support"] diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 12cd67cdae1a250d07468047617c8cc7a52737fa..99295c69d45427c799e3d850d605f63d3950ee57 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -406,7 +406,31 @@ impl GitRepository for FakeGitRepository { } fn worktrees(&self) -> BoxFuture<'_, Result>> { - self.with_state_async(false, |state| Ok(state.worktrees.clone())) + let dot_git_path = self.dot_git_path.clone(); + self.with_state_async(false, move |state| { + let work_dir = dot_git_path + .parent() + .map(PathBuf::from) + .unwrap_or(dot_git_path); + let head_sha = state + .refs + .get("HEAD") + .cloned() + .unwrap_or_else(|| "0000000".to_string()); + let branch_ref = state + .current_branch_name + .as_ref() + .map(|name| format!("refs/heads/{name}")) + .unwrap_or_else(|| "refs/heads/main".to_string()); + let main_worktree = Worktree { + path: work_dir, + ref_name: branch_ref.into(), + sha: head_sha.into(), + }; + let mut all = vec![main_worktree]; + all.extend(state.worktrees.iter().cloned()); + Ok(all) + }) } fn create_worktree( @@ -1012,145 +1036,3 @@ impl GitRepository for FakeGitRepository { anyhow::bail!("commit_data_reader not supported for FakeGitRepository") } } - -#[cfg(test)] -mod tests { - use super::*; - use crate::{FakeFs, Fs}; - use gpui::TestAppContext; - use serde_json::json; - use std::path::Path; - - #[gpui::test] - async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { - let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"]; - - for worktree_dir_setting in worktree_dir_settings { - let fs = FakeFs::new(cx.executor()); - fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"})) - .await; - let repo = fs - .open_repo(Path::new("/project/.git"), None) - .expect("should open fake repo"); - - // Initially no worktrees - let worktrees = repo.worktrees().await.unwrap(); - assert!(worktrees.is_empty()); - - let expected_dir = git::repository::resolve_worktree_directory( - Path::new("/project"), - worktree_dir_setting, - ); - - // Create a worktree - repo.create_worktree( - "feature-branch".to_string(), - expected_dir.clone(), - Some("abc123".to_string()), - ) - .await - .unwrap(); - - // List worktrees — should have one - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert_eq!( - worktrees[0].path, - expected_dir.join("feature-branch"), - "failed for worktree_directory setting: {worktree_dir_setting:?}" - ); - assert_eq!(worktrees[0].ref_name.as_ref(), "refs/heads/feature-branch"); - assert_eq!(worktrees[0].sha.as_ref(), "abc123"); - - // Directory should exist in FakeFs after create - assert!( - fs.is_dir(&expected_dir.join("feature-branch")).await, - "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" - ); - - // Create a second worktree (without explicit commit) - repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - assert!( - fs.is_dir(&expected_dir.join("bugfix-branch")).await, - "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" - ); - - // Rename the first worktree - repo.rename_worktree( - expected_dir.join("feature-branch"), - expected_dir.join("renamed-branch"), - ) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 2); - assert!( - worktrees - .iter() - .any(|w| w.path == expected_dir.join("renamed-branch")), - "renamed worktree should exist at new path for setting {worktree_dir_setting:?}" - ); - assert!( - worktrees - .iter() - .all(|w| w.path != expected_dir.join("feature-branch")), - "old path should no longer exist for setting {worktree_dir_setting:?}" - ); - - // Directory should be moved in FakeFs after rename - assert!( - !fs.is_dir(&expected_dir.join("feature-branch")).await, - "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}" - ); - assert!( - fs.is_dir(&expected_dir.join("renamed-branch")).await, - "new worktree directory should exist after rename for setting {worktree_dir_setting:?}" - ); - - // Rename a nonexistent worktree should fail - let result = repo - .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere")) - .await; - assert!(result.is_err()); - - // Remove a worktree - repo.remove_worktree(expected_dir.join("renamed-branch"), false) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert_eq!(worktrees.len(), 1); - assert_eq!(worktrees[0].path, expected_dir.join("bugfix-branch")); - - // Directory should be removed from FakeFs after remove - assert!( - !fs.is_dir(&expected_dir.join("renamed-branch")).await, - "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" - ); - - // Remove a nonexistent worktree should fail - let result = repo - .remove_worktree(PathBuf::from("/nonexistent"), false) - .await; - assert!(result.is_err()); - - // Remove the last worktree - repo.remove_worktree(expected_dir.join("bugfix-branch"), false) - .await - .unwrap(); - - let worktrees = repo.worktrees().await.unwrap(); - assert!(worktrees.is_empty()); - assert!( - !fs.is_dir(&expected_dir.join("bugfix-branch")).await, - "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" - ); - } - } -} diff --git a/crates/fs/tests/integration/fake_git_repo.rs b/crates/fs/tests/integration/fake_git_repo.rs index 36dfcaf168b4f0190c5c49bf4798fac7bc9bd37b..bae7f2fc94dd5161793f85f64cc0a1448a187134 100644 --- a/crates/fs/tests/integration/fake_git_repo.rs +++ b/crates/fs/tests/integration/fake_git_repo.rs @@ -1,9 +1,146 @@ use fs::{FakeFs, Fs}; -use gpui::BackgroundExecutor; +use gpui::{BackgroundExecutor, TestAppContext}; use serde_json::json; -use std::path::Path; +use std::path::{Path, PathBuf}; use util::path; +#[gpui::test] +async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) { + let worktree_dir_settings = &["../worktrees", ".git/zed-worktrees", "my-worktrees/"]; + + for worktree_dir_setting in worktree_dir_settings { + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/project", json!({".git": {}, "file.txt": "content"})) + .await; + let repo = fs + .open_repo(Path::new("/project/.git"), None) + .expect("should open fake repo"); + + // Initially only the main worktree exists + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + + let expected_dir = git::repository::resolve_worktree_directory( + Path::new("/project"), + worktree_dir_setting, + ); + + // Create a worktree + repo.create_worktree( + "feature-branch".to_string(), + expected_dir.clone(), + Some("abc123".to_string()), + ) + .await + .unwrap(); + + // List worktrees — should have main + one created + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert_eq!( + worktrees[1].path, + expected_dir.join("feature-branch"), + "failed for worktree_directory setting: {worktree_dir_setting:?}" + ); + assert_eq!(worktrees[1].ref_name.as_ref(), "refs/heads/feature-branch"); + assert_eq!(worktrees[1].sha.as_ref(), "abc123"); + + // Directory should exist in FakeFs after create + assert!( + fs.is_dir(&expected_dir.join("feature-branch")).await, + "worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" + ); + + // Create a second worktree (without explicit commit) + repo.create_worktree("bugfix-branch".to_string(), expected_dir.clone(), None) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 3); + assert!( + fs.is_dir(&expected_dir.join("bugfix-branch")).await, + "second worktree directory should be created in FakeFs for setting {worktree_dir_setting:?}" + ); + + // Rename the first worktree + repo.rename_worktree( + expected_dir.join("feature-branch"), + expected_dir.join("renamed-branch"), + ) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 3); + assert!( + worktrees + .iter() + .any(|w| w.path == expected_dir.join("renamed-branch")), + "renamed worktree should exist at new path for setting {worktree_dir_setting:?}" + ); + assert!( + worktrees + .iter() + .all(|w| w.path != expected_dir.join("feature-branch")), + "old path should no longer exist for setting {worktree_dir_setting:?}" + ); + + // Directory should be moved in FakeFs after rename + assert!( + !fs.is_dir(&expected_dir.join("feature-branch")).await, + "old worktree directory should not exist after rename for setting {worktree_dir_setting:?}" + ); + assert!( + fs.is_dir(&expected_dir.join("renamed-branch")).await, + "new worktree directory should exist after rename for setting {worktree_dir_setting:?}" + ); + + // Rename a nonexistent worktree should fail + let result = repo + .rename_worktree(PathBuf::from("/nonexistent"), PathBuf::from("/somewhere")) + .await; + assert!(result.is_err()); + + // Remove a worktree + repo.remove_worktree(expected_dir.join("renamed-branch"), false) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 2); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert_eq!(worktrees[1].path, expected_dir.join("bugfix-branch")); + + // Directory should be removed from FakeFs after remove + assert!( + !fs.is_dir(&expected_dir.join("renamed-branch")).await, + "worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" + ); + + // Remove a nonexistent worktree should fail + let result = repo + .remove_worktree(PathBuf::from("/nonexistent"), false) + .await; + assert!(result.is_err()); + + // Remove the last worktree + repo.remove_worktree(expected_dir.join("bugfix-branch"), false) + .await + .unwrap(); + + let worktrees = repo.worktrees().await.unwrap(); + assert_eq!(worktrees.len(), 1); + assert_eq!(worktrees[0].path, PathBuf::from("/project")); + assert!( + !fs.is_dir(&expected_dir.join("bugfix-branch")).await, + "last worktree directory should be removed from FakeFs for setting {worktree_dir_setting:?}" + ); + } +} + #[gpui::test] async fn test_checkpoints(executor: BackgroundExecutor) { let fs = FakeFs::new(executor); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index bd07555d05b759a33080b9ae9f166145c3d26d14..6dba1400dffe1fd00844dd7241f39f48a7a759a6 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -303,6 +303,7 @@ impl Branch { pub struct Worktree { pub path: PathBuf, pub ref_name: SharedString, + // todo(git_worktree) This type should be a Oid pub sha: SharedString, } @@ -340,6 +341,8 @@ pub fn parse_worktrees_from_str>(raw_worktrees: T) -> Vec Date: Tue, 3 Mar 2026 21:09:26 +0200 Subject: [PATCH 276/548] ep: Predict by querying Baseten directly (#50626) This can be used like `ep predict --provider baseten:V0131GitMergeMarkersPrefix`. Since it doesn't require load_project, it can be used with captured requests. Release Notes: - N/A --- .../edit_prediction_cli/src/format_prompt.rs | 20 ++-- crates/edit_prediction_cli/src/main.rs | 9 ++ crates/edit_prediction_cli/src/predict.rs | 110 +++++++++++++++++- 3 files changed, 128 insertions(+), 11 deletions(-) diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index ecacd963023d7d113ea5ad77b61fd1d88306fc95..bee79ae8160eeb815a3739b53a5441f6063fb622 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -53,18 +53,22 @@ pub async fn run_format_prompt( let prompt = format_zeta_prompt(prompt_inputs, zeta_format); let prefill = zeta_prompt::get_prefill(prompt_inputs, zeta_format); - let (expected_patch, expected_cursor_offset) = example + let expected_output = example .spec .expected_patches_with_cursor_positions() .into_iter() .next() - .context("expected patches is empty")?; - let expected_output = zeta2_output_for_patch( - prompt_inputs, - &expected_patch, - expected_cursor_offset, - zeta_format, - )?; + .and_then(|(expected_patch, expected_cursor_offset)| { + zeta2_output_for_patch( + prompt_inputs, + &expected_patch, + expected_cursor_offset, + zeta_format, + ) + .ok() + }) + .unwrap_or_default(); + let rejected_output = example.spec.rejected_patch.as_ref().and_then(|patch| { zeta2_output_for_patch(prompt_inputs, patch, None, zeta_format).ok() }); diff --git a/crates/edit_prediction_cli/src/main.rs b/crates/edit_prediction_cli/src/main.rs index 207a69328fb07277c39463c0c6a460862c95fe42..8bb4b2a8e2f50d448fc314a70e2fc94cfa2c3d71 100644 --- a/crates/edit_prediction_cli/src/main.rs +++ b/crates/edit_prediction_cli/src/main.rs @@ -358,6 +358,7 @@ enum PredictionProvider { Mercury, Zeta1, Zeta2(ZetaFormat), + Baseten(ZetaFormat), Teacher(TeacherBackend), TeacherNonBatching(TeacherBackend), Repair, @@ -376,6 +377,7 @@ impl std::fmt::Display for PredictionProvider { PredictionProvider::Mercury => write!(f, "mercury"), PredictionProvider::Zeta1 => write!(f, "zeta1"), PredictionProvider::Zeta2(format) => write!(f, "zeta2:{format}"), + PredictionProvider::Baseten(format) => write!(f, "baseten:{format}"), PredictionProvider::Teacher(backend) => write!(f, "teacher:{backend}"), PredictionProvider::TeacherNonBatching(backend) => { write!(f, "teacher-non-batching:{backend}") @@ -415,6 +417,13 @@ impl std::str::FromStr for PredictionProvider { Ok(PredictionProvider::TeacherNonBatching(backend)) } "repair" => Ok(PredictionProvider::Repair), + "baseten" => { + let format = arg + .map(ZetaFormat::parse) + .transpose()? + .unwrap_or(ZetaFormat::default()); + Ok(PredictionProvider::Baseten(format)) + } _ => { anyhow::bail!( "unknown provider `{provider}`. Valid options: sweep, mercury, zeta1, zeta2, zeta2:, teacher, teacher:, teacher-non-batching, repair\n\ diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs index 02ba24b8a4f2627b9542254e3d118981737f8318..8f537dc0817a9cb0b4fd74348ae5e43d4f63beb9 100644 --- a/crates/edit_prediction_cli/src/predict.rs +++ b/crates/edit_prediction_cli/src/predict.rs @@ -6,14 +6,18 @@ use crate::{ headless::EpAppState, load_project::run_load_project, openai_client::OpenAiClient, + parse_output::parse_prediction_output, paths::{LATEST_EXAMPLE_RUN_DIR, RUN_DIR}, - progress::{ExampleProgress, InfoStyle, Step}, + progress::{ExampleProgress, InfoStyle, Step, StepProgress}, retrieve_context::run_context_retrieval, }; use anyhow::Context as _; +use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse}; use edit_prediction::{DebugEvent, EditPredictionStore, Zeta2RawConfig}; -use futures::{FutureExt as _, StreamExt as _, future::Shared}; +use futures::{AsyncReadExt as _, FutureExt as _, StreamExt as _, future::Shared}; use gpui::{AppContext as _, AsyncApp, Task}; +use http_client::{AsyncBody, HttpClient, Method}; +use reqwest_client::ReqwestClient; use std::{ fs, sync::{ @@ -79,6 +83,22 @@ pub async fn run_prediction( .await; } + if let PredictionProvider::Baseten(format) = provider { + run_format_prompt( + example, + &FormatPromptArgs { + provider: PredictionProvider::Zeta2(format), + }, + app_state.clone(), + example_progress, + cx, + ) + .await?; + + let step_progress = example_progress.start(Step::Predict); + return predict_baseten(example, format, &step_progress).await; + } + run_load_project(example, app_state.clone(), example_progress, cx.clone()).await?; run_context_retrieval(example, app_state.clone(), example_progress, cx.clone()).await?; @@ -116,7 +136,8 @@ pub async fn run_prediction( PredictionProvider::Mercury => edit_prediction::EditPredictionModel::Mercury, PredictionProvider::Teacher(..) | PredictionProvider::TeacherNonBatching(..) - | PredictionProvider::Repair => { + | PredictionProvider::Repair + | PredictionProvider::Baseten(_) => { unreachable!() } }; @@ -480,6 +501,89 @@ async fn predict_openai( Ok(()) } +pub async fn predict_baseten( + example: &mut Example, + format: ZetaFormat, + step_progress: &StepProgress, +) -> anyhow::Result<()> { + let model_id = + std::env::var("ZED_ZETA_MODEL").context("ZED_ZETA_MODEL environment variable required")?; + + let api_key = + std::env::var("BASETEN_API_KEY").context("BASETEN_API_KEY environment variable not set")?; + + let prompt = example.prompt.as_ref().context("Prompt is required")?; + let prompt_text = prompt.input.clone(); + let prefill = prompt.prefill.clone().unwrap_or_default(); + + step_progress.set_substatus("running prediction via baseten"); + + let environment: String = <&'static str>::from(&format).to_lowercase(); + let url = format!( + "https://model-{model_id}.api.baseten.co/environments/{environment}/sync/v1/completions" + ); + + let request_body = RawCompletionRequest { + model: model_id, + prompt: prompt_text.clone(), + max_tokens: Some(2048), + temperature: Some(0.), + stop: vec![], + environment: None, + }; + + let body_bytes = + serde_json::to_vec(&request_body).context("Failed to serialize request body")?; + + let http_client: Arc = Arc::new(ReqwestClient::new()); + let request = http_client::Request::builder() + .method(Method::POST) + .uri(&url) + .header("Content-Type", "application/json") + .header("Authorization", format!("Api-Key {api_key}")) + .body(AsyncBody::from(body_bytes))?; + + let mut response = http_client.send(request).await?; + let status = response.status(); + + let mut body = String::new(); + response + .body_mut() + .read_to_string(&mut body) + .await + .context("Failed to read Baseten response body")?; + + if !status.is_success() { + anyhow::bail!("Baseten API returned {status}: {body}"); + } + + let completion: RawCompletionResponse = + serde_json::from_str(&body).context("Failed to parse Baseten response")?; + + let actual_output = completion + .choices + .into_iter() + .next() + .map(|choice| choice.text) + .unwrap_or_default(); + + let actual_output = format!("{prefill}{actual_output}"); + + let (actual_patch, actual_cursor) = + parse_prediction_output(example, &actual_output, PredictionProvider::Zeta2(format))?; + + let prediction = ExamplePrediction { + actual_patch: Some(actual_patch), + actual_output, + actual_cursor, + error: None, + provider: PredictionProvider::Baseten(format), + }; + + example.predictions.push(prediction); + Ok(()) +} + pub async fn sync_batches(provider: Option<&PredictionProvider>) -> anyhow::Result<()> { match provider { Some(PredictionProvider::Teacher(backend)) => match backend { From 3d3a66dc9811be2afd2c3690f68e5aef498bc01f Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Tue, 3 Mar 2026 20:46:08 +0100 Subject: [PATCH 277/548] collab: Fix unable to rejoin shared project after leaving a call (#50630) When a downstream project was disconnected from the host (e.g. the guest left the call), `disconnected_from_host_internal` did not clear `client_subscriptions`. These subscriptions hold entries in the `Client`'s entity subscription map, so a subsequent `join_remote_project` with the same project ID would fail with "already subscribed to entity". The fix adds `self.client_subscriptions.clear()` to `disconnected_from_host_internal`, matching what `unshare_internal` already does for the host side. Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects Release Notes: - collab: Fix unable to rejoin project bug ("already subscribed to entity") --- crates/collab/tests/integration/git_tests.rs | 1 - .../tests/integration/integration_tests.rs | 86 +++++++++++++++++++ crates/project/src/project.rs | 6 ++ 3 files changed, 92 insertions(+), 1 deletion(-) diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index 6792eb92484d34f3085287b57f48a5761e760c92..6e50e41bade5f5dfdf124f5a6d659e81fc2ce0f6 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -9,7 +9,6 @@ use serde_json::json; use util::{path, rel_path::rel_path}; use workspace::{MultiWorkspace, Workspace}; -// use crate::TestServer; #[gpui::test] diff --git a/crates/collab/tests/integration/integration_tests.rs b/crates/collab/tests/integration/integration_tests.rs index c26f20c1e294326f275dbfda1d2d41603719cd3e..3bad9c82c26392a935f67efc578b5d293b2cab3d 100644 --- a/crates/collab/tests/integration/integration_tests.rs +++ b/crates/collab/tests/integration/integration_tests.rs @@ -7205,3 +7205,89 @@ async fn test_remote_git_branches( assert_eq!(host_branch.name(), "totally-new-branch"); } + +#[gpui::test] +async fn test_guest_can_rejoin_shared_project_after_leaving_call( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ + "file.txt": "hello\n", + }), + ) + .await; + + let (project_a, _worktree_id) = client_a.build_local_project(path!("/project"), cx_a).await; + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + + let _project_b = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + // third client joins call to prevent room from being torn down + let _project_c = client_c.join_remote_project(project_id, cx_c).await; + executor.run_until_parked(); + + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + let user_id_b = client_b.current_user_id(cx_b).to_proto(); + let active_call_a = cx_a.read(ActiveCall::global); + active_call_a + .update(cx_a, |call, cx| call.invite(user_id_b, None, cx)) + .await + .unwrap(); + executor.run_until_parked(); + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + executor.run_until_parked(); + + let _project_b2 = client_b.join_remote_project(project_id, cx_b).await; + executor.run_until_parked(); + + project_a.read_with(cx_a, |project, _| { + let guest_count = project + .collaborators() + .values() + .filter(|c| !c.is_host) + .count(); + + assert_eq!( + guest_count, 2, + "host should have exactly one guest collaborator after rejoin" + ); + }); + + _project_b.read_with(cx_b, |project, _| { + assert_eq!( + project.client_subscriptions().len(), + 0, + "We should clear all host subscriptions after leaving the project" + ); + }) +} diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 9e37802213dfb8df5cf63af5648044ae8ec65ecb..756f095511a9688678df013458710e69d720c52e 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -1942,6 +1942,11 @@ impl Project { } } + #[cfg(feature = "test-support")] + pub fn client_subscriptions(&self) -> &Vec { + &self.client_subscriptions + } + #[cfg(feature = "test-support")] pub async fn example( root_paths: impl IntoIterator, @@ -2741,6 +2746,7 @@ impl Project { } = &mut self.client_state { *sharing_has_stopped = true; + self.client_subscriptions.clear(); self.collaborators.clear(); self.worktree_store.update(cx, |store, cx| { store.disconnected_from_host(cx); From a5a1977e985fbabcac2aacd7786f460df28c7eba Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Tue, 3 Mar 2026 14:28:48 -0600 Subject: [PATCH 278/548] ep: API keys for OpenAI compatible (#50615) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Added support for providing an API key to OpenAI-compatible edit prediction providers --- crates/edit_prediction/src/edit_prediction.rs | 1 + crates/edit_prediction/src/fim.rs | 11 +- .../edit_prediction/src/open_ai_compatible.rs | 133 ++++++++++++++++++ crates/edit_prediction/src/zeta.rs | 78 ++-------- .../src/edit_prediction_button.rs | 8 +- .../pages/edit_prediction_provider_setup.rs | 115 ++++++++------- .../zed/src/zed/edit_prediction_registry.rs | 5 +- 7 files changed, 230 insertions(+), 121 deletions(-) create mode 100644 crates/edit_prediction/src/open_ai_compatible.rs diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index e6e3a9abdf83deb785cd56d358b065973682b8cc..74988d65933b3bbbc2507077a74dfeb94089ab63 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -69,6 +69,7 @@ pub mod sweep_ai; pub mod udiff; mod capture_example; +pub mod open_ai_compatible; mod zed_edit_prediction_delegate; pub mod zeta; diff --git a/crates/edit_prediction/src/fim.rs b/crates/edit_prediction/src/fim.rs index 66f2e58a3b01b4fbf49b11864db4daec6b4dc1c2..d3e18f73acc665eec28d725530d11297cf4d69ea 100644 --- a/crates/edit_prediction/src/fim.rs +++ b/crates/edit_prediction/src/fim.rs @@ -1,6 +1,7 @@ use crate::{ - EditPredictionId, EditPredictionModelInput, cursor_excerpt, prediction::EditPredictionResult, - zeta, + EditPredictionId, EditPredictionModelInput, cursor_excerpt, + open_ai_compatible::{self, load_open_ai_compatible_api_key_if_needed}, + prediction::EditPredictionResult, }; use anyhow::{Context as _, Result, anyhow}; use gpui::{App, AppContext as _, Entity, Task}; @@ -58,6 +59,8 @@ pub fn request_prediction( return Task::ready(Err(anyhow!("Unsupported edit prediction provider for FIM"))); }; + let api_key = load_open_ai_compatible_api_key_if_needed(provider, cx); + let result = cx.background_spawn(async move { let (excerpt_range, _) = cursor_excerpt::editable_and_context_ranges_for_cursor_position( cursor_point, @@ -90,12 +93,14 @@ pub fn request_prediction( let stop_tokens = get_fim_stop_tokens(); let max_tokens = settings.max_output_tokens; - let (response_text, request_id) = zeta::send_custom_server_request( + + let (response_text, request_id) = open_ai_compatible::send_custom_server_request( provider, &settings, prompt, max_tokens, stop_tokens, + api_key, &http_client, ) .await?; diff --git a/crates/edit_prediction/src/open_ai_compatible.rs b/crates/edit_prediction/src/open_ai_compatible.rs new file mode 100644 index 0000000000000000000000000000000000000000..ca378ba1fd0bc9bdbb3e85c7610e1b94c1be388f --- /dev/null +++ b/crates/edit_prediction/src/open_ai_compatible.rs @@ -0,0 +1,133 @@ +use anyhow::{Context as _, Result}; +use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse}; +use futures::AsyncReadExt as _; +use gpui::{App, AppContext as _, Entity, Global, SharedString, Task, http_client}; +use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings}; +use language_model::{ApiKeyState, EnvVar, env_var}; +use std::sync::Arc; + +pub fn open_ai_compatible_api_url(cx: &App) -> SharedString { + all_language_settings(None, cx) + .edit_predictions + .open_ai_compatible_api + .as_ref() + .map(|settings| settings.api_url.clone()) + .unwrap_or_default() + .into() +} + +pub const OPEN_AI_COMPATIBLE_CREDENTIALS_USERNAME: &str = "openai-compatible-api-token"; +pub static OPEN_AI_COMPATIBLE_TOKEN_ENV_VAR: std::sync::LazyLock = + env_var!("ZED_OPEN_AI_COMPATIBLE_EDIT_PREDICTION_API_KEY"); + +struct GlobalOpenAiCompatibleApiKey(Entity); + +impl Global for GlobalOpenAiCompatibleApiKey {} + +pub fn open_ai_compatible_api_token(cx: &mut App) -> Entity { + if let Some(global) = cx.try_global::() { + return global.0.clone(); + } + + let entity = cx.new(|cx| { + ApiKeyState::new( + open_ai_compatible_api_url(cx), + OPEN_AI_COMPATIBLE_TOKEN_ENV_VAR.clone(), + ) + }); + cx.set_global(GlobalOpenAiCompatibleApiKey(entity.clone())); + entity +} + +pub fn load_open_ai_compatible_api_token( + cx: &mut App, +) -> Task> { + let api_url = open_ai_compatible_api_url(cx); + open_ai_compatible_api_token(cx).update(cx, |key_state, cx| { + key_state.load_if_needed(api_url, |s| s, cx) + }) +} + +pub fn load_open_ai_compatible_api_key_if_needed( + provider: settings::EditPredictionProvider, + cx: &mut App, +) -> Option> { + if provider != settings::EditPredictionProvider::OpenAiCompatibleApi { + return None; + } + _ = load_open_ai_compatible_api_token(cx); + let url = open_ai_compatible_api_url(cx); + return open_ai_compatible_api_token(cx).read(cx).key(&url); +} + +pub(crate) async fn send_custom_server_request( + provider: settings::EditPredictionProvider, + settings: &OpenAiCompatibleEditPredictionSettings, + prompt: String, + max_tokens: u32, + stop_tokens: Vec, + api_key: Option>, + http_client: &Arc, +) -> Result<(String, String)> { + match provider { + settings::EditPredictionProvider::Ollama => { + let response = crate::ollama::make_request( + settings.clone(), + prompt, + stop_tokens, + http_client.clone(), + ) + .await?; + Ok((response.response, response.created_at)) + } + _ => { + let request = RawCompletionRequest { + model: settings.model.clone(), + prompt, + max_tokens: Some(max_tokens), + temperature: None, + stop: stop_tokens + .into_iter() + .map(std::borrow::Cow::Owned) + .collect(), + environment: None, + }; + + let request_body = serde_json::to_string(&request)?; + let mut http_request_builder = http_client::Request::builder() + .method(http_client::Method::POST) + .uri(settings.api_url.as_ref()) + .header("Content-Type", "application/json"); + + if let Some(api_key) = api_key { + http_request_builder = + http_request_builder.header("Authorization", format!("Bearer {}", api_key)); + } + + let http_request = + http_request_builder.body(http_client::AsyncBody::from(request_body))?; + + let mut response = http_client.send(http_request).await?; + let status = response.status(); + + if !status.is_success() { + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + anyhow::bail!("custom server error: {} - {}", status, body); + } + + let mut body = String::new(); + response.body_mut().read_to_string(&mut body).await?; + + let parsed: RawCompletionResponse = + serde_json::from_str(&body).context("Failed to parse completion response")?; + let text = parsed + .choices + .into_iter() + .next() + .map(|choice| choice.text) + .unwrap_or_default(); + Ok((text, parsed.id)) + } + } +} diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index f6a786572736908556535b9131c1cf7814a6126f..789ff6c0d7fcc269baf30b5e0fb0e849bc865859 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -2,15 +2,14 @@ use crate::cursor_excerpt::compute_excerpt_ranges; use crate::prediction::EditPredictionResult; use crate::{ CurrentEditPrediction, DebugEvent, EditPredictionFinishedDebugEvent, EditPredictionId, - EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, ollama, + EditPredictionModelInput, EditPredictionStartedDebugEvent, EditPredictionStore, }; -use anyhow::{Context as _, Result}; -use cloud_llm_client::predict_edits_v3::{RawCompletionRequest, RawCompletionResponse}; +use anyhow::Result; +use cloud_llm_client::predict_edits_v3::RawCompletionRequest; use cloud_llm_client::{AcceptEditPredictionBody, EditPredictionRejectReason}; use edit_prediction_types::PredictedCursorPosition; -use futures::AsyncReadExt as _; -use gpui::{App, AppContext as _, Task, http_client, prelude::*}; -use language::language_settings::{OpenAiCompatibleEditPredictionSettings, all_language_settings}; +use gpui::{App, AppContext as _, Task, prelude::*}; +use language::language_settings::all_language_settings; use language::{BufferSnapshot, ToOffset as _, ToPoint, text_diff}; use release_channel::AppVersion; use settings::EditPredictionPromptFormat; @@ -25,6 +24,10 @@ use zeta_prompt::{ zeta1::{self, EDITABLE_REGION_END_MARKER}, }; +use crate::open_ai_compatible::{ + load_open_ai_compatible_api_key_if_needed, send_custom_server_request, +}; + pub fn request_prediction_with_zeta( store: &mut EditPredictionStore, EditPredictionModelInput { @@ -56,6 +59,7 @@ pub fn request_prediction_with_zeta( let buffer_snapshotted_at = Instant::now(); let raw_config = store.zeta2_raw_config().cloned(); let preferred_experiment = store.preferred_experiment().map(|s| s.to_owned()); + let open_ai_compatible_api_key = load_open_ai_compatible_api_key_if_needed(provider, cx); let excerpt_path: Arc = snapshot .file() @@ -131,6 +135,7 @@ pub fn request_prediction_with_zeta( prompt, max_tokens, stop_tokens, + open_ai_compatible_api_key.clone(), &http_client, ) .await?; @@ -157,6 +162,7 @@ pub fn request_prediction_with_zeta( prompt, max_tokens, vec![], + open_ai_compatible_api_key.clone(), &http_client, ) .await?; @@ -400,66 +406,6 @@ pub fn zeta2_prompt_input( (full_context_offset_range, prompt_input) } -pub(crate) async fn send_custom_server_request( - provider: settings::EditPredictionProvider, - settings: &OpenAiCompatibleEditPredictionSettings, - prompt: String, - max_tokens: u32, - stop_tokens: Vec, - http_client: &Arc, -) -> Result<(String, String)> { - match provider { - settings::EditPredictionProvider::Ollama => { - let response = - ollama::make_request(settings.clone(), prompt, stop_tokens, http_client.clone()) - .await?; - Ok((response.response, response.created_at)) - } - _ => { - let request = RawCompletionRequest { - model: settings.model.clone(), - prompt, - max_tokens: Some(max_tokens), - temperature: None, - stop: stop_tokens - .into_iter() - .map(std::borrow::Cow::Owned) - .collect(), - environment: None, - }; - - let request_body = serde_json::to_string(&request)?; - let http_request = http_client::Request::builder() - .method(http_client::Method::POST) - .uri(settings.api_url.as_ref()) - .header("Content-Type", "application/json") - .body(http_client::AsyncBody::from(request_body))?; - - let mut response = http_client.send(http_request).await?; - let status = response.status(); - - if !status.is_success() { - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - anyhow::bail!("custom server error: {} - {}", status, body); - } - - let mut body = String::new(); - response.body_mut().read_to_string(&mut body).await?; - - let parsed: RawCompletionResponse = - serde_json::from_str(&body).context("Failed to parse completion response")?; - let text = parsed - .choices - .into_iter() - .next() - .map(|choice| choice.text) - .unwrap_or_default(); - Ok((text, parsed.id)) - } - } -} - pub(crate) fn edit_prediction_accepted( store: &EditPredictionStore, current_prediction: CurrentEditPrediction, diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index 6339c7d6cd9fa1cc40101cc1bf14650a6904b3c7..743256970f486b474405e7f034f18501505cb825 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -539,9 +539,15 @@ impl EditPredictionButton { edit_prediction::ollama::ensure_authenticated(cx); let sweep_api_token_task = edit_prediction::sweep_ai::load_sweep_api_token(cx); let mercury_api_token_task = edit_prediction::mercury::load_mercury_api_token(cx); + let open_ai_compatible_api_token_task = + edit_prediction::open_ai_compatible::load_open_ai_compatible_api_token(cx); cx.spawn(async move |this, cx| { - _ = futures::join!(sweep_api_token_task, mercury_api_token_task); + _ = futures::join!( + sweep_api_token_task, + mercury_api_token_task, + open_ai_compatible_api_token_task + ); this.update(cx, |_, cx| { cx.notify(); }) diff --git a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs index 338fe4de14f1f7e9060fafe865253f09f0bdc481..32c4bee84bd1f72263ed28bcd44d7e6349c4b24c 100644 --- a/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs +++ b/crates/settings_ui/src/pages/edit_prediction_provider_setup.rs @@ -2,6 +2,7 @@ use codestral::{CODESTRAL_API_URL, codestral_api_key_state, codestral_api_url}; use edit_prediction::{ ApiKeyState, mercury::{MERCURY_CREDENTIALS_URL, mercury_api_token}, + open_ai_compatible::{open_ai_compatible_api_token, open_ai_compatible_api_url}, sweep_ai::{SWEEP_CREDENTIALS_URL, sweep_api_token}, }; use edit_prediction_ui::{get_available_providers, set_completion_provider}; @@ -33,7 +34,9 @@ pub(crate) fn render_edit_prediction_setup_page( render_api_key_provider( IconName::Inception, "Mercury", - "https://platform.inceptionlabs.ai/dashboard/api-keys".into(), + ApiKeyDocs::Link { + dashboard_url: "https://platform.inceptionlabs.ai/dashboard/api-keys".into(), + }, mercury_api_token(cx), |_cx| MERCURY_CREDENTIALS_URL, None, @@ -46,7 +49,9 @@ pub(crate) fn render_edit_prediction_setup_page( render_api_key_provider( IconName::SweepAi, "Sweep", - "https://app.sweep.dev/".into(), + ApiKeyDocs::Link { + dashboard_url: "https://app.sweep.dev/".into(), + }, sweep_api_token(cx), |_cx| SWEEP_CREDENTIALS_URL, Some( @@ -68,7 +73,9 @@ pub(crate) fn render_edit_prediction_setup_page( render_api_key_provider( IconName::AiMistral, "Codestral", - "https://console.mistral.ai/codestral".into(), + ApiKeyDocs::Link { + dashboard_url: "https://console.mistral.ai/codestral".into(), + }, codestral_api_key_state(cx), |cx| codestral_api_url(cx), Some( @@ -87,7 +94,31 @@ pub(crate) fn render_edit_prediction_setup_page( .into_any_element(), ), Some(render_ollama_provider(settings_window, window, cx).into_any_element()), - Some(render_open_ai_compatible_provider(settings_window, window, cx).into_any_element()), + Some( + render_api_key_provider( + IconName::AiOpenAiCompat, + "OpenAI Compatible API", + ApiKeyDocs::Custom { + message: "Set an API key here. It will be sent as Authorization: Bearer {key}." + .into(), + }, + open_ai_compatible_api_token(cx), + |cx| open_ai_compatible_api_url(cx), + Some( + settings_window + .render_sub_page_items_section( + open_ai_compatible_settings().iter().enumerate(), + true, + window, + cx, + ) + .into_any_element(), + ), + window, + cx, + ) + .into_any_element(), + ), ]; div() @@ -162,10 +193,15 @@ fn render_provider_dropdown(window: &mut Window, cx: &mut App) -> AnyElement { .into_any_element() } +enum ApiKeyDocs { + Link { dashboard_url: SharedString }, + Custom { message: SharedString }, +} + fn render_api_key_provider( icon: IconName, title: &'static str, - link: SharedString, + docs: ApiKeyDocs, api_key_state: Entity, current_url: fn(&mut App) -> SharedString, additional_fields: Option, @@ -209,25 +245,32 @@ fn render_api_key_provider( .icon(icon) .no_padding(true); let button_link_label = format!("{} dashboard", title); - let description = h_flex() - .min_w_0() - .gap_0p5() - .child( - Label::new("Visit the") + let description = match docs { + ApiKeyDocs::Custom { message } => h_flex().min_w_0().gap_0p5().child( + Label::new(message) .size(LabelSize::Small) .color(Color::Muted), - ) - .child( - ButtonLink::new(button_link_label, link) - .no_icon(true) - .label_size(LabelSize::Small) - .label_color(Color::Muted), - ) - .child( - Label::new("to generate an API key.") - .size(LabelSize::Small) - .color(Color::Muted), - ); + ), + ApiKeyDocs::Link { dashboard_url } => h_flex() + .min_w_0() + .gap_0p5() + .child( + Label::new("Visit the") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .child( + ButtonLink::new(button_link_label, dashboard_url) + .no_icon(true) + .label_size(LabelSize::Small) + .label_color(Color::Muted), + ) + .child( + Label::new("to generate an API key.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + }; let configured_card_label = if is_from_env_var { "API Key Set in Environment Variable" } else { @@ -484,34 +527,6 @@ fn ollama_settings() -> Box<[SettingsPageItem]> { ]) } -fn render_open_ai_compatible_provider( - settings_window: &SettingsWindow, - window: &mut Window, - cx: &mut Context, -) -> impl IntoElement { - let open_ai_compatible_settings = open_ai_compatible_settings(); - let additional_fields = settings_window - .render_sub_page_items_section( - open_ai_compatible_settings.iter().enumerate(), - true, - window, - cx, - ) - .into_any_element(); - - v_flex() - .id("open-ai-compatible") - .min_w_0() - .pt_8() - .gap_1p5() - .child( - SettingsSectionHeader::new("OpenAI Compatible API") - .icon(IconName::AiOpenAiCompat) - .no_padding(true), - ) - .child(div().px_neg_8().child(additional_fields)) -} - fn open_ai_compatible_settings() -> Box<[SettingsPageItem]> { Box::new([ SettingsPageItem::SettingItem(SettingItem { diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 67b0d26c88cf0bd254a776834de09fb89d6ea195..39eee233e02a782e2379849247448c8f8c1ea71a 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -154,7 +154,10 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option Date: Tue, 3 Mar 2026 15:11:32 -0600 Subject: [PATCH 279/548] zeta2: Hashlines prompt format (#50623) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/edit_prediction/src/zeta.rs | 33 +- .../edit_prediction_cli/src/format_prompt.rs | 22 +- .../edit_prediction_cli/src/parse_output.rs | 43 +- crates/zeta_prompt/src/zeta_prompt.rs | 1778 ++++++++++++++++- 4 files changed, 1712 insertions(+), 164 deletions(-) diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 789ff6c0d7fcc269baf30b5e0fb0e849bc865859..f038d2a4ca1929faee2a02391534539b5b63e2d0 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -15,12 +15,10 @@ use release_channel::AppVersion; use settings::EditPredictionPromptFormat; use text::{Anchor, Bias}; -use std::env; -use std::ops::Range; -use std::{path::Path, sync::Arc, time::Instant}; +use std::{env, ops::Range, path::Path, sync::Arc, time::Instant}; use zeta_prompt::{ CURSOR_MARKER, ZetaFormat, clean_zeta2_model_output, format_zeta_prompt, get_prefill, - prompt_input_contains_special_tokens, + output_with_context_for_format, prompt_input_contains_special_tokens, zeta1::{self, EDITABLE_REGION_END_MARKER}, }; @@ -246,6 +244,25 @@ pub fn request_prediction_with_zeta( return Ok((Some((request_id, None, model_version)), usage)); }; + let editable_range_in_buffer = editable_range_in_excerpt.start + + full_context_offset_range.start + ..editable_range_in_excerpt.end + full_context_offset_range.start; + + let mut old_text = snapshot + .text_for_range(editable_range_in_buffer.clone()) + .collect::(); + + // For the hashline format, the model may return <|set|>/<|insert|> + // edit commands instead of a full replacement. Apply them against + // the original editable region to produce the full replacement text. + // This must happen before cursor marker stripping because the cursor + // marker is embedded inside edit command content. + if let Some(rewritten_output) = + output_with_context_for_format(zeta_version, &old_text, &output_text)? + { + output_text = rewritten_output; + } + // Client-side cursor marker processing (applies to both raw and v3 responses) let cursor_offset_in_output = output_text.find(CURSOR_MARKER); if let Some(offset) = cursor_offset_in_output { @@ -265,14 +282,6 @@ pub fn request_prediction_with_zeta( .ok(); } - let editable_range_in_buffer = editable_range_in_excerpt.start - + full_context_offset_range.start - ..editable_range_in_excerpt.end + full_context_offset_range.start; - - let mut old_text = snapshot - .text_for_range(editable_range_in_buffer.clone()) - .collect::(); - if !output_text.is_empty() && !output_text.ends_with('\n') { output_text.push('\n'); } diff --git a/crates/edit_prediction_cli/src/format_prompt.rs b/crates/edit_prediction_cli/src/format_prompt.rs index bee79ae8160eeb815a3739b53a5441f6063fb622..f36eaf2799166d6fbd2b7b212003a1a0644b82c4 100644 --- a/crates/edit_prediction_cli/src/format_prompt.rs +++ b/crates/edit_prediction_cli/src/format_prompt.rs @@ -12,7 +12,8 @@ use similar::DiffableStr; use std::ops::Range; use std::sync::Arc; use zeta_prompt::{ - ZetaFormat, excerpt_range_for_format, format_zeta_prompt, resolve_cursor_region, + ZetaFormat, encode_patch_as_output_for_format, excerpt_range_for_format, format_zeta_prompt, + output_end_marker_for_format, resolve_cursor_region, }; pub async fn run_format_prompt( @@ -101,6 +102,12 @@ pub fn zeta2_output_for_patch( old_editable_region.push('\n'); } + if let Some(encoded_output) = + encode_patch_as_output_for_format(version, &old_editable_region, patch, cursor_offset)? + { + return Ok(encoded_output); + } + let (mut result, first_hunk_offset) = udiff::apply_diff_to_string_with_hunk_offset(patch, &old_editable_region).with_context( || { @@ -120,16 +127,11 @@ pub fn zeta2_output_for_patch( result.insert_str(offset, zeta_prompt::CURSOR_MARKER); } - match version { - ZetaFormat::V0120GitMergeMarkers - | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211SeedCoder => { - if !result.ends_with('\n') { - result.push('\n'); - } - result.push_str(zeta_prompt::v0120_git_merge_markers::END_MARKER); + if let Some(end_marker) = output_end_marker_for_format(version) { + if !result.ends_with('\n') { + result.push('\n'); } - _ => (), + result.push_str(end_marker); } Ok(result) diff --git a/crates/edit_prediction_cli/src/parse_output.rs b/crates/edit_prediction_cli/src/parse_output.rs index 4b8af44785c1781de772f569c012ee64eee48aad..2c066b8b32b3eaab54ad6e3b3bcb0796ff27f950 100644 --- a/crates/edit_prediction_cli/src/parse_output.rs +++ b/crates/edit_prediction_cli/src/parse_output.rs @@ -6,7 +6,11 @@ use crate::{ }; use anyhow::{Context as _, Result}; use edit_prediction::example_spec::encode_cursor_in_patch; -use zeta_prompt::{CURSOR_MARKER, ZetaFormat}; +use zeta_prompt::{ + CURSOR_MARKER, ZetaFormat, clean_extracted_region_for_format, + current_region_markers_for_format, output_end_marker_for_format, + output_with_context_for_format, +}; pub fn run_parse_output(example: &mut Example) -> Result<()> { example @@ -51,22 +55,7 @@ pub fn parse_prediction_output( } fn extract_zeta2_current_region(prompt: &str, format: ZetaFormat) -> Result { - let (current_marker, end_marker) = match format { - ZetaFormat::V0112MiddleAtEnd => ("<|fim_middle|>current\n", "<|fim_middle|>updated"), - ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion => { - ("<|fim_middle|>current\n", "<|fim_suffix|>") - } - ZetaFormat::V0120GitMergeMarkers - | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211Prefill => ( - zeta_prompt::v0120_git_merge_markers::START_MARKER, - zeta_prompt::v0120_git_merge_markers::SEPARATOR, - ), - ZetaFormat::V0211SeedCoder => ( - zeta_prompt::seed_coder::START_MARKER, - zeta_prompt::seed_coder::SEPARATOR, - ), - }; + let (current_marker, end_marker) = current_region_markers_for_format(format); let start = prompt.find(current_marker).with_context(|| { format!( @@ -82,8 +71,7 @@ fn extract_zeta2_current_region(prompt: &str, format: ZetaFormat) -> Result { - zeta_prompt::v0131_git_merge_markers_prefix::END_MARKER - } - ZetaFormat::V0120GitMergeMarkers => zeta_prompt::v0120_git_merge_markers::END_MARKER, - ZetaFormat::V0112MiddleAtEnd - | ZetaFormat::V0113Ordered - | ZetaFormat::V0114180EditableRegion => "", - ZetaFormat::V0211SeedCoder => zeta_prompt::seed_coder::END_MARKER, - }; - if !suffix.is_empty() { + if let Some(marker) = output_end_marker_for_format(format) { new_text = new_text - .strip_suffix(suffix) + .strip_suffix(marker) .unwrap_or(&new_text) .to_string(); } diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index 0cd37a455397334933dbfa2464c2dbcb72bba456..2ec12e8bebb4a868c0784e2fe52541a1de580555 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -86,6 +86,7 @@ pub enum ZetaFormat { V0131GitMergeMarkersPrefix, V0211Prefill, V0211SeedCoder, + v0226Hashline, } impl std::fmt::Display for ZetaFormat { @@ -122,25 +123,6 @@ impl ZetaFormat { .collect::>() .concat() } - - pub fn special_tokens(&self) -> &'static [&'static str] { - match self { - ZetaFormat::V0112MiddleAtEnd - | ZetaFormat::V0113Ordered - | ZetaFormat::V0114180EditableRegion => &[ - "<|fim_prefix|>", - "<|fim_suffix|>", - "<|fim_middle|>", - "<|file_sep|>", - CURSOR_MARKER, - ], - ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(), - ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => { - v0131_git_merge_markers_prefix::special_tokens() - } - ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(), - } - } } #[derive(Clone, Debug, PartialEq, Hash, Serialize, Deserialize)] @@ -212,33 +194,29 @@ pub struct RelatedExcerpt { } pub fn prompt_input_contains_special_tokens(input: &ZetaPromptInput, format: ZetaFormat) -> bool { - format - .special_tokens() + special_tokens_for_format(format) .iter() .any(|token| input.cursor_excerpt.contains(token)) } pub fn format_zeta_prompt(input: &ZetaPromptInput, format: ZetaFormat) -> String { - format_zeta_prompt_with_budget(input, format, MAX_PROMPT_TOKENS) + format_prompt_with_budget_for_format(input, format, MAX_PROMPT_TOKENS) } -/// Post-processes model output for the given zeta format by stripping format-specific suffixes. -pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str { +pub fn special_tokens_for_format(format: ZetaFormat) -> &'static [&'static str] { match format { - ZetaFormat::V0120GitMergeMarkers => output - .strip_suffix(v0120_git_merge_markers::END_MARKER) - .unwrap_or(output), - ZetaFormat::V0131GitMergeMarkersPrefix => output - .strip_suffix(v0131_git_merge_markers_prefix::END_MARKER) - .unwrap_or(output), - ZetaFormat::V0211SeedCoder => output - .strip_suffix(seed_coder::END_MARKER) - .unwrap_or(output), - _ => output, + ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::special_tokens(), + ZetaFormat::V0113Ordered => v0113_ordered::special_tokens(), + ZetaFormat::V0114180EditableRegion => v0114180_editable_region::special_tokens(), + ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::special_tokens(), + ZetaFormat::V0131GitMergeMarkersPrefix => v0131_git_merge_markers_prefix::special_tokens(), + ZetaFormat::V0211Prefill => v0211_prefill::special_tokens(), + ZetaFormat::V0211SeedCoder => seed_coder::special_tokens(), + ZetaFormat::v0226Hashline => hashline::special_tokens(), } } -pub fn excerpt_range_for_format( +pub fn excerpt_ranges_for_format( format: ZetaFormat, ranges: &ExcerptRanges, ) -> (Range, Range) { @@ -247,129 +225,257 @@ pub fn excerpt_range_for_format( ranges.editable_150.clone(), ranges.editable_150_context_350.clone(), ), - ZetaFormat::V0114180EditableRegion - | ZetaFormat::V0120GitMergeMarkers + ZetaFormat::V0114180EditableRegion => ( + ranges.editable_180.clone(), + ranges.editable_180_context_350.clone(), + ), + ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill - | ZetaFormat::V0211SeedCoder => ( + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline => ( ranges.editable_350.clone(), ranges.editable_350_context_150.clone(), ), } } -pub fn resolve_cursor_region( - input: &ZetaPromptInput, - format: ZetaFormat, -) -> (&str, Range, usize) { - let (editable_range, context_range) = excerpt_range_for_format(format, &input.excerpt_ranges); - let context_start = context_range.start; - let context_text = &input.cursor_excerpt[context_range]; - let adjusted_editable = - (editable_range.start - context_start)..(editable_range.end - context_start); - let adjusted_cursor = input.cursor_offset_in_excerpt - context_start; - - (context_text, adjusted_editable, adjusted_cursor) -} - -fn format_zeta_prompt_with_budget( - input: &ZetaPromptInput, +pub fn write_cursor_excerpt_section_for_format( format: ZetaFormat, - max_tokens: usize, -) -> String { - let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format); - let path = &*input.cursor_path; - - let mut cursor_section = String::new(); + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, +) { match format { - ZetaFormat::V0112MiddleAtEnd => { - v0112_middle_at_end::write_cursor_excerpt_section( - &mut cursor_section, - path, - context, - &editable_range, - cursor_offset, - ); - } + ZetaFormat::V0112MiddleAtEnd => v0112_middle_at_end::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ), ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion => { v0113_ordered::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ) } ZetaFormat::V0120GitMergeMarkers => v0120_git_merge_markers::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ), ZetaFormat::V0131GitMergeMarkersPrefix | ZetaFormat::V0211Prefill => { v0131_git_merge_markers_prefix::write_cursor_excerpt_section( - &mut cursor_section, + prompt, path, context, - &editable_range, + editable_range, cursor_offset, ) } - ZetaFormat::V0211SeedCoder => { - return seed_coder::format_prompt_with_budget( + ZetaFormat::V0211SeedCoder => seed_coder::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ), + ZetaFormat::v0226Hashline => hashline::write_cursor_excerpt_section( + prompt, + path, + context, + editable_range, + cursor_offset, + ), + } +} + +pub fn format_prompt_with_budget_for_format( + input: &ZetaPromptInput, + format: ZetaFormat, + max_tokens: usize, +) -> String { + let (context, editable_range, cursor_offset) = resolve_cursor_region(input, format); + let path = &*input.cursor_path; + + match format { + ZetaFormat::V0211SeedCoder => seed_coder::format_prompt_with_budget( + path, + context, + &editable_range, + cursor_offset, + &input.events, + &input.related_files, + max_tokens, + ), + _ => { + let mut cursor_section = String::new(); + write_cursor_excerpt_section_for_format( + format, + &mut cursor_section, path, context, &editable_range, cursor_offset, + ); + + let cursor_tokens = estimate_tokens(cursor_section.len()); + let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens); + + let edit_history_section = format_edit_history_within_budget( &input.events, + "<|file_sep|>", + "edit history", + budget_after_cursor, + ); + let edit_history_tokens = estimate_tokens(edit_history_section.len()); + let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); + + let related_files_section = format_related_files_within_budget( &input.related_files, - max_tokens, + "<|file_sep|>", + "", + budget_after_edit_history, ); + + let mut prompt = String::new(); + prompt.push_str(&related_files_section); + prompt.push_str(&edit_history_section); + prompt.push_str(&cursor_section); + prompt } } - - let cursor_tokens = estimate_tokens(cursor_section.len()); - let budget_after_cursor = max_tokens.saturating_sub(cursor_tokens); - - let edit_history_section = format_edit_history_within_budget( - &input.events, - "<|file_sep|>", - "edit history", - budget_after_cursor, - ); - let edit_history_tokens = estimate_tokens(edit_history_section.len()); - let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); - - let related_files_section = format_related_files_within_budget( - &input.related_files, - "<|file_sep|>", - "", - budget_after_edit_history, - ); - - let mut prompt = String::new(); - prompt.push_str(&related_files_section); - prompt.push_str(&edit_history_section); - prompt.push_str(&cursor_section); - prompt } -pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { +pub fn get_prefill_for_format( + format: ZetaFormat, + context: &str, + editable_range: &Range, +) -> String { match format { + ZetaFormat::V0211Prefill => v0211_prefill::get_prefill(context, editable_range), ZetaFormat::V0112MiddleAtEnd | ZetaFormat::V0113Ordered | ZetaFormat::V0114180EditableRegion | ZetaFormat::V0120GitMergeMarkers | ZetaFormat::V0131GitMergeMarkersPrefix - | ZetaFormat::V0211SeedCoder => String::new(), - ZetaFormat::V0211Prefill => { - let (context, editable_range, _) = resolve_cursor_region(input, format); - v0211_prefill::get_prefill(context, &editable_range) + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline => String::new(), + } +} + +pub fn output_end_marker_for_format(format: ZetaFormat) -> Option<&'static str> { + match format { + ZetaFormat::V0120GitMergeMarkers => Some(v0120_git_merge_markers::END_MARKER), + ZetaFormat::V0131GitMergeMarkersPrefix => Some(v0131_git_merge_markers_prefix::END_MARKER), + ZetaFormat::V0211Prefill => Some(v0131_git_merge_markers_prefix::END_MARKER), + ZetaFormat::V0211SeedCoder => Some(seed_coder::END_MARKER), + ZetaFormat::V0112MiddleAtEnd + | ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::v0226Hashline => None, + } +} + +pub fn current_region_markers_for_format(format: ZetaFormat) -> (&'static str, &'static str) { + match format { + ZetaFormat::V0112MiddleAtEnd => ("<|fim_middle|>current\n", "<|fim_middle|>updated"), + ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::v0226Hashline => ("<|fim_middle|>current\n", "<|fim_suffix|>"), + ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211Prefill => ( + v0120_git_merge_markers::START_MARKER, + v0120_git_merge_markers::SEPARATOR, + ), + ZetaFormat::V0211SeedCoder => (seed_coder::START_MARKER, seed_coder::SEPARATOR), + } +} + +pub fn clean_extracted_region_for_format(format: ZetaFormat, region: &str) -> String { + match format { + ZetaFormat::v0226Hashline => hashline::strip_hashline_prefixes(region), + _ => region.to_string(), + } +} + +pub fn encode_patch_as_output_for_format( + format: ZetaFormat, + old_editable_region: &str, + patch: &str, + cursor_offset: Option, +) -> Result> { + match format { + ZetaFormat::v0226Hashline => { + hashline::patch_to_edit_commands(old_editable_region, patch, cursor_offset).map(Some) + } + _ => Ok(None), + } +} + +pub fn output_with_context_for_format( + format: ZetaFormat, + old_editable_region: &str, + output: &str, +) -> Result> { + match format { + ZetaFormat::v0226Hashline => { + if hashline::output_has_edit_commands(output) { + Ok(Some(hashline::apply_edit_commands( + old_editable_region, + output, + ))) + } else { + Ok(None) + } } + _ => Ok(None), } } +/// Post-processes model output for the given zeta format by stripping format-specific suffixes. +pub fn clean_zeta2_model_output(output: &str, format: ZetaFormat) -> &str { + match output_end_marker_for_format(format) { + Some(marker) => output.strip_suffix(marker).unwrap_or(output), + None => output, + } +} + +pub fn excerpt_range_for_format( + format: ZetaFormat, + ranges: &ExcerptRanges, +) -> (Range, Range) { + excerpt_ranges_for_format(format, ranges) +} + +pub fn resolve_cursor_region( + input: &ZetaPromptInput, + format: ZetaFormat, +) -> (&str, Range, usize) { + let (editable_range, context_range) = excerpt_range_for_format(format, &input.excerpt_ranges); + let context_start = context_range.start; + let context_text = &input.cursor_excerpt[context_range]; + let adjusted_editable = + (editable_range.start - context_start)..(editable_range.end - context_start); + let adjusted_cursor = input.cursor_offset_in_excerpt - context_start; + + (context_text, adjusted_editable, adjusted_cursor) +} + +pub fn get_prefill(input: &ZetaPromptInput, format: ZetaFormat) -> String { + let (context, editable_range, _) = resolve_cursor_region(input, format); + get_prefill_for_format(format, context, &editable_range) +} + fn format_edit_history_within_budget( events: &[Arc], file_marker: &str, @@ -533,6 +639,16 @@ pub fn write_related_files( mod v0112_middle_at_end { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + pub fn write_cursor_excerpt_section( prompt: &mut String, path: &Path, @@ -567,6 +683,16 @@ mod v0112_middle_at_end { mod v0113_ordered { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + &[ + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + "<|file_sep|>", + CURSOR_MARKER, + ] + } + pub fn write_cursor_excerpt_section( prompt: &mut String, path: &Path, @@ -601,6 +727,14 @@ mod v0113_ordered { } } +mod v0114180_editable_region { + use super::*; + + pub fn special_tokens() -> &'static [&'static str] { + v0113_ordered::special_tokens() + } +} + pub mod v0120_git_merge_markers { //! A prompt that uses git-style merge conflict markers to represent the editable region. //! @@ -752,6 +886,10 @@ pub mod v0131_git_merge_markers_prefix { pub mod v0211_prefill { use super::*; + pub fn special_tokens() -> &'static [&'static str] { + v0131_git_merge_markers_prefix::special_tokens() + } + pub fn get_prefill(context: &str, editable_range: &Range) -> String { let editable_region = &context[editable_range.start..editable_range.end]; @@ -783,6 +921,1413 @@ pub mod v0211_prefill { } } +pub mod hashline { + + use std::fmt::Display; + + pub const END_MARKER: &str = "<|fim_middle|>updated"; + pub const START_MARKER: &str = "<|fim_middle|>current"; + + use super::*; + + const SET_COMMAND_MARKER: &str = "<|set|>"; + const INSERT_COMMAND_MARKER: &str = "<|insert|>"; + + pub fn special_tokens() -> &'static [&'static str] { + return &[ + SET_COMMAND_MARKER, + "<|set_range|>", + INSERT_COMMAND_MARKER, + CURSOR_MARKER, + "<|file_sep|>", + "<|fim_prefix|>", + "<|fim_suffix|>", + "<|fim_middle|>", + ]; + } + + /// A parsed line reference like `3:c3` (line index 3 with hash 0xc3). + #[derive(Debug, Clone, PartialEq, Eq)] + struct LineRef { + index: usize, + hash: u8, + } + + impl Display for LineRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}:{:02x}", self.index, self.hash) + } + } + + pub fn hash_line(line: &[u8]) -> u8 { + let mut h: u8 = 0; + for &byte in line { + h = h.wrapping_add(byte); + } + return h; + } + + /// Write the hashline-encoded editable region into `out`. Each line of + /// `editable_text` is prefixed with `{line_index}:{hash}|` and the cursor + /// marker is inserted at `cursor_offset_in_editable` (byte offset relative + /// to the start of `editable_text`). + pub fn write_hashline_editable_region( + out: &mut String, + editable_text: &str, + cursor_offset_in_editable: usize, + ) { + let mut offset = 0; + for (i, line) in editable_text.lines().enumerate() { + let (head, cursor, tail) = if cursor_offset_in_editable > offset + && cursor_offset_in_editable < offset + line.len() + { + ( + &line[..cursor_offset_in_editable - offset], + CURSOR_MARKER, + &line[cursor_offset_in_editable - offset..], + ) + } else { + (line, "", "") + }; + write!( + out, + "\n{}|{head}{cursor}{tail}", + LineRef { + index: i, + hash: hash_line(line.as_bytes()) + } + ) + .unwrap(); + offset += line.len() + 1; + } + } + + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + ) { + let path_str = path.to_string_lossy(); + write!(prompt, "<|file_sep|>{}\n", path_str).ok(); + + prompt.push_str("<|fim_prefix|>\n"); + prompt.push_str(&context[..editable_range.start]); + prompt.push_str(START_MARKER); + + let cursor_offset_in_editable = cursor_offset.saturating_sub(editable_range.start); + let editable_region = &context[editable_range.clone()]; + write_hashline_editable_region(prompt, editable_region, cursor_offset_in_editable); + + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + + prompt.push_str("<|fim_suffix|>\n"); + prompt.push_str(&context[editable_range.end..]); + if !prompt.ends_with('\n') { + prompt.push('\n'); + } + + prompt.push_str(END_MARKER); + } + + /// A single edit command parsed from the model output. + #[derive(Debug)] + enum EditCommand<'a> { + /// Replace a range of lines (inclusive on both ends). Single-line set is + /// represented by `start == end`. + Set { + start: LineRef, + end: LineRef, + content: &'a str, + }, + /// Insert new lines after the given line, or before the first line if + /// `after` is `None`. + Insert { + after: Option, + content: &'a str, + }, + } + + /// Parse a line reference like `3:c3` into a `LineRef`. + fn parse_line_ref(s: &str) -> Option { + let (idx_str, hash_str) = s.split_once(':')?; + let index = idx_str.parse::().ok()?; + let hash = u8::from_str_radix(hash_str, 16).ok()?; + Some(LineRef { index, hash }) + } + + /// Parse the model output into a list of `EditCommand`s. + fn parse_edit_commands(model_output: &str) -> Vec> { + let mut commands = Vec::new(); + let mut offset = 0usize; + + while offset < model_output.len() { + let next_nl = model_output[offset..] + .find('\n') + .map(|i| offset + i) + .unwrap_or(model_output.len()); + let line = &model_output[offset..next_nl]; + let line_end = if next_nl < model_output.len() { + next_nl + 1 + } else { + next_nl + }; + + let trimmed = line.trim(); + let (is_set, specifier) = if let Some(spec) = trimmed.strip_prefix(SET_COMMAND_MARKER) { + (true, spec) + } else if let Some(spec) = trimmed.strip_prefix(INSERT_COMMAND_MARKER) { + (false, spec) + } else { + offset = line_end; + continue; + }; + + let mut content_end = line_end; + let mut scan = line_end; + + while scan < model_output.len() { + let body_nl = model_output[scan..] + .find('\n') + .map(|i| scan + i) + .unwrap_or(model_output.len()); + let body_line = &model_output[scan..body_nl]; + if body_line.trim().starts_with(SET_COMMAND_MARKER) + || body_line.trim().starts_with(INSERT_COMMAND_MARKER) + { + break; + } + scan = if body_nl < model_output.len() { + body_nl + 1 + } else { + body_nl + }; + content_end = scan; + } + + let content = &model_output[line_end..content_end]; + + if is_set { + if let Some((start_str, end_str)) = specifier.split_once('-') { + if let (Some(start), Some(end)) = + (parse_line_ref(start_str), parse_line_ref(end_str)) + { + commands.push(EditCommand::Set { + start, + end, + content, + }); + } + } else if let Some(target) = parse_line_ref(specifier) { + commands.push(EditCommand::Set { + start: target.clone(), + end: target, + content, + }); + } + } else { + let after = parse_line_ref(specifier); + commands.push(EditCommand::Insert { after, content }); + } + + offset = scan; + } + + commands + } + + /// Returns `true` if the model output contains `<|set|>` or `<|insert|>` commands + /// (as opposed to being a plain full-replacement output). + /// Strip the `{line_num}:{hash}|` prefixes from each line of a hashline-encoded + /// editable region, returning the plain text content. + pub fn strip_hashline_prefixes(region: &str) -> String { + let mut decoded: String = region + .lines() + .map(|line| line.find('|').map_or(line, |pos| &line[pos + 1..])) + .collect::>() + .join("\n"); + if region.ends_with('\n') { + decoded.push('\n'); + } + decoded + } + + pub fn output_has_edit_commands(model_output: &str) -> bool { + model_output.contains(SET_COMMAND_MARKER) || model_output.contains(INSERT_COMMAND_MARKER) + } + + /// Apply `<|set|>` and `<|insert|>` edit commands from the model output to the + /// original editable region text. + /// + /// `editable_region` is the original text of the editable region (without hash + /// prefixes). `model_output` is the raw model response containing edit commands. + /// + /// Returns the full replacement text for the editable region. + pub fn apply_edit_commands(editable_region: &str, model_output: &str) -> String { + let original_lines: Vec<&str> = editable_region.lines().collect(); + let old_hashes: Vec = original_lines + .iter() + .map(|line| hash_line(line.as_bytes())) + .collect(); + + let commands = parse_edit_commands(model_output); + + // For set operations: indexed by start line → Some((end line index, content)) + // For insert operations: indexed by line index → vec of content to insert after + // Insert-before-first is tracked separately. + let mut set_ops: Vec> = vec![None; original_lines.len()]; + let mut insert_before_first: Vec<&str> = Vec::new(); + let mut insert_after: Vec> = vec![Vec::new(); original_lines.len()]; + + for command in &commands { + match command { + EditCommand::Set { + start, + end, + content, + } => { + if start.index < old_hashes.len() + && end.index < old_hashes.len() + && start.index <= end.index + && old_hashes[start.index] == start.hash + && old_hashes[end.index] == end.hash + { + set_ops[start.index] = Some((end.index, *content)); + } + } + EditCommand::Insert { after, content } => match after { + None => insert_before_first.push(*content), + Some(line_ref) => { + if line_ref.index < old_hashes.len() + && old_hashes[line_ref.index] == line_ref.hash + { + insert_after[line_ref.index].push(*content); + } + } + }, + } + } + + let mut result = String::new(); + + // Emit any insertions before the first line + for content in &insert_before_first { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + + let mut i = 0; + while i < original_lines.len() { + if let Some((end_index, replacement)) = set_ops[i].as_ref() { + // Replace lines i..=end_index with the replacement content + result.push_str(replacement); + if !replacement.is_empty() && !replacement.ends_with('\n') { + result.push('\n'); + } + // Emit any insertions after the end of this set range + if *end_index < insert_after.len() { + for content in &insert_after[*end_index] { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + } + i = end_index + 1; + } else { + // Keep the original line + result.push_str(original_lines[i]); + result.push('\n'); + // Emit any insertions after this line + for content in &insert_after[i] { + result.push_str(content); + if !content.ends_with('\n') { + result.push('\n'); + } + } + i += 1; + } + } + + // Preserve trailing newline behavior: if the original ended with a + // newline the result already has one; if it didn't, trim the extra one + // we added. + if !editable_region.ends_with('\n') && result.ends_with('\n') { + result.pop(); + } + + result + } + + /// Convert a unified diff patch into hashline edit commands. + /// + /// Parses the unified diff `patch` directly to determine which lines of + /// `old_text` are deleted/replaced and what new lines are added, then emits + /// `<|set|>` and `<|insert|>` edit commands referencing old lines by their + /// `{index}:{hash}` identifiers. + /// + /// `cursor_offset` is an optional byte offset into the first hunk's new + /// text (context + additions) where the cursor marker should be placed. + pub fn patch_to_edit_commands( + old_text: &str, + patch: &str, + cursor_offset: Option, + ) -> Result { + let old_lines: Vec<&str> = old_text.lines().collect(); + let old_hashes: Vec = old_lines + .iter() + .map(|line| hash_line(line.as_bytes())) + .collect(); + + let mut result = String::new(); + let mut first_hunk = true; + + struct Hunk<'a> { + line_range: Range, + new_text_lines: Vec<&'a str>, + cursor_line_offset_in_new_text: Option<(usize, usize)>, + } + + // Parse the patch line by line. We only care about hunk headers, + // context, deletions, and additions. + let mut old_line_index: usize = 0; + let mut current_hunk: Option = None; + // Byte offset tracking within the hunk's new text for cursor placement. + let mut new_text_byte_offset: usize = 0; + // The line index of the last old line seen before/in the current hunk + // (used for insert-after reference). + let mut last_old_line_before_hunk: Option = None; + + fn flush_hunk( + hunk: Hunk, + last_old_line: Option, + result: &mut String, + old_hashes: &[u8], + ) { + if hunk.line_range.is_empty() { + // Pure insertion — reference the old line to insert after when in bounds. + if let Some(after) = last_old_line + && let Some(&hash) = old_hashes.get(after) + { + write!( + result, + "{INSERT_COMMAND_MARKER}{}\n", + LineRef { index: after, hash } + ) + .unwrap(); + } else { + result.push_str(INSERT_COMMAND_MARKER); + result.push('\n'); + } + } else { + let start = hunk.line_range.start; + let end_exclusive = hunk.line_range.end; + let deleted_line_count = end_exclusive.saturating_sub(start); + + if deleted_line_count == 1 { + if let Some(&hash) = old_hashes.get(start) { + write!( + result, + "{SET_COMMAND_MARKER}{}\n", + LineRef { index: start, hash } + ) + .unwrap(); + } else { + result.push_str(SET_COMMAND_MARKER); + result.push('\n'); + } + } else { + let end_inclusive = end_exclusive - 1; + match ( + old_hashes.get(start).copied(), + old_hashes.get(end_inclusive).copied(), + ) { + (Some(start_hash), Some(end_hash)) => { + write!( + result, + "{SET_COMMAND_MARKER}{}-{}\n", + LineRef { + index: start, + hash: start_hash + }, + LineRef { + index: end_inclusive, + hash: end_hash + } + ) + .unwrap(); + } + _ => { + result.push_str(SET_COMMAND_MARKER); + result.push('\n'); + } + } + } + } + for (line_offset, line) in hunk.new_text_lines.iter().enumerate() { + if let Some((cursor_line_offset, char_offset)) = hunk.cursor_line_offset_in_new_text + && line_offset == cursor_line_offset + { + result.push_str(&line[..char_offset]); + result.push_str(CURSOR_MARKER); + result.push_str(&line[char_offset..]); + continue; + } + + result.push_str(line); + } + } + + for raw_line in patch.split_inclusive('\n') { + if raw_line.starts_with("@@") { + // Flush any pending change hunk from a previous patch hunk. + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + + // Parse hunk header: @@ -old_start[,old_count] +new_start[,new_count] @@ + // We intentionally do not trust old_start as a direct local index into `old_text`, + // because some patches are produced against a larger file region and carry + // non-local line numbers. We keep indexing local by advancing from parsed patch lines. + if first_hunk { + new_text_byte_offset = 0; + first_hunk = false; + } + continue; + } + + if raw_line.starts_with("---") || raw_line.starts_with("+++") { + continue; + } + if raw_line.starts_with("\\ No newline") { + continue; + } + + if raw_line.starts_with('-') { + // Extend or start a change hunk with this deleted old line. + match &mut current_hunk { + Some(Hunk { + line_range: range, .. + }) => range.end = old_line_index + 1, + None => { + current_hunk = Some(Hunk { + line_range: old_line_index..old_line_index + 1, + new_text_lines: Vec::new(), + cursor_line_offset_in_new_text: None, + }); + } + } + old_line_index += 1; + } else if let Some(added_content) = raw_line.strip_prefix('+') { + // Place cursor marker if cursor_offset falls within this line. + let mut cursor_line_offset = None; + if let Some(cursor_off) = cursor_offset + && (first_hunk + || cursor_off >= new_text_byte_offset + && cursor_off <= new_text_byte_offset + added_content.len()) + { + let line_offset = added_content.floor_char_boundary( + cursor_off + .saturating_sub(new_text_byte_offset) + .min(added_content.len()), + ); + cursor_line_offset = Some(line_offset); + } + + new_text_byte_offset += added_content.len(); + + let hunk = current_hunk.get_or_insert(Hunk { + line_range: old_line_index..old_line_index, + new_text_lines: vec![], + cursor_line_offset_in_new_text: None, + }); + hunk.new_text_lines.push(added_content); + hunk.cursor_line_offset_in_new_text = cursor_line_offset + .map(|offset_in_line| (hunk.new_text_lines.len() - 1, offset_in_line)); + } else { + // Context line (starts with ' ' or is empty). + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + last_old_line_before_hunk = Some(old_line_index); + old_line_index += 1; + let content = raw_line.strip_prefix(' ').unwrap_or(raw_line); + new_text_byte_offset += content.len(); + } + } + + // Flush final group. + if let Some(hunk) = current_hunk.take() { + flush_hunk(hunk, last_old_line_before_hunk, &mut result, &old_hashes); + } + + // Trim a single trailing newline. + if result.ends_with('\n') { + result.pop(); + } + + Ok(result) + } + + #[cfg(test)] + mod tests { + use super::*; + use indoc::indoc; + + #[test] + fn test_format_cursor_region() { + struct Case { + name: &'static str, + context: &'static str, + editable_range: Range, + cursor_offset: usize, + expected: &'static str, + } + + let cases = [ + Case { + name: "basic_cursor_placement", + context: "hello world\n", + editable_range: 0..12, + cursor_offset: 5, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:5c|hello<|user_cursor|> world + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "multiline_cursor_on_second_line", + context: "aaa\nbbb\nccc\n", + editable_range: 0..12, + cursor_offset: 5, // byte 5 → 1 byte into "bbb" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:23|aaa + 1:26|b<|user_cursor|>bb + 2:29|ccc + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "no_trailing_newline_in_context", + context: "line1\nline2", + editable_range: 0..11, + cursor_offset: 3, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:d9|lin<|user_cursor|>e1 + 1:da|line2 + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "leading_newline_in_editable_region", + context: "\nabc\n", + editable_range: 0..5, + cursor_offset: 2, // byte 2 = 'a' in "abc" (after leading \n) + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:00| + 1:26|a<|user_cursor|>bc + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "with_suffix", + context: "abc\ndef", + editable_range: 0..4, // editable region = "abc\n", suffix = "def" + cursor_offset: 2, + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|ab<|user_cursor|>c + <|fim_suffix|> + def + <|fim_middle|>updated"}, + }, + Case { + name: "unicode_two_byte_chars", + context: "héllo\n", + editable_range: 0..7, + cursor_offset: 3, // byte 3 = after "hé" (h=1 byte, é=2 bytes), before "llo" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:1b|hé<|user_cursor|>llo + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "unicode_three_byte_chars", + context: "日本語\n", + editable_range: 0..10, + cursor_offset: 6, // byte 6 = after "日本" (3+3 bytes), before "語" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:80|日本<|user_cursor|>語 + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "unicode_four_byte_chars", + context: "a🌍b\n", + editable_range: 0..7, + cursor_offset: 5, // byte 5 = after "a🌍" (1+4 bytes), before "b" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:6b|a🌍<|user_cursor|>b + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "cursor_at_start_of_region_not_placed", + context: "abc\n", + editable_range: 0..4, + cursor_offset: 0, // cursor_offset(0) > offset(0) is false → cursor not placed + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|abc + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "cursor_at_end_of_line_not_placed", + context: "abc\ndef\n", + editable_range: 0..8, + cursor_offset: 3, // byte 3 = the \n after "abc" → falls between lines, not placed + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + <|fim_middle|>current + 0:26|abc + 1:2f|def + <|fim_suffix|> + <|fim_middle|>updated"}, + }, + Case { + name: "cursor_offset_relative_to_context_not_editable_region", + // cursor_offset is relative to `context`, so when editable_range.start > 0, + // write_cursor_excerpt_section must subtract it before comparing against + // per-line offsets within the editable region. + context: "pre\naaa\nbbb\nsuf\n", + editable_range: 4..12, // editable region = "aaa\nbbb\n" + cursor_offset: 9, // byte 9 in context = second 'b' in "bbb" + expected: indoc! {" + <|file_sep|>test.rs + <|fim_prefix|> + pre + <|fim_middle|>current + 0:23|aaa + 1:26|b<|user_cursor|>bb + <|fim_suffix|> + suf + <|fim_middle|>updated"}, + }, + ]; + + for case in &cases { + let mut prompt = String::new(); + hashline::write_cursor_excerpt_section( + &mut prompt, + Path::new("test.rs"), + case.context, + &case.editable_range, + case.cursor_offset, + ); + assert_eq!(prompt, case.expected, "failed case: {}", case.name); + } + } + + #[test] + fn test_apply_edit_commands() { + struct Case { + name: &'static str, + original: &'static str, + model_output: &'static str, + expected: &'static str, + } + + let cases = vec![ + Case { + name: "set_single_line", + original: indoc! {" + let mut total = 0; + for product in products { + total += ; + } + total + "}, + model_output: indoc! {" + <|set|>2:87 + total += product.price; + "}, + expected: indoc! {" + let mut total = 0; + for product in products { + total += product.price; + } + total + "}, + }, + Case { + name: "set_range", + original: indoc! {" + fn foo() { + let x = 1; + let y = 2; + let z = 3; + } + "}, + model_output: indoc! {" + <|set|>1:46-3:4a + let sum = 6; + "}, + expected: indoc! {" + fn foo() { + let sum = 6; + } + "}, + }, + Case { + name: "insert_after_line", + original: indoc! {" + fn main() { + let x = 1; + } + "}, + model_output: indoc! {" + <|insert|>1:46 + let y = 2; + "}, + expected: indoc! {" + fn main() { + let x = 1; + let y = 2; + } + "}, + }, + Case { + name: "insert_before_first", + original: indoc! {" + let x = 1; + let y = 2; + "}, + model_output: indoc! {" + <|insert|> + use std::io; + "}, + expected: indoc! {" + use std::io; + let x = 1; + let y = 2; + "}, + }, + Case { + name: "set_with_cursor_marker", + original: indoc! {" + fn main() { + println!(); + } + "}, + model_output: indoc! {" + <|set|>1:34 + eprintln!(\"<|user_cursor|>\"); + "}, + expected: indoc! {" + fn main() { + eprintln!(\"<|user_cursor|>\"); + } + "}, + }, + Case { + name: "multiple_set_commands", + original: indoc! {" + aaa + bbb + ccc + ddd + "}, + model_output: indoc! {" + <|set|>0:23 + AAA + <|set|>2:29 + CCC + "}, + expected: indoc! {" + AAA + bbb + CCC + ddd + "}, + }, + Case { + name: "set_range_multiline_replacement", + original: indoc! {" + fn handle_submit() { + } + + fn handle_keystroke() { + "}, + model_output: indoc! {" + <|set|>0:3f-1:7d + fn handle_submit(modal_state: &mut ModalState) { + <|user_cursor|> + } + "}, + expected: indoc! {" + fn handle_submit(modal_state: &mut ModalState) { + <|user_cursor|> + } + + fn handle_keystroke() { + "}, + }, + Case { + name: "no_edit_commands_returns_original", + original: indoc! {" + hello + world + "}, + model_output: "some random text with no commands", + expected: indoc! {" + hello + world + "}, + }, + Case { + name: "wrong_hash_set_ignored", + original: indoc! {" + aaa + bbb + "}, + model_output: indoc! {" + <|set|>0:ff + ZZZ + "}, + expected: indoc! {" + aaa + bbb + "}, + }, + Case { + name: "insert_and_set_combined", + original: indoc! {" + alpha + beta + gamma + "}, + model_output: indoc! {" + <|set|>0:06 + ALPHA + <|insert|>1:9c + beta_extra + "}, + expected: indoc! {" + ALPHA + beta + beta_extra + gamma + "}, + }, + Case { + name: "no_trailing_newline_preserved", + original: "hello\nworld", + model_output: indoc! {" + <|set|>0:14 + HELLO + "}, + expected: "HELLO\nworld", + }, + Case { + name: "set_range_hash_mismatch_in_end_bound", + original: indoc! {" + one + two + three + "}, + model_output: indoc! {" + <|set|>0:42-2:ff + ONE_TWO_THREE + "}, + expected: indoc! {" + one + two + three + "}, + }, + Case { + name: "set_range_start_greater_than_end_ignored", + original: indoc! {" + a + b + c + "}, + model_output: indoc! {" + <|set|>2:63-1:62 + X + "}, + expected: indoc! {" + a + b + c + "}, + }, + Case { + name: "insert_out_of_bounds_ignored", + original: indoc! {" + x + y + "}, + model_output: indoc! {" + <|insert|>99:aa + z + "}, + expected: indoc! {" + x + y + "}, + }, + Case { + name: "set_out_of_bounds_ignored", + original: indoc! {" + x + y + "}, + model_output: indoc! {" + <|set|>99:aa + z + "}, + expected: indoc! {" + x + y + "}, + }, + Case { + name: "malformed_set_command_ignored", + original: indoc! {" + alpha + beta + "}, + model_output: indoc! {" + <|set|>not-a-line-ref + UPDATED + "}, + expected: indoc! {" + alpha + beta + "}, + }, + Case { + name: "malformed_insert_hash_treated_as_before_first", + original: indoc! {" + alpha + beta + "}, + model_output: indoc! {" + <|insert|>1:nothex + preamble + "}, + expected: indoc! {" + preamble + alpha + beta + "}, + }, + Case { + name: "set_then_insert_same_target_orders_insert_after_replacement", + original: indoc! {" + cat + dog + "}, + model_output: indoc! {" + <|set|>0:38 + CAT + <|insert|>0:38 + TAIL + "}, + expected: indoc! {" + CAT + TAIL + dog + "}, + }, + Case { + name: "overlapping_set_ranges_last_wins", + original: indoc! {" + a + b + c + d + "}, + model_output: indoc! {" + <|set|>0:61-2:63 + FIRST + <|set|>1:62-3:64 + SECOND + "}, + expected: indoc! {" + FIRST + d + "}, + }, + Case { + name: "insert_before_first_and_after_line", + original: indoc! {" + a + b + "}, + model_output: indoc! {" + <|insert|> + HEAD + <|insert|>0:61 + MID + "}, + expected: indoc! {" + HEAD + a + MID + b + "}, + }, + ]; + + for case in &cases { + let result = hashline::apply_edit_commands(case.original, &case.model_output); + assert_eq!(result, case.expected, "failed case: {}", case.name); + } + } + + #[test] + fn test_output_has_edit_commands() { + assert!(hashline::output_has_edit_commands(&format!( + "{}0:ab\nnew", + SET_COMMAND_MARKER + ))); + assert!(hashline::output_has_edit_commands(&format!( + "{}0:ab\nnew", + INSERT_COMMAND_MARKER + ))); + assert!(hashline::output_has_edit_commands(&format!( + "some text\n{}1:cd\nstuff", + SET_COMMAND_MARKER + ))); + assert!(!hashline::output_has_edit_commands("just plain text")); + assert!(!hashline::output_has_edit_commands("NO_EDITS")); + } + + // ---- hashline::patch_to_edit_commands round-trip tests ---- + + #[test] + fn test_patch_to_edit_commands() { + struct Case { + name: &'static str, + old: &'static str, + patch: &'static str, + expected_new: &'static str, + } + + let cases = [ + Case { + name: "single_line_replacement", + old: indoc! {" + let mut total = 0; + for product in products { + total += ; + } + total + "}, + patch: indoc! {" + @@ -1,5 +1,5 @@ + let mut total = 0; + for product in products { + - total += ; + + total += product.price; + } + total + "}, + expected_new: indoc! {" + let mut total = 0; + for product in products { + total += product.price; + } + total + "}, + }, + Case { + name: "multiline_replacement", + old: indoc! {" + fn foo() { + let x = 1; + let y = 2; + let z = 3; + } + "}, + patch: indoc! {" + @@ -1,5 +1,3 @@ + fn foo() { + - let x = 1; + - let y = 2; + - let z = 3; + + let sum = 1 + 2 + 3; + } + "}, + expected_new: indoc! {" + fn foo() { + let sum = 1 + 2 + 3; + } + "}, + }, + Case { + name: "insertion", + old: indoc! {" + fn main() { + let x = 1; + } + "}, + patch: indoc! {" + @@ -1,3 +1,4 @@ + fn main() { + let x = 1; + + let y = 2; + } + "}, + expected_new: indoc! {" + fn main() { + let x = 1; + let y = 2; + } + "}, + }, + Case { + name: "insertion_before_first", + old: indoc! {" + let x = 1; + let y = 2; + "}, + patch: indoc! {" + @@ -1,2 +1,3 @@ + +use std::io; + let x = 1; + let y = 2; + "}, + expected_new: indoc! {" + use std::io; + let x = 1; + let y = 2; + "}, + }, + Case { + name: "deletion", + old: indoc! {" + aaa + bbb + ccc + ddd + "}, + patch: indoc! {" + @@ -1,4 +1,2 @@ + aaa + -bbb + -ccc + ddd + "}, + expected_new: indoc! {" + aaa + ddd + "}, + }, + Case { + name: "multiple_changes", + old: indoc! {" + alpha + beta + gamma + delta + epsilon + "}, + patch: indoc! {" + @@ -1,5 +1,5 @@ + -alpha + +ALPHA + beta + gamma + -delta + +DELTA + epsilon + "}, + expected_new: indoc! {" + ALPHA + beta + gamma + DELTA + epsilon + "}, + }, + Case { + name: "replace_with_insertion", + old: indoc! {r#" + fn handle() { + modal_state.close(); + modal_state.dismiss(); + "#}, + patch: indoc! {r#" + @@ -1,3 +1,4 @@ + fn handle() { + modal_state.close(); + + eprintln!(""); + modal_state.dismiss(); + "#}, + expected_new: indoc! {r#" + fn handle() { + modal_state.close(); + eprintln!(""); + modal_state.dismiss(); + "#}, + }, + Case { + name: "complete_replacement", + old: indoc! {" + aaa + bbb + ccc + "}, + patch: indoc! {" + @@ -1,3 +1,3 @@ + -aaa + -bbb + -ccc + +xxx + +yyy + +zzz + "}, + expected_new: indoc! {" + xxx + yyy + zzz + "}, + }, + Case { + name: "add_function_body", + old: indoc! {" + fn foo() { + modal_state.dismiss(); + } + + fn + + fn handle_keystroke() { + "}, + patch: indoc! {" + @@ -1,6 +1,8 @@ + fn foo() { + modal_state.dismiss(); + } + + -fn + +fn handle_submit() { + + todo() + +} + + fn handle_keystroke() { + "}, + expected_new: indoc! {" + fn foo() { + modal_state.dismiss(); + } + + fn handle_submit() { + todo() + } + + fn handle_keystroke() { + "}, + }, + Case { + name: "with_cursor_offset", + old: indoc! {r#" + fn main() { + println!(); + } + "#}, + patch: indoc! {r#" + @@ -1,3 +1,3 @@ + fn main() { + - println!(); + + eprintln!(""); + } + "#}, + expected_new: indoc! {r#" + fn main() { + eprintln!("<|user_cursor|>"); + } + "#}, + }, + Case { + name: "non_local_hunk_header_pure_insertion_repro", + old: indoc! {" + aaa + bbb + "}, + patch: indoc! {" + @@ -20,2 +20,3 @@ + aaa + +xxx + bbb + "}, + expected_new: indoc! {" + aaa + xxx + bbb + "}, + }, + ]; + + for case in &cases { + // The cursor_offset for patch_to_edit_commands is relative to + // the first hunk's new text (context + additions). We compute + // it by finding where the marker sits in the expected output + // (which mirrors the new text of the hunk). + let cursor_offset = case.expected_new.find(CURSOR_MARKER); + + let commands = + hashline::patch_to_edit_commands(case.old, case.patch, cursor_offset) + .unwrap_or_else(|e| panic!("failed case {}: {e}", case.name)); + + assert!( + hashline::output_has_edit_commands(&commands), + "case {}: expected edit commands, got: {commands:?}", + case.name, + ); + + let applied = hashline::apply_edit_commands(case.old, &commands); + assert_eq!(applied, case.expected_new, "case {}", case.name); + } + } + } +} + pub mod seed_coder { //! Seed-Coder prompt format using SPM (Suffix-Prefix-Middle) FIM mode. //! @@ -847,6 +2392,17 @@ pub mod seed_coder { ] } + pub fn write_cursor_excerpt_section( + prompt: &mut String, + path: &Path, + context: &str, + editable_range: &Range, + cursor_offset: usize, + ) { + let section = build_cursor_prefix_section(path, context, editable_range, cursor_offset); + prompt.push_str(§ion); + } + pub fn format_prompt_with_budget( path: &Path, context: &str, @@ -1186,7 +2742,7 @@ mod tests { } fn format_with_budget(input: &ZetaPromptInput, max_tokens: usize) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0114180EditableRegion, max_tokens) + format_prompt_with_budget_for_format(input, ZetaFormat::V0114180EditableRegion, max_tokens) } #[test] @@ -1551,11 +3107,11 @@ mod tests { } fn format_seed_coder(input: &ZetaPromptInput) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0211SeedCoder, 10000) + format_prompt_with_budget_for_format(input, ZetaFormat::V0211SeedCoder, 10000) } fn format_seed_coder_with_budget(input: &ZetaPromptInput, max_tokens: usize) -> String { - format_zeta_prompt_with_budget(input, ZetaFormat::V0211SeedCoder, max_tokens) + format_prompt_with_budget_for_format(input, ZetaFormat::V0211SeedCoder, max_tokens) } #[test] From 2772db8dcc3c27bc63a09e5db9f4c60340e91436 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Tue, 3 Mar 2026 13:21:52 -0800 Subject: [PATCH 280/548] Remove zeta2 feature flag (#50618) Release Notes: - N/A --- crates/edit_prediction/src/edit_prediction.rs | 7 +--- .../src/edit_prediction_button.rs | 20 ++-------- crates/settings_content/src/language.rs | 7 +--- .../zed/src/zed/edit_prediction_registry.rs | 38 ++----------------- 4 files changed, 9 insertions(+), 63 deletions(-) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 74988d65933b3bbbc2507077a74dfeb94089ab63..33c3ea1e56648c73682e06f685f91f54344200d6 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -108,13 +108,8 @@ const EDIT_PREDICTION_SETTLED_EVENT: &str = "Edit Prediction Settled"; const EDIT_PREDICTION_SETTLED_TTL: Duration = Duration::from_secs(60 * 5); const EDIT_PREDICTION_SETTLED_QUIESCENCE: Duration = Duration::from_secs(10); -pub struct Zeta2FeatureFlag; pub struct EditPredictionJumpsFeatureFlag; -impl FeatureFlag for Zeta2FeatureFlag { - const NAME: &'static str = "zeta2"; -} - impl FeatureFlag for EditPredictionJumpsFeatureFlag { const NAME: &'static str = "edit_prediction_jumps"; } @@ -2109,7 +2104,7 @@ impl EditPredictionStore { active_buffer.clone(), position, trigger, - cx.has_flag::(), + cx.has_flag::(), cx, ) } diff --git a/crates/edit_prediction_ui/src/edit_prediction_button.rs b/crates/edit_prediction_ui/src/edit_prediction_button.rs index 743256970f486b474405e7f034f18501505cb825..b00a229164d480d38312ca97cac31a23010f8b69 100644 --- a/crates/edit_prediction_ui/src/edit_prediction_button.rs +++ b/crates/edit_prediction_ui/src/edit_prediction_button.rs @@ -3,7 +3,7 @@ use client::{Client, UserStore, zed_urls}; use cloud_llm_client::UsageLimit; use codestral::{self, CodestralEditPredictionDelegate}; use copilot::Status; -use edit_prediction::{EditPredictionStore, Zeta2FeatureFlag}; +use edit_prediction::EditPredictionStore; use edit_prediction_types::EditPredictionDelegateHandle; use editor::{ Editor, MultiBufferOffset, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll, @@ -22,9 +22,7 @@ use language::{ }; use project::{DisableAiSettings, Project}; use regex::Regex; -use settings::{ - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, Settings, SettingsStore, update_settings_file, -}; +use settings::{Settings, SettingsStore, update_settings_file}; use std::{ rc::Rc, sync::{Arc, LazyLock}, @@ -776,13 +774,7 @@ impl EditPredictionButton { menu = menu.separator().header("Privacy"); - if matches!( - provider, - EditPredictionProvider::Zed - | EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) - ) { + if matches!(provider, EditPredictionProvider::Zed) { if let Some(provider) = &self.edit_prediction_provider { let data_collection = provider.data_collection_state(cx); @@ -1405,12 +1397,6 @@ pub fn get_available_providers(cx: &mut App) -> Vec { providers.push(EditPredictionProvider::Zed); - if cx.has_flag::() { - providers.push(EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - )); - } - if let Some(app_state) = workspace::AppState::global(cx).upgrade() && copilot::GlobalCopilotAuth::try_get_or_init(app_state, cx) .is_some_and(|copilot| copilot.0.read(cx).is_authenticated()) diff --git a/crates/settings_content/src/language.rs b/crates/settings_content/src/language.rs index d429f53824fd0f4f0a5810bce01b05badcfb9a51..a8d68fea99c024830ee45c66ec5d7d641aa4c250 100644 --- a/crates/settings_content/src/language.rs +++ b/crates/settings_content/src/language.rs @@ -90,7 +90,7 @@ pub enum EditPredictionProvider { Experimental(&'static str), } -pub const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2"; +const EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME: &str = "zeta2"; impl<'de> Deserialize<'de> for EditPredictionProvider { fn deserialize(deserializer: D) -> Result @@ -157,10 +157,7 @@ impl EditPredictionProvider { EditPredictionProvider::Codestral => Some("Codestral"), EditPredictionProvider::Sweep => Some("Sweep"), EditPredictionProvider::Mercury => Some("Mercury"), - EditPredictionProvider::Experimental( - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, - ) => Some("Zeta2"), - EditPredictionProvider::None | EditPredictionProvider::Experimental(_) => None, + EditPredictionProvider::Experimental(_) | EditPredictionProvider::None => None, EditPredictionProvider::Ollama => Some("Ollama"), EditPredictionProvider::OpenAiCompatibleApi => Some("OpenAI-Compatible API"), } diff --git a/crates/zed/src/zed/edit_prediction_registry.rs b/crates/zed/src/zed/edit_prediction_registry.rs index 39eee233e02a782e2379849247448c8f8c1ea71a..9f05c5795e6f16cab231df8a5586106ed25b03ee 100644 --- a/crates/zed/src/zed/edit_prediction_registry.rs +++ b/crates/zed/src/zed/edit_prediction_registry.rs @@ -2,15 +2,12 @@ use client::{Client, UserStore}; use codestral::{CodestralEditPredictionDelegate, load_codestral_api_key}; use collections::HashMap; use copilot::CopilotEditPredictionDelegate; -use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate, Zeta2FeatureFlag}; +use edit_prediction::{EditPredictionModel, ZedEditPredictionDelegate}; use editor::Editor; -use feature_flags::FeatureFlagAppExt; use gpui::{AnyWindowHandle, App, AppContext as _, Context, Entity, WeakEntity}; use language::language_settings::{EditPredictionProvider, all_language_settings}; -use settings::{ - EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME, EditPredictionPromptFormat, SettingsStore, -}; +use settings::{EditPredictionPromptFormat, SettingsStore}; use std::{cell::RefCell, rc::Rc, sync::Arc}; use ui::Window; @@ -81,9 +78,6 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { .detach(); cx.observe_global::({ - let editors = editors.clone(); - let client = client.clone(); - let user_store = user_store.clone(); let mut previous_config = edit_prediction_provider_config_for_settings(cx); move |cx| { let new_provider_config = edit_prediction_provider_config_for_settings(cx); @@ -107,24 +101,6 @@ pub fn init(client: Arc, user_store: Entity, cx: &mut App) { } }) .detach(); - - cx.observe_flag::({ - let mut previous_config = edit_prediction_provider_config_for_settings(cx); - move |_is_enabled, cx| { - let new_provider_config = edit_prediction_provider_config_for_settings(cx); - if new_provider_config != previous_config { - previous_config = new_provider_config; - assign_edit_prediction_providers( - &editors, - new_provider_config, - &client, - user_store.clone(), - cx, - ); - } - } - }) - .detach(); } fn edit_prediction_provider_config_for_settings(cx: &App) -> Option { @@ -171,15 +147,7 @@ fn edit_prediction_provider_config_for_settings(cx: &App) -> Option Some(EditPredictionProviderConfig::Zed( EditPredictionModel::Mercury, )), - EditPredictionProvider::Experimental(name) => { - if name == EXPERIMENTAL_ZETA2_EDIT_PREDICTION_PROVIDER_NAME - && cx.has_flag::() - { - Some(EditPredictionProviderConfig::Zed(EditPredictionModel::Zeta)) - } else { - None - } - } + EditPredictionProvider::Experimental(_) => None, } } From 4dd42a0f77b11d0bed2a072919bcd9180b9a577c Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Tue, 3 Mar 2026 22:32:11 +0100 Subject: [PATCH 281/548] agent: Fix subagent error display (#50638) Since we were no longer just returning a string, we need to update the content in both success and error modes to get a nice rendering experience. Release Notes: - N/A --- crates/agent/src/tools/spawn_agent_tool.rs | 49 ++++++++++++++-------- 1 file changed, 31 insertions(+), 18 deletions(-) diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index b75c41775258db49577024dca3eb1770937e52e8..162de68b86115056e9579d22a8623d675245cc91 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -161,29 +161,42 @@ impl AgentTool for SpawnAgentTool { Ok((subagent, session_info)) })?; - match subagent.send(input.message, cx).await { - Ok(output) => { - session_info.message_end_index = - cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1))); - event_stream.update_fields_with_meta( - acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]), - Some(acp::Meta::from_iter([( - SUBAGENT_SESSION_INFO_META_KEY.into(), - serde_json::json!(&session_info), - )])), - ); + let send_result = subagent.send(input.message, cx).await; + + session_info.message_end_index = + cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1))); + + let meta = Some(acp::Meta::from_iter([( + SUBAGENT_SESSION_INFO_META_KEY.into(), + serde_json::json!(&session_info), + )])); + + let (output, result) = match send_result { + Ok(output) => ( + output.clone(), Ok(SpawnAgentToolOutput::Success { session_id: session_info.session_id.clone(), session_info, output, - }) + }), + ), + Err(e) => { + let error = e.to_string(); + ( + error.clone(), + Err(SpawnAgentToolOutput::Error { + session_id: Some(session_info.session_id.clone()), + error, + session_info: Some(session_info), + }), + ) } - Err(e) => Err(SpawnAgentToolOutput::Error { - session_id: Some(session_info.session_id.clone()), - error: e.to_string(), - session_info: Some(session_info), - }), - } + }; + event_stream.update_fields_with_meta( + acp::ToolCallUpdateFields::new().content(vec![output.into()]), + meta, + ); + result }) } From c1cbcb612dc24499adfa7a0219436ecd3c8aaf91 Mon Sep 17 00:00:00 2001 From: John Tur Date: Tue, 3 Mar 2026 16:47:57 -0500 Subject: [PATCH 282/548] Fix handling of `surface.configure` on Linux (#50640) Closes #50574 Release Notes: - Fixed Zed not being responsive on some Linux configurations Co-authored-by: Conrad Irwin --- crates/gpui_wgpu/src/wgpu_renderer.rs | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/crates/gpui_wgpu/src/wgpu_renderer.rs b/crates/gpui_wgpu/src/wgpu_renderer.rs index 5beeef6ad1238f25db7c50f739053e138b2e1295..2fd83b7b065e7ce4fe0ba9ec017f39264a33bee3 100644 --- a/crates/gpui_wgpu/src/wgpu_renderer.rs +++ b/crates/gpui_wgpu/src/wgpu_renderer.rs @@ -98,7 +98,6 @@ pub struct WgpuRenderer { queue: Arc, surface: wgpu::Surface<'static>, surface_config: wgpu::SurfaceConfiguration, - surface_configured: bool, pipelines: WgpuPipelines, bind_group_layouts: WgpuBindGroupLayouts, atlas: Arc, @@ -381,7 +380,6 @@ impl WgpuRenderer { queue, surface, surface_config, - surface_configured: true, pipelines, bind_group_layouts, atlas, @@ -875,9 +873,7 @@ impl WgpuRenderer { self.surface_config.width = clamped_width.max(1); self.surface_config.height = clamped_height.max(1); - if self.surface_configured { - self.surface.configure(&self.device, &self.surface_config); - } + self.surface.configure(&self.device, &self.surface_config); // Invalidate intermediate textures - they will be lazily recreated // in draw() after we confirm the surface is healthy. This avoids @@ -928,9 +924,7 @@ impl WgpuRenderer { if new_alpha_mode != self.surface_config.alpha_mode { self.surface_config.alpha_mode = new_alpha_mode; - if self.surface_configured { - self.surface.configure(&self.device, &self.surface_config); - } + self.surface.configure(&self.device, &self.surface_config); self.pipelines = Self::create_pipelines( &self.device, &self.bind_group_layouts, @@ -991,7 +985,7 @@ impl WgpuRenderer { let frame = match self.surface.get_current_texture() { Ok(frame) => frame, Err(wgpu::SurfaceError::Lost | wgpu::SurfaceError::Outdated) => { - self.surface_configured = false; + self.surface.configure(&self.device, &self.surface_config); return; } Err(e) => { From 832782f6b333a89074d55f04ecbecd75467fc48a Mon Sep 17 00:00:00 2001 From: Eric Holk Date: Tue, 3 Mar 2026 14:20:12 -0800 Subject: [PATCH 283/548] Persist token count and scroll position across agent restarts (#50620) Release Notes: - Token counts and scroll position are restored when loading a previous agent thread --- crates/acp_thread/src/acp_thread.rs | 11 ++++ crates/agent/src/agent.rs | 39 +++++++++++- crates/agent/src/db.rs | 60 +++++++++++++++++++ crates/agent/src/thread.rs | 20 +++++++ crates/agent/src/thread_store.rs | 1 + crates/agent_ui/src/connection_view.rs | 4 ++ .../src/connection_view/thread_view.rs | 56 +++++++++++++---- 7 files changed, 178 insertions(+), 13 deletions(-) diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index f57ce1f4d188e260624bd90187a21890379fe6b6..1b9271918884dc020986577926d9578e3a6f049c 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -972,6 +972,8 @@ pub struct AcpThread { had_error: bool, /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. draft_prompt: Option>, + /// The initial scroll position for the thread view, set during session registration. + ui_scroll_position: Option, } impl From<&AcpThread> for ActionLogTelemetry { @@ -1210,6 +1212,7 @@ impl AcpThread { pending_terminal_exit: HashMap::default(), had_error: false, draft_prompt: None, + ui_scroll_position: None, } } @@ -1229,6 +1232,14 @@ impl AcpThread { self.draft_prompt = prompt; } + pub fn ui_scroll_position(&self) -> Option { + self.ui_scroll_position + } + + pub fn set_ui_scroll_position(&mut self, position: Option) { + self.ui_scroll_position = position; + } + pub fn connection(&self) -> &Rc { &self.connection } diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 7cf9416840a6bd2870327c9c68135857c01f7c9b..5421538ca736028a4ea7290c09ef81036e055b81 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -352,6 +352,8 @@ impl NativeAgent { let parent_session_id = thread.parent_thread_id(); let title = thread.title(); let draft_prompt = thread.draft_prompt().map(Vec::from); + let scroll_position = thread.ui_scroll_position(); + let token_usage = thread.latest_token_usage(); let project = thread.project.clone(); let action_log = thread.action_log.clone(); let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone(); @@ -367,6 +369,8 @@ impl NativeAgent { cx, ); acp_thread.set_draft_prompt(draft_prompt); + acp_thread.set_ui_scroll_position(scroll_position); + acp_thread.update_token_usage(token_usage, cx); acp_thread }); @@ -1917,7 +1921,9 @@ mod internal_tests { use gpui::TestAppContext; use indoc::formatdoc; use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider}; - use language_model::{LanguageModelProviderId, LanguageModelProviderName}; + use language_model::{ + LanguageModelCompletionEvent, LanguageModelProviderId, LanguageModelProviderName, + }; use serde_json::json; use settings::SettingsStore; use util::{path, rel_path::rel_path}; @@ -2549,6 +2555,13 @@ mod internal_tests { cx.run_until_parked(); model.send_last_completion_stream_text_chunk("Lorem."); + model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate( + language_model::TokenUsage { + input_tokens: 150, + output_tokens: 75, + ..Default::default() + }, + )); model.end_last_completion_stream(); cx.run_until_parked(); summary_model @@ -2587,6 +2600,12 @@ mod internal_tests { acp_thread.update(cx, |thread, _cx| { thread.set_draft_prompt(Some(draft_blocks.clone())); }); + thread.update(cx, |thread, _cx| { + thread.set_ui_scroll_position(Some(gpui::ListOffset { + item_ix: 5, + offset_in_item: gpui::px(12.5), + })); + }); thread.update(cx, |_thread, cx| cx.notify()); cx.run_until_parked(); @@ -2632,6 +2651,24 @@ mod internal_tests { acp_thread.read_with(cx, |thread, _| { assert_eq!(thread.draft_prompt(), Some(draft_blocks.as_slice())); }); + + // Ensure token usage survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + let usage = thread + .token_usage() + .expect("token usage should be restored after reload"); + assert_eq!(usage.input_tokens, 150); + assert_eq!(usage.output_tokens, 75); + }); + + // Ensure scroll position survived the round-trip. + acp_thread.read_with(cx, |thread, _| { + let scroll = thread + .ui_scroll_position() + .expect("scroll position should be restored after reload"); + assert_eq!(scroll.item_ix, 5); + assert_eq!(scroll.offset_in_item, gpui::px(12.5)); + }); } fn thread_entries( diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 3a7af37cac85065d8853fbb5332093ef3fd20592..10ecb643b9a17dd6b02b47a416c526a662d12632 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -66,6 +66,14 @@ pub struct DbThread { pub thinking_effort: Option, #[serde(default)] pub draft_prompt: Option>, + #[serde(default)] + pub ui_scroll_position: Option, +} + +#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +pub struct SerializedScrollPosition { + pub item_ix: usize, + pub offset_in_item: f32, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -108,6 +116,7 @@ impl SharedThread { thinking_enabled: false, thinking_effort: None, draft_prompt: None, + ui_scroll_position: None, } } @@ -286,6 +295,7 @@ impl DbThread { thinking_enabled: false, thinking_effort: None, draft_prompt: None, + ui_scroll_position: None, }) } } @@ -637,6 +647,7 @@ mod tests { thinking_enabled: false, thinking_effort: None, draft_prompt: None, + ui_scroll_position: None, } } @@ -841,4 +852,53 @@ mod tests { assert_eq!(threads.len(), 1); assert!(threads[0].folder_paths.is_empty()); } + + #[test] + fn test_scroll_position_defaults_to_none() { + let json = r#"{ + "title": "Old Thread", + "messages": [], + "updated_at": "2024-01-01T00:00:00Z" + }"#; + + let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize"); + + assert!( + db_thread.ui_scroll_position.is_none(), + "Legacy threads without scroll_position field should default to None" + ); + } + + #[gpui::test] + async fn test_scroll_position_roundtrips_through_save_load(cx: &mut TestAppContext) { + let database = ThreadsDatabase::new(cx.executor()).unwrap(); + + let thread_id = session_id("thread-with-scroll"); + + let mut thread = make_thread( + "Thread With Scroll", + Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), + ); + thread.ui_scroll_position = Some(SerializedScrollPosition { + item_ix: 42, + offset_in_item: 13.5, + }); + + database + .save_thread(thread_id.clone(), thread, PathList::default()) + .await + .unwrap(); + + let loaded = database + .load_thread(thread_id) + .await + .unwrap() + .expect("thread should exist"); + + let scroll = loaded + .ui_scroll_position + .expect("scroll_position should be restored"); + assert_eq!(scroll.item_ix, 42); + assert!((scroll.offset_in_item - 13.5).abs() < f32::EPSILON); + } } diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index c57bd1e99b9ae4fd1a93214e2a5d5937d1ab0274..99d77456e3822ae12c65c0a419ceea18f13f41e8 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -901,6 +901,7 @@ pub struct Thread { subagent_context: Option, /// The user's unsent prompt text, persisted so it can be restored when reloading the thread. draft_prompt: Option>, + ui_scroll_position: Option, /// Weak references to running subagent threads for cancellation propagation running_subagents: Vec>, } @@ -1017,6 +1018,7 @@ impl Thread { imported: false, subagent_context: None, draft_prompt: None, + ui_scroll_position: None, running_subagents: Vec::new(), } } @@ -1233,6 +1235,10 @@ impl Thread { imported: db_thread.imported, subagent_context: db_thread.subagent_context, draft_prompt: db_thread.draft_prompt, + ui_scroll_position: db_thread.ui_scroll_position.map(|sp| gpui::ListOffset { + item_ix: sp.item_ix, + offset_in_item: gpui::px(sp.offset_in_item), + }), running_subagents: Vec::new(), } } @@ -1258,6 +1264,12 @@ impl Thread { thinking_enabled: self.thinking_enabled, thinking_effort: self.thinking_effort.clone(), draft_prompt: self.draft_prompt.clone(), + ui_scroll_position: self.ui_scroll_position.map(|lo| { + crate::db::SerializedScrollPosition { + item_ix: lo.item_ix, + offset_in_item: lo.offset_in_item.as_f32(), + } + }), }; cx.background_spawn(async move { @@ -1307,6 +1319,14 @@ impl Thread { self.draft_prompt = prompt; } + pub fn ui_scroll_position(&self) -> Option { + self.ui_scroll_position + } + + pub fn set_ui_scroll_position(&mut self, position: Option) { + self.ui_scroll_position = position; + } + pub fn model(&self) -> Option<&Arc> { self.model.as_ref() } diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index f944377e489a88ac0fa6dbb802edf9702e86f5f2..e26820ddacc3132d42946de3b27d25f4424fae02 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -146,6 +146,7 @@ mod tests { thinking_enabled: false, thinking_effort: None, draft_prompt: None, + ui_scroll_position: None, } } diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 835ff611288c2bf6867a885ed2be8c6a66679cdb..07e34ccd56f0bd867135fe62894a5a3ff388c85e 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -845,6 +845,10 @@ impl ConnectionView { ); }); + if let Some(scroll_position) = thread.read(cx).ui_scroll_position() { + list_state.scroll_to(scroll_position); + } + AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx); let connection = thread.read(cx).connection().clone(); diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 8ce4da360664774342c4167f7c8dfbce914b647e..4b0d1686a2dafd2b9975a9109dd56dcf0b3faa00 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -248,7 +248,8 @@ pub struct ThreadView { pub resumed_without_history: bool, pub resume_thread_metadata: Option, pub _cancel_task: Option>, - _draft_save_task: Option>, + _save_task: Option>, + _draft_resolve_task: Option>, pub skip_queue_processing_count: usize, pub user_interrupted_generation: bool, pub can_fast_track_queue: bool, @@ -396,7 +397,7 @@ impl ThreadView { } else { Some(editor.update(cx, |editor, cx| editor.draft_contents(cx))) }; - this._draft_save_task = Some(cx.spawn(async move |this, cx| { + this._draft_resolve_task = Some(cx.spawn(async move |this, cx| { let draft = if let Some(task) = draft_contents_task { let blocks = task.await.ok().filter(|b| !b.is_empty()); blocks @@ -407,15 +408,7 @@ impl ThreadView { this.thread.update(cx, |thread, _cx| { thread.set_draft_prompt(draft); }); - }) - .ok(); - cx.background_executor() - .timer(SERIALIZATION_THROTTLE_TIME) - .await; - this.update(cx, |this, cx| { - if let Some(thread) = this.as_native_thread(cx) { - thread.update(cx, |_thread, cx| cx.notify()); - } + this.schedule_save(cx); }) .ok(); })); @@ -471,7 +464,8 @@ impl ThreadView { is_loading_contents: false, new_server_version_available: None, _cancel_task: None, - _draft_save_task: None, + _save_task: None, + _draft_resolve_task: None, skip_queue_processing_count: 0, user_interrupted_generation: false, can_fast_track_queue: false, @@ -487,12 +481,50 @@ impl ThreadView { _history_subscription: history_subscription, show_codex_windows_warning, }; + let list_state_for_scroll = this.list_state.clone(); + let thread_view = cx.entity().downgrade(); + this.list_state + .set_scroll_handler(move |_event, _window, cx| { + let list_state = list_state_for_scroll.clone(); + let thread_view = thread_view.clone(); + // N.B. We must defer because the scroll handler is called while the + // ListState's RefCell is mutably borrowed. Reading logical_scroll_top() + // directly would panic from a double borrow. + cx.defer(move |cx| { + let scroll_top = list_state.logical_scroll_top(); + let _ = thread_view.update(cx, |this, cx| { + if let Some(thread) = this.as_native_thread(cx) { + thread.update(cx, |thread, _cx| { + thread.set_ui_scroll_position(Some(scroll_top)); + }); + } + this.schedule_save(cx); + }); + }); + }); + if should_auto_submit { this.send(window, cx); } this } + /// Schedule a throttled save of the thread state (draft prompt, scroll position, etc.). + /// Multiple calls within `SERIALIZATION_THROTTLE_TIME` are coalesced into a single save. + fn schedule_save(&mut self, cx: &mut Context) { + self._save_task = Some(cx.spawn(async move |this, cx| { + cx.background_executor() + .timer(SERIALIZATION_THROTTLE_TIME) + .await; + this.update(cx, |this, cx| { + if let Some(thread) = this.as_native_thread(cx) { + thread.update(cx, |_thread, cx| cx.notify()); + } + }) + .ok(); + })); + } + pub fn handle_message_editor_event( &mut self, _editor: &Entity, From 7f3dee85c0fa19853ff64e1064de40db686dd49d Mon Sep 17 00:00:00 2001 From: John Tur Date: Tue, 3 Mar 2026 17:31:53 -0500 Subject: [PATCH 284/548] Fix OpenGL initialization on Intel HD 4000 (#50646) Release Notes: - Fixed Zed failing to initialize OpenGL on certain Linux devices --- Cargo.lock | 67 ++++++++++++++++++++++++++++++++++-------------------- Cargo.toml | 2 +- 2 files changed, 43 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dcecec352bf1426fb76956f04224c66b04143627..fee9c5d0cc3aad4ac76e478362981efb760da2f2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7598,7 +7598,7 @@ dependencies = [ "mach2 0.5.0", "media", "metal", - "naga", + "naga 28.0.0", "num_cpus", "objc", "objc2", @@ -10702,6 +10702,30 @@ name = "naga" version = "28.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "618f667225063219ddfc61251087db8a9aec3c3f0950c916b614e403486f1135" +dependencies = [ + "arrayvec", + "bit-set", + "bitflags 2.10.0", + "cfg-if", + "cfg_aliases 0.2.1", + "codespan-reporting 0.12.0", + "half", + "hashbrown 0.16.1", + "hexf-parse", + "indexmap", + "libm", + "log", + "num-traits", + "once_cell", + "rustc-hash 1.1.0", + "thiserror 2.0.17", + "unicode-ident", +] + +[[package]] +name = "naga" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "arrayvec", "bit-set", @@ -19890,9 +19914,8 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "wgpu" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9cb534d5ffd109c7d1135f34cdae29e60eab94855a625dcfe1705f8bc7ad79f" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "arrayvec", "bitflags 2.10.0", @@ -19903,7 +19926,7 @@ dependencies = [ "hashbrown 0.16.1", "js-sys", "log", - "naga", + "naga 28.0.1", "parking_lot", "portable-atomic", "profiling", @@ -19920,9 +19943,8 @@ dependencies = [ [[package]] name = "wgpu-core" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bb4c8b5db5f00e56f1f08869d870a0dff7c8bc7ebc01091fec140b0cf0211a9" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "arrayvec", "bit-set", @@ -19934,7 +19956,7 @@ dependencies = [ "hashbrown 0.16.1", "indexmap", "log", - "naga", + "naga 28.0.1", "once_cell", "parking_lot", "portable-atomic", @@ -19952,36 +19974,32 @@ dependencies = [ [[package]] name = "wgpu-core-deps-apple" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87b7b696b918f337c486bf93142454080a32a37832ba8a31e4f48221890047da" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-emscripten" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b251c331f84feac147de3c4aa3aa45112622a95dd7ee1b74384fa0458dbd79" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-windows-linux-android" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68ca976e72b2c9964eb243e281f6ce7f14a514e409920920dcda12ae40febaae" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-hal" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293080d77fdd14d6b08a67c5487dfddbf874534bb7921526db56a7b75d7e3bef" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "android_system_properties", "arrayvec", @@ -20004,7 +20022,7 @@ dependencies = [ "libloading", "log", "metal", - "naga", + "naga 28.0.1", "ndk-sys", "objc", "once_cell", @@ -20027,9 +20045,8 @@ dependencies = [ [[package]] name = "wgpu-types" -version = "28.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e18308757e594ed2cd27dddbb16a139c42a683819d32a2e0b1b0167552f5840c" +version = "28.0.1" +source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" dependencies = [ "bitflags 2.10.0", "bytemuck", diff --git a/Cargo.toml b/Cargo.toml index 35180020a8d70d83c113172051d12a85f33c55ca..cc5ff3054161ec2d0651aeac6ff4dc673251c414 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -770,7 +770,7 @@ wax = "0.7" which = "6.0.0" wasm-bindgen = "0.2.113" web-time = "1.1.0" -wgpu = "28.0" +wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" } windows-core = "0.61" yawc = "0.2.5" zeroize = "1.8" From 6a38c5c0a0fe23ec7d6bc80834660d61d7f9255b Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Tue, 3 Mar 2026 15:32:57 -0700 Subject: [PATCH 285/548] Fix panic in remote workspaces (#50647) Fixes ZED-4JD Release Notes: - Fix a panic when opening the remote server modal --- crates/recent_projects/src/remote_servers.rs | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index 6c0ce4b18854320fda8e72f291800049b07cec1a..a94f7b1d57eaef8657fb0d448480f84c97ce7e70 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -1161,12 +1161,11 @@ impl RemoteServerProjects { workspace.toggle_modal(window, cx, |window, cx| { RemoteConnectionModal::new(&connection_options, Vec::new(), window, cx) }); - let prompt = workspace - .active_modal::(cx) - .unwrap() - .read(cx) - .prompt - .clone(); + // can be None if another copy of this modal opened in the meantime + let Some(modal) = workspace.active_modal::(cx) else { + return; + }; + let prompt = modal.read(cx).prompt.clone(); let connect = connect( ConnectionIdentifier::setup(), From 9c9337a8021f74511625517c3f4fa021106609eb Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Tue, 3 Mar 2026 16:09:01 -0800 Subject: [PATCH 286/548] Add cmd-y binding for agent::Keep in agent diff review (#50656) Release Notes: - Added `cmd-y` keybinding for accepting changes in the agent diff review, matching the git diff review shortcut. --- assets/keymaps/default-linux.json | 2 ++ assets/keymaps/default-macos.json | 2 ++ assets/keymaps/default-windows.json | 2 ++ 3 files changed, 6 insertions(+) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 9b8f2d337b1f1073bca818cf0b9c66773a3ce4e9..87e76829966b501df4139d4942de604c4fc42d65 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -204,6 +204,7 @@ { "context": "Editor && editor_agent_diff", "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -214,6 +215,7 @@ { "context": "AgentDiff", "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 410c13687fbe0c19fbcb4c155ebba36dd068354c..ccb3a7fa9116b0771dda94e75e467c4572cdaf2c 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -242,6 +242,7 @@ "context": "AgentDiff", "use_key_equivalents": true, "bindings": { + "cmd-y": "agent::Keep", "cmd-alt-y": "agent::Keep", "cmd-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -252,6 +253,7 @@ "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { + "cmd-y": "agent::Keep", "cmd-alt-y": "agent::Keep", "cmd-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 19f75f858cd45192c4cf30dd6bd0799046c26268..251c3d6541a611737027900e659a94271ed36526 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -203,6 +203,7 @@ "context": "Editor && editor_agent_diff", "use_key_equivalents": true, "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", @@ -214,6 +215,7 @@ "context": "AgentDiff", "use_key_equivalents": true, "bindings": { + "alt-y": "agent::Keep", "ctrl-alt-y": "agent::Keep", "ctrl-alt-z": "agent::Reject", "shift-alt-y": "agent::KeepAll", From 9a6046cc57cf00c1276c05c7998b6e74bc27ea53 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Wed, 4 Mar 2026 02:18:52 -0500 Subject: [PATCH 287/548] Change miniprofiler file extension to `.miniprof.json` (#50429) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The main intention behind this change is to support uploading these files to GitHub. `.miniprof` is not a supported extension by GitHub, but `.json` is. The only “downside” to this change is that the cleanup process will have to look for `.miniprof` files AND `.miniprof.json` files. Maybe we can remove that change at a later date? Ref: https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/attaching-files Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Changed miniprofiler file extension to `.miniprof.json` --- crates/miniprofiler_ui/src/miniprofiler_ui.rs | 2 +- crates/zed/src/reliability.rs | 6 +++--- docs/src/performance.md | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/crates/miniprofiler_ui/src/miniprofiler_ui.rs b/crates/miniprofiler_ui/src/miniprofiler_ui.rs index 1f95dc3d230e7c50b4960560a96c9007fd77aab8..12b2bce77b5866e885483a847d40647f525207e6 100644 --- a/crates/miniprofiler_ui/src/miniprofiler_ui.rs +++ b/crates/miniprofiler_ui/src/miniprofiler_ui.rs @@ -544,7 +544,7 @@ impl Render for ProfilerWindow { let path = cx.prompt_for_new_path( &active_path, - Some("performance_profile.miniprof"), + Some("performance_profile.miniprof.json"), ); cx.background_spawn(async move { diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index b291b9c8493db75e20282c8c9bc5a3750fb5e705..d8dc1c4f8fe412b5e8eeb6b09e482a9ed243aaa3 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -144,7 +144,7 @@ fn cleanup_old_hang_traces() { entry .path() .extension() - .is_some_and(|ext| ext == "miniprof") + .is_some_and(|ext| ext == "json" || ext == "miniprof") }) .collect(); @@ -175,7 +175,7 @@ fn save_hang_trace( .collect::>(); let trace_path = paths::hang_traces_dir().join(&format!( - "hang-{}.miniprof", + "hang-{}.miniprof.json", hang_time.format("%Y-%m-%d_%H-%M-%S") )); @@ -193,7 +193,7 @@ fn save_hang_trace( entry .path() .extension() - .is_some_and(|ext| ext == "miniprof") + .is_some_and(|ext| ext == "json" || ext == "miniprof") }) .collect(); diff --git a/docs/src/performance.md b/docs/src/performance.md index 09abecdeffe4e268413a73b189ef301511b1a20e..e974d63f8816b68d30a1c06d7cbbc083f8564327 100644 --- a/docs/src/performance.md +++ b/docs/src/performance.md @@ -78,7 +78,7 @@ Download the importer - `cd import && mkdir build && cd build` - Run cmake to generate build files: `cmake -G Ninja -DCMAKE_BUILD_TYPE=Release ..` - Build the importer: `ninja` -- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof /path/to/output.tracy` +- Run the importer on the trace file: `./tracy-import-miniprofiler /path/to/trace.miniprof.json /path/to/output.tracy` - Open the trace in tracy: - If you're on windows download the v0.12.2 version from the releases on the upstream repo - If you're on other platforms open it on the website: https://tracy.nereid.pl/ (the version might mismatch so your luck might vary, we need to host our own ideally..) @@ -87,7 +87,7 @@ Download the importer - Run the action: `zed open performance profiler` - Hit the save button. This opens a save dialog or if that fails to open the trace gets saved in your working directory. -- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler output.tracy` +- Convert the profile so it can be imported in tracy using the importer: `./tracy-import-miniprofiler output.tracy` - Go to hit the 'power button' in the top left and then open saved trace. - Now zoom in to see the tasks and how long they took From cdb34c30c921a1bd480180c9485d4cac28deede2 Mon Sep 17 00:00:00 2001 From: Xin Zhao Date: Wed, 4 Mar 2026 15:31:27 +0800 Subject: [PATCH 288/548] python: Register LSP adapters directly to the LanguageRegistry (#50662) The purpose of `register_available_lsp_adapter()` is to allow language servers to be reused across multiple languages. Since adapters like `ty`, `pylsp`, and `pyright` are specific to Python, there is no need to register them for other languages. Additionally, registering them directly to the global `LanguageRegistry` results in negligible resource consumption. We can then use the default settings to control the default language server for Python, as referenced here: https://github.com/zed-industries/zed/blob/9c9337a8021f74511625517c3f4fa021106609eb/assets/settings/default.json#L2119-L2130 Additionally, the documentation for Python has been updated to clarify that the `"..."` syntax does not mean "keep the rest at default," but rather "include all other available servers." Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing (no sure how to add test for this) - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/languages/src/lib.rs | 11 +++++++---- docs/src/languages/python.md | 4 ++-- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/crates/languages/src/lib.rs b/crates/languages/src/lib.rs index c31911f372261db47f689d29de9c60c0f9cad56e..4c291b86982a8cb1aa153aa0c036b3d169621339 100644 --- a/crates/languages/src/lib.rs +++ b/crates/languages/src/lib.rs @@ -179,7 +179,13 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime }, LanguageInfo { name: "python", - adapters: vec![basedpyright_lsp_adapter, ruff_lsp_adapter], + adapters: vec![ + basedpyright_lsp_adapter, + ruff_lsp_adapter, + ty_lsp_adapter, + py_lsp_adapter, + python_lsp_adapter, + ], context: Some(python_context_provider), toolchain: Some(python_toolchain_provider), manifest_name: Some(SharedString::new_static("pyproject.toml").into()), @@ -281,9 +287,6 @@ pub fn init(languages: Arc, fs: Arc, node: NodeRuntime typescript_lsp_adapter, ); - languages.register_available_lsp_adapter(python_lsp_adapter.name(), python_lsp_adapter); - languages.register_available_lsp_adapter(py_lsp_adapter.name(), py_lsp_adapter); - languages.register_available_lsp_adapter(ty_lsp_adapter.name(), ty_lsp_adapter); // Register Tailwind for the existing languages that should have it by default. // // This can be driven by the `language_servers` setting once we have a way for diff --git a/docs/src/languages/python.md b/docs/src/languages/python.md index d66f52c71cb9295fe9ca94e5890de48cd1275e57..fdeabec5069ed20a9b168ab19129dde0cc6280ba 100644 --- a/docs/src/languages/python.md +++ b/docs/src/languages/python.md @@ -89,8 +89,8 @@ Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages "languages": { "Python": { "language_servers": [ - // Disable basedpyright and enable ty, and otherwise - // use the default configuration. + // Disable basedpyright and enable ty, and include all + // other registered language servers (ruff, pylsp, pyright). "ty", "!basedpyright", "..." From e51cd4931c10007f30dd3d3e9351b0a4b99af063 Mon Sep 17 00:00:00 2001 From: Vitaly Slobodin Date: Wed, 4 Mar 2026 08:32:12 +0100 Subject: [PATCH 289/548] doc: Improve documentation for language server `...` expansion (#50672) Hi! The `...` entry in the `language_servers` setting was only explained in a single bullet point, which led users to misconfigure their setup, particularly when overriding defaults that disable certain servers with `!`. Add a detailed explanation of how `...` works and a table of examples using Ruby's real server configuration to illustrate the override behavior. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- docs/src/configuring-languages.md | 37 +++++++++++++++++++++++++++---- 1 file changed, 33 insertions(+), 4 deletions(-) diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index 4e9bbce822f2f0d87ac2a8c9617698acd5983243..91775c3df137e38eb0b6b7b333b49d269b2f3a7c 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -122,11 +122,40 @@ You can specify your preference using the `language_servers` setting: In this example: -- `intelephense` is set as the primary language server -- `phpactor` is disabled (note the `!` prefix) -- `...` expands to the rest of the language servers that are registered for PHP +- `intelephense` is set as the primary language server. +- `phpactor` and `phptools` are disabled (note the `!` prefix). +- `"..."` expands to the rest of the language servers registered for PHP that are not already listed. -This configuration allows you to tailor the language server setup to your specific needs, ensuring that you get the most suitable functionality for your development workflow. +The `"..."` entry acts as a wildcard that includes any registered language server you haven't explicitly mentioned. Servers you list by name keep their position, and `"..."` fills in the remaining ones at that point in the list. Servers prefixed with `!` are excluded entirely. This means that if a new language server extension is installed or a new server is registered for a language, `"..."` will automatically include it. If you want full control over which servers are enabled, omit `"..."` — only the servers you list by name will be used. + +#### Examples + +Suppose you're working with Ruby. The default configuration is: + +```json [settings] +{ + "language_servers": [ + "solargraph", + "!ruby-lsp", + "!rubocop", + "!sorbet", + "!steep", + "!kanayago", + "..." + ] +} +``` + +When you override `language_servers` in your settings, your list **replaces** the default entirely. This means default-disabled servers like `kanayago` will be re-enabled by `"..."` unless you explicitly disable them again. + +| Configuration | Result | +| ------------------------------------------------- | ------------------------------------------------------------------ | +| `["..."]` | `solargraph`, `ruby-lsp`, `rubocop`, `sorbet`, `steep`, `kanayago` | +| `["ruby-lsp", "..."]` | `ruby-lsp`, `solargraph`, `rubocop`, `sorbet`, `steep`, `kanayago` | +| `["ruby-lsp", "!solargraph", "!kanayago", "..."]` | `ruby-lsp`, `rubocop`, `sorbet`, `steep` | +| `["ruby-lsp", "solargraph"]` | `ruby-lsp`, `solargraph` | + +> Note: In the first example, `"..."` includes `kanayago` even though it is disabled by default. The override replaced the default list, so the `"!kanayago"` entry is no longer present. To keep it disabled, you must include `"!kanayago"` in your configuration. ### Toolchains From c0fa025bc9ccce8eb538f2d06a74f4ab4a1205b0 Mon Sep 17 00:00:00 2001 From: Casper van Elteren Date: Wed, 4 Mar 2026 17:33:40 +1000 Subject: [PATCH 290/548] repl: Fix image scaling (#48435) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Continues #47114 Release Notes: - Fixed REPL output width clamping to apply to the content area so images don’t get clipped by controls --------- Co-authored-by: MrSubidubi --- crates/repl/src/notebook/cell.rs | 95 ++++++------------- crates/repl/src/outputs.rs | 37 ++++---- crates/repl/src/outputs/image.rs | 8 +- crates/repl/src/outputs/plain.rs | 2 +- crates/repl/src/repl_settings.rs | 6 -- .../settings_content/src/settings_content.rs | 5 - 6 files changed, 55 insertions(+), 98 deletions(-) diff --git a/crates/repl/src/notebook/cell.rs b/crates/repl/src/notebook/cell.rs index d66261698b722cfcd0f547e09d84cf83a0d2b1a6..200424742aff113d637fe9aca30999c0f95e79a5 100644 --- a/crates/repl/src/notebook/cell.rs +++ b/crates/repl/src/notebook/cell.rs @@ -1,13 +1,11 @@ -#![allow(unused, dead_code)] use std::sync::Arc; use std::time::{Duration, Instant}; use editor::{Editor, EditorMode, MultiBuffer, SizingBehavior}; use futures::future::Shared; use gpui::{ - App, Entity, EventEmitter, Focusable, Hsla, InteractiveElement, KeyContext, - RetainAllImageCache, StatefulInteractiveElement, Task, TextStyleRefinement, image_cache, - prelude::*, + App, Entity, EventEmitter, Focusable, Hsla, InteractiveElement, RetainAllImageCache, + StatefulInteractiveElement, Task, TextStyleRefinement, prelude::*, }; use language::{Buffer, Language, LanguageRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; @@ -236,7 +234,7 @@ pub trait RenderableCell: Render { fn source(&self) -> &String; fn selected(&self) -> bool; fn set_selected(&mut self, selected: bool) -> &mut Self; - fn selected_bg_color(&self, window: &mut Window, cx: &mut Context) -> Hsla { + fn selected_bg_color(&self, _window: &mut Window, cx: &mut Context) -> Hsla { if self.selected() { let mut color = cx.theme().colors().element_hover; color.fade_out(0.5); @@ -253,7 +251,7 @@ pub trait RenderableCell: Render { fn cell_position_spacer( &self, is_first: bool, - window: &mut Window, + _window: &mut Window, cx: &mut Context, ) -> Option { let cell_position = self.cell_position(); @@ -328,7 +326,6 @@ pub struct MarkdownCell { editing: bool, selected: bool, cell_position: Option, - languages: Arc, _editor_subscription: gpui::Subscription, } @@ -386,7 +383,6 @@ impl MarkdownCell { let markdown = cx.new(|cx| Markdown::new(source.clone().into(), None, None, cx)); - let cell_id = id.clone(); let editor_subscription = cx.subscribe(&editor, move |this, _editor, event, cx| match event { editor::EditorEvent::Blurred => { @@ -410,7 +406,6 @@ impl MarkdownCell { editing: start_editing, selected: false, cell_position: None, - languages, _editor_subscription: editor_subscription, } } @@ -461,8 +456,6 @@ impl MarkdownCell { .unwrap_or_default(); self.source = source.clone(); - let languages = self.languages.clone(); - self.markdown.update(cx, |markdown, cx| { markdown.reset(source.into(), cx); }); @@ -606,7 +599,7 @@ pub struct CodeCell { outputs: Vec, selected: bool, cell_position: Option, - language_task: Task<()>, + _language_task: Task<()>, execution_start_time: Option, execution_duration: Option, is_executing: bool, @@ -670,10 +663,10 @@ impl CodeCell { outputs: Vec::new(), selected: false, cell_position: None, - language_task, execution_start_time: None, execution_duration: None, is_executing: false, + _language_task: language_task, } } @@ -748,10 +741,10 @@ impl CodeCell { outputs, selected: false, cell_position: None, - language_task, execution_start_time: None, execution_duration: None, is_executing: false, + _language_task: language_task, } } @@ -879,15 +872,7 @@ impl CodeCell { cx.notify(); } - fn output_control(&self) -> Option { - if self.has_outputs() { - Some(CellControlType::ClearCell) - } else { - None - } - } - - pub fn gutter_output(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + pub fn gutter_output(&self, _window: &mut Window, cx: &mut Context) -> impl IntoElement { let is_selected = self.selected(); div() @@ -948,7 +933,7 @@ impl RenderableCell for CodeCell { &self.source } - fn control(&self, window: &mut Window, cx: &mut Context) -> Option { + fn control(&self, _window: &mut Window, cx: &mut Context) -> Option { let control_type = if self.has_outputs() { CellControlType::RerunCell } else { @@ -1038,8 +1023,7 @@ impl RenderableCell for CodeCell { } impl RunnableCell for CodeCell { - fn run(&mut self, window: &mut Window, cx: &mut Context) { - println!("Running code cell: {}", self.id); + fn run(&mut self, _window: &mut Window, cx: &mut Context) { cx.emit(CellEvent::Run(self.id.clone())); } @@ -1062,11 +1046,8 @@ impl Render for CodeCell { } else { None }; - let output_max_width = plain::max_width_for_columns( - ReplSettings::get_global(cx).output_max_width_columns, - window, - cx, - ); + let output_max_width = + plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx); // get the language from the editor's buffer let language_name = self .editor @@ -1198,41 +1179,23 @@ impl Render for CodeCell { }, ) // output at bottom - .child(div().w_full().children(self.outputs.iter().map( - |output| { - let content = match output { - Output::Plain { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Markdown { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Stream { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Image { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Message(message) => Some( - div() - .child(message.clone()) - .into_any_element(), - ), - Output::Table { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::Json { content, .. } => { - Some(content.clone().into_any_element()) - } - Output::ErrorOutput(error_view) => { - error_view.render(window, cx) - } - Output::ClearOutputWaitMarker => None, - }; - - div().children(content) - }, - ))), + .child( + div() + .id(( + ElementId::from(self.id.to_string()), + "output-scroll", + )) + .w_full() + .when_some(output_max_width, |div, max_width| { + div.max_w(max_width).overflow_x_scroll() + }) + .when_some(output_max_height, |div, max_height| { + div.max_h(max_height).overflow_y_scroll() + }) + .children(self.outputs.iter().map(|output| { + div().children(output.content(window, cx)) + })), + ), ), ), ) diff --git a/crates/repl/src/outputs.rs b/crates/repl/src/outputs.rs index 8be8c57cceee84435a6d99ba5c611d24c563bec3..f6d2bc4d3173ce64700b7b5ac45301df0fe0ab53 100644 --- a/crates/repl/src/outputs.rs +++ b/crates/repl/src/outputs.rs @@ -253,18 +253,8 @@ impl Output { ) } - pub fn render( - &self, - workspace: WeakEntity, - window: &mut Window, - cx: &mut Context, - ) -> impl IntoElement + use<> { - let max_width = plain::max_width_for_columns( - ReplSettings::get_global(cx).output_max_width_columns, - window, - cx, - ); - let content = match self { + pub fn content(&self, window: &mut Window, cx: &mut App) -> Option { + match self { Self::Plain { content, .. } => Some(content.clone().into_any_element()), Self::Markdown { content, .. } => Some(content.clone().into_any_element()), Self::Stream { content, .. } => Some(content.clone().into_any_element()), @@ -274,21 +264,36 @@ impl Output { Self::Json { content, .. } => Some(content.clone().into_any_element()), Self::ErrorOutput(error_view) => error_view.render(window, cx), Self::ClearOutputWaitMarker => None, - }; + } + } - let needs_horizontal_scroll = matches!(self, Self::Table { .. } | Self::Image { .. }); + pub fn render( + &self, + workspace: WeakEntity, + window: &mut Window, + cx: &mut Context, + ) -> impl IntoElement + use<> { + let max_width = + plain::max_width_for_columns(ReplSettings::get_global(cx).max_columns, window, cx); + let content = self.content(window, cx); + + let needs_horizontal_scroll = matches!(self, Self::Table { .. }); h_flex() .id("output-content") .w_full() - .when_some(max_width, |this, max_w| this.max_w(max_w)) - .overflow_x_scroll() + .when_else( + needs_horizontal_scroll, + |this| this.overflow_x_scroll(), + |this| this.overflow_x_hidden(), + ) .items_start() .child( div() .when(!needs_horizontal_scroll, |el| { el.flex_1().w_full().overflow_x_hidden() }) + .when_some(max_width, |el, max_width| el.max_w(max_width)) .children(content), ) .children(match self { diff --git a/crates/repl/src/outputs/image.rs b/crates/repl/src/outputs/image.rs index 9d1ffa3d2065281cd69e67b2faf960c9aa690bcb..e5444be3d779c9541fcadd55b9255d3e25da0cba 100644 --- a/crates/repl/src/outputs/image.rs +++ b/crates/repl/src/outputs/image.rs @@ -3,10 +3,10 @@ use base64::{ Engine as _, alphabet, engine::{DecodePaddingMode, GeneralPurpose, GeneralPurposeConfig}, }; -use gpui::{App, ClipboardItem, Image, ImageFormat, RenderImage, Window, img}; +use gpui::{App, ClipboardItem, Image, ImageFormat, Pixels, RenderImage, Window, img}; use settings::Settings as _; use std::sync::Arc; -use ui::{IntoElement, Styled, div, prelude::*}; +use ui::{IntoElement, Styled, prelude::*}; use crate::outputs::{OutputContent, plain}; use crate::repl_settings::ReplSettings; @@ -113,7 +113,7 @@ impl Render for ImageView { let settings = ReplSettings::get_global(cx); let line_height = window.line_height(); - let max_width = plain::max_width_for_columns(settings.output_max_width_columns, window, cx); + let max_width = plain::max_width_for_columns(settings.max_columns, window, cx); let max_height = if settings.output_max_height_lines > 0 { Some(line_height * settings.output_max_height_lines as f32) @@ -125,7 +125,7 @@ impl Render for ImageView { let image = self.image.clone(); - div().h(height).w(width).child(img(image)) + img(image).w(width).h(height) } } diff --git a/crates/repl/src/outputs/plain.rs b/crates/repl/src/outputs/plain.rs index 0db2f811fb9ca3b82114db23826e37fe699bd3a0..71e2624f8ad7b0172a86793d5d81b38339b04f36 100644 --- a/crates/repl/src/outputs/plain.rs +++ b/crates/repl/src/outputs/plain.rs @@ -22,7 +22,7 @@ use alacritty_terminal::{ term::Config, vte::ansi::Processor, }; -use gpui::{Bounds, ClipboardItem, Entity, FontStyle, TextStyle, WhiteSpace, canvas, size}; +use gpui::{Bounds, ClipboardItem, Entity, FontStyle, Pixels, TextStyle, WhiteSpace, canvas, size}; use language::Buffer; use settings::Settings as _; use terminal::terminal_settings::TerminalSettings; diff --git a/crates/repl/src/repl_settings.rs b/crates/repl/src/repl_settings.rs index 302164a5b360157edceff1b1f2e18f6c6fd7a50b..5fd7623bb71e6446b8cacd6029108e481efc8680 100644 --- a/crates/repl/src/repl_settings.rs +++ b/crates/repl/src/repl_settings.rs @@ -27,11 +27,6 @@ pub struct ReplSettings { /// /// Default: 0 pub output_max_height_lines: usize, - /// Maximum number of columns of output to display before scaling images. - /// Set to 0 to disable output width limits. - /// - /// Default: 0 - pub output_max_width_columns: usize, } impl Settings for ReplSettings { @@ -44,7 +39,6 @@ impl Settings for ReplSettings { inline_output: repl.inline_output.unwrap_or(true), inline_output_max_length: repl.inline_output_max_length.unwrap_or(50), output_max_height_lines: repl.output_max_height_lines.unwrap_or(0), - output_max_width_columns: repl.output_max_width_columns.unwrap_or(0), } } } diff --git a/crates/settings_content/src/settings_content.rs b/crates/settings_content/src/settings_content.rs index f94c6a0b98d7fa23686dc1c89012e3b1fe476c70..5a4e87c384d802f3de4c96c07f65cf163c3a6d1a 100644 --- a/crates/settings_content/src/settings_content.rs +++ b/crates/settings_content/src/settings_content.rs @@ -1148,11 +1148,6 @@ pub struct ReplSettingsContent { /// /// Default: 0 pub output_max_height_lines: Option, - /// Maximum number of columns of output to display before scaling images. - /// Set to 0 to disable output width limits. - /// - /// Default: 0 - pub output_max_width_columns: Option, } /// Settings for configuring the which-key popup behaviour. From 152d3eafcaf4655ac65e2a25e65cc5ee0545db3f Mon Sep 17 00:00:00 2001 From: Sarthak Mishra Date: Wed, 4 Mar 2026 13:07:50 +0530 Subject: [PATCH 291/548] project_panel: Fix Reveal in File Manager for WSL projects (#50610) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #46767 ## Summary The "Reveal in File Manager" action was shown in the context menu for WSL projects (guarded by `is_via_wsl_with_host_interop`), but the action handler in `Render` was only registered when `project.is_local()` — which returns `false` for WSL. Dispatching the action without a handler caused a crash. Adds the same `is_via_wsl_with_host_interop(cx)` check to the handler registration. ## Testing - Ran `cargo test -p project_panel` — 78 passed, 0 failed - Manual testing: connected to WSL Ubuntu, right-clicked a file in the project panel, used "Reveal in File Manager" — Windows Explorer opened correctly without crashing Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed a crash when using "Reveal in File Manager" on files in WSL projects (#46767). --- crates/project_panel/src/project_panel.rs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 7f746a6ccd7efec2b73354992c593433b0b6f281..0dd19dddde7ab947cfe85a1fd9d96ad7b2d6f23d 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -6457,11 +6457,14 @@ impl Render for ProjectPanel { el.on_action(cx.listener(Self::trash)) }) }) - .when(project.is_local(), |el| { - el.on_action(cx.listener(Self::reveal_in_finder)) - .on_action(cx.listener(Self::open_system)) - .on_action(cx.listener(Self::open_in_terminal)) - }) + .when( + project.is_local() || project.is_via_wsl_with_host_interop(cx), + |el| { + el.on_action(cx.listener(Self::reveal_in_finder)) + .on_action(cx.listener(Self::open_system)) + .on_action(cx.listener(Self::open_in_terminal)) + }, + ) .when(project.is_via_remote_server(), |el| { el.on_action(cx.listener(Self::open_in_terminal)) .on_action(cx.listener(Self::download_from_remote)) From f023109a107d1fb50f9519c492fa7868d7be41ad Mon Sep 17 00:00:00 2001 From: john Date: Wed, 4 Mar 2026 03:31:51 -0500 Subject: [PATCH 292/548] docs: Fix incorrect IAM terminology under Bedrock section (#50546) IAM users cannot be assumed; only IAM roles can be. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- docs/src/ai/llm-providers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index 3a32bd96e73d9df427897798681f203c4ceb2273..a4a6274af10d1aea20ed27160704136d9f0eb586 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -88,7 +88,7 @@ With that done, choose one of the three authentication methods: While it's possible to configure through the Agent Panel settings UI by entering your AWS access key and secret directly, we recommend using named profiles instead for better security practices. To do this: -1. Create an IAM User that you can assume in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users). +1. Create an IAM User in the [IAM Console](https://us-east-1.console.aws.amazon.com/iam/home?region=us-east-1#/users). 2. Create security credentials for that User, save them and keep them secure. 3. Open the Agent Configuration with (`agent: open settings`) and go to the Amazon Bedrock section 4. Copy the credentials from Step 2 into the respective **Access Key ID**, **Secret Access Key**, and **Region** fields. From f0abcd89957027d66aba37764523f5143a1a1c34 Mon Sep 17 00:00:00 2001 From: John Tur Date: Wed, 4 Mar 2026 03:55:53 -0500 Subject: [PATCH 293/548] More fixes for OpenGL initialization on Intel HD 4000 (#50680) Release Notes: - N/A --- Cargo.lock | 16 ++++++++-------- Cargo.toml | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fee9c5d0cc3aad4ac76e478362981efb760da2f2..d79134c6145d3a6644f780097f7dd8f69eeae295 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10725,7 +10725,7 @@ dependencies = [ [[package]] name = "naga" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "arrayvec", "bit-set", @@ -19915,7 +19915,7 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "wgpu" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "arrayvec", "bitflags 2.10.0", @@ -19944,7 +19944,7 @@ dependencies = [ [[package]] name = "wgpu-core" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "arrayvec", "bit-set", @@ -19975,7 +19975,7 @@ dependencies = [ [[package]] name = "wgpu-core-deps-apple" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "wgpu-hal", ] @@ -19983,7 +19983,7 @@ dependencies = [ [[package]] name = "wgpu-core-deps-emscripten" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "wgpu-hal", ] @@ -19991,7 +19991,7 @@ dependencies = [ [[package]] name = "wgpu-core-deps-windows-linux-android" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "wgpu-hal", ] @@ -19999,7 +19999,7 @@ dependencies = [ [[package]] name = "wgpu-hal" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "android_system_properties", "arrayvec", @@ -20046,7 +20046,7 @@ dependencies = [ [[package]] name = "wgpu-types" version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2#e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" +source = "git+https://github.com/zed-industries/wgpu?rev=6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d#6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" dependencies = [ "bitflags 2.10.0", "bytemuck", diff --git a/Cargo.toml b/Cargo.toml index cc5ff3054161ec2d0651aeac6ff4dc673251c414..15d39992804b5ed7ad99fadd46e350b1357b17d1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -770,7 +770,7 @@ wax = "0.7" which = "6.0.0" wasm-bindgen = "0.2.113" web-time = "1.1.0" -wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "e0f83a6cedc5e0b97da1ebe2d638ad103672e0a2" } +wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "6e0c2546d99dad72ce6ffb5b04349e6a4ce96e6d" } windows-core = "0.61" yawc = "0.2.5" zeroize = "1.8" From 4668dbc83a773049f48f58728b5ca631fc47e5f3 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 4 Mar 2026 10:38:42 +0100 Subject: [PATCH 294/548] agent: Allow for expanding the subagent thread when permissions are requested (#50684) Previously, there was no way to view the full thread context Release Notes: - N/A Co-authored-by: Bennet Bo Fenner Co-authored-by: MrSubidubi --- .../src/connection_view/thread_view.rs | 56 +++++++++---------- 1 file changed, 27 insertions(+), 29 deletions(-) diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 4b0d1686a2dafd2b9975a9109dd56dcf0b3faa00..8a1a7d2ea5b0f01ba559e83051861b9d6324985f 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -6768,6 +6768,31 @@ impl ThreadView { .read(cx) .pending_tool_call(thread.read(cx).session_id(), cx); + let session_id = thread.read(cx).session_id().clone(); + + let fullscreen_toggle = h_flex() + .id(entry_ix) + .py_1() + .w_full() + .justify_center() + .border_t_1() + .when(is_failed, |this| this.border_dashed()) + .border_color(self.tool_card_border_color(cx)) + .hover(|s| s.bg(cx.theme().colors().element_hover)) + .child( + Icon::new(IconName::Maximize) + .color(Color::Muted) + .size(IconSize::Small), + ) + .tooltip(Tooltip::text("Make Subagent Full Screen")) + .on_click(cx.listener(move |this, _event, window, cx| { + this.server_view + .update(cx, |this, cx| { + this.navigate_to_session(session_id.clone(), window, cx); + }) + .ok(); + })); + if is_running && let Some((_, subagent_tool_call_id, _)) = pending_tool_call { if let Some((entry_ix, tool_call)) = thread.read(cx).tool_call(&subagent_tool_call_id) @@ -6782,11 +6807,11 @@ impl ThreadView { window, cx, )) + .child(fullscreen_toggle) } else { this } } else { - let session_id = thread.read(cx).session_id().clone(); this.when(is_expanded, |this| { this.child(self.render_subagent_expanded_content( thread_view, @@ -6803,34 +6828,7 @@ impl ThreadView { .title(message), ) }) - .child( - h_flex() - .id(entry_ix) - .py_1() - .w_full() - .justify_center() - .border_t_1() - .when(is_failed, |this| this.border_dashed()) - .border_color(self.tool_card_border_color(cx)) - .hover(|s| s.bg(cx.theme().colors().element_hover)) - .child( - Icon::new(IconName::Maximize) - .color(Color::Muted) - .size(IconSize::Small), - ) - .tooltip(Tooltip::text("Make Subagent Full Screen")) - .on_click(cx.listener(move |this, _event, window, cx| { - this.server_view - .update(cx, |this, cx| { - this.navigate_to_session( - session_id.clone(), - window, - cx, - ); - }) - .ok(); - })), - ) + .child(fullscreen_toggle) }) } }) From 007e3ec527949f25ef8b42f5b8a42136d20aba72 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 4 Mar 2026 11:24:24 +0100 Subject: [PATCH 295/548] docs: Update docs for the subagent tool (#50689) Adds the actual tool name so people can turn it off if they want. Release Notes: - N/A Co-authored-by: Bennet Bo Fenner Co-authored-by: MrSubidubi --- docs/src/ai/tools.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/ai/tools.md b/docs/src/ai/tools.md index 66f0af571d70fb8db7add2bd89139bf788369de6..faafc76b164f7f786c91c212bf51960f24a6bb0a 100644 --- a/docs/src/ai/tools.md +++ b/docs/src/ai/tools.md @@ -91,6 +91,6 @@ Executes shell commands and returns the combined output, creating a new shell pr ## Other Tools -### `subagent` +### `spawn_agent` -Spawns a subagent with its own context window to perform a delegated task. Useful for running parallel investigations, completing self-contained tasks, or performing research where only the outcome matters. Each subagent has access to the same tools as the parent agent. +Spawns a subagent with its own context window to perform a delegated task. Each subagent has access to the same tools as the parent agent. From de107768b10f05f30d2df508547f63410d71b7e8 Mon Sep 17 00:00:00 2001 From: moleium Date: Wed, 4 Mar 2026 13:30:33 +0300 Subject: [PATCH 296/548] Add .cppm (C++20 module interface) to C++ file extensions (#50667) `.cppm` is the widely used extension for C++20 module interface units, supported by MSVC, Clang, and GCC. Currently Zed doesn't recognize it as C++, so users get no syntax highlighting or LSP support. Changes: `crates/languages/src/cpp/config.toml`: add cppm to path_suffixes `crates/theme/src/icon_theme.rs`: add cppm to the C++ icon matcher https://github.com/search?q=path%3A*.cppm&type=code Release Notes: - N/A --- crates/languages/src/cpp/config.toml | 2 +- crates/theme/src/icon_theme.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/languages/src/cpp/config.toml b/crates/languages/src/cpp/config.toml index 10c36a6ded1e1f3a1204d1e15af47fee78b8e049..e2608a8ce5f17cb648e4f86dc27da60ed8bdd2ae 100644 --- a/crates/languages/src/cpp/config.toml +++ b/crates/languages/src/cpp/config.toml @@ -1,6 +1,6 @@ name = "C++" grammar = "cpp" -path_suffixes = ["cc", "hh", "cpp", "h", "hpp", "cxx", "hxx", "c++", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"] +path_suffixes = ["cc", "hh", "cpp", "cppm", "h", "hpp", "cxx", "hxx", "c++", "h++", "ipp", "inl", "ino", "ixx", "cu", "cuh", "C", "H"] line_comments = ["// ", "/// ", "//! "] first_line_pattern = '^//.*-\*-\s*C\+\+\s*-\*-' decrease_indent_patterns = [ diff --git a/crates/theme/src/icon_theme.rs b/crates/theme/src/icon_theme.rs index 7c2d603281ec50c1daa6f21e1dc3487bfc394a67..121ff9d7d4fbd841315b89e631606c7e67bc5cde 100644 --- a/crates/theme/src/icon_theme.rs +++ b/crates/theme/src/icon_theme.rs @@ -89,7 +89,7 @@ const FILE_SUFFIXES_BY_ICON_KEY: &[(&str, &[&str])] = &[ ( "cpp", &[ - "c++", "h++", "cc", "cpp", "cxx", "hh", "hpp", "hxx", "inl", "ixx", + "c++", "h++", "cc", "cpp", "cppm", "cxx", "hh", "hpp", "hxx", "inl", "ixx", ], ), ("crystal", &["cr", "ecr"]), From 90ddd58c356c84f1467eb9874be944db192e46c7 Mon Sep 17 00:00:00 2001 From: Ben Brandt Date: Wed, 4 Mar 2026 11:31:29 +0100 Subject: [PATCH 297/548] agent: Move file_read_times logic to ActionLog instead of Thread (#50688) Since the read times always correspond to an action log call anyway, we can let the action log track this internally, and we don't have to provide a reference to the Thread in as many tools. Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner Co-authored-by: MrSubidubi --- Cargo.lock | 1 + crates/action_log/Cargo.toml | 1 + crates/action_log/src/action_log.rs | 288 +++++++++++++++++- .../agent/src/tests/edit_file_thread_test.rs | 2 +- crates/agent/src/thread.rs | 9 +- crates/agent/src/tools/edit_file_tool.rs | 65 ++-- crates/agent/src/tools/read_file_tool.rs | 211 ++----------- .../src/tools/streaming_edit_file_tool.rs | 59 ++-- .../remote_server/src/remote_editing_tests.rs | 24 +- crates/zed/src/visual_test_runner.rs | 25 +- 10 files changed, 349 insertions(+), 336 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d79134c6145d3a6644f780097f7dd8f69eeae295..4e4d86b947be1f68d03b225d4a62747659c99bf8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -76,6 +76,7 @@ dependencies = [ "clock", "collections", "ctor", + "fs", "futures 0.3.31", "gpui", "indoc", diff --git a/crates/action_log/Cargo.toml b/crates/action_log/Cargo.toml index 8488df691e40ea3bcfc04f4f6f74964fba7863dd..b1a1bf824fb770b8378e596fd0c799a7cf98b13d 100644 --- a/crates/action_log/Cargo.toml +++ b/crates/action_log/Cargo.toml @@ -20,6 +20,7 @@ buffer_diff.workspace = true log.workspace = true clock.workspace = true collections.workspace = true +fs.workspace = true futures.workspace = true gpui.workspace = true language.workspace = true diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 5f8a639c0559c10546fc5640dc240aeba9dde487..5679f3c58fe52057f7a4a0faa24d5b5db2b5e497 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -1,14 +1,20 @@ use anyhow::{Context as _, Result}; use buffer_diff::BufferDiff; use clock; -use collections::BTreeMap; +use collections::{BTreeMap, HashMap}; +use fs::MTime; use futures::{FutureExt, StreamExt, channel::mpsc}; use gpui::{ App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, }; use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint}; use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle}; -use std::{cmp, ops::Range, sync::Arc}; +use std::{ + cmp, + ops::Range, + path::{Path, PathBuf}, + sync::Arc, +}; use text::{Edit, Patch, Rope}; use util::{RangeExt, ResultExt as _}; @@ -54,6 +60,8 @@ pub struct ActionLog { linked_action_log: Option>, /// Stores undo information for the most recent reject operation last_reject_undo: Option, + /// Tracks the last time files were read by the agent, to detect external modifications + file_read_times: HashMap, } impl ActionLog { @@ -64,6 +72,7 @@ impl ActionLog { project, linked_action_log: None, last_reject_undo: None, + file_read_times: HashMap::default(), } } @@ -76,6 +85,32 @@ impl ActionLog { &self.project } + pub fn file_read_time(&self, path: &Path) -> Option { + self.file_read_times.get(path).copied() + } + + fn update_file_read_time(&mut self, buffer: &Entity, cx: &App) { + let buffer = buffer.read(cx); + if let Some(file) = buffer.file() { + if let Some(local_file) = file.as_local() { + if let Some(mtime) = file.disk_state().mtime() { + let abs_path = local_file.abs_path(cx); + self.file_read_times.insert(abs_path, mtime); + } + } + } + } + + fn remove_file_read_time(&mut self, buffer: &Entity, cx: &App) { + let buffer = buffer.read(cx); + if let Some(file) = buffer.file() { + if let Some(local_file) = file.as_local() { + let abs_path = local_file.abs_path(cx); + self.file_read_times.remove(&abs_path); + } + } + } + fn track_buffer_internal( &mut self, buffer: Entity, @@ -506,24 +541,69 @@ impl ActionLog { /// Track a buffer as read by agent, so we can notify the model about user edits. pub fn buffer_read(&mut self, buffer: Entity, cx: &mut Context) { - if let Some(linked_action_log) = &mut self.linked_action_log { - linked_action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + self.buffer_read_impl(buffer, true, cx); + } + + fn buffer_read_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_read_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); } self.track_buffer_internal(buffer, false, cx); } /// Mark a buffer as created by agent, so we can refresh it in the context pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) { - if let Some(linked_action_log) = &mut self.linked_action_log { - linked_action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + self.buffer_created_impl(buffer, true, cx); + } + + fn buffer_created_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_created_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); } self.track_buffer_internal(buffer, true, cx); } /// Mark a buffer as edited by agent, so we can refresh it in the context pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) { - if let Some(linked_action_log) = &mut self.linked_action_log { - linked_action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + self.buffer_edited_impl(buffer, true, cx); + } + + fn buffer_edited_impl( + &mut self, + buffer: Entity, + record_file_read_time: bool, + cx: &mut Context, + ) { + if let Some(linked_action_log) = &self.linked_action_log { + // We don't want to share read times since the other agent hasn't read it necessarily + linked_action_log.update(cx, |log, cx| { + log.buffer_edited_impl(buffer.clone(), false, cx); + }); + } + if record_file_read_time { + self.update_file_read_time(&buffer, cx); } let new_version = buffer.read(cx).version(); let tracked_buffer = self.track_buffer_internal(buffer, false, cx); @@ -536,6 +616,8 @@ impl ActionLog { } pub fn will_delete_buffer(&mut self, buffer: Entity, cx: &mut Context) { + // Ok to propagate file read time removal to linked action log + self.remove_file_read_time(&buffer, cx); let has_linked_action_log = self.linked_action_log.is_some(); let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx); match tracked_buffer.status { @@ -2976,6 +3058,196 @@ mod tests { ); } + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_read" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_read" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_edited" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_edited" + ); + } + + #[gpui::test] + async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "existing content"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be None before buffer_created" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + }); + + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should be recorded after buffer_created" + ); + } + + #[gpui::test] + async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let action_log = cx.new(|_| ActionLog::new(project.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "file_read_time should exist after buffer_read" + ); + + cx.update(|cx| { + action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx)); + }); + assert!( + action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "file_read_time should be removed after will_delete_buffer" + ); + } + + #[gpui::test] + async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree(path!("/dir"), json!({"file": "hello world"})) + .await; + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + let parent_log = cx.new(|_| ActionLog::new(project.clone())); + let child_log = + cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone())); + + let file_path = project + .read_with(cx, |project, cx| project.find_project_path("dir/file", cx)) + .unwrap(); + let buffer = project + .update(cx, |project, cx| project.open_buffer(file_path, cx)) + .await + .unwrap(); + + let abs_path = PathBuf::from(path!("/dir/file")); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + }); + assert!( + child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()), + "child should record file_read_time on buffer_read" + ); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_read" + ); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx)); + }); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_edited" + ); + + cx.update(|cx| { + child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx)); + }); + assert!( + parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()), + "parent should NOT get file_read_time from child's buffer_created" + ); + } + #[derive(Debug, PartialEq)] struct HunkStatus { range: Range, diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs index 069bf0349299e6f4952f673cbf7607e52d48d9c5..3beb5cb0d51abc55fbf3cf0849ced248a9d1fa5c 100644 --- a/crates/agent/src/tests/edit_file_thread_test.rs +++ b/crates/agent/src/tests/edit_file_thread_test.rs @@ -50,9 +50,9 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) { // Add just the tools we need for this test let language_registry = project.read(cx).languages().clone(); thread.add_tool(crate::ReadFileTool::new( - cx.weak_entity(), project.clone(), thread.action_log().clone(), + true, )); thread.add_tool(crate::EditFileTool::new( project.clone(), diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 99d77456e3822ae12c65c0a419ceea18f13f41e8..616ae414d4d51a384a18460e8339fd07770fa6b9 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -893,8 +893,6 @@ pub struct Thread { pub(crate) prompt_capabilities_rx: watch::Receiver, pub(crate) project: Entity, pub(crate) action_log: Entity, - /// Tracks the last time files were read by the agent, to detect external modifications - pub(crate) file_read_times: HashMap, /// True if this thread was imported from a shared thread and can be synced. imported: bool, /// If this is a subagent thread, contains context about the parent @@ -1014,7 +1012,6 @@ impl Thread { prompt_capabilities_rx, project, action_log, - file_read_times: HashMap::default(), imported: false, subagent_context: None, draft_prompt: None, @@ -1231,7 +1228,6 @@ impl Thread { updated_at: db_thread.updated_at, prompt_capabilities_tx, prompt_capabilities_rx, - file_read_times: HashMap::default(), imported: db_thread.imported, subagent_context: db_thread.subagent_context, draft_prompt: db_thread.draft_prompt, @@ -1436,6 +1432,9 @@ impl Thread { environment: Rc, cx: &mut Context, ) { + // Only update the agent location for the root thread, not for subagents. + let update_agent_location = self.parent_thread_id().is_none(); + let language_registry = self.project.read(cx).languages().clone(); self.add_tool(CopyPathTool::new(self.project.clone())); self.add_tool(CreateDirectoryTool::new(self.project.clone())); @@ -1463,9 +1462,9 @@ impl Thread { self.add_tool(NowTool); self.add_tool(OpenTool::new(self.project.clone())); self.add_tool(ReadFileTool::new( - cx.weak_entity(), self.project.clone(), self.action_log.clone(), + update_agent_location, )); self.add_tool(SaveFileTool::new(self.project.clone())); self.add_tool(RestoreFileFromDiskTool::new(self.project.clone())); diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index d8c380eba326d089b848563cca04557e903ba0f4..29b08ac09db4417123403fd3915b8575791b2a4e 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -305,13 +305,13 @@ impl AgentTool for EditFileTool { // Check if the file has been modified since the agent last read it if let Some(abs_path) = abs_path.as_ref() { - let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.update(cx, |thread, cx| { - let last_read = thread.file_read_times.get(abs_path).copied(); + let last_read_mtime = action_log.read_with(cx, |log, _| log.file_read_time(abs_path)); + let (current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.read_with(cx, |thread, cx| { let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime()); let dirty = buffer.read(cx).is_dirty(); let has_save = thread.has_tool(SaveFileTool::NAME); let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (last_read, current, dirty, has_save, has_restore) + (current, dirty, has_save, has_restore) })?; // Check for unsaved changes first - these indicate modifications we don't know about @@ -470,17 +470,6 @@ impl AgentTool for EditFileTool { log.buffer_edited(buffer.clone(), cx); }); - // Update the recorded read time after a successful edit so consecutive edits work - if let Some(abs_path) = abs_path.as_ref() { - if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - self.thread.update(cx, |thread, _| { - thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime); - })?; - } - } - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let (new_text, unified_diff) = cx .background_spawn({ @@ -2212,14 +2201,18 @@ mod tests { let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); // Initially, file_read_times should be empty - let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty()); + let is_empty = action_log.read_with(cx, |action_log, _| { + action_log + .file_read_time(path!("/root/test.txt").as_ref()) + .is_none() + }); assert!(is_empty, "file_read_times should start empty"); // Create read tool let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), project.clone(), - action_log, + action_log.clone(), + true, )); // Read the file to record the read time @@ -2238,12 +2231,9 @@ mod tests { .unwrap(); // Verify that file_read_times now contains an entry for the file - let has_entry = thread.read_with(cx, |thread, _| { - thread.file_read_times.len() == 1 - && thread - .file_read_times - .keys() - .any(|path| path.ends_with("test.txt")) + let has_entry = action_log.read_with(cx, |log, _| { + log.file_read_time(path!("/root/test.txt").as_ref()) + .is_some() }); assert!( has_entry, @@ -2265,11 +2255,14 @@ mod tests { .await .unwrap(); - // Should still have exactly one entry - let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1); + // Should still have an entry after re-reading + let has_entry = action_log.read_with(cx, |log, _| { + log.file_read_time(path!("/root/test.txt").as_ref()) + .is_some() + }); assert!( - has_one_entry, - "file_read_times should still have one entry after re-reading" + has_entry, + "file_read_times should still have an entry after re-reading" ); } @@ -2309,11 +2302,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), @@ -2423,11 +2412,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), @@ -2534,11 +2519,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(EditFileTool::new( project.clone(), thread.downgrade(), diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index 8cfc16ddf6174a190ffe7cc11921dc204b05b79d..f7a75bc63a1c461b65c3a2e6f74f2c70e0ca15f6 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -2,7 +2,7 @@ use action_log::ActionLog; use agent_client_protocol::{self as acp, ToolCallUpdateFields}; use anyhow::{Context as _, Result, anyhow}; use futures::FutureExt as _; -use gpui::{App, Entity, SharedString, Task, WeakEntity}; +use gpui::{App, Entity, SharedString, Task}; use indoc::formatdoc; use language::Point; use language_model::{LanguageModelImage, LanguageModelToolResultContent}; @@ -21,7 +21,7 @@ use super::tool_permissions::{ ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots, resolve_project_path, }; -use crate::{AgentTool, Thread, ToolCallEventStream, ToolInput, outline}; +use crate::{AgentTool, ToolCallEventStream, ToolInput, outline}; /// Reads the content of the given file in the project. /// @@ -56,21 +56,21 @@ pub struct ReadFileToolInput { } pub struct ReadFileTool { - thread: WeakEntity, project: Entity, action_log: Entity, + update_agent_location: bool, } impl ReadFileTool { pub fn new( - thread: WeakEntity, project: Entity, action_log: Entity, + update_agent_location: bool, ) -> Self { Self { - thread, project, action_log, + update_agent_location, } } } @@ -119,7 +119,6 @@ impl AgentTool for ReadFileTool { cx: &mut App, ) -> Task> { let project = self.project.clone(); - let thread = self.thread.clone(); let action_log = self.action_log.clone(); cx.spawn(async move |cx| { let input = input @@ -257,20 +256,6 @@ impl AgentTool for ReadFileTool { return Err(tool_content_err(format!("{file_path} not found"))); } - // Record the file read time and mtime - if let Some(mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - thread - .update(cx, |thread, _| { - thread.file_read_times.insert(abs_path.to_path_buf(), mtime); - }) - .ok(); - } - - - let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default(); - let mut anchor = None; // Check if specific line ranges are provided @@ -330,7 +315,7 @@ impl AgentTool for ReadFileTool { }; project.update(cx, |project, cx| { - if update_agent_location { + if self.update_agent_location { project.set_agent_location( Some(AgentLocation { buffer: buffer.downgrade(), @@ -362,13 +347,10 @@ impl AgentTool for ReadFileTool { #[cfg(test)] mod test { use super::*; - use crate::{ContextServerRegistry, Templates, Thread}; use agent_client_protocol as acp; use fs::Fs as _; use gpui::{AppContext, TestAppContext, UpdateGlobal as _}; - use language_model::fake_provider::FakeLanguageModel; use project::{FakeFs, Project}; - use prompt_store::ProjectContext; use serde_json::json; use settings::SettingsStore; use std::path::PathBuf; @@ -383,20 +365,7 @@ mod test { fs.insert_tree(path!("/root"), json!({})).await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, _) = ToolCallEventStream::test(); let result = cx @@ -429,20 +398,7 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -476,20 +432,7 @@ mod test { let language_registry = project.read_with(cx, |project, _| project.languages().clone()); language_registry.add(language::rust_lang()); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -569,20 +512,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let result = cx .update(|cx| { let input = ReadFileToolInput { @@ -614,20 +544,7 @@ mod test { .await; let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); // start_line of 0 should be treated as 1 let result = cx @@ -757,20 +674,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); // Reading a file outside the project worktree should fail let result = cx @@ -965,20 +869,7 @@ mod test { let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let read_task = cx.update(|cx| { @@ -1084,24 +975,7 @@ mod test { .await; let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log.clone(), - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone(), true)); // Test reading allowed files in worktree1 let result = cx @@ -1288,24 +1162,7 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { @@ -1364,24 +1221,7 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let task = cx.update(|cx| { @@ -1444,24 +1284,7 @@ mod test { cx.executor().run_until_parked(); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let tool = Arc::new(ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true)); let (event_stream, mut event_rx) = ToolCallEventStream::test(); let result = cx diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index 7e023d7d7e00c2eb13ea78467776816b13151796..62b96d569f34d65889abee6be803674dfa42e709 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -483,7 +483,12 @@ impl EditSession { .await .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; - ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; + let action_log = tool + .thread + .read_with(cx, |thread, _cx| thread.action_log().clone()) + .ok(); + + ensure_buffer_saved(&buffer, &abs_path, tool, action_log.as_ref(), cx)?; let diff = cx.new(|cx| Diff::new(buffer.clone(), cx)); event_stream.update_diff(diff.clone()); @@ -495,13 +500,9 @@ impl EditSession { } }) as Box); - tool.thread - .update(cx, |thread, cx| { - thread - .action_log() - .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)) - }) - .ok(); + if let Some(action_log) = &action_log { + action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); + } let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let old_text = cx @@ -637,18 +638,6 @@ impl EditSession { log.buffer_edited(buffer.clone(), cx); }); - if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| { - buffer.file().and_then(|file| file.disk_state().mtime()) - }) { - tool.thread - .update(cx, |thread, _| { - thread - .file_read_times - .insert(abs_path.to_path_buf(), new_mtime); - }) - .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; - } - let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let (new_text, unified_diff) = cx .background_spawn({ @@ -1018,10 +1007,12 @@ fn ensure_buffer_saved( buffer: &Entity, abs_path: &PathBuf, tool: &StreamingEditFileTool, + action_log: Option<&Entity>, cx: &mut AsyncApp, ) -> Result<(), StreamingEditFileToolOutput> { - let check_result = tool.thread.update(cx, |thread, cx| { - let last_read = thread.file_read_times.get(abs_path).copied(); + let last_read_mtime = + action_log.and_then(|log| log.read_with(cx, |log, _| log.file_read_time(abs_path))); + let check_result = tool.thread.read_with(cx, |thread, cx| { let current = buffer .read(cx) .file() @@ -1029,12 +1020,10 @@ fn ensure_buffer_saved( let dirty = buffer.read(cx).is_dirty(); let has_save = thread.has_tool(SaveFileTool::NAME); let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME); - (last_read, current, dirty, has_save, has_restore) + (current, dirty, has_save, has_restore) }); - let Ok((last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool)) = - check_result - else { + let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else { return Ok(()); }; @@ -4006,11 +3995,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(StreamingEditFileTool::new( project.clone(), thread.downgrade(), @@ -4112,11 +4097,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(StreamingEditFileTool::new( project.clone(), thread.downgrade(), @@ -4225,11 +4206,7 @@ mod tests { let languages = project.read_with(cx, |project, _| project.languages().clone()); let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone()); - let read_tool = Arc::new(crate::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true)); let edit_tool = Arc::new(StreamingEditFileTool::new( project.clone(), thread.downgrade(), diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 778f7292d2a032df6995169852deeecee6fa76a7..9b9fe9948ace530d7e55d2843952ca5c9efb3749 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -2,15 +2,12 @@ /// The tests in this file assume that server_cx is running on Windows too. /// We neead to find a way to test Windows-Non-Windows interactions. use crate::headless_project::HeadlessProject; -use agent::{ - AgentTool, ReadFileTool, ReadFileToolInput, Templates, Thread, ToolCallEventStream, ToolInput, -}; +use agent::{AgentTool, ReadFileTool, ReadFileToolInput, ToolCallEventStream, ToolInput}; use client::{Client, UserStore}; use clock::FakeSystemClock; use collections::{HashMap, HashSet}; use git::repository::DiffType; -use language_model::{LanguageModelToolResultContent, fake_provider::FakeLanguageModel}; -use prompt_store::ProjectContext; +use language_model::LanguageModelToolResultContent; use extension::ExtensionHostProxy; use fs::{FakeFs, Fs}; @@ -2065,27 +2062,12 @@ async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mu let action_log = cx.new(|_| action_log::ActionLog::new(project.clone())); - // Create a minimal thread for the ReadFileTool - let context_server_registry = - cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - let thread = cx.new(|cx| { - Thread::new( - project.clone(), - cx.new(|_cx| ProjectContext::default()), - context_server_registry, - Templates::new(), - Some(model), - cx, - ) - }); - let input = ReadFileToolInput { path: "project/b.txt".into(), start_line: None, end_line: None, }; - let read_tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log)); + let read_tool = Arc::new(ReadFileTool::new(project, action_log, true)); let (event_stream, _) = ToolCallEventStream::test(); let exists_result = cx.update(|cx| { diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index df673f0b4869af8fa55b0e83af10553df8afb4d8..8f005fa68b6accb5cf5686157bbb065e33bb1b0c 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -2032,32 +2032,9 @@ fn run_agent_thread_view_test( // Create the necessary entities for the ReadFileTool let action_log = cx.update(|cx| cx.new(|_| action_log::ActionLog::new(project.clone()))); - let context_server_registry = cx.update(|cx| { - cx.new(|cx| agent::ContextServerRegistry::new(project.read(cx).context_server_store(), cx)) - }); - let fake_model = Arc::new(language_model::fake_provider::FakeLanguageModel::default()); - let project_context = cx.update(|cx| cx.new(|_| prompt_store::ProjectContext::default())); - - // Create the agent Thread - let thread = cx.update(|cx| { - cx.new(|cx| { - agent::Thread::new( - project.clone(), - project_context, - context_server_registry, - agent::Templates::new(), - Some(fake_model), - cx, - ) - }) - }); // Create the ReadFileTool - let tool = Arc::new(agent::ReadFileTool::new( - thread.downgrade(), - project.clone(), - action_log, - )); + let tool = Arc::new(agent::ReadFileTool::new(project.clone(), action_log, true)); // Create a test event stream to capture tool output let (event_stream, mut event_receiver) = agent::ToolCallEventStream::test(); From 932981fca14824cfbacafc900ca9bbe08c1b5a96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=80=E1=B4=8D=E1=B4=9B=E1=B4=8F=E1=B4=80=E1=B4=87?= =?UTF-8?q?=CA=80?= Date: Wed, 4 Mar 2026 19:59:08 +0800 Subject: [PATCH 298/548] editor: Prevent underlines from appearing in minimap (#48510) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I noticed that the minimap seems to render underlines with the same thickness as the main editor, which looks a bit off. This becomes much more noticeable when enabling `semantic_token_rules` (due to the increased number of underlines): ```json "global_lsp_settings": { "semantic_token_rules": [ { "token_modifiers": ["mutable"], "underline": true, }, ], } ``` Looking at the existing code, I found that diagnostic underlines already check `editor_style.show_underlines` to ensure they are only displayed in the main editor. To maintain consistency, I applied the same filtering logic to `chunk_highlight` so that these underlines are no longer rendered in the minimap. Before: CleanShot 2026-02-06 at 02 28 31@2x After: CleanShot 2026-02-06 at 02 31 36@2x Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/editor/src/display_map.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index b666557b90a3c1181404d8f09b1d50ff9f8402a9..658239db9a575d4d13c2a6f7877e20fcd6e47673 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1924,6 +1924,9 @@ impl DisplaySnapshot { color } }), + underline: chunk_highlight + .underline + .filter(|_| editor_style.show_underlines), ..chunk_highlight } }); From 0b58d3493612a15bac33665713c522ab6e139041 Mon Sep 17 00:00:00 2001 From: Dino Date: Wed, 4 Mar 2026 12:11:40 +0000 Subject: [PATCH 299/548] editor: Refactor excerpts removed event handling (#50695) Refactor the changes introduced in https://github.com/zed-industries/zed/pull/50525, in order to remove the `DisplayMap.clear_folded_buffer` method and update the editor's handling of `multi_buffer::Event::ExcerptsRemoved` to actually call `DisplayMap.unfold_buffers`, which correctly updates the `BlockMap` using its `BlockMapWriter`, ensuring that the block map is synced. Before you mark this PR as ready for review, make sure that you have: - [X] Added a solid test coverage and/or screenshots from doing manual testing - [X] Done a self-review taking into account security and performance aspects - [X] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/editor/src/display_map.rs | 5 ----- crates/editor/src/editor.rs | 6 +++++- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs index 658239db9a575d4d13c2a6f7877e20fcd6e47673..00a48a9ab3d249850b9749d64267d8274e7eaa79 100644 --- a/crates/editor/src/display_map.rs +++ b/crates/editor/src/display_map.rs @@ -1006,11 +1006,6 @@ impl DisplayMap { &self.block_map.folded_buffers } - #[instrument(skip_all)] - pub(super) fn clear_folded_buffer(&mut self, buffer_id: language::BufferId) { - self.block_map.folded_buffers.remove(&buffer_id); - } - #[instrument(skip_all)] pub fn insert_creases( &mut self, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 5504305f86eb95dee000cec4099e366bbf86ffef..0d1238da21695738e4f6cedc54e172ad456c9bd6 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -24147,9 +24147,13 @@ impl Editor { self.display_map.update(cx, |display_map, cx| { display_map.invalidate_semantic_highlights(*buffer_id); display_map.clear_lsp_folding_ranges(*buffer_id, cx); - display_map.clear_folded_buffer(*buffer_id); }); } + + self.display_map.update(cx, |display_map, cx| { + display_map.unfold_buffers(removed_buffer_ids.iter().copied(), cx); + }); + jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); cx.emit(EditorEvent::ExcerptsRemoved { ids: ids.clone(), From d5137d76c1f8b24f075768a8f7e247efed62a938 Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Wed, 4 Mar 2026 13:19:13 +0100 Subject: [PATCH 300/548] git: Add trusted worktree support to git integrations (#50649) This PR cleans up the git command spawning by wrapping everything in GitBinary instead to follow a builder/factory pattern. It also extends trusted workspace support to git commands. I also added a `clippy.toml` configuration to our git crate that warns against using `Command` struct to spawn git commands instead of going through `GitBinary`. This should help us maintain the factory pattern in the future Before you mark this PR as ready for review, make sure that you have: - [x] Added solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects Release Notes: - git: Add trusted workspace support for Zed's git integration --- crates/fs/src/fake_git_repo.rs | 12 +- crates/fs/src/fs.rs | 1 + crates/git/clippy.toml | 28 + crates/git/src/blame.rs | 24 +- crates/git/src/commit.rs | 17 +- crates/git/src/repository.rs | 661 +++++++++--------- crates/project/src/git_store.rs | 61 +- crates/project/tests/integration/git_store.rs | 205 ++++++ 8 files changed, 644 insertions(+), 365 deletions(-) create mode 100644 crates/git/clippy.toml diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 99295c69d45427c799e3d850d605f63d3950ee57..06ebea9157f97a0323297cd3ae142c4b306fe4ef 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -20,7 +20,7 @@ use ignore::gitignore::GitignoreBuilder; use parking_lot::Mutex; use rope::Rope; use smol::{channel::Sender, future::FutureExt as _}; -use std::{path::PathBuf, sync::Arc}; +use std::{path::PathBuf, sync::Arc, sync::atomic::AtomicBool}; use text::LineEnding; use util::{paths::PathStyle, rel_path::RelPath}; @@ -32,6 +32,7 @@ pub struct FakeGitRepository { pub(crate) dot_git_path: PathBuf, pub(crate) repository_dir_path: PathBuf, pub(crate) common_dir_path: PathBuf, + pub(crate) is_trusted: Arc, } #[derive(Debug, Clone)] @@ -1035,4 +1036,13 @@ impl GitRepository for FakeGitRepository { fn commit_data_reader(&self) -> Result { anyhow::bail!("commit_data_reader not supported for FakeGitRepository") } + + fn set_trusted(&self, trusted: bool) { + self.is_trusted + .store(trusted, std::sync::atomic::Ordering::Release); + } + + fn is_trusted(&self) -> bool { + self.is_trusted.load(std::sync::atomic::Ordering::Acquire) + } } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 2db9e48a2e77bdb3e49fce0b16ea9b67ffaacbc0..0fde444171042eda859edcac7915c456ab91e265 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -2776,6 +2776,7 @@ impl Fs for FakeFs { repository_dir_path: repository_dir_path.to_owned(), common_dir_path: common_dir_path.to_owned(), checkpoints: Arc::default(), + is_trusted: Arc::default(), }) as _ }, ) diff --git a/crates/git/clippy.toml b/crates/git/clippy.toml new file mode 100644 index 0000000000000000000000000000000000000000..fb3926840493fd5981c1861e7cea96bd54b9647f --- /dev/null +++ b/crates/git/clippy.toml @@ -0,0 +1,28 @@ +allow-private-module-inception = true +avoid-breaking-exported-api = false +ignore-interior-mutability = [ + # Suppresses clippy::mutable_key_type, which is a false positive as the Eq + # and Hash impls do not use fields with interior mutability. + "agent_ui::context::AgentContextKey" +] +disallowed-methods = [ + { path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" }, + { path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" }, + { path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" }, + { path = "std::process::Command::stdin", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdin" }, + { path = "std::process::Command::stdout", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stdout" }, + { path = "std::process::Command::stderr", reason = "`smol::process::Command::from()` does not preserve stdio configuration", replacement = "smol::process::Command::stderr" }, + { path = "smol::Timer::after", reason = "smol::Timer introduces non-determinism in tests", replacement = "gpui::BackgroundExecutor::timer" }, + { path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." }, + { path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." }, + { path = "cocoa::foundation::NSString::alloc", reason = "NSString must be autoreleased to avoid memory leaks. Use `ns_string()` helper instead." }, + { path = "smol::process::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, + { path = "util::command::new_command", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, + { path = "util::command::Command::new", reason = "Git commands must go through `GitBinary::build_command` to ensure security flags like `-c core.fsmonitor=false` are always applied.", replacement = "GitBinary::build_command" }, +] +disallowed-types = [ + # { path = "std::collections::HashMap", replacement = "collections::HashMap" }, + # { path = "std::collections::HashSet", replacement = "collections::HashSet" }, + # { path = "indexmap::IndexSet", replacement = "collections::IndexSet" }, + # { path = "indexmap::IndexMap", replacement = "collections::IndexMap" }, +] \ No newline at end of file diff --git a/crates/git/src/blame.rs b/crates/git/src/blame.rs index 9dc184bf2ac253c8bc24f6203f13d6654ac2b64b..c44aea74051bb7c190a091703d6c60807fc4e27e 100644 --- a/crates/git/src/blame.rs +++ b/crates/git/src/blame.rs @@ -1,11 +1,11 @@ use crate::Oid; use crate::commit::get_messages; -use crate::repository::RepoPath; +use crate::repository::{GitBinary, RepoPath}; use anyhow::{Context as _, Result}; use collections::{HashMap, HashSet}; use futures::AsyncWriteExt; use serde::{Deserialize, Serialize}; -use std::{ops::Range, path::Path}; +use std::ops::Range; use text::{LineEnding, Rope}; use time::OffsetDateTime; use time::UtcOffset; @@ -21,15 +21,13 @@ pub struct Blame { } impl Blame { - pub async fn for_path( - git_binary: &Path, - working_directory: &Path, + pub(crate) async fn for_path( + git: &GitBinary, path: &RepoPath, content: &Rope, line_ending: LineEnding, ) -> Result { - let output = - run_git_blame(git_binary, working_directory, path, content, line_ending).await?; + let output = run_git_blame(git, path, content, line_ending).await?; let mut entries = parse_git_blame(&output)?; entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start)); @@ -40,7 +38,7 @@ impl Blame { } let shas = unique_shas.into_iter().collect::>(); - let messages = get_messages(working_directory, &shas) + let messages = get_messages(git, &shas) .await .context("failed to get commit messages")?; @@ -52,8 +50,7 @@ const GIT_BLAME_NO_COMMIT_ERROR: &str = "fatal: no such ref: HEAD"; const GIT_BLAME_NO_PATH: &str = "fatal: no such path"; async fn run_git_blame( - git_binary: &Path, - working_directory: &Path, + git: &GitBinary, path: &RepoPath, contents: &Rope, line_ending: LineEnding, @@ -61,12 +58,7 @@ async fn run_git_blame( let mut child = { let span = ztracing::debug_span!("spawning git-blame command", path = path.as_unix_str()); let _enter = span.enter(); - util::command::new_command(git_binary) - .current_dir(working_directory) - .arg("blame") - .arg("--incremental") - .arg("--contents") - .arg("-") + git.build_command(["blame", "--incremental", "--contents", "-"]) .arg(path.as_unix_str()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) diff --git a/crates/git/src/commit.rs b/crates/git/src/commit.rs index 3f3526afc4ba8fa146592684a6d3acfc44ce7e73..46e050ce155fc049a670fdfa26101eb729b34352 100644 --- a/crates/git/src/commit.rs +++ b/crates/git/src/commit.rs @@ -1,11 +1,11 @@ use crate::{ BuildCommitPermalinkParams, GitHostingProviderRegistry, GitRemote, Oid, parse_git_remote_url, - status::StatusCode, + repository::GitBinary, status::StatusCode, }; use anyhow::{Context as _, Result}; use collections::HashMap; use gpui::SharedString; -use std::{path::Path, sync::Arc}; +use std::sync::Arc; #[derive(Clone, Debug, Default)] pub struct ParsedCommitMessage { @@ -48,7 +48,7 @@ impl ParsedCommitMessage { } } -pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result> { +pub(crate) async fn get_messages(git: &GitBinary, shas: &[Oid]) -> Result> { if shas.is_empty() { return Ok(HashMap::default()); } @@ -63,12 +63,12 @@ pub async fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result Result>()) } -async fn get_messages_impl(working_directory: &Path, shas: &[Oid]) -> Result> { +async fn get_messages_impl(git: &GitBinary, shas: &[Oid]) -> Result> { const MARKER: &str = ""; - let output = util::command::new_command("git") - .current_dir(working_directory) - .arg("show") + let output = git + .build_command(["show"]) .arg("-s") .arg(format!("--format=%B{}", MARKER)) .args(shas.iter().map(ToString::to_string)) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 6dba1400dffe1fd00844dd7241f39f48a7a759a6..f5a856325cc80071f2c8ef500e7b07aa24035f59 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -21,6 +21,7 @@ use text::LineEnding; use std::collections::HashSet; use std::ffi::{OsStr, OsString}; +use std::sync::atomic::AtomicBool; use std::process::ExitStatus; use std::str::FromStr; @@ -961,6 +962,9 @@ pub trait GitRepository: Send + Sync { ) -> BoxFuture<'_, Result<()>>; fn commit_data_reader(&self) -> Result; + + fn set_trusted(&self, trusted: bool); + fn is_trusted(&self) -> bool; } pub enum DiffType { @@ -987,6 +991,7 @@ pub struct RealGitRepository { pub any_git_binary_path: PathBuf, any_git_binary_help_output: Arc>>, executor: BackgroundExecutor, + is_trusted: Arc, } impl RealGitRepository { @@ -1005,6 +1010,7 @@ impl RealGitRepository { any_git_binary_path, executor, any_git_binary_help_output: Arc::new(Mutex::new(None)), + is_trusted: Arc::new(AtomicBool::new(false)), }) } @@ -1016,20 +1022,24 @@ impl RealGitRepository { .map(Path::to_path_buf) } + fn git_binary(&self) -> Result { + Ok(GitBinary::new( + self.any_git_binary_path.clone(), + self.working_directory() + .with_context(|| "Can't run git commands without a working directory")?, + self.executor.clone(), + self.is_trusted(), + )) + } + async fn any_git_binary_help_output(&self) -> SharedString { if let Some(output) = self.any_git_binary_help_output.lock().clone() { return output; } - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let output: SharedString = self .executor - .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) - .run(["help", "-a"]) - .await - }) + .spawn(async move { git_binary?.run(["help", "-a"]).await }) .await .unwrap_or_default() .into(); @@ -1072,6 +1082,7 @@ pub async fn get_git_committer(cx: &AsyncApp) -> GitCommitter { git_binary_path.unwrap_or(PathBuf::from("git")), paths::home_dir().clone(), cx.background_executor().clone(), + true, ); cx.background_spawn(async move { @@ -1103,14 +1114,12 @@ impl GitRepository for RealGitRepository { } fn show(&self, commit: String) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(git_binary_path) - .current_dir(&working_directory) - .args([ + let git = git_binary?; + let output = git + .build_command([ "--no-optional-locks", "show", "--no-patch", @@ -1141,15 +1150,14 @@ impl GitRepository for RealGitRepository { } fn load_commit(&self, commit: String, cx: AsyncApp) -> BoxFuture<'_, Result> { - let Some(working_directory) = self.repository.lock().workdir().map(ToOwned::to_owned) - else { + if self.repository.lock().workdir().is_none() { return future::ready(Err(anyhow!("no working directory"))).boxed(); - }; - let git_binary_path = self.any_git_binary_path.clone(); + } + let git_binary = self.git_binary(); cx.background_spawn(async move { - let show_output = util::command::new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ + let git = git_binary?; + let show_output = git + .build_command([ "--no-optional-locks", "show", "--format=", @@ -1170,9 +1178,8 @@ impl GitRepository for RealGitRepository { let changes = parse_git_diff_name_status(&show_stdout); let parent_sha = format!("{}^", commit); - let mut cat_file_process = util::command::new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"]) + let mut cat_file_process = git + .build_command(["--no-optional-locks", "cat-file", "--batch=%(objectsize)"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) @@ -1279,18 +1286,17 @@ impl GitRepository for RealGitRepository { mode: ResetMode, env: Arc>, ) -> BoxFuture<'_, Result<()>> { + let git_binary = self.git_binary(); async move { - let working_directory = self.working_directory(); - let mode_flag = match mode { ResetMode::Mixed => "--mixed", ResetMode::Soft => "--soft", }; - let output = new_command(&self.any_git_binary_path) + let git = git_binary?; + let output = git + .build_command(["reset", mode_flag, &commit]) .envs(env.iter()) - .current_dir(&working_directory?) - .args(["reset", mode_flag, &commit]) .output() .await?; anyhow::ensure!( @@ -1309,17 +1315,16 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); async move { if paths.is_empty() { return Ok(()); } - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["checkout", &commit, "--"]) .envs(env.iter()) - .args(["checkout", &commit, "--"]) .args(paths.iter().map(|path| path.as_unix_str())) .output() .await?; @@ -1414,18 +1419,16 @@ impl GitRepository for RealGitRepository { env: Arc>, is_executable: bool, ) -> BoxFuture<'_, anyhow::Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let mode = if is_executable { "100755" } else { "100644" }; if let Some(content) = content { - let mut child = new_command(&git_binary_path) - .current_dir(&working_directory) + let mut child = git + .build_command(["hash-object", "-w", "--stdin"]) .envs(env.iter()) - .args(["hash-object", "-w", "--stdin"]) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .spawn()?; @@ -1438,10 +1441,9 @@ impl GitRepository for RealGitRepository { log::debug!("indexing SHA: {sha}, path {path:?}"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) + let output = git + .build_command(["update-index", "--add", "--cacheinfo", mode, sha]) .envs(env.iter()) - .args(["update-index", "--add", "--cacheinfo", mode, sha]) .arg(path.as_unix_str()) .output() .await?; @@ -1453,10 +1455,9 @@ impl GitRepository for RealGitRepository { ); } else { log::debug!("removing path {path:?} from the index"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) + let output = git + .build_command(["update-index", "--force-remove"]) .envs(env.iter()) - .args(["update-index", "--force-remove"]) .arg(path.as_unix_str()) .output() .await?; @@ -1485,14 +1486,12 @@ impl GitRepository for RealGitRepository { } fn revparse_batch(&self, revs: Vec) -> BoxFuture<'_, Result>>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let mut process = new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ + let git = git_binary?; + let mut process = git + .build_command([ "--no-optional-locks", "cat-file", "--batch-check=%(objectname)", @@ -1545,19 +1544,14 @@ impl GitRepository for RealGitRepository { } fn status(&self, path_prefixes: &[RepoPath]) -> Task> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = match self.working_directory() { - Ok(working_directory) => working_directory, + let git = match self.git_binary() { + Ok(git) => git, Err(e) => return Task::ready(Err(e)), }; let args = git_status_args(path_prefixes); log::debug!("Checking for git status in {path_prefixes:?}"); self.executor.spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); stdout.parse() @@ -1569,9 +1563,8 @@ impl GitRepository for RealGitRepository { } fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = match self.working_directory() { - Ok(working_directory) => working_directory, + let git = match self.git_binary() { + Ok(git) => git, Err(e) => return Task::ready(Err(e)).boxed(), }; @@ -1596,11 +1589,7 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; if output.status.success() { let stdout = String::from_utf8_lossy(&output.stdout); stdout.parse() @@ -1613,13 +1602,12 @@ impl GitRepository for RealGitRepository { } fn stash_entries(&self) -> BoxFuture<'_, Result> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"]) + let git = git_binary?; + let output = git + .build_command(&["stash", "list", "--pretty=format:%gd%x00%H%x00%ct%x00%s"]) .output() .await?; if output.status.success() { @@ -1634,8 +1622,7 @@ impl GitRepository for RealGitRepository { } fn branches(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { let fields = [ @@ -1657,12 +1644,8 @@ impl GitRepository for RealGitRepository { "--format", &fields, ]; - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(args).output().await?; anyhow::ensure!( output.status.success(), @@ -1676,11 +1659,7 @@ impl GitRepository for RealGitRepository { if branches.is_empty() { let args = vec!["symbolic-ref", "--quiet", "HEAD"]; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; // git symbolic-ref returns a non-0 exit code if HEAD points // to something other than a branch @@ -1702,13 +1681,12 @@ impl GitRepository for RealGitRepository { } fn worktrees(&self) -> BoxFuture<'_, Result>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(&["--no-optional-locks", "worktree", "list", "--porcelain"]) + let git = git_binary?; + let output = git + .build_command(&["--no-optional-locks", "worktree", "list", "--porcelain"]) .output() .await?; if output.status.success() { @@ -1728,8 +1706,7 @@ impl GitRepository for RealGitRepository { directory: PathBuf, from_commit: Option, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let final_path = directory.join(&name); let mut args = vec![ OsString::from("--no-optional-locks"), @@ -1749,11 +1726,8 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { std::fs::create_dir_all(final_path.parent().unwrap_or(&final_path))?; - let output = new_command(&git_binary_path) - .current_dir(working_directory?) - .args(args) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(args).output().await?; if output.status.success() { Ok(()) } else { @@ -1765,9 +1739,7 @@ impl GitRepository for RealGitRepository { } fn remove_worktree(&self, path: PathBuf, force: bool) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { @@ -1781,18 +1753,14 @@ impl GitRepository for RealGitRepository { } args.push("--".into()); args.push(path.as_os_str().into()); - GitBinary::new(git_binary_path, working_directory?, executor) - .run(args) - .await?; + git_binary?.run(args).await?; anyhow::Ok(()) }) .boxed() } fn rename_worktree(&self, old_path: PathBuf, new_path: PathBuf) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { @@ -1804,9 +1772,7 @@ impl GitRepository for RealGitRepository { old_path.as_os_str().into(), new_path.as_os_str().into(), ]; - GitBinary::new(git_binary_path, working_directory?, executor) - .run(args) - .await?; + git_binary?.run(args).await?; anyhow::Ok(()) }) .boxed() @@ -1814,9 +1780,7 @@ impl GitRepository for RealGitRepository { fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { let repo = self.repository.clone(); - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); let branch = self.executor.spawn(async move { let repo = repo.lock(); let branch = if let Ok(branch) = repo.find_branch(&name, BranchType::Local) { @@ -1851,9 +1815,7 @@ impl GitRepository for RealGitRepository { self.executor .spawn(async move { let branch = branch.await?; - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&["checkout", &branch]) - .await?; + git_binary?.run(&["checkout", &branch]).await?; anyhow::Ok(()) }) .boxed() @@ -1864,9 +1826,7 @@ impl GitRepository for RealGitRepository { name: String, base_branch: Option, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { @@ -1877,22 +1837,18 @@ impl GitRepository for RealGitRepository { args.push(&base_branch_str); } - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&args) - .await?; + git_binary?.run(&args).await?; anyhow::Ok(()) }) .boxed() } fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) + git_binary? .run(&["branch", "-m", &branch, &new_name]) .await?; anyhow::Ok(()) @@ -1901,15 +1857,11 @@ impl GitRepository for RealGitRepository { } fn delete_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - GitBinary::new(git_binary_path, working_directory?, executor) - .run(&["branch", "-d", &name]) - .await?; + git_binary?.run(&["branch", "-d", &name]).await?; anyhow::Ok(()) }) .boxed() @@ -1921,20 +1873,11 @@ impl GitRepository for RealGitRepository { content: Rope, line_ending: LineEnding, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git = self.git_binary(); - executor + self.executor .spawn(async move { - crate::blame::Blame::for_path( - &git_binary_path, - &working_directory?, - &path, - &content, - line_ending, - ) - .await + crate::blame::Blame::for_path(&git?, &path, &content, line_ending).await }) .boxed() } @@ -1949,11 +1892,10 @@ impl GitRepository for RealGitRepository { skip: usize, limit: Option, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; // Use a unique delimiter with a hardcoded UUID to separate commits // This essentially eliminates any chance of encountering the delimiter in actual commit data let commit_delimiter = @@ -1981,9 +1923,8 @@ impl GitRepository for RealGitRepository { args.push("--"); - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(&args) + let output = git + .build_command(&args) .arg(path.as_unix_str()) .output() .await?; @@ -2028,30 +1969,17 @@ impl GitRepository for RealGitRepository { } fn diff(&self, diff: DiffType) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let output = match diff { DiffType::HeadToIndex => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--staged"]) - .output() - .await? - } - DiffType::HeadToWorktree => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff"]) - .output() - .await? + git.build_command(["diff", "--staged"]).output().await? } + DiffType::HeadToWorktree => git.build_command(["diff"]).output().await?, DiffType::MergeBase { base_ref } => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--merge-base", base_ref.as_ref()]) + git.build_command(["diff", "--merge-base", base_ref.as_ref()]) .output() .await? } @@ -2071,38 +1999,29 @@ impl GitRepository for RealGitRepository { &self, diff: DiffType, ) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let output = match diff { DiffType::HeadToIndex => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--numstat", "--staged"]) + git.build_command(["diff", "--numstat", "--staged"]) .output() .await? } DiffType::HeadToWorktree => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["diff", "--numstat"]) - .output() - .await? + git.build_command(["diff", "--numstat"]).output().await? } DiffType::MergeBase { base_ref } => { - new_command(&git_binary_path) - .current_dir(&working_directory) - .args([ - "diff", - "--numstat", - "--merge-base", - base_ref.as_ref(), - "HEAD", - ]) - .output() - .await? + git.build_command([ + "diff", + "--numstat", + "--merge-base", + base_ref.as_ref(), + "HEAD", + ]) + .output() + .await? } }; @@ -2123,15 +2042,14 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { if !paths.is_empty() { - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["update-index", "--add", "--remove", "--"]) .envs(env.iter()) - .args(["update-index", "--add", "--remove", "--"]) .args(paths.iter().map(|p| p.as_unix_str())) .output() .await?; @@ -2151,16 +2069,15 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { if !paths.is_empty() { - let output = new_command(&git_binary_path) - .current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["reset", "--quiet", "--"]) .envs(env.iter()) - .args(["reset", "--quiet", "--"]) .args(paths.iter().map(|p| p.as_std_path())) .output() .await?; @@ -2181,19 +2098,16 @@ impl GitRepository for RealGitRepository { paths: Vec, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(&git_binary_path); - cmd.current_dir(&working_directory?) + let git = git_binary?; + let output = git + .build_command(["stash", "push", "--quiet", "--include-untracked"]) .envs(env.iter()) - .args(["stash", "push", "--quiet"]) - .arg("--include-untracked"); - - cmd.args(paths.iter().map(|p| p.as_unix_str())); - - let output = cmd.output().await?; + .args(paths.iter().map(|p| p.as_unix_str())) + .output() + .await?; anyhow::ensure!( output.status.success(), @@ -2210,20 +2124,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "pop".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2240,20 +2149,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "apply".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2270,20 +2174,15 @@ impl GitRepository for RealGitRepository { index: Option, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let mut cmd = new_command(git_binary_path); + let git = git_binary?; let mut args = vec!["stash".to_string(), "drop".to_string()]; if let Some(index) = index { args.push(format!("stash@{{{}}}", index)); } - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(args); - - let output = cmd.output().await?; + let output = git.build_command(&args).envs(env.iter()).output().await?; anyhow::ensure!( output.status.success(), @@ -2303,16 +2202,14 @@ impl GitRepository for RealGitRepository { ask_pass: AskPassDelegate, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); let executor = self.executor.clone(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { - let mut cmd = new_command(git_binary_path); - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(["commit", "--quiet", "-m"]) + let git = git_binary?; + let mut cmd = git.build_command(["commit", "--quiet", "-m"]); + cmd.envs(env.iter()) .arg(&message.to_string()) .arg("--cleanup=strip") .arg("--no-verify") @@ -2351,16 +2248,21 @@ impl GitRepository for RealGitRepository { let working_directory = self.working_directory(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't push")?; let working_directory = working_directory?; - let mut command = new_command(git_binary_path); + let git = GitBinary::new( + git_binary_path, + working_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(["push"]); command .envs(env.iter()) - .current_dir(&working_directory) - .args(["push"]) .args(options.map(|option| match option { PushOptions::SetUpstream => "--set-upstream", PushOptions::Force => "--force-with-lease", @@ -2388,15 +2290,20 @@ impl GitRepository for RealGitRepository { let working_directory = self.working_directory(); let executor = cx.background_executor().clone(); let git_binary_path = self.system_git_binary_path.clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't pull")?; - let mut command = new_command(git_binary_path); - command - .envs(env.iter()) - .current_dir(&working_directory?) - .arg("pull"); + let working_directory = working_directory?; + let git = GitBinary::new( + git_binary_path, + working_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(["pull"]); + command.envs(env.iter()); if rebase { command.arg("--rebase"); @@ -2424,15 +2331,21 @@ impl GitRepository for RealGitRepository { let remote_name = format!("{}", fetch_options); let git_binary_path = self.system_git_binary_path.clone(); let executor = cx.background_executor().clone(); + let is_trusted = self.is_trusted(); // Note: Do not spawn this command on the background thread, it might pop open the credential helper // which we want to block on. async move { let git_binary_path = git_binary_path.context("git not found on $PATH, can't fetch")?; - let mut command = new_command(git_binary_path); + let working_directory = working_directory?; + let git = GitBinary::new( + git_binary_path, + working_directory, + executor.clone(), + is_trusted, + ); + let mut command = git.build_command(["fetch", &remote_name]); command .envs(env.iter()) - .current_dir(&working_directory?) - .args(["fetch", &remote_name]) .stdout(Stdio::piped()) .stderr(Stdio::piped()); @@ -2442,14 +2355,12 @@ impl GitRepository for RealGitRepository { } fn get_push_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["rev-parse", "--abbrev-ref"]) + let git = git_binary?; + let output = git + .build_command(["rev-parse", "--abbrev-ref"]) .arg(format!("{branch}@{{push}}")) .output() .await?; @@ -2469,14 +2380,12 @@ impl GitRepository for RealGitRepository { } fn get_branch_remote(&self, branch: String) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["config", "--get"]) + let git = git_binary?; + let output = git + .build_command(["config", "--get"]) .arg(format!("branch.{branch}.remote")) .output() .await?; @@ -2493,16 +2402,11 @@ impl GitRepository for RealGitRepository { } fn get_all_remotes(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(["remote", "-v"]) - .output() - .await?; + let git = git_binary?; + let output = git.build_command(["remote", "-v"]).output().await?; anyhow::ensure!( output.status.success(), @@ -2551,17 +2455,12 @@ impl GitRepository for RealGitRepository { } fn check_for_pushed_commit(&self) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; + let git = git_binary?; let git_cmd = async |args: &[&str]| -> Result { - let output = new_command(&git_binary_path) - .current_dir(&working_directory) - .args(args) - .output() - .await?; + let output = git.build_command(args).output().await?; anyhow::ensure!( output.status.success(), String::from_utf8_lossy(&output.stderr).to_string() @@ -2610,14 +2509,10 @@ impl GitRepository for RealGitRepository { } fn checkpoint(&self) -> BoxFuture<'static, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let mut git = GitBinary::new(git_binary_path, working_directory.clone(), executor) - .envs(checkpoint_author_envs()); + let mut git = git_binary?.envs(checkpoint_author_envs()); git.with_temp_index(async |git| { let head_sha = git.run(&["rev-parse", "HEAD"]).await.ok(); let mut excludes = exclude_files(git).await?; @@ -2643,15 +2538,10 @@ impl GitRepository for RealGitRepository { } fn restore_checkpoint(&self, checkpoint: GitRepositoryCheckpoint) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; git.run(&[ "restore", "--source", @@ -2682,14 +2572,10 @@ impl GitRepository for RealGitRepository { left: GitRepositoryCheckpoint, right: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let result = git .run(&[ "diff-tree", @@ -2720,14 +2606,10 @@ impl GitRepository for RealGitRepository { base_checkpoint: GitRepositoryCheckpoint, target_checkpoint: GitRepositoryCheckpoint, ) -> BoxFuture<'_, Result> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; git.run(&[ "diff", "--find-renames", @@ -2744,14 +2626,10 @@ impl GitRepository for RealGitRepository { &self, include_remote_name: bool, ) -> BoxFuture<'_, Result>> { - let working_directory = self.working_directory(); - let git_binary_path = self.any_git_binary_path.clone(); - - let executor = self.executor.clone(); + let git_binary = self.git_binary(); self.executor .spawn(async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let strip_prefix = if include_remote_name { "refs/remotes/" @@ -2801,15 +2679,19 @@ impl GitRepository for RealGitRepository { hook: RunHook, env: Arc>, ) -> BoxFuture<'_, Result<()>> { - let working_directory = self.working_directory(); + let git_binary = self.git_binary(); let repository = self.repository.clone(); - let git_binary_path = self.any_git_binary_path.clone(); - let executor = self.executor.clone(); let help_output = self.any_git_binary_help_output(); // Note: Do not spawn these commands on the background thread, as this causes some git hooks to hang. async move { - let working_directory = working_directory?; + let git = git_binary?; + + if !git.is_trusted { + bail!("Can't run git commit hooks in restrictive workspace"); + } + + let working_directory = git.working_directory.clone(); if !help_output .await .lines() @@ -2817,6 +2699,7 @@ impl GitRepository for RealGitRepository { { let hook_abs_path = repository.lock().path().join("hooks").join(hook.as_str()); if hook_abs_path.is_file() { + #[allow(clippy::disallowed_methods)] let output = new_command(&hook_abs_path) .envs(env.iter()) .current_dir(&working_directory) @@ -2836,8 +2719,7 @@ impl GitRepository for RealGitRepository { return Ok(()); } - let git = GitBinary::new(git_binary_path, working_directory, executor) - .envs(HashMap::clone(&env)); + let git = git.envs(HashMap::clone(&env)); git.run(&["hook", "run", "--ignore-missing", hook.as_str()]) .await?; Ok(()) @@ -2851,13 +2733,10 @@ impl GitRepository for RealGitRepository { log_order: LogOrder, request_tx: Sender>>, ) -> BoxFuture<'_, Result<()>> { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self.working_directory(); - let executor = self.executor.clone(); + let git_binary = self.git_binary(); async move { - let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = git_binary?; let mut command = git.build_command([ "log", @@ -2911,19 +2790,12 @@ impl GitRepository for RealGitRepository { } fn commit_data_reader(&self) -> Result { - let git_binary_path = self.any_git_binary_path.clone(); - let working_directory = self - .working_directory() - .map_err(|_| anyhow!("no working directory"))?; - let executor = self.executor.clone(); + let git_binary = self.git_binary()?; let (request_tx, request_rx) = smol::channel::bounded::(64); let task = self.executor.spawn(async move { - if let Err(error) = - run_commit_data_reader(git_binary_path, working_directory, executor, request_rx) - .await - { + if let Err(error) = run_commit_data_reader(git_binary, request_rx).await { log::error!("commit data reader failed: {error:?}"); } }); @@ -2933,15 +2805,21 @@ impl GitRepository for RealGitRepository { _task: task, }) } + + fn set_trusted(&self, trusted: bool) { + self.is_trusted + .store(trusted, std::sync::atomic::Ordering::Release); + } + + fn is_trusted(&self) -> bool { + self.is_trusted.load(std::sync::atomic::Ordering::Acquire) + } } async fn run_commit_data_reader( - git_binary_path: PathBuf, - working_directory: PathBuf, - executor: BackgroundExecutor, + git: GitBinary, request_rx: smol::channel::Receiver, ) -> Result<()> { - let git = GitBinary::new(git_binary_path, working_directory, executor); let mut process = git .build_command(["--no-optional-locks", "cat-file", "--batch"]) .stdin(Stdio::piped()) @@ -3117,19 +2995,21 @@ async fn exclude_files(git: &GitBinary) -> Result { Ok(excludes) } -struct GitBinary { +pub(crate) struct GitBinary { git_binary_path: PathBuf, working_directory: PathBuf, executor: BackgroundExecutor, index_file_path: Option, envs: HashMap, + is_trusted: bool, } impl GitBinary { - fn new( + pub(crate) fn new( git_binary_path: PathBuf, working_directory: PathBuf, executor: BackgroundExecutor, + is_trusted: bool, ) -> Self { Self { git_binary_path, @@ -3137,6 +3017,7 @@ impl GitBinary { executor, index_file_path: None, envs: HashMap::default(), + is_trusted, } } @@ -3241,12 +3122,20 @@ impl GitBinary { Ok(String::from_utf8(output.stdout)?) } - fn build_command(&self, args: impl IntoIterator) -> util::command::Command + #[allow(clippy::disallowed_methods)] + pub(crate) fn build_command( + &self, + args: impl IntoIterator, + ) -> util::command::Command where S: AsRef, { let mut command = new_command(&self.git_binary_path); command.current_dir(&self.working_directory); + command.args(["-c", "core.fsmonitor=false"]); + if !self.is_trusted { + command.args(["-c", "core.hooksPath=/dev/null"]); + } command.args(args); if let Some(index_file_path) = self.index_file_path.as_ref() { command.env("GIT_INDEX_FILE", index_file_path); @@ -3506,6 +3395,102 @@ mod tests { } } + #[gpui::test] + async fn test_build_command_untrusted_includes_both_safety_args(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let dir = tempfile::tempdir().unwrap(); + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + false, + ); + let output = git + .build_command(["version"]) + .output() + .await + .expect("git version should succeed"); + assert!(output.status.success()); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + false, + ); + let output = git + .build_command(["config", "--get", "core.fsmonitor"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "false", + "fsmonitor should be disabled for untrusted repos" + ); + + git2::Repository::init(dir.path()).unwrap(); + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + false, + ); + let output = git + .build_command(["config", "--get", "core.hooksPath"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "/dev/null", + "hooksPath should be /dev/null for untrusted repos" + ); + } + + #[gpui::test] + async fn test_build_command_trusted_only_disables_fsmonitor(cx: &mut TestAppContext) { + cx.executor().allow_parking(); + let dir = tempfile::tempdir().unwrap(); + git2::Repository::init(dir.path()).unwrap(); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + true, + ); + let output = git + .build_command(["config", "--get", "core.fsmonitor"]) + .output() + .await + .expect("git config should run"); + let stdout = String::from_utf8_lossy(&output.stdout); + assert_eq!( + stdout.trim(), + "false", + "fsmonitor should be disabled even for trusted repos" + ); + + let git = GitBinary::new( + PathBuf::from("git"), + dir.path().to_path_buf(), + cx.executor(), + true, + ); + let output = git + .build_command(["config", "--get", "core.hooksPath"]) + .output() + .await + .expect("git config should run"); + assert!( + !output.status.success(), + "hooksPath should NOT be overridden for trusted repos" + ); + } + #[gpui::test] async fn test_checkpoint_basic(cx: &mut TestAppContext) { disable_git_global_config(); @@ -4398,7 +4383,7 @@ mod tests { .spawn(async move { let git_binary_path = git_binary_path.clone(); let working_directory = working_directory?; - let git = GitBinary::new(git_binary_path, working_directory, executor); + let git = GitBinary::new(git_binary_path, working_directory, executor, true); git.run(&["gc", "--prune"]).await?; Ok(()) }) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 487e7f5f9699382ce4930141f7a0c7c50a1d23b8..b03c7d69ab05daf94254a9d47cb2ae23da3043d1 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -6,6 +6,9 @@ pub mod pending_op; use crate::{ ProjectEnvironment, ProjectItem, ProjectPath, buffer_store::{BufferStore, BufferStoreEvent}, + trusted_worktrees::{ + PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore, + }, worktree_store::{WorktreeStore, WorktreeStoreEvent}, }; use anyhow::{Context as _, Result, anyhow, bail}; @@ -354,6 +357,7 @@ impl LocalRepositoryState { dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, + is_trusted: bool, cx: &mut AsyncApp, ) -> anyhow::Result { let environment = project_environment @@ -381,6 +385,7 @@ impl LocalRepositoryState { } }) .await?; + backend.set_trusted(is_trusted); Ok(LocalRepositoryState { backend, environment: Arc::new(environment), @@ -495,11 +500,15 @@ impl GitStore { state: GitStoreState, cx: &mut Context, ) -> Self { - let _subscriptions = vec![ + let mut _subscriptions = vec![ cx.subscribe(&worktree_store, Self::on_worktree_store_event), cx.subscribe(&buffer_store, Self::on_buffer_store_event), ]; + if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) { + _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event)); + } + GitStore { state, buffer_store, @@ -1517,6 +1526,13 @@ impl GitStore { let original_repo_abs_path: Arc = git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into(); let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release)); + let is_trusted = TrustedWorktrees::try_get_global(cx) + .map(|trusted_worktrees| { + trusted_worktrees.update(cx, |trusted_worktrees, cx| { + trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx) + }) + }) + .unwrap_or(false); let git_store = cx.weak_entity(); let repo = cx.new(|cx| { let mut repo = Repository::local( @@ -1526,6 +1542,7 @@ impl GitStore { dot_git_abs_path.clone(), project_environment.downgrade(), fs.clone(), + is_trusted, git_store, cx, ); @@ -1566,6 +1583,39 @@ impl GitStore { } } + fn on_trusted_worktrees_event( + &mut self, + _: Entity, + event: &TrustedWorktreesEvent, + cx: &mut Context, + ) { + if !matches!(self.state, GitStoreState::Local { .. }) { + return; + } + + let (is_trusted, event_paths) = match event { + TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths), + TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths), + }; + + for (repo_id, worktree_ids) in &self.worktree_ids { + if worktree_ids + .iter() + .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id))) + { + if let Some(repo) = self.repositories.get(repo_id) { + let repository_state = repo.read(cx).repository_state.clone(); + cx.background_spawn(async move { + if let Ok(RepositoryState::Local(state)) = repository_state.await { + state.backend.set_trusted(is_trusted); + } + }) + .detach(); + } + } + } + } + fn on_buffer_store_event( &mut self, _: Entity, @@ -3763,6 +3813,13 @@ impl MergeDetails { } impl Repository { + pub fn is_trusted(&self) -> bool { + match self.repository_state.peek() { + Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(), + _ => false, + } + } + pub fn snapshot(&self) -> RepositorySnapshot { self.snapshot.clone() } @@ -3788,6 +3845,7 @@ impl Repository { dot_git_abs_path: Arc, project_environment: WeakEntity, fs: Arc, + is_trusted: bool, git_store: WeakEntity, cx: &mut Context, ) -> Self { @@ -3804,6 +3862,7 @@ impl Repository { dot_git_abs_path, project_environment, fs, + is_trusted, cx, ) .await diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs index 88614cec68b542b3d08de11cfe0c5f3781d6b379..82e92bc4f1cfb606fb09d5efd5d341ed2951c067 100644 --- a/crates/project/tests/integration/git_store.rs +++ b/crates/project/tests/integration/git_store.rs @@ -1293,3 +1293,208 @@ mod git_worktrees { use crate::Project; } + +mod trust_tests { + use collections::HashSet; + use fs::FakeFs; + use gpui::TestAppContext; + use project::trusted_worktrees::*; + + use serde_json::json; + use settings::SettingsStore; + use util::path; + + use crate::Project; + + fn init_test(cx: &mut TestAppContext) { + zlog::init_test(); + + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + } + + #[gpui::test] + async fn test_repository_defaults_to_untrusted_without_trust_system(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + }), + ) + .await; + + // Create project without trust system — repos should default to untrusted. + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + repository.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "repository should default to untrusted when no trust system is initialized" + ); + }); + } + + #[gpui::test] + async fn test_multiple_repos_trust_with_single_worktree(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + "sub": { + ".git": {}, + "b.txt": "world", + }, + }), + ) + .await; + + cx.update(|cx| { + init(DbTrustedPaths::default(), cx); + }); + + let project = + Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let worktree_id = worktree_store.read_with(cx, |store, cx| { + store.worktrees().next().unwrap().read(cx).id() + }); + + let repos = project.read_with(cx, |project, cx| { + project + .repositories(cx) + .values() + .cloned() + .collect::>() + }); + assert_eq!(repos.len(), 2, "should have two repositories"); + for repo in &repos { + repo.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "all repos should be untrusted initially" + ); + }); + } + + let trusted_worktrees = cx + .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set")); + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + for repo in &repos { + repo.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "all repos should be trusted after worktree is trusted" + ); + }); + } + } + + #[gpui::test] + async fn test_repository_trust_restrict_trust_cycle(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/project"), + json!({ + ".git": {}, + "a.txt": "hello", + }), + ) + .await; + + cx.update(|cx| { + project::trusted_worktrees::init(DbTrustedPaths::default(), cx); + }); + + let project = + Project::test_with_worktree_trust(fs.clone(), [path!("/project").as_ref()], cx).await; + cx.executor().run_until_parked(); + + let worktree_store = project.read_with(cx, |project, _| project.worktree_store()); + let worktree_id = worktree_store.read_with(cx, |store, cx| { + store.worktrees().next().unwrap().read(cx).id() + }); + + let repository = project.read_with(cx, |project, cx| { + project.repositories(cx).values().next().unwrap().clone() + }); + + repository.read_with(cx, |repo, _| { + assert!(!repo.is_trusted(), "repository should start untrusted"); + }); + + let trusted_worktrees = cx + .update(|cx| TrustedWorktrees::try_get_global(cx).expect("trust global should be set")); + + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "repository should be trusted after worktree is trusted" + ); + }); + + trusted_worktrees.update(cx, |store, cx| { + store.restrict( + worktree_store.downgrade(), + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + !repo.is_trusted(), + "repository should be untrusted after worktree is restricted" + ); + }); + + trusted_worktrees.update(cx, |store, cx| { + store.trust( + &worktree_store, + HashSet::from_iter([PathTrust::Worktree(worktree_id)]), + cx, + ); + }); + cx.executor().run_until_parked(); + + repository.read_with(cx, |repo, _| { + assert!( + repo.is_trusted(), + "repository should be trusted again after second trust" + ); + }); + } +} From 5641ccf250c7140559416342d8ecf59bdd4aabee Mon Sep 17 00:00:00 2001 From: Gaauwe Rombouts Date: Wed, 4 Mar 2026 15:38:31 +0100 Subject: [PATCH 301/548] docs: Add consent banner (#50302) Adds a consent banner, similar to the one on zed.dev Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- .github/workflows/deploy_cloudflare.yml | 1 + crates/docs_preprocessor/src/main.rs | 2 + docs/.prettierignore | 3 + docs/README.md | 16 ++ docs/book.toml | 4 +- docs/theme/analytics.js | 93 ++++++++ docs/theme/c15t@2.0.0-rc.3.js | 1 + docs/theme/consent-banner.css | 292 ++++++++++++++++++++++++ docs/theme/index.hbs | 102 +++++++-- typos.toml | 2 + 10 files changed, 497 insertions(+), 19 deletions(-) create mode 100644 docs/theme/analytics.js create mode 100644 docs/theme/c15t@2.0.0-rc.3.js create mode 100644 docs/theme/consent-banner.css diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml index cb0dfc2187a06cf62255b049b7e5fe74b10c505a..37f23b20d2825e9f3d26c456903962a10c2d0081 100644 --- a/.github/workflows/deploy_cloudflare.yml +++ b/.github/workflows/deploy_cloudflare.yml @@ -26,6 +26,7 @@ jobs: CC: clang CXX: clang++ DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }} + DOCS_CONSENT_IO_INSTANCE: ${{ secrets.DOCS_CONSENT_IO_INSTANCE }} - name: Deploy Docs uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3 diff --git a/crates/docs_preprocessor/src/main.rs b/crates/docs_preprocessor/src/main.rs index 6ef599542a5b2f511915d7435af192162a5dbd3b..43efbeea0b0310cf70cd9bdb560b1b0d2b0c14ef 100644 --- a/crates/docs_preprocessor/src/main.rs +++ b/crates/docs_preprocessor/src/main.rs @@ -578,6 +578,7 @@ fn handle_postprocessing() -> Result<()> { .expect("Default title not a string") .to_string(); let amplitude_key = std::env::var("DOCS_AMPLITUDE_API_KEY").unwrap_or_default(); + let consent_io_instance = std::env::var("DOCS_CONSENT_IO_INSTANCE").unwrap_or_default(); output.insert("html".to_string(), zed_html); mdbook::Renderer::render(&mdbook::renderer::HtmlHandlebars::new(), &ctx)?; @@ -647,6 +648,7 @@ fn handle_postprocessing() -> Result<()> { zlog::trace!(logger => "Updating {:?}", pretty_path(&file, &root_dir)); let contents = contents.replace("#description#", meta_description); let contents = contents.replace("#amplitude_key#", &litude_key); + let contents = contents.replace("#consent_io_instance#", &consent_io_instance); let contents = title_regex() .replace(&contents, |_: ®ex::Captures| { format!("{}", meta_title) diff --git a/docs/.prettierignore b/docs/.prettierignore index a52439689a83a1c2e834918c39441186b47120e5..c742ed4b6859f32219cecbac9f722db8a6929710 100644 --- a/docs/.prettierignore +++ b/docs/.prettierignore @@ -1,2 +1,5 @@ # Handlebars partials are not supported by Prettier. *.hbs + +# Automatically generated +theme/c15t@*.js diff --git a/docs/README.md b/docs/README.md index e1649f4bc99e1668352a46ee2071dcfe1775f4a7..a0f9bbd5c628f41d291880239ca555ea7ec0e3ea 100644 --- a/docs/README.md +++ b/docs/README.md @@ -64,6 +64,22 @@ This will render a human-readable version of the action name, e.g., "zed: open s Templates are functions that modify the source of the docs pages (usually with a regex match and replace). You can see how the actions and keybindings are templated in `crates/docs_preprocessor/src/main.rs` for reference on how to create new templates. +## Consent Banner + +We pre-bundle the `c15t` package because the docs pipeline does not include a JS bundler. If you need to update `c15t` and rebuild the bundle, use: + +``` +mkdir c15t-bundle && cd c15t-bundle +npm init -y +npm install c15t@ esbuild +echo "import { getOrCreateConsentRuntime } from 'c15t'; window.c15t = { getOrCreateConsentRuntime };" > entry.js +npx esbuild entry.js --bundle --format=iife --minify --outfile=c15t@.js +cp c15t@.js ../theme/c15t@.js +cd .. && rm -rf c15t-bundle +``` + +Replace `` with the new version of `c15t` you are installing. Then update `book.toml` to reference the new bundle filename. + ### References - Template Trait: `crates/docs_preprocessor/src/templates.rs` diff --git a/docs/book.toml b/docs/book.toml index 86fa447f581fba88ff7df53bb51e08440585a9dc..3269003a1d37ede19ec18b62809a928a08764d2f 100644 --- a/docs/book.toml +++ b/docs/book.toml @@ -23,8 +23,8 @@ default-description = "Learn how to use and customize Zed, the fast, collaborati default-title = "Zed Code Editor Documentation" no-section-label = true preferred-dark-theme = "dark" -additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css"] -additional-js = ["theme/page-toc.js", "theme/plugins.js"] +additional-css = ["theme/page-toc.css", "theme/plugins.css", "theme/highlight.css", "theme/consent-banner.css"] +additional-js = ["theme/page-toc.js", "theme/plugins.js", "theme/c15t@2.0.0-rc.3.js", "theme/analytics.js"] [output.zed-html.print] enable = false diff --git a/docs/theme/analytics.js b/docs/theme/analytics.js new file mode 100644 index 0000000000000000000000000000000000000000..6e9df27f30fc6d38ba6fb322f9888fda089bb20c --- /dev/null +++ b/docs/theme/analytics.js @@ -0,0 +1,93 @@ +const amplitudeKey = document.querySelector( + 'meta[name="amplitude-key"]', +)?.content; +const consentInstance = document.querySelector( + 'meta[name="consent-io-instance"]', +)?.content; + +document.addEventListener("DOMContentLoaded", () => { + if (!consentInstance || consentInstance.length === 0) return; + const { getOrCreateConsentRuntime } = window.c15t; + + const { consentStore } = getOrCreateConsentRuntime({ + mode: "c15t", + backendURL: consentInstance, + consentCategories: ["necessary", "measurement", "marketing"], + storageConfig: { + crossSubdomain: true, + }, + scripts: [ + { + id: "amplitude", + src: `https://cdn.amplitude.com/script/${amplitudeKey}.js`, + category: "measurement", + onLoad: () => { + window.amplitude.init(amplitudeKey, { + fetchRemoteConfig: true, + autocapture: true, + }); + }, + }, + ], + }); + + let previousActiveUI = consentStore.getState().activeUI; + const banner = document.getElementById("c15t-banner"); + const configureSection = document.getElementById("c15t-configure-section"); + const configureBtn = document.getElementById("c15t-configure-btn"); + const measurementToggle = document.getElementById("c15t-toggle-measurement"); + const marketingToggle = document.getElementById("c15t-toggle-marketing"); + + const toggleConfigureMode = () => { + const currentConsents = consentStore.getState().consents; + measurementToggle.checked = currentConsents + ? (currentConsents.measurement ?? false) + : false; + marketingToggle.checked = currentConsents + ? (currentConsents.marketing ?? false) + : false; + configureSection.style.display = "flex"; + configureBtn.innerHTML = "Save"; + configureBtn.className = "c15t-button secondary"; + configureBtn.title = ""; + }; + + consentStore.subscribe((state) => { + const hideBanner = + state.activeUI === "none" || + (state.activeUI === "banner" && state.mode === "opt-out"); + banner.style.display = hideBanner ? "none" : "block"; + + if (state.activeUI === "dialog" && previousActiveUI !== "dialog") { + toggleConfigureMode(); + } + + previousActiveUI = state.activeUI; + }); + + configureBtn.addEventListener("click", () => { + if (consentStore.getState().activeUI === "dialog") { + consentStore + .getState() + .setConsent("measurement", measurementToggle.checked); + consentStore.getState().setConsent("marketing", marketingToggle.checked); + consentStore.getState().saveConsents("custom"); + } else { + consentStore.getState().setActiveUI("dialog"); + } + }); + + document.getElementById("c15t-accept").addEventListener("click", () => { + consentStore.getState().saveConsents("all"); + }); + + document.getElementById("c15t-decline").addEventListener("click", () => { + consentStore.getState().saveConsents("necessary"); + }); + + document + .getElementById("c15t-manage-consent-btn") + .addEventListener("click", () => { + consentStore.getState().setActiveUI("dialog"); + }); +}); diff --git a/docs/theme/c15t@2.0.0-rc.3.js b/docs/theme/c15t@2.0.0-rc.3.js new file mode 100644 index 0000000000000000000000000000000000000000..5e4a38c12b605062bd8e7e77809d03e3aa11ff74 --- /dev/null +++ b/docs/theme/c15t@2.0.0-rc.3.js @@ -0,0 +1 @@ +(()=>{var ni=Object.defineProperty;var P=(n,e)=>()=>(n&&(e=n(n=0)),e);var Nt=(n,e)=>{for(var t in e)ni(n,t,{get:e[t],enumerable:!0})};var G,$t=P(()=>{G=class extends Error{constructor(e){super(e),this.name="DecodingError"}}});var q,Kt=P(()=>{q=class extends Error{constructor(e){super(e),this.name="EncodingError"}}});var oe,Yt=P(()=>{oe=class extends Error{constructor(e){super(e),this.name="GVLError"}}});var W,Wt=P(()=>{W=class extends Error{constructor(e,t,i=""){super(`invalid value ${t} passed for ${e} ${i}`),this.name="TCModelError"}}});var J=P(()=>{$t();Kt();Yt();Wt()});var pe,rt=P(()=>{J();pe=class{static DICT="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";static REVERSE_DICT=new Map([["A",0],["B",1],["C",2],["D",3],["E",4],["F",5],["G",6],["H",7],["I",8],["J",9],["K",10],["L",11],["M",12],["N",13],["O",14],["P",15],["Q",16],["R",17],["S",18],["T",19],["U",20],["V",21],["W",22],["X",23],["Y",24],["Z",25],["a",26],["b",27],["c",28],["d",29],["e",30],["f",31],["g",32],["h",33],["i",34],["j",35],["k",36],["l",37],["m",38],["n",39],["o",40],["p",41],["q",42],["r",43],["s",44],["t",45],["u",46],["v",47],["w",48],["x",49],["y",50],["z",51],["0",52],["1",53],["2",54],["3",55],["4",56],["5",57],["6",58],["7",59],["8",60],["9",61],["-",62],["_",63]]);static BASIS=6;static LCM=24;static encode(e){if(!/^[0-1]+$/.test(e))throw new q("Invalid bitField");let t=e.length%this.LCM;e+=t?"0".repeat(this.LCM-t):"";let i="";for(let s=0;s{Ee=class n{static langSet=new Set(["AR","BG","BS","CA","CS","CY","DA","DE","EL","EN","ES","ET","EU","FI","FR","GL","HE","HI","HR","HU","ID","IS","IT","JA","KA","KO","LT","LV","MK","MS","MT","NL","NO","PL","PT-BR","PT-PT","RO","RU","SK","SL","SQ","SR-LATN","SR-CYRL","SV","SW","TH","TL","TR","UK","VI","ZH","ZH-HANT"]);has(e){return n.langSet.has(e)}parseLanguage(e){e=e.toUpperCase();let t=e.split("-")[0];if(e.length>=2&&t.length==2){if(n.langSet.has(e))return e;if(n.langSet.has(t))return t;let i=t+"-"+t;if(n.langSet.has(i))return i;for(let s of n.langSet)if(s.indexOf(e)!==-1||s.indexOf(t)!==-1)return s}throw new Error(`unsupported language ${e}`)}forEach(e){n.langSet.forEach(e)}get size(){return n.langSet.size}}});var v,ot=P(()=>{v=class{static cmpId="cmpId";static cmpVersion="cmpVersion";static consentLanguage="consentLanguage";static consentScreen="consentScreen";static created="created";static supportOOB="supportOOB";static isServiceSpecific="isServiceSpecific";static lastUpdated="lastUpdated";static numCustomPurposes="numCustomPurposes";static policyVersion="policyVersion";static publisherCountryCode="publisherCountryCode";static publisherCustomConsents="publisherCustomConsents";static publisherCustomLegitimateInterests="publisherCustomLegitimateInterests";static publisherLegitimateInterests="publisherLegitimateInterests";static publisherConsents="publisherConsents";static publisherRestrictions="publisherRestrictions";static purposeConsents="purposeConsents";static purposeLegitimateInterests="purposeLegitimateInterests";static purposeOneTreatment="purposeOneTreatment";static specialFeatureOptins="specialFeatureOptins";static useNonStandardTexts="useNonStandardTexts";static vendorConsents="vendorConsents";static vendorLegitimateInterests="vendorLegitimateInterests";static vendorListVersion="vendorListVersion";static vendorsAllowed="vendorsAllowed";static vendorsDisclosed="vendorsDisclosed";static version="version"}});var Zt=P(()=>{});var Qt=P(()=>{});var te,fe=P(()=>{te=class{clone(){let e=new this.constructor;return Object.keys(this).forEach(i=>{let s=this.deepClone(this[i]);s!==void 0&&(e[i]=s)}),e}deepClone(e){let t=typeof e;if(t==="number"||t==="string"||t==="boolean")return e;if(e!==null&&t==="object"){if(typeof e.clone=="function")return e.clone();if(e instanceof Date)return new Date(e.getTime());if(e[Symbol.iterator]!==void 0){let i=[];for(let s of e)i.push(this.deepClone(s));return e instanceof Array?i:new e.constructor(i)}else{let i={};for(let s in e)e.hasOwnProperty(s)&&(i[s]=this.deepClone(e[s]));return i}}}}});var Z,Ge=P(()=>{(function(n){n[n.NOT_ALLOWED=0]="NOT_ALLOWED",n[n.REQUIRE_CONSENT=1]="REQUIRE_CONSENT",n[n.REQUIRE_LI=2]="REQUIRE_LI"})(Z||(Z={}))});var ae,at=P(()=>{fe();J();Ge();ae=class n extends te{static hashSeparator="-";purposeId_;restrictionType;constructor(e,t){super(),e!==void 0&&(this.purposeId=e),t!==void 0&&(this.restrictionType=t)}static unHash(e){let t=e.split(this.hashSeparator),i=new n;if(t.length!==2)throw new W("hash",e);return i.purposeId=parseInt(t[0],10),i.restrictionType=parseInt(t[1],10),i}get hash(){if(!this.isValid())throw new Error("cannot hash invalid PurposeRestriction");return`${this.purposeId}${n.hashSeparator}${this.restrictionType}`}get purposeId(){return this.purposeId_}set purposeId(e){this.purposeId_=e}isValid(){return Number.isInteger(this.purposeId)&&this.purposeId>0&&(this.restrictionType===Z.NOT_ALLOWED||this.restrictionType===Z.REQUIRE_CONSENT||this.restrictionType===Z.REQUIRE_LI)}isSameAs(e){return this.purposeId===e.purposeId&&this.restrictionType===e.restrictionType}}});var he,Xt=P(()=>{at();Ge();fe();he=class extends te{bitLength=0;map=new Map;gvl_;has(e){return this.map.has(e)}isOkToHave(e,t,i){let s=!0;if(this.gvl?.vendors){let r=this.gvl.vendors[i];if(r)if(e===Z.NOT_ALLOWED)s=r.legIntPurposes.includes(t)||r.purposes.includes(t);else if(r.flexiblePurposes.length)switch(e){case Z.REQUIRE_CONSENT:s=r.flexiblePurposes.includes(t)&&r.legIntPurposes.includes(t);break;case Z.REQUIRE_LI:s=r.flexiblePurposes.includes(t)&&r.purposes.includes(t);break}else s=!1;else s=!1}return s}add(e,t){if(this.isOkToHave(t.restrictionType,t.purposeId,e)){let i=t.hash;this.has(i)||(this.map.set(i,new Set),this.bitLength=0),this.map.get(i).add(e)}}restrictPurposeToLegalBasis(e){let t=Array.from(this.gvl.vendorIds),i=e.hash,s=t[t.length-1],r=[...Array(s).keys()].map(o=>o+1);if(!this.has(i))this.map.set(i,new Set(r)),this.bitLength=0;else for(let o=1;o<=s;o++)this.map.get(i).add(o)}getVendors(e){let t=[];if(e){let i=e.hash;this.has(i)&&(t=Array.from(this.map.get(i)))}else{let i=new Set;this.map.forEach(s=>{s.forEach(r=>{i.add(r)})}),t=Array.from(i)}return t.sort((i,s)=>i-s)}getRestrictionType(e,t){let i;return this.getRestrictions(e).forEach(s=>{s.purposeId===t&&(i===void 0||i>s.restrictionType)&&(i=s.restrictionType)}),i}vendorHasRestriction(e,t){let i=!1,s=this.getRestrictions(e);for(let r=0;r{e=Math.max(Array.from(t)[t.size-1],e)}),e}getRestrictions(e){let t=[];return this.map.forEach((i,s)=>{e?i.has(e)&&t.push(ae.unHash(s)):t.push(ae.unHash(s))}),t}getPurposes(){let e=new Set;return this.map.forEach((t,i)=>{e.add(ae.unHash(i).purposeId)}),Array.from(e)}remove(e,t){let i=t.hash,s=this.map.get(i);s&&(s.delete(e),s.size==0&&(this.map.delete(i),this.bitLength=0))}set gvl(e){this.gvl_||(this.gvl_=e,this.map.forEach((t,i)=>{let s=ae.unHash(i);Array.from(t).forEach(o=>{this.isOkToHave(s.restrictionType,s.purposeId,o)||t.delete(o)})}))}get gvl(){return this.gvl_}isEmpty(){return this.map.size===0}get numRestrictions(){return this.map.size}}});var ct,en=P(()=>{(function(n){n.COOKIE="cookie",n.WEB="web",n.APP="app"})(ct||(ct={}))});var tn=P(()=>{});var N,lt=P(()=>{(function(n){n.CORE="core",n.VENDORS_DISCLOSED="vendorsDisclosed",n.VENDORS_ALLOWED="vendorsAllowed",n.PUBLISHER_TC="publisherTC"})(N||(N={}))});var ke,nn=P(()=>{lt();ke=class{static ID_TO_KEY=[N.CORE,N.VENDORS_DISCLOSED,N.VENDORS_ALLOWED,N.PUBLISHER_TC];static KEY_TO_ID={[N.CORE]:0,[N.VENDORS_DISCLOSED]:1,[N.VENDORS_ALLOWED]:2,[N.PUBLISHER_TC]:3}}});var H,sn=P(()=>{fe();J();H=class extends te{bitLength=0;maxId_=0;set_=new Set;*[Symbol.iterator](){for(let e=1;e<=this.maxId;e++)yield[e,this.has(e)]}values(){return this.set_.values()}get maxId(){return this.maxId_}has(e){return this.set_.has(e)}unset(e){Array.isArray(e)?e.forEach(t=>this.unset(t)):typeof e=="object"?this.unset(Object.keys(e).map(t=>Number(t))):(this.set_.delete(Number(e)),this.bitLength=0,e===this.maxId&&(this.maxId_=0,this.set_.forEach(t=>{this.maxId_=Math.max(this.maxId,t)})))}isIntMap(e){let t=typeof e=="object";return t=t&&Object.keys(e).every(i=>{let s=Number.isInteger(parseInt(i,10));return s=s&&this.isValidNumber(e[i].id),s=s&&e[i].name!==void 0,s}),t}isValidNumber(e){return parseInt(e,10)>0}isSet(e){let t=!1;return e instanceof Set&&(t=Array.from(e).every(this.isValidNumber)),t}set(e){if(Array.isArray(e))e.forEach(t=>this.set(t));else if(this.isSet(e))this.set(Array.from(e));else if(this.isIntMap(e))this.set(Object.keys(e).map(t=>Number(t)));else if(this.isValidNumber(e))this.set_.add(e),this.maxId_=Math.max(this.maxId,e),this.bitLength=0;else throw new W("set()",e,"must be positive integer array, positive integer, Set, or IntMap")}empty(){this.set_=new Set,this.maxId_=0}forEach(e){for(let t=1;t<=this.maxId;t++)e(this.has(t),t)}get size(){return this.set_.size}setAll(e){this.set(e)}unsetAll(e){this.unset(e)}}});var rn=P(()=>{});var on=P(()=>{});var an=P(()=>{});var cn=P(()=>{});var ln=P(()=>{});var un=P(()=>{});var dn=P(()=>{});var pn=P(()=>{});var gn=P(()=>{});var mn=P(()=>{});var fn=P(()=>{});var hn=P(()=>{rn();on();an();cn();ln();un();dn();pn();gn();mn();fn()});var Q=P(()=>{Jt();ot();Zt();Qt();at();Xt();en();tn();Ge();lt();nn();sn();hn()});var S,He=P(()=>{Q();S=class{static[v.cmpId]=12;static[v.cmpVersion]=12;static[v.consentLanguage]=12;static[v.consentScreen]=6;static[v.created]=36;static[v.isServiceSpecific]=1;static[v.lastUpdated]=36;static[v.policyVersion]=6;static[v.publisherCountryCode]=12;static[v.publisherLegitimateInterests]=24;static[v.publisherConsents]=24;static[v.purposeConsents]=24;static[v.purposeLegitimateInterests]=24;static[v.purposeOneTreatment]=1;static[v.specialFeatureOptins]=12;static[v.useNonStandardTexts]=1;static[v.vendorListVersion]=12;static[v.version]=6;static anyBoolean=1;static encodingType=1;static maxId=16;static numCustomPurposes=6;static numEntries=12;static numRestrictions=12;static purposeId=6;static restrictionType=2;static segmentType=3;static singleOrRange=1;static vendorId=16}});var kn=P(()=>{});var $,je=P(()=>{$=class{static encode(e){return String(Number(e))}static decode(e){return e==="1"}}});var L,ge=P(()=>{J();L=class{static encode(e,t){let i;if(typeof e=="string"&&(e=parseInt(e,10)),i=e.toString(2),i.length>t||e<0)throw new q(`${e} too large to encode into ${t}`);return i.length{ge();J();Se=class{static encode(e,t){return L.encode(Math.round(e.getTime()/100),t)}static decode(e,t){if(t!==e.length)throw new G("invalid bit length");let i=new Date;return i.setTime(L.decode(e,t)*100),i}}});var ne,qe=P(()=>{je();J();Q();ne=class{static encode(e,t){let i="";for(let s=1;s<=t;s++)i+=$.encode(e.has(s));return i}static decode(e,t){if(e.length!==t)throw new G("bitfield encoding length mismatch");let i=new H;for(let s=1;s<=t;s++)$.decode(e[s-1])&&i.set(s);return i.bitLength=e.length,i}}});var Ae,dt=P(()=>{ge();J();Ae=class{static encode(e,t){e=e.toUpperCase();let i=65,s=e.charCodeAt(0)-i,r=e.charCodeAt(1)-i;if(s<0||s>25||r<0||r>25)throw new q(`invalid language code: ${e}`);if(t%2===1)throw new q(`numBits must be even, ${t} is not valid`);t=t/2;let o=L.encode(s,t),a=L.encode(r,t);return o+a}static decode(e,t){let i;if(t===e.length&&!(e.length%2)){let r=e.length/2,o=L.decode(e.slice(0,r),r)+65,a=L.decode(e.slice(r),r)+65;i=String.fromCharCode(o)+String.fromCharCode(a)}else throw new G("invalid bit length for language");return i}}});var Ve,pt=P(()=>{He();je();J();ge();Q();Ve=class{static encode(e){let t=L.encode(e.numRestrictions,S.numRestrictions);if(!e.isEmpty()){let i=(s,r)=>{for(let o=s+1;o<=r;o++)if(e.gvl.vendorIds.has(o))return o;return s};e.getRestrictions().forEach(s=>{t+=L.encode(s.purposeId,S.purposeId),t+=L.encode(s.restrictionType,S.restrictionType);let r=e.getVendors(s),o=r.length,a=0,c=0,u="";for(let p=0;pi(d,r[o-1])){let l=d!==c;u+=$.encode(l),u+=L.encode(c,S.vendorId),l&&(u+=L.encode(d,S.vendorId)),c=0}}t+=L.encode(a,S.numEntries),t+=u})}return t}static decode(e){let t=0,i=new he,s=L.decode(e.substr(t,S.numRestrictions),S.numRestrictions);t+=S.numRestrictions;for(let r=0;r{(function(n){n[n.FIELD=0]="FIELD",n[n.RANGE=1]="RANGE"})(ve||(ve={}))});var ce,mt=P(()=>{Q();$e();ge();je();qe();gt();J();ce=class{static encode(e){let t=[],i=[],s=L.encode(e.maxId,S.maxId),r="",o,a=S.maxId+S.encodingType,c=a+e.maxId,u=S.vendorId*2+S.singleOrRange+S.numEntries,p=a+S.numEntries;return e.forEach((d,l)=>{r+=$.encode(d),o=e.maxId>u&&p{let r=s.length===1;i+=$.encode(!r),i+=L.encode(s[0],S.vendorId),r||(i+=L.encode(s[1],S.vendorId))}),i}}});function Ke(){return{[v.version]:L,[v.created]:Se,[v.lastUpdated]:Se,[v.cmpId]:L,[v.cmpVersion]:L,[v.consentScreen]:L,[v.consentLanguage]:Ae,[v.vendorListVersion]:L,[v.policyVersion]:L,[v.isServiceSpecific]:$,[v.useNonStandardTexts]:$,[v.specialFeatureOptins]:ne,[v.purposeConsents]:ne,[v.purposeLegitimateInterests]:ne,[v.purposeOneTreatment]:$,[v.publisherCountryCode]:Ae,[v.vendorConsents]:ce,[v.vendorLegitimateInterests]:ce,[v.publisherRestrictions]:Ve,segmentType:L,[v.vendorsDisclosed]:ce,[v.vendorsAllowed]:ce,[v.publisherConsents]:ne,[v.publisherLegitimateInterests]:ne,[v.numCustomPurposes]:L,[v.publisherCustomConsents]:ne,[v.publisherCustomLegitimateInterests]:ne}}var vn=P(()=>{Q();je();ut();qe();ge();dt();pt();mt()});var ft=P(()=>{je();ut();vn();qe();ge();dt();pt();gt();mt()});var xe,yn=P(()=>{Q();xe=class{1={[N.CORE]:[v.version,v.created,v.lastUpdated,v.cmpId,v.cmpVersion,v.consentScreen,v.consentLanguage,v.vendorListVersion,v.purposeConsents,v.vendorConsents]};2={[N.CORE]:[v.version,v.created,v.lastUpdated,v.cmpId,v.cmpVersion,v.consentScreen,v.consentLanguage,v.vendorListVersion,v.policyVersion,v.isServiceSpecific,v.useNonStandardTexts,v.specialFeatureOptins,v.purposeConsents,v.purposeLegitimateInterests,v.purposeOneTreatment,v.publisherCountryCode,v.vendorConsents,v.vendorLegitimateInterests,v.publisherRestrictions],[N.VENDORS_DISCLOSED]:[v.vendorsDisclosed],[N.PUBLISHER_TC]:[v.publisherConsents,v.publisherLegitimateInterests,v.numCustomPurposes,v.publisherCustomConsents,v.publisherCustomLegitimateInterests],[N.VENDORS_ALLOWED]:[v.vendorsAllowed]}}});var Fe,bn=P(()=>{Q();Fe=class{1=[N.CORE];2=[N.CORE];constructor(e,t){if(e.version===2)if(e.isServiceSpecific)this[2].push(N.VENDORS_DISCLOSED),this[2].push(N.PUBLISHER_TC);else{let i=!!(t&&t.isForVendors);(!i||e[v.supportOOB]===!0)&&this[2].push(N.VENDORS_DISCLOSED),i&&(e[v.supportOOB]&&e[v.vendorsAllowed].size>0&&this[2].push(N.VENDORS_ALLOWED),this[2].push(N.PUBLISHER_TC))}}}});var wn=P(()=>{});var ht=P(()=>{yn();bn();wn()});var ze,Cn=P(()=>{rt();He();ft();ht();J();ot();Q();ze=class{static fieldSequence=new xe;static encode(e,t){let i;try{i=this.fieldSequence[String(e.version)][t]}catch{throw new q(`Unable to encode version: ${e.version}, segment: ${t}`)}let s="";t!==N.CORE&&(s=L.encode(ke.KEY_TO_ID[t],S.segmentType));let r=Ke();return i.forEach(o=>{let a=e[o],c=r[o],u=S[o];u===void 0&&this.isPublisherCustom(o)&&(u=Number(e[v.numCustomPurposes]));try{s+=c.encode(a,u)}catch(p){throw new q(`Error encoding ${t}->${o}: ${p.message}`)}}),pe.encode(s)}static decode(e,t,i){let s=pe.decode(e),r=0;i===N.CORE&&(t.version=L.decode(s.substr(r,S[v.version]),S[v.version])),i!==N.CORE&&(r+=S.segmentType);let o=this.fieldSequence[String(t.version)][i],a=Ke();return o.forEach(c=>{let u=a[c],p=S[c];if(p===void 0&&this.isPublisherCustom(c)&&(p=Number(t[v.numCustomPurposes])),p!==0){let d=s.substr(r,p);if(u===ce?t[c]=u.decode(d,t.version):t[c]=u.decode(d,p),Number.isInteger(p))r+=p;else if(Number.isInteger(t[c].bitLength))r+=t[c].bitLength;else throw new G(c)}}),t}static isPublisherCustom(e){return e.indexOf("publisherCustom")===0}}});var Be,In=P(()=>{J();Q();Be=class{static processor=[e=>e,(e,t)=>{e.publisherRestrictions.gvl=t,e.purposeLegitimateInterests.unset([1,3,4,5,6]);let i=new Map;return i.set("legIntPurposes",e.vendorLegitimateInterests),i.set("purposes",e.vendorConsents),i.forEach((s,r)=>{s.forEach((o,a)=>{if(o){let c=t.vendors[a];if(!c||c.deletedDate)s.unset(a);else if(c[r].length===0)if(r==="legIntPurposes"&&c.purposes.length===0&&c.legIntPurposes.length===0&&c.specialPurposes.length>0)s.set(a);else if(r==="legIntPurposes"&&c.purposes.length>0&&c.legIntPurposes.length===0&&c.specialPurposes.length>0)s.set(a);else if(e.isServiceSpecific)if(c.flexiblePurposes.length===0)s.unset(a);else{let u=e.publisherRestrictions.getRestrictions(a),p=!1;for(let d=0,l=u.length;d0&&t?.version<=this.processor.length?e.version=t.version:e.version=this.processor.length;let s=e.version-1;if(!this.processor[s])throw new q(`Invalid version: ${e.version}`);return this.processor[s](e,i)}}});var $e=P(()=>{rt();He();kn();Cn();In();ft();ht()});var De,kt=P(()=>{De=class{static absCall(e,t,i,s){return new Promise((r,o)=>{let a=new XMLHttpRequest,c=()=>{if(a.readyState==XMLHttpRequest.DONE)if(a.status>=200&&a.status<300){let l=a.response;if(typeof l=="string")try{l=JSON.parse(l)}catch{}r(l)}else o(new Error(`HTTP Status: ${a.status} response type: ${a.responseType}`))},u=()=>{o(new Error("error"))},p=()=>{o(new Error("aborted"))},d=()=>{o(new Error("Timeout "+s+"ms "+e))};a.withCredentials=i,a.addEventListener("load",c),a.addEventListener("error",u),a.addEventListener("abort",p),t===null?a.open("GET",e,!0):a.open("POST",e,!0),a.responseType="json",a.timeout=s,a.ontimeout=d,a.send(t)})}static post(e,t,i=!1,s=0){return this.absCall(e,JSON.stringify(t),i,s)}static fetch(e,t=!1,i=0){return this.absCall(e,null,t,i)}}});var ye,vt=P(()=>{fe();J();kt();Q();ye=class n extends te{static LANGUAGE_CACHE=new Map;static CACHE=new Map;static LATEST_CACHE_KEY=0;static DEFAULT_LANGUAGE="EN";static consentLanguages=new Ee;static baseUrl_;static set baseUrl(e){if(/^https?:\/\/vendorlist\.consensu\.org\//.test(e))throw new oe("Invalid baseUrl! You may not pull directly from vendorlist.consensu.org and must provide your own cache");e.length>0&&e[e.length-1]!=="/"&&(e+="/"),this.baseUrl_=e}static get baseUrl(){return this.baseUrl_}static latestFilename="vendor-list.json";static versionedFilename="archives/vendor-list-v[VERSION].json";static languageFilename="purposes-[LANG].json";readyPromise;gvlSpecificationVersion;vendorListVersion;tcfPolicyVersion;lastUpdated;purposes;specialPurposes;features;specialFeatures;isReady_=!1;vendors_;vendorIds;fullVendorList;byPurposeVendorMap;bySpecialPurposeVendorMap;byFeatureVendorMap;bySpecialFeatureVendorMap;stacks;dataCategories;lang_;cacheLang_;isLatest=!1;constructor(e,t){super();let i=n.baseUrl,s=t?.language;if(s)try{s=n.consentLanguages.parseLanguage(s)}catch(r){throw new oe("Error during parsing the language: "+r.message)}if(this.lang_=s||n.DEFAULT_LANGUAGE,this.cacheLang_=s||n.DEFAULT_LANGUAGE,this.isVendorList(e))this.populate(e),this.readyPromise=Promise.resolve();else{if(!i)throw new oe("must specify GVL.baseUrl before loading GVL json");if(e>0){let r=e;n.CACHE.has(r)?(this.populate(n.CACHE.get(r)),this.readyPromise=Promise.resolve()):(i+=n.versionedFilename.replace("[VERSION]",String(r)),this.readyPromise=this.fetchJson(i))}else n.CACHE.has(n.LATEST_CACHE_KEY)?(this.populate(n.CACHE.get(n.LATEST_CACHE_KEY)),this.readyPromise=Promise.resolve()):(this.isLatest=!0,this.readyPromise=this.fetchJson(i+n.latestFilename))}}static emptyLanguageCache(e){let t=!1;return e==null&&n.LANGUAGE_CACHE.size>0?(n.LANGUAGE_CACHE=new Map,t=!0):typeof e=="string"&&this.consentLanguages.has(e.toUpperCase())&&(n.LANGUAGE_CACHE.delete(e.toUpperCase()),t=!0),t}static emptyCache(e){let t=!1;return Number.isInteger(e)&&e>=0?(n.CACHE.delete(e),t=!0):e===void 0&&(n.CACHE=new Map,t=!0),t}cacheLanguage(){n.LANGUAGE_CACHE.has(this.cacheLang_)||n.LANGUAGE_CACHE.set(this.cacheLang_,{purposes:this.purposes,specialPurposes:this.specialPurposes,features:this.features,specialFeatures:this.specialFeatures,stacks:this.stacks,dataCategories:this.dataCategories})}async fetchJson(e){try{this.populate(await De.fetch(e))}catch(t){throw new oe(t.message)}}getJson(){return{gvlSpecificationVersion:this.gvlSpecificationVersion,vendorListVersion:this.vendorListVersion,tcfPolicyVersion:this.tcfPolicyVersion,lastUpdated:this.lastUpdated,purposes:this.clonePurposes(),specialPurposes:this.cloneSpecialPurposes(),features:this.cloneFeatures(),specialFeatures:this.cloneSpecialFeatures(),stacks:this.cloneStacks(),...this.dataCategories?{dataCategories:this.cloneDataCategories()}:{},vendors:this.cloneVendors()}}cloneSpecialFeatures(){let e={};for(let t of Object.keys(this.specialFeatures))e[t]=n.cloneFeature(this.specialFeatures[t]);return e}cloneFeatures(){let e={};for(let t of Object.keys(this.features))e[t]=n.cloneFeature(this.features[t]);return e}cloneStacks(){let e={};for(let t of Object.keys(this.stacks))e[t]=n.cloneStack(this.stacks[t]);return e}cloneDataCategories(){let e={};for(let t of Object.keys(this.dataCategories))e[t]=n.cloneDataCategory(this.dataCategories[t]);return e}cloneSpecialPurposes(){let e={};for(let t of Object.keys(this.specialPurposes))e[t]=n.clonePurpose(this.specialPurposes[t]);return e}clonePurposes(){let e={};for(let t of Object.keys(this.purposes))e[t]=n.clonePurpose(this.purposes[t]);return e}static clonePurpose(e){return{id:e.id,name:e.name,description:e.description,...e.descriptionLegal?{descriptionLegal:e.descriptionLegal}:{},...e.illustrations?{illustrations:Array.from(e.illustrations)}:{}}}static cloneFeature(e){return{id:e.id,name:e.name,description:e.description,...e.descriptionLegal?{descriptionLegal:e.descriptionLegal}:{},...e.illustrations?{illustrations:Array.from(e.illustrations)}:{}}}static cloneDataCategory(e){return{id:e.id,name:e.name,description:e.description}}static cloneStack(e){return{id:e.id,name:e.name,description:e.description,purposes:Array.from(e.purposes),specialFeatures:Array.from(e.specialFeatures)}}static cloneDataRetention(e){return{...typeof e.stdRetention=="number"?{stdRetention:e.stdRetention}:{},purposes:{...e.purposes},specialPurposes:{...e.specialPurposes}}}static cloneVendorUrls(e){return e.map(t=>({langId:t.langId,privacy:t.privacy,...t.legIntClaim?{legIntClaim:t.legIntClaim}:{}}))}static cloneVendor(e){return{id:e.id,name:e.name,purposes:Array.from(e.purposes),legIntPurposes:Array.from(e.legIntPurposes),flexiblePurposes:Array.from(e.flexiblePurposes),specialPurposes:Array.from(e.specialPurposes),features:Array.from(e.features),specialFeatures:Array.from(e.specialFeatures),...e.overflow?{overflow:{httpGetLimit:e.overflow.httpGetLimit}}:{},...typeof e.cookieMaxAgeSeconds=="number"||e.cookieMaxAgeSeconds===null?{cookieMaxAgeSeconds:e.cookieMaxAgeSeconds}:{},...e.usesCookies!==void 0?{usesCookies:e.usesCookies}:{},...e.policyUrl?{policyUrl:e.policyUrl}:{},...e.cookieRefresh!==void 0?{cookieRefresh:e.cookieRefresh}:{},...e.usesNonCookieAccess!==void 0?{usesNonCookieAccess:e.usesNonCookieAccess}:{},...e.dataRetention?{dataRetention:this.cloneDataRetention(e.dataRetention)}:{},...e.urls?{urls:this.cloneVendorUrls(e.urls)}:{},...e.dataDeclaration?{dataDeclaration:Array.from(e.dataDeclaration)}:{},...e.deviceStorageDisclosureUrl?{deviceStorageDisclosureUrl:e.deviceStorageDisclosureUrl}:{},...e.deletedDate?{deletedDate:e.deletedDate}:{}}}cloneVendors(){let e={};for(let t of Object.keys(this.fullVendorList))e[t]=n.cloneVendor(this.fullVendorList[t]);return e}async changeLanguage(e){let t=e;try{t=n.consentLanguages.parseLanguage(e)}catch(s){throw new oe("Error during parsing the language: "+s.message)}let i=e.toUpperCase();if(!(t.toLowerCase()===n.DEFAULT_LANGUAGE.toLowerCase()&&!n.LANGUAGE_CACHE.has(i))&&t!==this.lang_)if(this.lang_=t,n.LANGUAGE_CACHE.has(i)){let s=n.LANGUAGE_CACHE.get(i);for(let r in s)s.hasOwnProperty(r)&&(this[r]=s[r])}else{let s=n.baseUrl+n.languageFilename.replace("[LANG]",this.lang_.toLowerCase());try{await this.fetchJson(s),this.cacheLang_=i,this.cacheLanguage()}catch(r){throw new oe("unable to load language: "+r.message)}}}get language(){return this.lang_}isVendorList(e){return e!==void 0&&e.vendors!==void 0}populate(e){this.purposes=e.purposes,this.specialPurposes=e.specialPurposes,this.features=e.features,this.specialFeatures=e.specialFeatures,this.stacks=e.stacks,this.dataCategories=e.dataCategories,this.isVendorList(e)&&(this.gvlSpecificationVersion=e.gvlSpecificationVersion,this.tcfPolicyVersion=e.tcfPolicyVersion,this.vendorListVersion=e.vendorListVersion,this.lastUpdated=e.lastUpdated,typeof this.lastUpdated=="string"&&(this.lastUpdated=new Date(this.lastUpdated)),this.vendors_=e.vendors,this.fullVendorList=e.vendors,this.mapVendors(),this.isReady_=!0,this.isLatest&&n.CACHE.set(n.LATEST_CACHE_KEY,this.getJson()),n.CACHE.has(this.vendorListVersion)||n.CACHE.set(this.vendorListVersion,this.getJson())),this.cacheLanguage()}mapVendors(e){this.byPurposeVendorMap={},this.bySpecialPurposeVendorMap={},this.byFeatureVendorMap={},this.bySpecialFeatureVendorMap={},Object.keys(this.purposes).forEach(t=>{this.byPurposeVendorMap[t]={legInt:new Set,consent:new Set,flexible:new Set}}),Object.keys(this.specialPurposes).forEach(t=>{this.bySpecialPurposeVendorMap[t]=new Set}),Object.keys(this.features).forEach(t=>{this.byFeatureVendorMap[t]=new Set}),Object.keys(this.specialFeatures).forEach(t=>{this.bySpecialFeatureVendorMap[t]=new Set}),Array.isArray(e)||(e=Object.keys(this.fullVendorList).map(t=>+t)),this.vendorIds=new Set(e),this.vendors_=e.reduce((t,i)=>{let s=this.vendors_[String(i)];return s&&s.deletedDate===void 0&&(s.purposes.forEach(r=>{this.byPurposeVendorMap[String(r)].consent.add(i)}),s.specialPurposes.forEach(r=>{this.bySpecialPurposeVendorMap[String(r)].add(i)}),s.legIntPurposes.forEach(r=>{this.byPurposeVendorMap[String(r)].legInt.add(i)}),s.flexiblePurposes&&s.flexiblePurposes.forEach(r=>{this.byPurposeVendorMap[String(r)].flexible.add(i)}),s.features.forEach(r=>{this.byFeatureVendorMap[String(r)].add(i)}),s.specialFeatures.forEach(r=>{this.bySpecialFeatureVendorMap[String(r)].add(i)}),t[i]=s),t},{})}getFilteredVendors(e,t,i,s){let r=e.charAt(0).toUpperCase()+e.slice(1),o,a={};return e==="purpose"&&i?o=this["by"+r+"VendorMap"][String(t)][i]:o=this["by"+(s?"Special":"")+r+"VendorMap"][String(t)],o.forEach(c=>{a[String(c)]=this.vendors[String(c)]}),a}getVendorsWithConsentPurpose(e){return this.getFilteredVendors("purpose",e,"consent")}getVendorsWithLegIntPurpose(e){return this.getFilteredVendors("purpose",e,"legInt")}getVendorsWithFlexiblePurpose(e){return this.getFilteredVendors("purpose",e,"flexible")}getVendorsWithSpecialPurpose(e){return this.getFilteredVendors("purpose",e,void 0,!0)}getVendorsWithFeature(e){return this.getFilteredVendors("feature",e)}getVendorsWithSpecialFeature(e){return this.getFilteredVendors("feature",e,void 0,!0)}get vendors(){return this.vendors_}narrowVendorsTo(e){this.mapVendors(e)}get isReady(){return this.isReady_}clone(){let e=new n(this.getJson());return this.lang_!==n.DEFAULT_LANGUAGE&&e.changeLanguage(this.lang_),e}static isInstanceOf(e){return typeof e=="object"&&typeof e.narrowVendorsTo=="function"}}});var Ne,yt=P(()=>{fe();J();vt();Q();Ne=class extends te{static consentLanguages=ye.consentLanguages;isServiceSpecific_=!0;supportOOB_=!1;useNonStandardTexts_=!1;purposeOneTreatment_=!1;publisherCountryCode_="AA";version_=2;consentScreen_=0;policyVersion_=5;consentLanguage_="EN";cmpId_=0;cmpVersion_=0;vendorListVersion_=0;numCustomPurposes_=0;gvl_;created;lastUpdated;specialFeatureOptins=new H;purposeConsents=new H;purposeLegitimateInterests=new H;publisherConsents=new H;publisherLegitimateInterests=new H;publisherCustomConsents=new H;publisherCustomLegitimateInterests=new H;customPurposes;vendorConsents=new H;vendorLegitimateInterests=new H;vendorsDisclosed=new H;vendorsAllowed=new H;publisherRestrictions=new he;constructor(e){super(),e&&(this.gvl=e),this.updated()}set gvl(e){ye.isInstanceOf(e)||(e=new ye(e)),this.gvl_=e,this.publisherRestrictions.gvl=e}get gvl(){return this.gvl_}set cmpId(e){if(e=Number(e),Number.isInteger(e)&&e>1)this.cmpId_=e;else throw new W("cmpId",e)}get cmpId(){return this.cmpId_}set cmpVersion(e){if(e=Number(e),Number.isInteger(e)&&e>-1)this.cmpVersion_=e;else throw new W("cmpVersion",e)}get cmpVersion(){return this.cmpVersion_}set consentScreen(e){if(e=Number(e),Number.isInteger(e)&&e>-1)this.consentScreen_=e;else throw new W("consentScreen",e)}get consentScreen(){return this.consentScreen_}set consentLanguage(e){this.consentLanguage_=e}get consentLanguage(){return this.consentLanguage_}set publisherCountryCode(e){if(/^([A-z]){2}$/.test(e))this.publisherCountryCode_=e.toUpperCase();else throw new W("publisherCountryCode",e)}get publisherCountryCode(){return this.publisherCountryCode_}set vendorListVersion(e){if(e=Number(e)>>0,e<0)throw new W("vendorListVersion",e);this.vendorListVersion_=e}get vendorListVersion(){return this.gvl?this.gvl.vendorListVersion:this.vendorListVersion_}set policyVersion(e){if(this.policyVersion_=parseInt(e,10),this.policyVersion_<0)throw new W("policyVersion",e)}get policyVersion(){return this.gvl?this.gvl.tcfPolicyVersion:this.policyVersion_}set version(e){this.version_=parseInt(e,10)}get version(){return this.version_}set isServiceSpecific(e){this.isServiceSpecific_=e}get isServiceSpecific(){return this.isServiceSpecific_}set useNonStandardTexts(e){this.useNonStandardTexts_=e}get useNonStandardTexts(){return this.useNonStandardTexts_}set supportOOB(e){this.supportOOB_=e}get supportOOB(){return this.supportOOB_}set purposeOneTreatment(e){this.purposeOneTreatment_=e}get purposeOneTreatment(){return this.purposeOneTreatment_}setAllVendorConsents(){this.vendorConsents.set(this.gvl.vendors)}unsetAllVendorConsents(){this.vendorConsents.empty()}setAllVendorsDisclosed(){this.vendorsDisclosed.set(this.gvl.vendors)}unsetAllVendorsDisclosed(){this.vendorsDisclosed.empty()}setAllVendorsAllowed(){this.vendorsAllowed.set(this.gvl.vendors)}unsetAllVendorsAllowed(){this.vendorsAllowed.empty()}setAllVendorLegitimateInterests(){this.vendorLegitimateInterests.set(this.gvl.vendors)}unsetAllVendorLegitimateInterests(){this.vendorLegitimateInterests.empty()}setAllPurposeConsents(){this.purposeConsents.set(this.gvl.purposes)}unsetAllPurposeConsents(){this.purposeConsents.empty()}setAllPurposeLegitimateInterests(){this.purposeLegitimateInterests.set(this.gvl.purposes)}unsetAllPurposeLegitimateInterests(){this.purposeLegitimateInterests.empty()}setAllSpecialFeatureOptins(){this.specialFeatureOptins.set(this.gvl.specialFeatures)}unsetAllSpecialFeatureOptins(){this.specialFeatureOptins.empty()}setAll(){this.setAllVendorConsents(),this.setAllPurposeLegitimateInterests(),this.setAllSpecialFeatureOptins(),this.setAllPurposeConsents(),this.setAllVendorLegitimateInterests()}unsetAll(){this.unsetAllVendorConsents(),this.unsetAllPurposeLegitimateInterests(),this.unsetAllSpecialFeatureOptins(),this.unsetAllPurposeConsents(),this.unsetAllVendorLegitimateInterests()}get numCustomPurposes(){let e=this.numCustomPurposes_;if(typeof this.customPurposes=="object"){let t=Object.keys(this.customPurposes).sort((i,s)=>Number(i)-Number(s));e=parseInt(t.pop(),10)}return e}set numCustomPurposes(e){if(this.numCustomPurposes_=parseInt(e,10),this.numCustomPurposes_<0)throw new W("numCustomPurposes",e)}updated(){let e=new Date,t=new Date(Date.UTC(e.getUTCFullYear(),e.getUTCMonth(),e.getUTCDate()));this.created=t,this.lastUpdated=t}}});var bt,jn=P(()=>{$e();Q();ge();yt();bt=class{static encode(e,t){let i="",s;return e=Be.process(e,t),Array.isArray(t?.segments)?s=t.segments:s=new Fe(e,t)[""+e.version],s.forEach((r,o)=>{let a="";ope,BitLength:()=>S,BooleanEncoder:()=>$,Cloneable:()=>te,ConsentLanguages:()=>Ee,DateEncoder:()=>Se,DecodingError:()=>G,DeviceDisclosureStorageAccessType:()=>ct,EncodingError:()=>q,FieldEncoderMap:()=>Ke,FieldSequence:()=>xe,Fields:()=>v,FixedVectorEncoder:()=>ne,GVL:()=>ye,GVLError:()=>oe,IntEncoder:()=>L,Json:()=>De,LangEncoder:()=>Ae,PurposeRestriction:()=>ae,PurposeRestrictionVector:()=>he,PurposeRestrictionVectorEncoder:()=>Ve,RestrictionType:()=>Z,Segment:()=>N,SegmentEncoder:()=>ze,SegmentIDs:()=>ke,SegmentSequence:()=>Fe,SemanticPreEncoder:()=>Be,TCModel:()=>Ne,TCModelError:()=>W,TCString:()=>bt,Vector:()=>H,VectorEncodingType:()=>ve,VendorVectorEncoder:()=>ce});var An=P(()=>{$e();J();Q();fe();vt();kt();yt();jn()});var st={};Nt(st,{baseTranslations:()=>Bi,deepMergeTranslations:()=>Rt,detectBrowserLanguage:()=>Gt,enTranslations:()=>it,mergeTranslationConfigs:()=>Ut,parseAcceptLanguage:()=>Mt,prepareTranslationConfig:()=>Ni,selectLanguage:()=>Di});var ii={common:{acceptAll:"\u041F\u0440\u0438\u0435\u043C\u0438 \u0432\u0441\u0438\u0447\u043A\u0438",rejectAll:"\u041E\u0442\u0445\u0432\u044A\u0440\u043B\u0438 \u0432\u0441\u0438\u0447\u043A\u0438",customize:"\u041F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u0439",save:"\u0417\u0430\u043F\u0430\u0437\u0438 \u043D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438\u0442\u0435"},cookieBanner:{title:"\u0426\u0435\u043D\u0438\u043C \u0432\u0430\u0448\u0430\u0442\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442",description:"\u0422\u043E\u0437\u0438 \u0441\u0430\u0439\u0442 \u0438\u0437\u043F\u043E\u043B\u0437\u0432\u0430 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438, \u0437\u0430 \u0434\u0430 \u043F\u043E\u0434\u043E\u0431\u0440\u0438 \u0432\u0430\u0448\u0435\u0442\u043E \u043F\u043E\u0442\u0440\u0435\u0431\u0438\u0442\u0435\u043B\u0441\u043A\u043E \u0438\u0437\u0436\u0438\u0432\u044F\u0432\u0430\u043D\u0435, \u0434\u0430 \u0430\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430 \u0442\u0440\u0430\u0444\u0438\u043A\u0430 \u043D\u0430 \u0441\u0430\u0439\u0442\u0430 \u0438 \u0434\u0430 \u043F\u043E\u043A\u0430\u0437\u0432\u0430 \u043F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u043D\u043E \u0441\u044A\u0434\u044A\u0440\u0436\u0430\u043D\u0438\u0435."},consentManagerDialog:{title:"\u041D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442",description:"\u041F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u0439\u0442\u0435 \u0432\u0430\u0448\u0438\u0442\u0435 \u043D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442 \u0442\u0443\u043A. \u041C\u043E\u0436\u0435\u0442\u0435 \u0434\u0430 \u0438\u0437\u0431\u0435\u0440\u0435\u0442\u0435 \u043A\u043E\u0438 \u0432\u0438\u0434\u043E\u0432\u0435 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438 \u0438 \u0442\u0435\u0445\u043D\u043E\u043B\u043E\u0433\u0438\u0438 \u0437\u0430 \u043F\u0440\u043E\u0441\u043B\u0435\u0434\u044F\u0432\u0430\u043D\u0435 \u0440\u0430\u0437\u0440\u0435\u0448\u0430\u0432\u0430\u0442\u0435."},consentTypes:{necessary:{title:"\u0421\u0442\u0440\u043E\u0433\u043E \u043D\u0435\u043E\u0431\u0445\u043E\u0434\u0438\u043C\u0438",description:"\u0422\u0435\u0437\u0438 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438 \u0441\u0430 \u043E\u0442 \u0441\u044A\u0449\u0435\u0441\u0442\u0432\u0435\u043D\u043E \u0437\u043D\u0430\u0447\u0435\u043D\u0438\u0435 \u0437\u0430 \u043F\u0440\u0430\u0432\u0438\u043B\u043D\u043E\u0442\u043E \u0444\u0443\u043D\u043A\u0446\u0438\u043E\u043D\u0438\u0440\u0430\u043D\u0435 \u043D\u0430 \u0443\u0435\u0431\u0441\u0430\u0439\u0442\u0430 \u0438 \u043D\u0435 \u043C\u043E\u0433\u0430\u0442 \u0434\u0430 \u0431\u044A\u0434\u0430\u0442 \u0434\u0435\u0430\u043A\u0442\u0438\u0432\u0438\u0440\u0430\u043D\u0438."},functionality:{title:"\u0424\u0443\u043D\u043A\u0446\u0438\u043E\u043D\u0430\u043B\u043D\u043E\u0441\u0442",description:"\u0422\u0435\u0437\u0438 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438 \u043F\u043E\u0437\u0432\u043E\u043B\u044F\u0432\u0430\u0442 \u043F\u043E\u0434\u043E\u0431\u0440\u0435\u043D\u0430 \u0444\u0443\u043D\u043A\u0446\u0438\u043E\u043D\u0430\u043B\u043D\u043E\u0441\u0442 \u0438 \u043F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u043D\u0435 \u043D\u0430 \u0443\u0435\u0431\u0441\u0430\u0439\u0442\u0430."},marketing:{title:"\u041C\u0430\u0440\u043A\u0435\u0442\u0438\u043D\u0433",description:"\u0422\u0435\u0437\u0438 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438 \u0441\u0435 \u0438\u0437\u043F\u043E\u043B\u0437\u0432\u0430\u0442 \u0437\u0430 \u043F\u043E\u043A\u0430\u0437\u0432\u0430\u043D\u0435 \u043D\u0430 \u043F\u043E\u0434\u0445\u043E\u0434\u044F\u0449\u0438 \u0440\u0435\u043A\u043B\u0430\u043C\u0438 \u0438 \u043F\u0440\u043E\u0441\u043B\u0435\u0434\u044F\u0432\u0430\u043D\u0435 \u043D\u0430 \u0442\u044F\u0445\u043D\u0430\u0442\u0430 \u0435\u0444\u0435\u043A\u0442\u0438\u0432\u043D\u043E\u0441\u0442."},measurement:{title:"\u0410\u043D\u0430\u043B\u0438\u0442\u0438\u043A\u0430",description:"\u0422\u0435\u0437\u0438 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438 \u043D\u0438 \u043F\u043E\u043C\u0430\u0433\u0430\u0442 \u0434\u0430 \u0440\u0430\u0437\u0431\u0435\u0440\u0435\u043C \u043A\u0430\u043A \u043F\u043E\u0441\u0435\u0442\u0438\u0442\u0435\u043B\u0438\u0442\u0435 \u0432\u0437\u0430\u0438\u043C\u043E\u0434\u0435\u0439\u0441\u0442\u0432\u0430\u0442 \u0441 \u0443\u0435\u0431\u0441\u0430\u0439\u0442\u0430 \u0438 \u0434\u0430 \u043F\u043E\u0434\u043E\u0431\u0440\u0438\u043C \u043D\u0435\u0433\u043E\u0432\u0430\u0442\u0430 \u043F\u0440\u043E\u0438\u0437\u0432\u043E\u0434\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442."},experience:{title:"\u041F\u043E\u0442\u0440\u0435\u0431\u0438\u0442\u0435\u043B\u0441\u043A\u043E \u0438\u0437\u0436\u0438\u0432\u044F\u0432\u0430\u043D\u0435",description:"\u0422\u0435\u0437\u0438 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438 \u043D\u0438 \u043F\u043E\u043C\u0430\u0433\u0430\u0442 \u0434\u0430 \u043E\u0441\u0438\u0433\u0443\u0440\u0438\u043C \u043F\u043E-\u0434\u043E\u0431\u0440\u043E \u043F\u043E\u0442\u0440\u0435\u0431\u0438\u0442\u0435\u043B\u0441\u043A\u043E \u0438\u0437\u0436\u0438\u0432\u044F\u0432\u0430\u043D\u0435 \u0438 \u0434\u0430 \u0442\u0435\u0441\u0442\u0432\u0430\u043C\u0435 \u043D\u043E\u0432\u0438 \u0444\u0443\u043D\u043A\u0446\u0438\u0438."}},frame:{title:"\u041F\u0440\u0438\u0435\u043C\u0435\u0442\u0435 \u0441\u044A\u0433\u043B\u0430\u0441\u0438\u0435 \u0437\u0430 {category}, \u0437\u0430 \u0434\u0430 \u0432\u0438\u0434\u0438\u0442\u0435 \u0442\u043E\u0432\u0430 \u0441\u044A\u0434\u044A\u0440\u0436\u0430\u043D\u0438\u0435.",actionButton:"\u0410\u043A\u0442\u0438\u0432\u0438\u0440\u0430\u0439\u0442\u0435 \u0441\u044A\u0433\u043B\u0430\u0441\u0438\u0435 \u0437\u0430 {category}"},legalLinks:{privacyPolicy:"\u041F\u043E\u043B\u0438\u0442\u0438\u043A\u0430 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442",cookiePolicy:"\u041F\u043E\u043B\u0438\u0442\u0438\u043A\u0430 \u0437\u0430 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438",termsOfService:"\u041E\u0431\u0449\u0438 \u0443\u0441\u043B\u043E\u0432\u0438\u044F"},iab:{banner:{title:"\u041D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442",description:"\u041D\u0438\u0435 \u0438 \u043D\u0430\u0448\u0438\u0442\u0435 {partnerCount} \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0438 \u0441\u044A\u0445\u0440\u0430\u043D\u044F\u0432\u0430\u043C\u0435 \u0438/\u0438\u043B\u0438 \u043E\u0441\u044A\u0449\u0435\u0441\u0442\u0432\u044F\u0432\u0430\u043C\u0435 \u0434\u043E\u0441\u0442\u044A\u043F \u0434\u043E \u0438\u043D\u0444\u043E\u0440\u043C\u0430\u0446\u0438\u044F \u043D\u0430 \u0432\u0430\u0448\u0435\u0442\u043E \u0443\u0441\u0442\u0440\u043E\u0439\u0441\u0442\u0432\u043E \u0438 \u043E\u0431\u0440\u0430\u0431\u043E\u0442\u0432\u0430\u043C\u0435 \u043B\u0438\u0447\u043D\u0438 \u0434\u0430\u043D\u043D\u0438, \u043A\u0430\u0442\u043E \u0443\u043D\u0438\u043A\u0430\u043B\u043D\u0438 \u0438\u0434\u0435\u043D\u0442\u0438\u0444\u0438\u043A\u0430\u0442\u043E\u0440\u0438 \u0438 \u0434\u0430\u043D\u043D\u0438 \u0437\u0430 \u0441\u044A\u0440\u0444\u0438\u0440\u0430\u043D\u0435, \u0437\u0430 \u0442\u043E\u0437\u0438 \u0443\u0435\u0431\u0441\u0430\u0439\u0442, \u0437\u0430 \u0434\u0430:",partnersLink:"{count, plural, one {# \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440} other {# \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0430}}",andMore:"\u0418 \u043E\u0449\u0435 {count, plural, one {# \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440} other {# \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0430}}...",legitimateInterestNotice:"\u041D\u044F\u043A\u043E\u0438 \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0438 \u043F\u0440\u0435\u0442\u0435\u043D\u0434\u0438\u0440\u0430\u0442 \u0437\u0430 \u0437\u0430\u043A\u043E\u043D\u0435\u043D \u0438\u043D\u0442\u0435\u0440\u0435\u0441 \u0434\u0430 \u043E\u0431\u0440\u0430\u0431\u043E\u0442\u0432\u0430\u0442 \u0432\u0430\u0448\u0438\u0442\u0435 \u0434\u0430\u043D\u043D\u0438. \u0418\u043C\u0430\u0442\u0435 \u043F\u0440\u0430\u0432\u043E \u0434\u0430 \u0432\u044A\u0437\u0440\u0430\u0437\u0438\u0442\u0435 \u0441\u0440\u0435\u0449\u0443 \u0442\u0430\u0437\u0438 \u043E\u0431\u0440\u0430\u0431\u043E\u0442\u043A\u0430, \u0434\u0430 \u043F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u0442\u0435 \u0432\u0430\u0448\u0438\u0442\u0435 \u0438\u0437\u0431\u043E\u0440\u0438 \u0438 \u0434\u0430 \u043E\u0442\u0442\u0435\u0433\u043B\u0438\u0442\u0435 \u0441\u044A\u0433\u043B\u0430\u0441\u0438\u0435\u0442\u043E \u0441\u0438 \u043F\u043E \u0432\u0441\u044F\u043A\u043E \u0432\u0440\u0435\u043C\u0435.",scopeServiceSpecific:"\u0412\u0430\u0448\u0435\u0442\u043E \u0441\u044A\u0433\u043B\u0430\u0441\u0438\u0435 \u0432\u0430\u0436\u0438 \u0441\u0430\u043C\u043E \u0437\u0430 \u0442\u043E\u0437\u0438 \u0443\u0435\u0431\u0441\u0430\u0439\u0442 \u0438 \u043D\u044F\u043C\u0430 \u0434\u0430 \u043F\u043E\u0432\u043B\u0438\u044F\u0435 \u043D\u0430 \u0434\u0440\u0443\u0433\u0438 \u0443\u0441\u043B\u0443\u0433\u0438.",scopeGroup:"\u0412\u0430\u0448\u0438\u044F\u0442 \u0438\u0437\u0431\u043E\u0440 \u0441\u0435 \u043F\u0440\u0438\u043B\u0430\u0433\u0430 \u043A\u044A\u043C \u0432\u0441\u0438\u0447\u043A\u0438 \u043D\u0430\u0448\u0438 \u0443\u0435\u0431\u0441\u0430\u0439\u0442\u043E\u0432\u0435 \u0432 \u0442\u0430\u0437\u0438 \u0433\u0440\u0443\u043F\u0430."},preferenceCenter:{title:"\u041D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442",description:"\u041F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u0439\u0442\u0435 \u0432\u0430\u0448\u0438\u0442\u0435 \u043D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442 \u0442\u0443\u043A. \u041C\u043E\u0436\u0435\u0442\u0435 \u0434\u0430 \u0438\u0437\u0431\u0435\u0440\u0435\u0442\u0435 \u043A\u043E\u0438 \u0432\u0438\u0434\u043E\u0432\u0435 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438 \u0438 \u0442\u0435\u0445\u043D\u043E\u043B\u043E\u0433\u0438\u0438 \u0437\u0430 \u043F\u0440\u043E\u0441\u043B\u0435\u0434\u044F\u0432\u0430\u043D\u0435 \u0440\u0430\u0437\u0440\u0435\u0448\u0430\u0432\u0430\u0442\u0435.",tabs:{purposes:"\u0426\u0435\u043B\u0438",vendors:"\u0414\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u0446\u0438"},purposeItem:{partners:"{count, plural, one {# \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440} other {# \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0430}}",vendorsUseLegitimateInterest:"{count, plural, one {# \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u043A \u043F\u0440\u0435\u0442\u0435\u043D\u0434\u0438\u0440\u0430} other {# \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u043A\u0430 \u043F\u0440\u0435\u0442\u0435\u043D\u0434\u0438\u0440\u0430\u0442}} \u0437\u0430 \u0437\u0430\u043A\u043E\u043D\u0435\u043D \u0438\u043D\u0442\u0435\u0440\u0435\u0441",examples:"\u041F\u0440\u0438\u043C\u0435\u0440\u0438",partnersUsingPurpose:"\u041F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0438, \u0438\u0437\u043F\u043E\u043B\u0437\u0432\u0430\u0449\u0438 \u0442\u0430\u0437\u0438 \u0446\u0435\u043B",withYourPermission:"\u0421 \u0432\u0430\u0448\u0435\u0442\u043E \u0440\u0430\u0437\u0440\u0435\u0448\u0435\u043D\u0438\u0435",legitimateInterest:"\u0417\u0430\u043A\u043E\u043D\u0435\u043D \u0438\u043D\u0442\u0435\u0440\u0435\u0441",objectButton:"\u0412\u044A\u0437\u0440\u0430\u0437\u044F\u0432\u0430\u043C",objected:"\u0412\u044A\u0437\u0440\u0430\u0437\u0435\u043D\u043E",rightToObject:"\u0418\u043C\u0430\u0442\u0435 \u043F\u0440\u0430\u0432\u043E \u0434\u0430 \u0432\u044A\u0437\u0440\u0430\u0437\u0438\u0442\u0435 \u0441\u0440\u0435\u0449\u0443 \u043E\u0431\u0440\u0430\u0431\u043E\u0442\u043A\u0430, \u0431\u0430\u0437\u0438\u0440\u0430\u043D\u0430 \u043D\u0430 \u0437\u0430\u043A\u043E\u043D\u0435\u043D \u0438\u043D\u0442\u0435\u0440\u0435\u0441."},specialPurposes:{title:"\u041E\u0441\u043D\u043E\u0432\u043D\u0438 \u0444\u0443\u043D\u043A\u0446\u0438\u0438 (\u0437\u0430\u0434\u044A\u043B\u0436\u0438\u0442\u0435\u043B\u043D\u0438)",tooltip:"\u0422\u0435 \u0441\u0430 \u043D\u0435\u043E\u0431\u0445\u043E\u0434\u0438\u043C\u0438 \u0437\u0430 \u0444\u0443\u043D\u043A\u0446\u0438\u043E\u043D\u0430\u043B\u043D\u043E\u0441\u0442\u0442\u0430 \u0438 \u0441\u0438\u0433\u0443\u0440\u043D\u043E\u0441\u0442\u0442\u0430 \u043D\u0430 \u0441\u0430\u0439\u0442\u0430. \u0421\u044A\u0433\u043B\u0430\u0441\u043D\u043E IAB TCF \u043D\u0435 \u043C\u043E\u0436\u0435\u0442\u0435 \u0434\u0430 \u0432\u044A\u0437\u0440\u0430\u0437\u0438\u0442\u0435 \u0441\u0440\u0435\u0449\u0443 \u0442\u0435\u0437\u0438 \u0441\u043F\u0435\u0446\u0438\u0430\u043B\u043D\u0438 \u0446\u0435\u043B\u0438."},vendorList:{search:"\u0422\u044A\u0440\u0441\u0435\u043D\u0435 \u043D\u0430 \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u0446\u0438...",showingCount:"{filtered} \u043E\u0442 {total, plural, one {# \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u043A} other {# \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u043A\u0430}}",iabVendorsHeading:"\u0420\u0435\u0433\u0438\u0441\u0442\u0440\u0438\u0440\u0430\u043D\u0438 \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u0446\u0438 \u0432 IAB",iabVendorsNotice:"\u0422\u0435\u0437\u0438 \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0438 \u0441\u0430 \u0440\u0435\u0433\u0438\u0441\u0442\u0440\u0438\u0440\u0430\u043D\u0438 \u0432 IAB Transparency & Consent Framework (TCF), \u0438\u043D\u0434\u0443\u0441\u0442\u0440\u0438\u0430\u043B\u0435\u043D \u0441\u0442\u0430\u043D\u0434\u0430\u0440\u0442 \u0437\u0430 \u0443\u043F\u0440\u0430\u0432\u043B\u0435\u043D\u0438\u0435 \u043D\u0430 \u0441\u044A\u0433\u043B\u0430\u0441\u0438\u0435\u0442\u043E",customVendorsHeading:"\u041F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u043D\u0438 \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0438",customVendorsNotice:"\u0422\u043E\u0432\u0430 \u0441\u0430 \u043F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u043D\u0438 \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440\u0438, \u043A\u043E\u0438\u0442\u043E \u043D\u0435 \u0441\u0430 \u0440\u0435\u0433\u0438\u0441\u0442\u0440\u0438\u0440\u0430\u043D\u0438 \u0432 IAB Transparency & Consent Framework (TCF). \u0422\u0435 \u043E\u0431\u0440\u0430\u0431\u043E\u0442\u0432\u0430\u0442 \u0434\u0430\u043D\u043D\u0438 \u0432\u044A\u0437 \u043E\u0441\u043D\u043E\u0432\u0430 \u043D\u0430 \u0432\u0430\u0448\u0435\u0442\u043E \u0441\u044A\u0433\u043B\u0430\u0441\u0438\u0435 \u0438 \u043C\u043E\u0436\u0435 \u0434\u0430 \u0438\u043C\u0430\u0442 \u0440\u0430\u0437\u043B\u0438\u0447\u043D\u0438 \u043F\u0440\u0430\u043A\u0442\u0438\u043A\u0438 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442 \u043E\u0442 \u0440\u0435\u0433\u0438\u0441\u0442\u0440\u0438\u0440\u0430\u043D\u0438\u0442\u0435 \u0432 IAB \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u0446\u0438.",purposes:"\u0426\u0435\u043B\u0438",specialPurposes:"\u0421\u043F\u0435\u0446\u0438\u0430\u043B\u043D\u0438 \u0446\u0435\u043B\u0438",specialFeatures:"\u0421\u043F\u0435\u0446\u0438\u0430\u043B\u043D\u0438 \u0444\u0443\u043D\u043A\u0446\u0438\u0438",features:"\u0424\u0443\u043D\u043A\u0446\u0438\u0438",dataCategories:"\u041A\u0430\u0442\u0435\u0433\u043E\u0440\u0438\u0438 \u0434\u0430\u043D\u043D\u0438",usesCookies:"\u0418\u0437\u043F\u043E\u043B\u0437\u0432\u0430 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438",nonCookieAccess:"\u0414\u043E\u0441\u0442\u044A\u043F \u0431\u0435\u0437 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0438",maxAge:"\u041C\u0430\u043A\u0441\u0438\u043C\u0430\u043B\u043D\u0430 \u0434\u0430\u0432\u043D\u043E\u0441\u0442: {days} \u0434",retention:"\u0421\u044A\u0445\u0440\u0430\u043D\u0435\u043D\u0438\u0435: {days} \u0434",legitimateInterest:"\u0417\u0430\u043A\u043E\u043D\u0435\u043D \u0438\u043D\u0442\u0435\u0440\u0435\u0441",privacyPolicy:"\u041F\u043E\u043B\u0438\u0442\u0438\u043A\u0430 \u0437\u0430 \u043F\u043E\u0432\u0435\u0440\u0438\u0442\u0435\u043B\u043D\u043E\u0441\u0442",storageDisclosure:"\u0414\u0435\u043A\u043B\u0430\u0440\u0430\u0446\u0438\u044F \u0437\u0430 \u0441\u044A\u0445\u0440\u0430\u043D\u0435\u043D\u0438\u0435",requiredNotice:"\u041D\u0435\u043E\u0431\u0445\u043E\u0434\u0438\u043C\u043E \u0437\u0430 \u0444\u0443\u043D\u043A\u0446\u0438\u043E\u043D\u0430\u043B\u043D\u043E\u0441\u0442\u0442\u0430 \u043D\u0430 \u0441\u0430\u0439\u0442\u0430, \u043D\u0435 \u043C\u043E\u0436\u0435 \u0434\u0430 \u0431\u044A\u0434\u0435 \u0434\u0435\u0430\u043A\u0442\u0438\u0432\u0438\u0440\u0430\u043D\u043E"},footer:{consentStorage:'\u041F\u0440\u0435\u0434\u043F\u043E\u0447\u0438\u0442\u0430\u043D\u0438\u044F\u0442\u0430 \u0437\u0430 \u0441\u044A\u0433\u043B\u0430\u0441\u0438\u0435 \u0441\u0435 \u0441\u044A\u0445\u0440\u0430\u043D\u044F\u0432\u0430\u0442 \u0432 \u0431\u0438\u0441\u043A\u0432\u0438\u0442\u043A\u0430 \u0441 \u0438\u043C\u0435 "euconsent-v2" \u0437\u0430 13 \u043C\u0435\u0441\u0435\u0446\u0430. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"\u041F\u0440\u0438\u0435\u043C\u0438 \u0432\u0441\u0438\u0447\u043A\u0438",rejectAll:"\u041E\u0442\u0445\u0432\u044A\u0440\u043B\u0438 \u0432\u0441\u0438\u0447\u043A\u0438",customize:"\u041F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u0439",saveSettings:"\u0417\u0430\u043F\u0430\u0437\u0438 \u043D\u0430\u0441\u0442\u0440\u043E\u0439\u043A\u0438\u0442\u0435",loading:"\u0417\u0430\u0440\u0435\u0436\u0434\u0430\u043D\u0435...",showingSelectedVendor:"\u041F\u043E\u043A\u0430\u0437\u0432\u0430\u043D\u0435 \u043D\u0430 \u0438\u0437\u0431\u0440\u0430\u043D \u0434\u043E\u0441\u0442\u0430\u0432\u0447\u0438\u043A",clearSelection:"\u0418\u0437\u0447\u0438\u0441\u0442\u0438",customPartner:"\u041F\u0435\u0440\u0441\u043E\u043D\u0430\u043B\u0438\u0437\u0438\u0440\u0430\u043D \u043F\u0430\u0440\u0442\u043D\u044C\u043E\u0440, \u043D\u0435\u0440\u0435\u0433\u0438\u0441\u0442\u0440\u0438\u0440\u0430\u043D \u0432 IAB"}}},si={common:{acceptAll:"P\u0159ijmout v\u0161e",rejectAll:"Odm\xEDtnout v\u0161e",customize:"P\u0159izp\u016Fsobit",save:"Ulo\u017Eit nastaven\xED"},cookieBanner:{title:"V\xE1\u017E\xEDme si va\u0161eho soukrom\xED",description:"Tento web pou\u017E\xEDv\xE1 soubory cookie ke zlep\u0161en\xED va\u0161eho prohl\xED\u017Een\xED, anal\xFDze provozu na webu a zobrazov\xE1n\xED personalizovan\xE9ho obsahu."},consentManagerDialog:{title:"Nastaven\xED soukrom\xED",description:"Zde si m\u016F\u017Eete p\u0159izp\u016Fsobit nastaven\xED soukrom\xED. M\u016F\u017Eete zvolit, kter\xE9 typy soubor\u016F cookie a sledovac\xEDch technologi\xED povol\xEDte."},consentTypes:{necessary:{title:"Nezbytn\u011B nutn\xE9",description:"Tyto soubory cookie jsou nezbytn\xE9 pro spr\xE1vn\xE9 fungov\xE1n\xED webov\xFDch str\xE1nek a nelze je deaktivovat."},functionality:{title:"Funk\u010Dnost",description:"Tyto soubory cookie umo\u017E\u0148uj\xED roz\u0161\xED\u0159enou funk\u010Dnost a personalizaci webov\xFDch str\xE1nek."},marketing:{title:"Marketing",description:"Tyto soubory cookie se pou\u017E\xEDvaj\xED k doru\u010Dov\xE1n\xED relevantn\xEDch reklam a sledov\xE1n\xED jejich \xFA\u010Dinnosti."},measurement:{title:"Analytika",description:"Tyto soubory cookie n\xE1m pom\xE1haj\xED pochopit, jak n\xE1v\u0161t\u011Bvn\xEDci interaguj\xED s webem a zlep\u0161uj\xED jeho v\xFDkon."},experience:{title:"U\u017Eivatelsk\xE1 zku\u0161enost",description:"Tyto soubory cookie n\xE1m pom\xE1haj\xED poskytovat lep\u0161\xED u\u017Eivatelskou zku\u0161enost a testovat nov\xE9 funkce."}},frame:{title:"Pro zobrazen\xED tohoto obsahu p\u0159ijm\u011Bte souhlas s kategori\xED {category}.",actionButton:"Povolit souhlas s kategori\xED {category}"},legalLinks:{privacyPolicy:"Z\xE1sady ochrany osobn\xEDch \xFAdaj\u016F",cookiePolicy:"Z\xE1sady pou\u017E\xEDv\xE1n\xED soubor\u016F cookie",termsOfService:"Podm\xEDnky slu\u017Eby"},iab:{banner:{title:"Nastaven\xED soukrom\xED",description:"My a na\u0161ich {partnerCount} partner\u016F ukl\xE1d\xE1me a/nebo p\u0159istupujeme k informac\xEDm na va\u0161em za\u0159\xEDzen\xED a zpracov\xE1v\xE1me osobn\xED \xFAdaje, jako jsou jedine\u010Dn\xE9 identifik\xE1tory a \xFAdaje o prohl\xED\u017Een\xED, pro tento web za \xFA\u010Delem:",partnersLink:"{count, plural, one {# partner} few {# partne\u0159i} other {# partner\u016F}}",andMore:"A dal\u0161\xEDch {count}...",legitimateInterestNotice:"N\u011Bkte\u0159\xED partne\u0159i uplat\u0148uj\xED opr\xE1vn\u011Bn\xFD z\xE1jem na zpracov\xE1n\xED va\u0161ich \xFAdaj\u016F. M\xE1te pr\xE1vo proti tomuto zpracov\xE1n\xED vzn\xE9st n\xE1mitku, p\u0159izp\u016Fsobit sv\xE9 volby a kdykoli odvolat sv\u016Fj souhlas.",scopeServiceSpecific:"V\xE1\u0161 souhlas plat\xED pouze pro tento web a neovlivn\xED jin\xE9 slu\u017Eby.",scopeGroup:"Va\u0161e volba plat\xED pro v\u0161echny na\u0161e weby v t\xE9to skupin\u011B."},preferenceCenter:{title:"Nastaven\xED soukrom\xED",description:"Zde si m\u016F\u017Eete p\u0159izp\u016Fsobit nastaven\xED soukrom\xED. M\u016F\u017Eete zvolit, kter\xE9 typy soubor\u016F cookie a sledovac\xEDch technologi\xED povol\xEDte.",tabs:{purposes:"\xDA\u010Dely",vendors:"Partne\u0159i"},purposeItem:{partners:"{count, plural, one {# partner} few {# partne\u0159i} other {# partner\u016F}}",vendorsUseLegitimateInterest:"{count, plural, one {# partner uplat\u0148uje} few {# partne\u0159i uplat\u0148uj\xED} other {# partner\u016F uplat\u0148uje}} opr\xE1vn\u011Bn\xFD z\xE1jem",examples:"P\u0159\xEDklady",partnersUsingPurpose:"Partne\u0159i vyu\u017E\xEDvaj\xEDc\xED tento \xFA\u010Del",withYourPermission:"S va\u0161\xEDm svolen\xEDm",legitimateInterest:"Opr\xE1vn\u011Bn\xFD z\xE1jem",objectButton:"Vzn\xE9st n\xE1mitku",objected:"N\xE1mitka vznesena",rightToObject:"M\xE1te pr\xE1vo vzn\xE9st n\xE1mitku proti zpracov\xE1n\xED zalo\u017Een\xE9mu na opr\xE1vn\u011Bn\xE9m z\xE1jmu."},specialPurposes:{title:"Z\xE1kladn\xED funkce (povinn\xE9)",tooltip:"Tyto funkce jsou nezbytn\xE9 pro funk\u010Dnost a zabezpe\u010Den\xED webu. Podle IAB TCF nem\u016F\u017Eete proti t\u011Bmto zvl\xE1\u0161tn\xEDm \xFA\u010Del\u016Fm vzn\xE9st n\xE1mitku."},vendorList:{search:"Hledat partnery...",showingCount:"{filtered} z {total, plural, one {# partnera} few {# partner\u016F} other {# partner\u016F}}",iabVendorsHeading:"Partne\u0159i registrovan\xED v IAB",iabVendorsNotice:"Tito partne\u0159i jsou registrov\xE1ni v r\xE1mci IAB Transparency & Consent Framework (TCF), co\u017E je pr\u016Fmyslov\xFD standard pro spr\xE1vu souhlasu",customVendorsHeading:"Vlastn\xED partne\u0159i",customVendorsNotice:"Toto jsou vlastn\xED partne\u0159i, kte\u0159\xED nejsou registrov\xE1ni v r\xE1mci IAB Transparency & Consent Framework (TCF). Zpracov\xE1vaj\xED data na z\xE1klad\u011B va\u0161eho souhlasu a mohou m\xEDt odli\u0161n\xE9 postupy ochrany osobn\xEDch \xFAdaj\u016F ne\u017E partne\u0159i registrovan\xED v IAB.",purposes:"\xDA\u010Dely",specialPurposes:"Zvl\xE1\u0161tn\xED \xFA\u010Dely",specialFeatures:"Zvl\xE1\u0161tn\xED funkce",features:"Funkce",dataCategories:"Kategorie dat",usesCookies:"Pou\u017E\xEDv\xE1 cookies",nonCookieAccess:"P\u0159\xEDstup bez cookies",maxAge:"Maxim\xE1ln\xED doba: {days} d",retention:"Uchov\xE1v\xE1n\xED: {days} d",legitimateInterest:"Opr\xE1vn\u011Bn\xFD z\xE1jem",privacyPolicy:"Z\xE1sady ochrany osobn\xEDch \xFAdaj\u016F",storageDisclosure:"Informace o ukl\xE1d\xE1n\xED",requiredNotice:"Vy\u017Eadov\xE1no pro funk\u010Dnost webu, nelze zak\xE1zat"},footer:{consentStorage:'P\u0159edvolby souhlasu jsou ulo\u017Eeny v cookie s n\xE1zvem "euconsent-v2" po dobu 13 m\u011Bs\xEDc\u016F. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"P\u0159ijmout v\u0161e",rejectAll:"Odm\xEDtnout v\u0161e",customize:"P\u0159izp\u016Fsobit",saveSettings:"Ulo\u017Eit nastaven\xED",loading:"Na\u010D\xEDt\xE1n\xED...",showingSelectedVendor:"Zobrazen\xED vybran\xE9ho partnera",clearSelection:"Vymazat",customPartner:"Vlastn\xED partner neregistrovan\xFD v IAB"}}},ri={common:{acceptAll:"Derbyn pob un",rejectAll:"Gwrthod pob un",customize:"Addasu",save:"Cadw gosodiadau"},cookieBanner:{title:"Rydym yn gwerthfawrogi eich preifatrwydd",description:"Mae'r wefan hon yn defnyddio cwcis i wella eich profiad pori, dadansoddi traffig y wefan, a dangos cynnwys wedi'i bersonoli."},consentManagerDialog:{title:"Gosodiadau preifatrwydd",description:"Addaswch eich gosodiadau preifatrwydd yma. Gallwch ddewis pa fathau o gwcis a thechnolegau tracio rydych yn eu caniat\xE1u."},consentTypes:{necessary:{title:"Cwbl angenrheidiol",description:"Mae'r cwcis hyn yn hanfodol i'r wefan weithredu'n iawn ac ni ellir eu hanalluogi."},functionality:{title:"Swyddogaeth",description:"Mae'r cwcis hyn yn galluogi swyddogaeth a phersonoli gwell o'r wefan."},marketing:{title:"Marchnata",description:"Defnyddir y cwcis hyn i ddarparu hysbysebion perthnasol a thracio eu heffeithiolrwydd."},measurement:{title:"Dadansoddeg",description:"Mae'r cwcis hyn yn ein helpu i ddeall sut mae ymwelwyr yn rhyngweithio \xE2'r wefan a gwella ei pherfformiad."},experience:{title:"Profiad",description:"Mae'r cwcis hyn yn ein helpu i ddarparu profiad defnyddiwr gwell a phrofi nodweddion newydd."}},frame:{title:"Derbyn caniat\xE2d {category} i weld y cynnwys hwn.",actionButton:"Galluogi caniat\xE2d {category}"},legalLinks:{privacyPolicy:"Polisi preifatrwydd",cookiePolicy:"Polisi cwcis",termsOfService:"Telerau gwasanaeth"},iab:{banner:{title:"Gosodiadau preifatrwydd",description:"Rydym ni a\u2019n {partnerCount} partner yn storio a/neu\u2019n cyrchu gwybodaeth ar eich dyfais ac yn prosesu data personol, megis dynodwyr unigryw a data pori, ar gyfer y wefan hon, er mwyn:",partnersLink:"{count} partner",andMore:"Ac {count} arall...",legitimateInterestNotice:"Mae rhai partneriaid yn hawlio buddiant cyfreithlon i brosesu eich data. Mae gennych hawl i wrthwynebu\u2019r prosesu hwn, addasu eich dewisiadau, a thynnu eich cydsyniad yn \xF4l unrhyw bryd.",scopeServiceSpecific:"Mae eich caniat\xE2d yn berthnasol i\u2019r wefan hon yn unig ac ni fydd yn effeithio ar wasanaethau eraill.",scopeGroup:"Mae eich dewis yn berthnasol ar draws ein gwefannau yn y gr\u0175p hwn."},preferenceCenter:{title:"Gosodiadau preifatrwydd",description:"Addaswch eich gosodiadau preifatrwydd yma. Gallwch ddewis pa fathau o gwcis a thechnolegau tracio rydych yn eu caniat\xE1u.",tabs:{purposes:"Dibenion",vendors:"Gwerthwyr"},purposeItem:{partners:"{count} partner",vendorsUseLegitimateInterest:"{count} gwerthwr yn hawlio buddiant cyfreithlon",examples:"Enghreifftiau",partnersUsingPurpose:"Partneriaid sy\u2019n Defnyddio\u2019r Diben Hwn",withYourPermission:"Gyda\u2019ch Caniat\xE2d",legitimateInterest:"Buddiant Cyfreithlon",objectButton:"Gwrthwynebu",objected:"Gwrthwynebwyd",rightToObject:"Mae gennych hawl i wrthwynebu prosesu sy\u2019n seiliedig ar fuddiant cyfreithlon."},specialPurposes:{title:"Swyddogaethau Hanfodol (Angenrheidiol)",tooltip:"Mae\u2019r rhain yn angenrheidiol ar gyfer swyddogaethau a diogelwch y wefan. Yn unol ag IAB TCF, ni allwch wrthwynebu\u2019r dibenion arbennig hyn."},vendorList:{search:"Chwilio gwerthwyr...",showingCount:"{filtered} o {total} gwerthwr",iabVendorsHeading:"Gwerthwyr Cofrestredig IAB",iabVendorsNotice:"Mae\u2019r partneriaid hyn wedi\u2019u cofrestru gyda Fframwaith Tryloywder a Chydsyniad (TCF) yr IAB, safon diwydiant ar gyfer rheoli cydsyniad",customVendorsHeading:"Partneriaid Personol",customVendorsNotice:"Partneriaid personol yw\u2019r rhain nad ydynt wedi\u2019u cofrestru gyda Fframwaith Tryloywder a Chydsyniad (TCF) yr IAB. Maent yn prosesu data yn seiliedig ar eich cydsyniad ac fe allant fod ag arferion preifatrwydd gwahanol i werthwyr cofrestredig IAB.",purposes:"Dibenion",specialPurposes:"Dibenion Arbennig",specialFeatures:"Nodweddion Arbennig",features:"Nodweddion",dataCategories:"Categor\xEFau Data",usesCookies:"Yn Defnyddio Cwcis",nonCookieAccess:"Mynediad Heb Gwcis",maxAge:"Oed Uchaf: {days}d",retention:"Cadw: {days}d",legitimateInterest:"Buddiant Cyf.",privacyPolicy:"Polisi Preifatrwydd",storageDisclosure:"Datgelu Storio",requiredNotice:"Angenrheidiol ar gyfer swyddogaeth y wefan, ni ellir ei analluogi"},footer:{consentStorage:'Mae dewisiadau cydsyniad yn cael eu storio mewn cwci o\u2019r enw "euconsent-v2" am 13 mis. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Derbyn pob un",rejectAll:"Gwrthod pob un",customize:"Addasu",saveSettings:"Cadw gosodiadau",loading:"Wrthi\u2019n llwytho...",showingSelectedVendor:"Yn dangos y gwerthwr a ddewiswyd",clearSelection:"Clirio",customPartner:"Partner personol heb ei gofrestru gyda\u2019r IAB"}}},oi={common:{acceptAll:"Accepter alle",rejectAll:"Afvis alle",customize:"Tilpas",save:"Gem indstillinger"},cookieBanner:{title:"Vi v\xE6rds\xE6tter dit privatliv",description:"Denne side bruger cookies til at forbedre din browsingoplevelse, analysere trafikken p\xE5 siden og vise personligt tilpasset indhold."},consentManagerDialog:{title:"Privatlivsindstillinger",description:"Tilpas dine privatlivsindstillinger her. Du kan v\xE6lge, hvilke typer cookies og sporingsteknologier du vil tillade."},consentTypes:{necessary:{title:"Strengt n\xF8dvendige",description:"Disse cookies er essentielle for, at hjemmesiden fungerer korrekt, og de kan ikke deaktiveres."},functionality:{title:"Funktionalitet",description:"Disse cookies muligg\xF8r forbedret funktionalitet og personalisering af hjemmesiden."},marketing:{title:"Markedsf\xF8ring",description:"Disse cookies bruges til at levere relevante annoncer og spore deres effektivitet."},measurement:{title:"Analyse",description:"Disse cookies hj\xE6lper os med at forst\xE5, hvordan bes\xF8gende interagerer med hjemmesiden og forbedre dens ydeevne."},experience:{title:"Oplevelse",description:"Disse cookies hj\xE6lper os med at levere en bedre brugeroplevelse og teste nye funktioner."}},frame:{title:"Accepter {category}-samtykke for at se dette indhold.",actionButton:"Aktiv\xE9r {category}-samtykke"},legalLinks:{privacyPolicy:"Privatlivspolitik",cookiePolicy:"Cookiepolitik",termsOfService:"Servicevilk\xE5r"},iab:{banner:{title:"Privatlivsindstillinger",description:"Vi og vores {partnerCount} partnere gemmer og/eller f\xE5r adgang til oplysninger p\xE5 din enhed og behandler personoplysninger, s\xE5som unikke id'er og browserdata, for dette website, for at:",partnersLink:"{count} partnere",andMore:"Og {count} mere...",legitimateInterestNotice:"Nogle partnere p\xE5ber\xE5ber sig legitim interesse for at behandle dine data. Du har ret til at g\xF8re indsigelse mod denne behandling, tilpasse dine valg og tr\xE6kke dit samtykke tilbage til enhver tid.",scopeServiceSpecific:"Dit samtykke g\xE6lder kun for dette websted og vil ikke p\xE5virke andre tjenester.",scopeGroup:"Dit valg g\xE6lder p\xE5 tv\xE6rs af vores websteder i denne gruppe."},preferenceCenter:{title:"Privatlivsindstillinger",description:"Tilpas dine privatlivsindstillinger her. Du kan v\xE6lge, hvilke typer cookies og sporingsteknologier du vil tillade.",tabs:{purposes:"Form\xE5l",vendors:"Leverand\xF8rer"},purposeItem:{partners:"{count} partnere",vendorsUseLegitimateInterest:"{count} leverand\xF8rer p\xE5ber\xE5ber sig legitim interesse",examples:"Eksempler",partnersUsingPurpose:"Partnere, der bruger dette form\xE5l",withYourPermission:"Med dit samtykke",legitimateInterest:"Legitim interesse",objectButton:"G\xF8r indsigelse",objected:"Indsigelse gjort",rightToObject:"Du har ret til at g\xF8re indsigelse mod behandling baseret p\xE5 legitim interesse."},specialPurposes:{title:"N\xF8dvendige funktioner (p\xE5kr\xE6vet)",tooltip:"Disse er n\xF8dvendige for sidens funktionalitet og sikkerhed. If\xF8lge IAB TCF kan du ikke g\xF8re indsigelse mod disse s\xE6rlige form\xE5l."},vendorList:{search:"S\xF8g leverand\xF8rer...",showingCount:"Viser {filtered} af {total} leverand\xF8rer",iabVendorsHeading:"IAB-registrerede leverand\xF8rer",iabVendorsNotice:"Disse partnere er registreret hos IAB Transparency & Consent Framework (TCF), en branchestandard for h\xE5ndtering af samtykke",customVendorsHeading:"Brugerdefinerede partnere",customVendorsNotice:"Disse er tilpassede partnere, som ikke er registreret hos IAB Transparency & Consent Framework (TCF). De behandler data baseret p\xE5 dit samtykke og kan have andre privatlivspraksisser end IAB-registrerede leverand\xF8rer.",purposes:"Form\xE5l",specialPurposes:"S\xE6rlige form\xE5l",specialFeatures:"S\xE6rlige funktioner",features:"Funktioner",dataCategories:"Datakategorier",usesCookies:"Bruger cookies",nonCookieAccess:"Adgang uden cookies",maxAge:"Maks. alder: {days}d",retention:"Opbevaring: {days}d",legitimateInterest:"Legitim interesse",privacyPolicy:"Privatlivspolitik",storageDisclosure:"Oplysning om lagring",requiredNotice:"P\xE5kr\xE6vet for sidens funktionalitet, kan ikke deaktiveres"},footer:{consentStorage:'Samtykkepr\xE6ferencer gemmes i en cookie med navnet "euconsent-v2" i 13 m\xE5neder. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Accepter alle",rejectAll:"Afvis alle",customize:"Tilpas",saveSettings:"Gem indstillinger",loading:"Indl\xE6ser...",showingSelectedVendor:"Viser valgt leverand\xF8r",clearSelection:"Ryd",customPartner:"Tilpasset partner, ikke registreret hos IAB"}}},ai={common:{acceptAll:"Alle akzeptieren",rejectAll:"Alle ablehnen",customize:"Anpassen",save:"Einstellungen speichern"},cookieBanner:{title:"Wir respektieren Deine Privatsph\xE4re.",description:"Diese Website verwendet Cookies, um Deine Surf-Erfahrung zu verbessern, den Seitenverkehr zu analysieren und pers\xF6nliche Inhalte anzuzeigen."},consentManagerDialog:{title:"Einstellungen",description:"Passe Deine Datenschutz-Einstellungen hier an. W\xE4hle aus, welche Arten von Cookies und Tracking-Technologien zugelassen werden."},consentTypes:{necessary:{title:"Unbedingt erforderliche Cookies",description:"Diese Cookies sind f\xFCr das reibungslose Funktionieren der Website unerl\xE4sslich und k\xF6nnen nicht deaktiviert werden."},functionality:{title:"Funktionalit\xE4t",description:"Diese Cookies erm\xF6glichen erweiterte Funktionalit\xE4ten und eine Personalisierung der Website."},marketing:{title:"Marketing",description:"Diese Cookies werden verwendet, um relevante Werbung anzuzeigen und ihre Wirksamkeit zu messen."},measurement:{title:"Analyse",description:"Diese Cookies helfen uns zu verstehen, wie Besucher mit der Website interagieren um die Surf-Erfahrung zu verbessern."},experience:{title:"Erfahrung",description:"Diese Cookies helfen uns dabei, ein besseres Nutzerlebnis zu bieten und neue Funktionen zu testen."}},frame:{title:"Akzeptieren Sie {category}, um diesen Inhalt anzuzeigen.",actionButton:"Zustimmung f\xFCr {category} aktivieren"},legalLinks:{privacyPolicy:"Datenschutzerkl\xE4rung",cookiePolicy:"Cookie-Richtlinie",termsOfService:"Nutzungsbedingungen"},iab:{banner:{title:"Datenschutz-Einstellungen",description:"Wir und unsere {partnerCount} Partner speichern und/oder greifen auf Informationen auf Deinem Ger\xE4t zu und verarbeiten personenbezogene Daten, wie eindeutige Kennungen und Browsing-Daten, f\xFCr diese Website, um:",partnersLink:"{count} Partner",andMore:"Und {count} weitere...",legitimateInterestNotice:"Einige Partner beanspruchen ein berechtigtes Interesse zur Verarbeitung Deiner Daten. Du hast das Recht, dieser Verarbeitung zu widersprechen, Deine Auswahl anzupassen und Deine Einwilligung jederzeit zu widerrufen.",scopeServiceSpecific:"Deine Einwilligung gilt nur f\xFCr diese Website und hat keinen Einfluss auf andere Dienste.",scopeGroup:"Ihre Auswahl gilt f\xFCr alle unsere Websites in dieser Gruppe."},preferenceCenter:{title:"Datenschutz-Einstellungen",description:"Passe Deine Datenschutz-Einstellungen hier an. W\xE4hle aus, welche Arten von Cookies und Tracking-Technologien zugelassen werden.",tabs:{purposes:"Zwecke",vendors:"Anbieter"},purposeItem:{partners:"{count} Partner",vendorsUseLegitimateInterest:"{count} Anbieter beanspruchen berechtigtes Interesse",examples:"Beispiele",partnersUsingPurpose:"Partner, die diesen Zweck nutzen",withYourPermission:"Mit Deiner Erlaubnis",legitimateInterest:"Berechtigtes Interesse",objectButton:"Widersprechen",objected:"Widersprochen",rightToObject:"Du hast das Recht, der Verarbeitung auf Grundlage berechtigten Interesses zu widersprechen."},specialPurposes:{title:"Wesentliche Funktionen (erforderlich)",tooltip:"Diese sind f\xFCr die Funktionalit\xE4t und Sicherheit der Website erforderlich. Gem\xE4\xDF IAB TCF kannst Du diesen besonderen Zwecken nicht widersprechen."},vendorList:{search:"Anbieter suchen...",showingCount:"{filtered} von {total} Anbietern",iabVendorsHeading:"IAB-registrierte Anbieter",iabVendorsNotice:"Diese Partner sind beim IAB Transparency & Consent Framework (TCF) registriert, einem Industriestandard f\xFCr die Verwaltung von Einwilligungen",customVendorsHeading:"Benutzerdefinierte Partner",customVendorsNotice:"Dies sind benutzerdefinierte Partner, die nicht beim IAB Transparency & Consent Framework (TCF) registriert sind. Sie verarbeiten Daten auf Grundlage Ihrer Einwilligung und k\xF6nnen andere Datenschutzpraktiken haben als IAB-registrierte Anbieter.",purposes:"Zwecke",specialPurposes:"Besondere Zwecke",specialFeatures:"Besondere Merkmale",features:"Merkmale",dataCategories:"Datenkategorien",usesCookies:"Verwendet Cookies",nonCookieAccess:"Zugriff ohne Cookies",maxAge:"Max. Alter: {days} Tage",retention:"Aufbewahrung: {days} Tage",legitimateInterest:"Berecht. Interesse",privacyPolicy:"Datenschutzerkl\xE4rung",storageDisclosure:"Speicheroffenlegung",requiredNotice:"Erforderlich f\xFCr die Funktionalit\xE4t der Website, kann nicht deaktiviert werden"},footer:{consentStorage:'Einwilligungspr\xE4ferenzen werden in einem Cookie namens "euconsent-v2" f\xFCr 13 Monate gespeichert. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Alle akzeptieren",rejectAll:"Alle ablehnen",customize:"Anpassen",saveSettings:"Einstellungen speichern",loading:"Wird geladen...",showingSelectedVendor:"Ausgew\xE4hlter Anbieter wird angezeigt",clearSelection:"L\xF6schen",customPartner:"Benutzerdefinierter Partner, nicht beim IAB registriert"}}},ci={common:{acceptAll:"\u0391\u03C0\u03BF\u03B4\u03BF\u03C7\u03AE \u03CC\u03BB\u03C9\u03BD",rejectAll:"\u0391\u03C0\u03CC\u03C1\u03C1\u03B9\u03C8\u03B7 \u03CC\u03BB\u03C9\u03BD",customize:"\u03A0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03BF\u03B3\u03AE",save:"\u0391\u03C0\u03BF\u03B8\u03AE\u03BA\u03B5\u03C5\u03C3\u03B7 \u03C1\u03C5\u03B8\u03BC\u03AF\u03C3\u03B5\u03C9\u03BD"},cookieBanner:{title:"\u0395\u03BA\u03C4\u03B9\u03BC\u03BF\u03CD\u03BC\u03B5 \u03C4\u03BF \u03B1\u03C0\u03CC\u03C1\u03C1\u03B7\u03C4\u03CC \u03C3\u03B1\u03C2",description:"\u0391\u03C5\u03C4\u03CC\u03C2 \u03BF \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF\u03C2 \u03C7\u03C1\u03B7\u03C3\u03B9\u03BC\u03BF\u03C0\u03BF\u03B9\u03B5\u03AF cookies \u03B3\u03B9\u03B1 \u03C4\u03B7 \u03B2\u03B5\u03BB\u03C4\u03AF\u03C9\u03C3\u03B7 \u03C4\u03B7\u03C2 \u03B5\u03BC\u03C0\u03B5\u03B9\u03C1\u03AF\u03B1\u03C2 \u03C0\u03B5\u03C1\u03B9\u03AE\u03B3\u03B7\u03C3\u03AE\u03C2 \u03C3\u03B1\u03C2, \u03C4\u03B7\u03BD \u03B1\u03BD\u03AC\u03BB\u03C5\u03C3\u03B7 \u03C4\u03B7\u03C2 \u03B5\u03C0\u03B9\u03C3\u03BA\u03B5\u03C8\u03B9\u03BC\u03CC\u03C4\u03B7\u03C4\u03B1\u03C2 \u03C4\u03BF\u03C5 \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF\u03C5 \u03BA\u03B1\u03B9 \u03C4\u03B7\u03BD \u03C0\u03C1\u03BF\u03B2\u03BF\u03BB\u03AE \u03B5\u03BE\u03B1\u03C4\u03BF\u03BC\u03B9\u03BA\u03B5\u03C5\u03BC\u03AD\u03BD\u03BF\u03C5 \u03C0\u03B5\u03C1\u03B9\u03B5\u03C7\u03BF\u03BC\u03AD\u03BD\u03BF\u03C5."},consentManagerDialog:{title:"\u03A1\u03C5\u03B8\u03BC\u03AF\u03C3\u03B5\u03B9\u03C2 \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5",description:"\u03A0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03CC\u03C3\u03C4\u03B5 \u03C4\u03B9\u03C2 \u03C1\u03C5\u03B8\u03BC\u03AF\u03C3\u03B5\u03B9\u03C2 \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5 \u03C3\u03B1\u03C2 \u03B5\u03B4\u03CE. \u039C\u03C0\u03BF\u03C1\u03B5\u03AF\u03C4\u03B5 \u03BD\u03B1 \u03B5\u03C0\u03B9\u03BB\u03AD\u03BE\u03B5\u03C4\u03B5 \u03C0\u03BF\u03B9\u03BF\u03C5\u03C2 \u03C4\u03CD\u03C0\u03BF\u03C5\u03C2 cookies \u03BA\u03B1\u03B9 \u03C4\u03B5\u03C7\u03BD\u03BF\u03BB\u03BF\u03B3\u03B9\u03CE\u03BD \u03C0\u03B1\u03C1\u03B1\u03BA\u03BF\u03BB\u03BF\u03CD\u03B8\u03B7\u03C3\u03B7\u03C2 \u03B5\u03C0\u03B9\u03C4\u03C1\u03AD\u03C0\u03B5\u03C4\u03B5."},consentTypes:{necessary:{title:"\u0391\u03C0\u03BF\u03BB\u03CD\u03C4\u03C9\u03C2 \u03B1\u03C0\u03B1\u03C1\u03B1\u03AF\u03C4\u03B7\u03C4\u03B1",description:"\u0391\u03C5\u03C4\u03AC \u03C4\u03B1 cookies \u03B5\u03AF\u03BD\u03B1\u03B9 \u03B1\u03C0\u03B1\u03C1\u03B1\u03AF\u03C4\u03B7\u03C4\u03B1 \u03B3\u03B9\u03B1 \u03C4\u03B7 \u03C3\u03C9\u03C3\u03C4\u03AE \u03BB\u03B5\u03B9\u03C4\u03BF\u03C5\u03C1\u03B3\u03AF\u03B1 \u03C4\u03BF\u03C5 \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF\u03C5 \u03BA\u03B1\u03B9 \u03B4\u03B5\u03BD \u03BC\u03C0\u03BF\u03C1\u03BF\u03CD\u03BD \u03BD\u03B1 \u03B1\u03C0\u03B5\u03BD\u03B5\u03C1\u03B3\u03BF\u03C0\u03BF\u03B9\u03B7\u03B8\u03BF\u03CD\u03BD."},functionality:{title:"\u039B\u03B5\u03B9\u03C4\u03BF\u03C5\u03C1\u03B3\u03B9\u03BA\u03CC\u03C4\u03B7\u03C4\u03B1",description:"\u0391\u03C5\u03C4\u03AC \u03C4\u03B1 cookies \u03B5\u03C0\u03B9\u03C4\u03C1\u03AD\u03C0\u03BF\u03C5\u03BD \u03B2\u03B5\u03BB\u03C4\u03B9\u03C9\u03BC\u03AD\u03BD\u03B7 \u03BB\u03B5\u03B9\u03C4\u03BF\u03C5\u03C1\u03B3\u03B9\u03BA\u03CC\u03C4\u03B7\u03C4\u03B1 \u03BA\u03B1\u03B9 \u03B5\u03BE\u03B1\u03C4\u03BF\u03BC\u03AF\u03BA\u03B5\u03C5\u03C3\u03B7 \u03C4\u03BF\u03C5 \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF\u03C5."},marketing:{title:"\u039C\u03AC\u03C1\u03BA\u03B5\u03C4\u03B9\u03BD\u03B3\u03BA",description:"\u0391\u03C5\u03C4\u03AC \u03C4\u03B1 cookies \u03C7\u03C1\u03B7\u03C3\u03B9\u03BC\u03BF\u03C0\u03BF\u03B9\u03BF\u03CD\u03BD\u03C4\u03B1\u03B9 \u03B3\u03B9\u03B1 \u03C4\u03B7\u03BD \u03C0\u03C1\u03BF\u03B2\u03BF\u03BB\u03AE \u03C3\u03C7\u03B5\u03C4\u03B9\u03BA\u03CE\u03BD \u03B4\u03B9\u03B1\u03C6\u03B7\u03BC\u03AF\u03C3\u03B5\u03C9\u03BD \u03BA\u03B1\u03B9 \u03C4\u03B7\u03BD \u03C0\u03B1\u03C1\u03B1\u03BA\u03BF\u03BB\u03BF\u03CD\u03B8\u03B7\u03C3\u03B7 \u03C4\u03B7\u03C2 \u03B1\u03C0\u03BF\u03C4\u03B5\u03BB\u03B5\u03C3\u03BC\u03B1\u03C4\u03B9\u03BA\u03CC\u03C4\u03B7\u03C4\u03AC\u03C2 \u03C4\u03BF\u03C5\u03C2."},measurement:{title:"\u0391\u03BD\u03B1\u03BB\u03C5\u03C4\u03B9\u03BA\u03AC \u03C3\u03C4\u03BF\u03B9\u03C7\u03B5\u03AF\u03B1",description:"\u0391\u03C5\u03C4\u03AC \u03C4\u03B1 cookies \u03BC\u03B1\u03C2 \u03B2\u03BF\u03B7\u03B8\u03BF\u03CD\u03BD \u03BD\u03B1 \u03BA\u03B1\u03C4\u03B1\u03BD\u03BF\u03AE\u03C3\u03BF\u03C5\u03BC\u03B5 \u03C0\u03CE\u03C2 \u03B1\u03BB\u03BB\u03B7\u03BB\u03B5\u03C0\u03B9\u03B4\u03C1\u03BF\u03CD\u03BD \u03BF\u03B9 \u03B5\u03C0\u03B9\u03C3\u03BA\u03AD\u03C0\u03C4\u03B5\u03C2 \u03BC\u03B5 \u03C4\u03BF\u03BD \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF \u03BA\u03B1\u03B9 \u03BD\u03B1 \u03B2\u03B5\u03BB\u03C4\u03B9\u03CE\u03C3\u03BF\u03C5\u03BC\u03B5 \u03C4\u03B7\u03BD \u03B1\u03C0\u03CC\u03B4\u03BF\u03C3\u03AE \u03C4\u03BF\u03C5."},experience:{title:"\u0395\u03BC\u03C0\u03B5\u03B9\u03C1\u03AF\u03B1",description:"\u0391\u03C5\u03C4\u03AC \u03C4\u03B1 cookies \u03BC\u03B1\u03C2 \u03B2\u03BF\u03B7\u03B8\u03BF\u03CD\u03BD \u03BD\u03B1 \u03C0\u03B1\u03C1\u03AD\u03C7\u03BF\u03C5\u03BC\u03B5 \u03BA\u03B1\u03BB\u03CD\u03C4\u03B5\u03C1\u03B7 \u03B5\u03BC\u03C0\u03B5\u03B9\u03C1\u03AF\u03B1 \u03C7\u03C1\u03AE\u03C3\u03C4\u03B7 \u03BA\u03B1\u03B9 \u03BD\u03B1 \u03B4\u03BF\u03BA\u03B9\u03BC\u03AC\u03B6\u03BF\u03C5\u03BC\u03B5 \u03BD\u03AD\u03B5\u03C2 \u03BB\u03B5\u03B9\u03C4\u03BF\u03C5\u03C1\u03B3\u03AF\u03B5\u03C2."}},frame:{title:"\u0391\u03C0\u03BF\u03B4\u03B5\u03C7\u03C4\u03B5\u03AF\u03C4\u03B5 \u03C4\u03B7 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03B7 {category} \u03B3\u03B9\u03B1 \u03BD\u03B1 \u03B4\u03B5\u03AF\u03C4\u03B5 \u03B1\u03C5\u03C4\u03CC \u03C4\u03BF \u03C0\u03B5\u03C1\u03B9\u03B5\u03C7\u03CC\u03BC\u03B5\u03BD\u03BF.",actionButton:"\u0395\u03BD\u03B5\u03C1\u03B3\u03BF\u03C0\u03BF\u03AF\u03B7\u03C3\u03B7 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03B7\u03C2 {category}"},legalLinks:{privacyPolicy:"\u03A0\u03BF\u03BB\u03B9\u03C4\u03B9\u03BA\u03AE \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5",cookiePolicy:"\u03A0\u03BF\u03BB\u03B9\u03C4\u03B9\u03BA\u03AE cookies",termsOfService:"\u038C\u03C1\u03BF\u03B9 \u03C7\u03C1\u03AE\u03C3\u03B7\u03C2"},iab:{banner:{title:"\u03A1\u03C5\u03B8\u03BC\u03AF\u03C3\u03B5\u03B9\u03C2 \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5",description:"\u0395\u03BC\u03B5\u03AF\u03C2 \u03BA\u03B1\u03B9 \u03BF\u03B9 {partnerCount} \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 \u03BC\u03B1\u03C2 \u03B1\u03C0\u03BF\u03B8\u03B7\u03BA\u03B5\u03CD\u03BF\u03C5\u03BC\u03B5 \u03AE/\u03BA\u03B1\u03B9 \u03AD\u03C7\u03BF\u03C5\u03BC\u03B5 \u03C0\u03C1\u03CC\u03C3\u03B2\u03B1\u03C3\u03B7 \u03C3\u03B5 \u03C0\u03BB\u03B7\u03C1\u03BF\u03C6\u03BF\u03C1\u03AF\u03B5\u03C2 \u03C3\u03C4\u03B7 \u03C3\u03C5\u03C3\u03BA\u03B5\u03C5\u03AE \u03C3\u03B1\u03C2 \u03BA\u03B1\u03B9 \u03B5\u03C0\u03B5\u03BE\u03B5\u03C1\u03B3\u03B1\u03B6\u03CC\u03BC\u03B1\u03C3\u03C4\u03B5 \u03C0\u03C1\u03BF\u03C3\u03C9\u03C0\u03B9\u03BA\u03AC \u03B4\u03B5\u03B4\u03BF\u03BC\u03AD\u03BD\u03B1, \u03CC\u03C0\u03C9\u03C2 \u03BC\u03BF\u03BD\u03B1\u03B4\u03B9\u03BA\u03AC \u03B1\u03BD\u03B1\u03B3\u03BD\u03C9\u03C1\u03B9\u03C3\u03C4\u03B9\u03BA\u03AC \u03BA\u03B1\u03B9 \u03B4\u03B5\u03B4\u03BF\u03BC\u03AD\u03BD\u03B1 \u03C0\u03B5\u03C1\u03B9\u03AE\u03B3\u03B7\u03C3\u03B7\u03C2, \u03B3\u03B9\u03B1 \u03B1\u03C5\u03C4\u03CC\u03BD \u03C4\u03BF\u03BD \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF, \u03B3\u03B9\u03B1 \u03BD\u03B1:",partnersLink:"{count} \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2",andMore:"\u039A\u03B1\u03B9 {count} \u03B1\u03BA\u03CC\u03BC\u03B7...",legitimateInterestNotice:"\u039F\u03C1\u03B9\u03C3\u03BC\u03AD\u03BD\u03BF\u03B9 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 \u03B5\u03C0\u03B9\u03BA\u03B1\u03BB\u03BF\u03CD\u03BD\u03C4\u03B1\u03B9 \u03AD\u03BD\u03BD\u03BF\u03BC\u03BF \u03C3\u03C5\u03BC\u03C6\u03AD\u03C1\u03BF\u03BD \u03B3\u03B9\u03B1 \u03C4\u03B7\u03BD \u03B5\u03C0\u03B5\u03BE\u03B5\u03C1\u03B3\u03B1\u03C3\u03AF\u03B1 \u03C4\u03C9\u03BD \u03B4\u03B5\u03B4\u03BF\u03BC\u03AD\u03BD\u03C9\u03BD \u03C3\u03B1\u03C2. \u0388\u03C7\u03B5\u03C4\u03B5 \u03C4\u03BF \u03B4\u03B9\u03BA\u03B1\u03AF\u03C9\u03BC\u03B1 \u03BD\u03B1 \u03B1\u03BD\u03C4\u03B9\u03C4\u03B1\u03C7\u03B8\u03B5\u03AF\u03C4\u03B5 \u03C3\u03B5 \u03B1\u03C5\u03C4\u03AE\u03BD \u03C4\u03B7\u03BD \u03B5\u03C0\u03B5\u03BE\u03B5\u03C1\u03B3\u03B1\u03C3\u03AF\u03B1, \u03BD\u03B1 \u03C0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03CC\u03C3\u03B5\u03C4\u03B5 \u03C4\u03B9\u03C2 \u03B5\u03C0\u03B9\u03BB\u03BF\u03B3\u03AD\u03C2 \u03C3\u03B1\u03C2 \u03BA\u03B1\u03B9 \u03BD\u03B1 \u03B1\u03BD\u03B1\u03BA\u03B1\u03BB\u03AD\u03C3\u03B5\u03C4\u03B5 \u03C4\u03B7 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03AE \u03C3\u03B1\u03C2 \u03B1\u03BD\u03AC \u03C0\u03AC\u03C3\u03B1 \u03C3\u03C4\u03B9\u03B3\u03BC\u03AE.",scopeServiceSpecific:"\u0397 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03AE \u03C3\u03B1\u03C2 \u03B9\u03C3\u03C7\u03CD\u03B5\u03B9 \u03BC\u03CC\u03BD\u03BF \u03B3\u03B9\u03B1 \u03B1\u03C5\u03C4\u03CC\u03BD \u03C4\u03BF\u03BD \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF \u03BA\u03B1\u03B9 \u03B4\u03B5\u03BD \u03B8\u03B1 \u03B5\u03C0\u03B7\u03C1\u03B5\u03AC\u03C3\u03B5\u03B9 \u03AC\u03BB\u03BB\u03B5\u03C2 \u03C5\u03C0\u03B7\u03C1\u03B5\u03C3\u03AF\u03B5\u03C2.",scopeGroup:"\u0397 \u03B5\u03C0\u03B9\u03BB\u03BF\u03B3\u03AE \u03C3\u03B1\u03C2 \u03B9\u03C3\u03C7\u03CD\u03B5\u03B9 \u03B3\u03B9\u03B1 \u03CC\u03BB\u03B5\u03C2 \u03C4\u03B9\u03C2 \u03B9\u03C3\u03C4\u03BF\u03C3\u03B5\u03BB\u03AF\u03B4\u03B5\u03C2 \u03BC\u03B1\u03C2 \u03C3\u03B5 \u03B1\u03C5\u03C4\u03AE \u03C4\u03B7\u03BD \u03BF\u03BC\u03AC\u03B4\u03B1."},preferenceCenter:{title:"\u03A1\u03C5\u03B8\u03BC\u03AF\u03C3\u03B5\u03B9\u03C2 \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5",description:"\u03A0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03CC\u03C3\u03C4\u03B5 \u03C4\u03B9\u03C2 \u03C1\u03C5\u03B8\u03BC\u03AF\u03C3\u03B5\u03B9\u03C2 \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5 \u03C3\u03B1\u03C2 \u03B5\u03B4\u03CE. \u039C\u03C0\u03BF\u03C1\u03B5\u03AF\u03C4\u03B5 \u03BD\u03B1 \u03B5\u03C0\u03B9\u03BB\u03AD\u03BE\u03B5\u03C4\u03B5 \u03C0\u03BF\u03B9\u03BF\u03C5\u03C2 \u03C4\u03CD\u03C0\u03BF\u03C5\u03C2 cookies \u03BA\u03B1\u03B9 \u03C4\u03B5\u03C7\u03BD\u03BF\u03BB\u03BF\u03B3\u03B9\u03CE\u03BD \u03C0\u03B1\u03C1\u03B1\u03BA\u03BF\u03BB\u03BF\u03CD\u03B8\u03B7\u03C3\u03B7\u03C2 \u03B5\u03C0\u03B9\u03C4\u03C1\u03AD\u03C0\u03B5\u03C4\u03B5.",tabs:{purposes:"\u03A3\u03BA\u03BF\u03C0\u03BF\u03AF",vendors:"\u03A3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2"},purposeItem:{partners:"{count} \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2",vendorsUseLegitimateInterest:"{count} \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 \u03B5\u03C0\u03B9\u03BA\u03B1\u03BB\u03BF\u03CD\u03BD\u03C4\u03B1\u03B9 \u03AD\u03BD\u03BD\u03BF\u03BC\u03BF \u03C3\u03C5\u03BC\u03C6\u03AD\u03C1\u03BF\u03BD",examples:"\u03A0\u03B1\u03C1\u03B1\u03B4\u03B5\u03AF\u03B3\u03BC\u03B1\u03C4\u03B1",partnersUsingPurpose:"\u03A3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 \u03C0\u03BF\u03C5 \u03C7\u03C1\u03B7\u03C3\u03B9\u03BC\u03BF\u03C0\u03BF\u03B9\u03BF\u03CD\u03BD \u03B1\u03C5\u03C4\u03CC\u03BD \u03C4\u03BF\u03BD \u03C3\u03BA\u03BF\u03C0\u03CC",withYourPermission:"\u039C\u03B5 \u03C4\u03B7 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03AE \u03C3\u03B1\u03C2",legitimateInterest:"\u0388\u03BD\u03BD\u03BF\u03BC\u03BF \u03C3\u03C5\u03BC\u03C6\u03AD\u03C1\u03BF\u03BD",objectButton:"\u0391\u03BD\u03C4\u03AF\u03C1\u03C1\u03B7\u03C3\u03B7",objected:"\u0391\u03BD\u03C4\u03B9\u03C4\u03AC\u03C7\u03B8\u03B7\u03BA\u03B5",rightToObject:"\u0388\u03C7\u03B5\u03C4\u03B5 \u03C4\u03BF \u03B4\u03B9\u03BA\u03B1\u03AF\u03C9\u03BC\u03B1 \u03BD\u03B1 \u03B1\u03BD\u03C4\u03B9\u03C4\u03B1\u03C7\u03B8\u03B5\u03AF\u03C4\u03B5 \u03C3\u03C4\u03B7\u03BD \u03B5\u03C0\u03B5\u03BE\u03B5\u03C1\u03B3\u03B1\u03C3\u03AF\u03B1 \u03C0\u03BF\u03C5 \u03B2\u03B1\u03C3\u03AF\u03B6\u03B5\u03C4\u03B1\u03B9 \u03C3\u03B5 \u03AD\u03BD\u03BD\u03BF\u03BC\u03BF \u03C3\u03C5\u03BC\u03C6\u03AD\u03C1\u03BF\u03BD."},specialPurposes:{title:"\u0392\u03B1\u03C3\u03B9\u03BA\u03AD\u03C2 \u03BB\u03B5\u03B9\u03C4\u03BF\u03C5\u03C1\u03B3\u03AF\u03B5\u03C2 (\u03B1\u03C0\u03B1\u03B9\u03C4\u03BF\u03CD\u03BD\u03C4\u03B1\u03B9)",tooltip:"\u0391\u03C5\u03C4\u03AD\u03C2 \u03B5\u03AF\u03BD\u03B1\u03B9 \u03B1\u03C0\u03B1\u03C1\u03B1\u03AF\u03C4\u03B7\u03C4\u03B5\u03C2 \u03B3\u03B9\u03B1 \u03C4\u03B7 \u03BB\u03B5\u03B9\u03C4\u03BF\u03C5\u03C1\u03B3\u03B9\u03BA\u03CC\u03C4\u03B7\u03C4\u03B1 \u03BA\u03B1\u03B9 \u03C4\u03B7\u03BD \u03B1\u03C3\u03C6\u03AC\u03BB\u03B5\u03B9\u03B1 \u03C4\u03BF\u03C5 \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF\u03C5. \u03A3\u03CD\u03BC\u03C6\u03C9\u03BD\u03B1 \u03BC\u03B5 \u03C4\u03BF IAB TCF, \u03B4\u03B5\u03BD \u03BC\u03C0\u03BF\u03C1\u03B5\u03AF\u03C4\u03B5 \u03BD\u03B1 \u03B1\u03BD\u03C4\u03B9\u03C4\u03B1\u03C7\u03B8\u03B5\u03AF\u03C4\u03B5 \u03C3\u03B5 \u03B1\u03C5\u03C4\u03BF\u03CD\u03C2 \u03C4\u03BF\u03C5\u03C2 \u03B5\u03B9\u03B4\u03B9\u03BA\u03BF\u03CD\u03C2 \u03C3\u03BA\u03BF\u03C0\u03BF\u03CD\u03C2."},vendorList:{search:"\u0391\u03BD\u03B1\u03B6\u03AE\u03C4\u03B7\u03C3\u03B7 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03B1\u03C4\u03CE\u03BD...",showingCount:"{filtered} \u03B1\u03C0\u03CC {total} \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2",iabVendorsHeading:"\u0395\u03B3\u03B3\u03B5\u03B3\u03C1\u03B1\u03BC\u03BC\u03AD\u03BD\u03BF\u03B9 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 IAB",iabVendorsNotice:"\u0391\u03C5\u03C4\u03BF\u03AF \u03BF\u03B9 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 \u03B5\u03AF\u03BD\u03B1\u03B9 \u03B5\u03B3\u03B3\u03B5\u03B3\u03C1\u03B1\u03BC\u03BC\u03AD\u03BD\u03BF\u03B9 \u03C3\u03C4\u03BF IAB Transparency & Consent Framework (TCF), \u03AD\u03BD\u03B1 \u03B2\u03B9\u03BF\u03BC\u03B7\u03C7\u03B1\u03BD\u03B9\u03BA\u03CC \u03C0\u03C1\u03CC\u03C4\u03C5\u03C0\u03BF \u03B3\u03B9\u03B1 \u03C4\u03B7 \u03B4\u03B9\u03B1\u03C7\u03B5\u03AF\u03C1\u03B9\u03C3\u03B7 \u03C4\u03B7\u03C2 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03B7\u03C2",customVendorsHeading:"\u03A0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03BF\u03C3\u03BC\u03AD\u03BD\u03BF\u03B9 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2",customVendorsNotice:"\u0391\u03C5\u03C4\u03BF\u03AF \u03B5\u03AF\u03BD\u03B1\u03B9 \u03C0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03BF\u03C3\u03BC\u03AD\u03BD\u03BF\u03B9 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 \u03C0\u03BF\u03C5 \u03B4\u03B5\u03BD \u03B5\u03AF\u03BD\u03B1\u03B9 \u03B5\u03B3\u03B3\u03B5\u03B3\u03C1\u03B1\u03BC\u03BC\u03AD\u03BD\u03BF\u03B9 \u03C3\u03C4\u03BF IAB Transparency & Consent Framework (TCF). \u0395\u03C0\u03B5\u03BE\u03B5\u03C1\u03B3\u03AC\u03B6\u03BF\u03BD\u03C4\u03B1\u03B9 \u03B4\u03B5\u03B4\u03BF\u03BC\u03AD\u03BD\u03B1 \u03BC\u03B5 \u03B2\u03AC\u03C3\u03B7 \u03C4\u03B7 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03AE \u03C3\u03B1\u03C2 \u03BA\u03B1\u03B9 \u03B5\u03BD\u03B4\u03AD\u03C7\u03B5\u03C4\u03B1\u03B9 \u03BD\u03B1 \u03AD\u03C7\u03BF\u03C5\u03BD \u03B4\u03B9\u03B1\u03C6\u03BF\u03C1\u03B5\u03C4\u03B9\u03BA\u03AD\u03C2 \u03C0\u03C1\u03B1\u03BA\u03C4\u03B9\u03BA\u03AD\u03C2 \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5 \u03B1\u03C0\u03CC \u03C4\u03BF\u03C5\u03C2 \u03B5\u03B3\u03B3\u03B5\u03B3\u03C1\u03B1\u03BC\u03BC\u03AD\u03BD\u03BF\u03C5\u03C2 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B5\u03C2 \u03C4\u03BF\u03C5 IAB.",purposes:"\u03A3\u03BA\u03BF\u03C0\u03BF\u03AF",specialPurposes:"\u0395\u03B9\u03B4\u03B9\u03BA\u03BF\u03AF \u03C3\u03BA\u03BF\u03C0\u03BF\u03AF",specialFeatures:"\u0395\u03B9\u03B4\u03B9\u03BA\u03AC \u03C7\u03B1\u03C1\u03B1\u03BA\u03C4\u03B7\u03C1\u03B9\u03C3\u03C4\u03B9\u03BA\u03AC",features:"\u03A7\u03B1\u03C1\u03B1\u03BA\u03C4\u03B7\u03C1\u03B9\u03C3\u03C4\u03B9\u03BA\u03AC",dataCategories:"\u039A\u03B1\u03C4\u03B7\u03B3\u03BF\u03C1\u03AF\u03B5\u03C2 \u03B4\u03B5\u03B4\u03BF\u03BC\u03AD\u03BD\u03C9\u03BD",usesCookies:"\u03A7\u03C1\u03B7\u03C3\u03B9\u03BC\u03BF\u03C0\u03BF\u03B9\u03B5\u03AF cookies",nonCookieAccess:"\u03A0\u03C1\u03CC\u03C3\u03B2\u03B1\u03C3\u03B7 \u03C7\u03C9\u03C1\u03AF\u03C2 cookies",maxAge:"\u039C\u03AD\u03B3\u03B9\u03C3\u03C4\u03B7 \u03B4\u03B9\u03AC\u03C1\u03BA\u03B5\u03B9\u03B1: {days} \u03B7\u03BC.",retention:"\u0394\u03B9\u03B1\u03C4\u03AE\u03C1\u03B7\u03C3\u03B7: {days} \u03B7\u03BC.",legitimateInterest:"\u0388\u03BD\u03BD\u03BF\u03BC\u03BF \u03C3\u03C5\u03BC\u03C6\u03AD\u03C1\u03BF\u03BD",privacyPolicy:"\u03A0\u03BF\u03BB\u03B9\u03C4\u03B9\u03BA\u03AE \u03B1\u03C0\u03BF\u03C1\u03C1\u03AE\u03C4\u03BF\u03C5",storageDisclosure:"\u0393\u03BD\u03C9\u03C3\u03C4\u03BF\u03C0\u03BF\u03AF\u03B7\u03C3\u03B7 \u03B1\u03C0\u03BF\u03B8\u03AE\u03BA\u03B5\u03C5\u03C3\u03B7\u03C2",requiredNotice:"\u0391\u03C0\u03B1\u03B9\u03C4\u03B5\u03AF\u03C4\u03B1\u03B9 \u03B3\u03B9\u03B1 \u03C4\u03B7 \u03BB\u03B5\u03B9\u03C4\u03BF\u03C5\u03C1\u03B3\u03B9\u03BA\u03CC\u03C4\u03B7\u03C4\u03B1 \u03C4\u03BF\u03C5 \u03B9\u03C3\u03C4\u03CC\u03C4\u03BF\u03C0\u03BF\u03C5, \u03B4\u03B5\u03BD \u03BC\u03C0\u03BF\u03C1\u03B5\u03AF \u03BD\u03B1 \u03B1\u03C0\u03B5\u03BD\u03B5\u03C1\u03B3\u03BF\u03C0\u03BF\u03B9\u03B7\u03B8\u03B5\u03AF"},footer:{consentStorage:'\u039F\u03B9 \u03C0\u03C1\u03BF\u03C4\u03B9\u03BC\u03AE\u03C3\u03B5\u03B9\u03C2 \u03C3\u03C5\u03B3\u03BA\u03B1\u03C4\u03AC\u03B8\u03B5\u03C3\u03B7\u03C2 \u03B1\u03C0\u03BF\u03B8\u03B7\u03BA\u03B5\u03CD\u03BF\u03BD\u03C4\u03B1\u03B9 \u03C3\u03B5 cookie \u03BC\u03B5 \u03C4\u03BF \u03CC\u03BD\u03BF\u03BC\u03B1 "euconsent-v2" \u03B3\u03B9\u03B1 13 \u03BC\u03AE\u03BD\u03B5\u03C2. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"\u0391\u03C0\u03BF\u03B4\u03BF\u03C7\u03AE \u03CC\u03BB\u03C9\u03BD",rejectAll:"\u0391\u03C0\u03CC\u03C1\u03C1\u03B9\u03C8\u03B7 \u03CC\u03BB\u03C9\u03BD",customize:"\u03A0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03BF\u03B3\u03AE",saveSettings:"\u0391\u03C0\u03BF\u03B8\u03AE\u03BA\u03B5\u03C5\u03C3\u03B7 \u03C1\u03C5\u03B8\u03BC\u03AF\u03C3\u03B5\u03C9\u03BD",loading:"\u03A6\u03CC\u03C1\u03C4\u03C9\u03C3\u03B7...",showingSelectedVendor:"\u0395\u03BC\u03C6\u03AC\u03BD\u03B9\u03C3\u03B7 \u03B5\u03C0\u03B9\u03BB\u03B5\u03B3\u03BC\u03AD\u03BD\u03BF\u03C5 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B7",clearSelection:"\u0395\u03BA\u03BA\u03B1\u03B8\u03AC\u03C1\u03B9\u03C3\u03B7",customPartner:"\u03A0\u03C1\u03BF\u03C3\u03B1\u03C1\u03BC\u03BF\u03C3\u03BC\u03AD\u03BD\u03BF\u03C2 \u03C3\u03C5\u03BD\u03B5\u03C1\u03B3\u03AC\u03C4\u03B7\u03C2 \u03C0\u03BF\u03C5 \u03B4\u03B5\u03BD \u03B5\u03AF\u03BD\u03B1\u03B9 \u03B5\u03B3\u03B3\u03B5\u03B3\u03C1\u03B1\u03BC\u03BC\u03AD\u03BD\u03BF\u03C2 \u03C3\u03C4\u03BF IAB"}}},it={common:{acceptAll:"Accept All",rejectAll:"Reject All",customize:"Customize",save:"Save Settings"},cookieBanner:{title:"We value your privacy",description:"This site uses cookies to improve your browsing experience, analyze site traffic, and show personalized content."},consentManagerDialog:{title:"Privacy Settings",description:"Customize your privacy settings here. You can choose which types of cookies and tracking technologies you allow."},consentTypes:{necessary:{title:"Strictly Necessary",description:"These cookies are essential for the website to function properly and cannot be disabled."},functionality:{title:"Functionality",description:"These cookies enable enhanced functionality and personalization of the website."},marketing:{title:"Marketing",description:"These cookies are used to deliver relevant advertisements and track their effectiveness."},measurement:{title:"Analytics",description:"These cookies help us understand how visitors interact with the website and improve its performance."},experience:{title:"Experience",description:"These cookies help us provide a better user experience and test new features."}},frame:{title:"Accept {category} consent to view this content.",actionButton:"Enable {category} consent"},legalLinks:{privacyPolicy:"Privacy Policy",cookiePolicy:"Cookie Policy",termsOfService:"Terms of Service"},iab:{banner:{title:"Privacy Settings",description:"We and our {partnerCount} partners store and/or access information on your device and process personal data, such as unique identifiers and browsing data, for this website, to:",partnersLink:"{count} partners",andMore:"And {count} more...",legitimateInterestNotice:"Some partners claim a legitimate interest to process your data. You have the right to object to this processing, customize your choices, and withdraw your consent at any time.",scopeServiceSpecific:"Your consent applies only to this website and will not affect other services.",scopeGroup:"Your choice applies across our websites in this group."},preferenceCenter:{title:"Privacy Settings",description:"Customize your privacy settings here. You can choose which types of cookies and tracking technologies you allow.",tabs:{purposes:"Purposes",vendors:"Vendors"},purposeItem:{partners:"{count} partners",vendorsUseLegitimateInterest:"{count} vendors claim legitimate interest",examples:"Examples",partnersUsingPurpose:"Partners Using This Purpose",withYourPermission:"With Your Permission",legitimateInterest:"Legitimate Interest",objectButton:"Object",objected:"Objected",rightToObject:"You have the right to object to processing based on legitimate interest."},specialPurposes:{title:"Essential Functions (Required)",tooltip:"These are required for site functionality and security. Per IAB TCF, you cannot object to these special purposes."},vendorList:{search:"Search vendors...",showingCount:"{filtered} of {total} vendors",iabVendorsHeading:"IAB Registered Vendors",iabVendorsNotice:"These partners are registered with the IAB Transparency & Consent Framework (TCF), an industry standard for managing consent",customVendorsHeading:"Custom Partners",customVendorsNotice:"These are custom partners not registered with IAB Transparency & Consent Framework (TCF). They process data based on your consent and may have different privacy practices than IAB-registered vendors.",purposes:"Purposes",specialPurposes:"Special Purposes",specialFeatures:"Special Features",features:"Features",dataCategories:"Data Categories",usesCookies:"Uses Cookies",nonCookieAccess:"Non-Cookie Access",maxAge:"Max Age: {days}d",retention:"Retention: {days}d",legitimateInterest:"Leg. Interest",privacyPolicy:"Privacy Policy",storageDisclosure:"Storage Disclosure",requiredNotice:"Required for site functionality, cannot be disabled"},footer:{consentStorage:'Consent preferences are stored in a cookie named "euconsent-v2" for 13 months. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Accept All",rejectAll:"Reject All",customize:"Customize",saveSettings:"Save Settings",loading:"Loading...",showingSelectedVendor:"Showing selected vendor",clearSelection:"Clear",customPartner:"Custom partner not registered with IAB"}}},li={common:{acceptAll:"Aceptar todo",rejectAll:"Rechazar todo",customize:"Personalizar",save:"Guardar ajustes"},cookieBanner:{title:"Valoramos tu privacidad",description:"Este sitio web utiliza cookies para mejorar tu experiencia de navegaci\xF3n, analizar el tr\xE1fico del sitio y mostrar contenido personalizado."},consentManagerDialog:{title:"Configuraci\xF3n de privacidad",description:"Personaliza tus ajustes de privacidad aqu\xED. Puedes elegir qu\xE9 tipos de cookies y tecnolog\xEDas de seguimiento permites."},consentTypes:{necessary:{title:"Necesario",description:"Estas cookies son esenciales para que el sitio web funcione correctamente y no pueden ser deshabilitadas."},functionality:{title:"Funcionalidad",description:"Estas cookies permiten una mejor funcionalidad y personalizaci\xF3n del sitio web."},marketing:{title:"Marketing",description:"Estas cookies se utilizan para ofrecer anuncios relevantes y realizar un seguimiento de su eficacia."},measurement:{title:"Anal\xEDtica",description:"Estas cookies nos ayudan a comprender c\xF3mo los visitantes interact\xFAan con el sitio web y a mejorar su rendimiento."},experience:{title:"Experiencia",description:"Estas cookies nos ayudan a proporcionar una mejor experiencia de usuario y a probar nuevas funciones."}},frame:{title:"Acepta {category} para ver este contenido.",actionButton:"Habilitar consentimiento de {category}"},legalLinks:{privacyPolicy:"Pol\xEDtica de Privacidad",cookiePolicy:"Pol\xEDtica de Cookies",termsOfService:"T\xE9rminos de Servicio"},iab:{banner:{title:"Configuraci\xF3n de privacidad",description:"Nosotros y nuestros {partnerCount} socios almacenamos y/o accedemos a informaci\xF3n en tu dispositivo y procesamos datos personales, como identificadores \xFAnicos y datos de navegaci\xF3n, para este sitio web, con el fin de:",partnersLink:"{count} socios",andMore:"Y {count} m\xE1s...",legitimateInterestNotice:"Algunos socios reclaman un inter\xE9s leg\xEDtimo para procesar tus datos. Tienes derecho a oponerte a este procesamiento, personalizar tus opciones y retirar tu consentimiento en cualquier momento.",scopeServiceSpecific:"Tu consentimiento se aplica solo a este sitio web y no afectar\xE1 a otros servicios.",scopeGroup:"Su elecci\xF3n se aplica a todos nuestros sitios web de este grupo."},preferenceCenter:{title:"Configuraci\xF3n de privacidad",description:"Personaliza tus ajustes de privacidad aqu\xED. Puedes elegir qu\xE9 tipos de cookies y tecnolog\xEDas de seguimiento permites.",tabs:{purposes:"Prop\xF3sitos",vendors:"Proveedores"},purposeItem:{partners:"{count} socios",vendorsUseLegitimateInterest:"{count} proveedores reclaman inter\xE9s leg\xEDtimo",examples:"Ejemplos",partnersUsingPurpose:"Socios que utilizan este prop\xF3sito",withYourPermission:"Con tu permiso",legitimateInterest:"Inter\xE9s leg\xEDtimo",objectButton:"Oponerse",objected:"Opuesto",rightToObject:"Tienes derecho a oponerte al procesamiento basado en inter\xE9s leg\xEDtimo."},specialPurposes:{title:"Funciones esenciales (requeridas)",tooltip:"Estas son necesarias para la funcionalidad y seguridad del sitio. Seg\xFAn el TCF de IAB, no puedes oponerte a estos prop\xF3sitos especiales."},vendorList:{search:"Buscar proveedores...",showingCount:"{filtered} de {total} proveedores",iabVendorsHeading:"Proveedores registrados en IAB",iabVendorsNotice:"Estos socios est\xE1n registrados en el Marco de Transparencia y Consentimiento (TCF) de IAB, un est\xE1ndar de la industria para gestionar el consentimiento",customVendorsHeading:"Socios personalizados",customVendorsNotice:"Estos son socios personalizados no registrados en el Marco de Transparencia y Consentimiento de IAB (TCF). Procesan datos bas\xE1ndose en tu consentimiento y pueden tener pr\xE1cticas de privacidad diferentes a las de los proveedores registrados en IAB.",purposes:"Finalidades",specialPurposes:"Finalidades especiales",specialFeatures:"Caracter\xEDsticas especiales",features:"Caracter\xEDsticas",dataCategories:"Categor\xEDas de datos",usesCookies:"Usa cookies",nonCookieAccess:"Acceso sin cookies",maxAge:"Duraci\xF3n m\xE1xima: {days}d",retention:"Retenci\xF3n: {days}d",legitimateInterest:"Inter\xE9s leg\xEDtimo",privacyPolicy:"Pol\xEDtica de privacidad",storageDisclosure:"Divulgaci\xF3n de almacenamiento",requiredNotice:"Requerido para la funcionalidad del sitio, no se puede desactivar"},footer:{consentStorage:'Las preferencias de consentimiento se almacenan en una cookie llamada "euconsent-v2" durante 13 meses. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Aceptar todo",rejectAll:"Rechazar todo",customize:"Personalizar",saveSettings:"Guardar ajustes",loading:"Cargando...",showingSelectedVendor:"Mostrando proveedor seleccionado",clearSelection:"Limpiar",customPartner:"Socio personalizado no registrado en IAB"}}},ui={common:{acceptAll:"N\xF5ustu k\xF5igiga",rejectAll:"Keeldu k\xF5igist",customize:"Kohanda",save:"Salvesta seaded"},cookieBanner:{title:"Hindame teie privaatsust",description:"See sait kasutab k\xFCpsiseid, et parandada teie sirvimiskogemust, anal\xFC\xFCsida saidi liiklust ja n\xE4idata isikup\xE4rastatud sisu."},consentManagerDialog:{title:"Privaatsusseaded",description:"Kohandage siin oma privaatsusseadeid. Saate valida, milliseid k\xFCpsiseid ja j\xE4lgimistehnoloogiaid lubate."},consentTypes:{necessary:{title:"H\xE4davajalikud",description:"Need k\xFCpsised on veebisaidi n\xF5uetekohaseks toimimiseks h\xE4davajalikud ja neid ei saa keelata."},functionality:{title:"Funktsionaalsus",description:"Need k\xFCpsised v\xF5imaldavad veebisaidi t\xE4iustatud funktsionaalsust ja isikup\xE4rastamist."},marketing:{title:"Turundus",description:"Neid k\xFCpsiseid kasutatakse asjakohaste reklaamide edastamiseks ja nende t\xF5hususe j\xE4lgimiseks."},measurement:{title:"Anal\xFC\xFCtika",description:"Need k\xFCpsised aitavad meil m\xF5ista, kuidas k\xFClastajad veebisaidiga suhtlevad, ja parandada selle toimivust."},experience:{title:"Kogemus",description:"Need k\xFCpsised aitavad meil pakkuda paremat kasutajakogemust ja testida uusi funktsioone."}},frame:{title:"Selle sisu vaatamiseks n\xF5ustuge kategooria {category} n\xF5usolekuga.",actionButton:"Luba kategooria {category} n\xF5usolek"},legalLinks:{privacyPolicy:"Privaatsuspoliitika",cookiePolicy:"K\xFCpsiste poliitika",termsOfService:"Kasutustingimused"},iab:{banner:{title:"Privaatsusseaded",description:"Meie ja meie {partnerCount} partnerit salvestavad ja/v\xF5i p\xE4\xE4sevad ligi teie seadmes olevatele andmetele ning t\xF6\xF6tlevad isikuandmeid, nagu unikaalsed identifikaatorid ja sirvimisandmed sellel veebilehel, et:",partnersLink:"{count} partnerit",andMore:"Ja veel {count}...",legitimateInterestNotice:"M\xF5ned partnerid v\xE4idavad, et neil on \xF5igustatud huvi teie andmete t\xF6\xF6tlemiseks. Teil on \xF5igus sellele t\xF6\xF6tlemisele vastu vaielda, oma valikuid kohandada ja n\xF5usolek igal ajal tagasi v\xF5tta.",scopeServiceSpecific:"Sinu n\xF5usolek kehtib ainult sellele veebisaidile ega m\xF5juta teisi teenuseid.",scopeGroup:"Teie valik kehtib k\xF5igil meie veebisaitidel selles grupis."},preferenceCenter:{title:"Privaatsusseaded",description:"Kohandage siin oma privaatsusseadeid. Saate valida, milliseid k\xFCpsiseid ja j\xE4lgimistehnoloogiaid lubate.",tabs:{purposes:"Eesm\xE4rgid",vendors:"Teenusepakkujad"},purposeItem:{partners:"{count} partnerit",vendorsUseLegitimateInterest:"{count} teenusepakkujat v\xE4idavad \xF5igustatud huvi",examples:"N\xE4ited",partnersUsingPurpose:"Selle eesm\xE4rgi kasutavad partnerid",withYourPermission:"Teie loal",legitimateInterest:"\xD5igustatud huvi",objectButton:"Vaidle vastu",objected:"Vastu vaieldud",rightToObject:"Teil on \xF5igus vaielda vastu t\xF6\xF6tlemisele, mis p\xF5hineb \xF5igustatud huvil."},specialPurposes:{title:"Olulised funktsioonid (n\xF5utud)",tooltip:"Need on vajalikud saidi toimimiseks ja turvalisuseks. IAB TCF-i kohaselt ei saa nendele erieesm\xE4rkidele vastu vaielda."},vendorList:{search:"Otsi teenusepakkujaid...",showingCount:"Kuvatakse {filtered} / {total} teenusepakkujat",iabVendorsHeading:"IAB registreeritud teenusepakkujad",iabVendorsNotice:"Need partnerid on registreeritud IAB l\xE4bipaistvuse ja n\xF5usoleku raamistikus (TCF), mis on t\xF6\xF6stusstandard n\xF5usoleku haldamiseks",customVendorsHeading:"Kohandatud partnerid",customVendorsNotice:"Need on kohandatud partnerid, kes ei ole registreeritud IAB l\xE4bipaistvuse ja n\xF5usoleku raamistikus (TCF). Nad t\xF6\xF6tlevad andmeid teie n\xF5usoleku alusel ning nende privaatsustavad v\xF5ivad erineda IAB-sertifitseeritud partnerite omadest.",purposes:"Eesm\xE4rgid",specialPurposes:"Eriotstarbed",specialFeatures:"Eriomadused",features:"Omadused",dataCategories:"Andmekategooriad",usesCookies:"Kasutab k\xFCpsiseid",nonCookieAccess:"K\xFCpsisteta juurdep\xE4\xE4s",maxAge:"Maksimaalne vanus: {days}p",retention:"S\xE4ilitamine: {days}p",legitimateInterest:"\xD5igustatud huvi",privacyPolicy:"Privaatsuspoliitika",storageDisclosure:"Salvestamise teave",requiredNotice:"Vajalik saidi toimimiseks, ei saa keelata"},footer:{consentStorage:'N\xF5usoleku eelistused salvestatakse k\xFCpsisesse nimega "euconsent-v2" 13 kuuks. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"N\xF5ustu k\xF5igiga",rejectAll:"Keeldu k\xF5igist",customize:"Kohanda",saveSettings:"Salvesta seaded",loading:"Laadimine...",showingSelectedVendor:"Kuvatakse valitud partner",clearSelection:"T\xFChjenda",customPartner:"Kohandatud partner, kes ei ole IAB-s registreeritud"}}},di={common:{acceptAll:"Hyv\xE4ksy kaikki",rejectAll:"Hylk\xE4\xE4 kaikki",customize:"Mukauta",save:"Tallenna asetukset"},cookieBanner:{title:"Arvostamme yksityisyytt\xE4si",description:"T\xE4m\xE4 sivusto k\xE4ytt\xE4\xE4 ev\xE4steit\xE4 parantaakseen selauskokemustasi, analysoidakseen sivuston liikennett\xE4 ja n\xE4ytt\xE4\xE4kseen yksil\xF6llist\xE4 sis\xE4lt\xF6\xE4."},consentManagerDialog:{title:"Tietosuoja-asetukset",description:"Mukauta yksityisyysasetuksiasi t\xE4\xE4ll\xE4. Voit valita, mink\xE4 tyyppiset ev\xE4steet ja seurantatekniikat sallit."},consentTypes:{necessary:{title:"Ehdottoman tarpeellinen",description:"N\xE4m\xE4 ev\xE4steet ovat v\xE4ltt\xE4m\xE4tt\xF6mi\xE4, jotta verkkosivusto toimisi oikein, eik\xE4 niit\xE4 voi poistaa k\xE4yt\xF6st\xE4."},functionality:{title:"Toiminnallisuus",description:"N\xE4m\xE4 ev\xE4steet mahdollistavat verkkosivuston tehostetun toiminnallisuuden ja personoinnin."},marketing:{title:"Markkinointi",description:"N\xE4it\xE4 ev\xE4steit\xE4 k\xE4ytet\xE4\xE4n relevanttien mainosten l\xE4hett\xE4miseen ja niiden tehokkuuden seurantaan."},measurement:{title:"Analytiikka",description:"N\xE4m\xE4 ev\xE4steet auttavat meit\xE4 ymm\xE4rt\xE4m\xE4\xE4n, miten k\xE4vij\xE4t ovat vuorovaikutuksessa verkkosivuston kanssa, ja parantamaan sen suorituskyky\xE4."},experience:{title:"Kokemus",description:"N\xE4m\xE4 ev\xE4steet auttavat meit\xE4 tarjoamaan paremman k\xE4ytt\xF6kokemuksen ja testaamaan uusia ominaisuuksia."}},frame:{title:"Hyv\xE4ksy {category}, jotta voit tarkastella t\xE4t\xE4 sis\xE4lt\xF6\xE4.",actionButton:"Ota {category} k\xE4ytt\xF6\xF6n"},legalLinks:{privacyPolicy:"Tietosuojak\xE4yt\xE4nt\xF6",cookiePolicy:"Ev\xE4stek\xE4yt\xE4nt\xF6",termsOfService:"K\xE4ytt\xF6ehdot"},iab:{banner:{title:"Tietosuoja-asetukset",description:"Me ja {partnerCount} kumppaniamme tallennamme ja/tai k\xE4yt\xE4mme tietoja laitteellasi ja k\xE4sittelemme henkil\xF6tietoja, kuten yksil\xF6llisi\xE4 tunnisteita ja selaustietoja, t\xE4ll\xE4 verkkosivustolla seuraaviin tarkoituksiin:",partnersLink:"{count} kumppania",andMore:"Ja {count} muuta...",legitimateInterestNotice:"Jotkut kumppanit vetoavat oikeutettuun etuun tietojesi k\xE4sittelyss\xE4. Sinulla on oikeus vastustaa t\xE4t\xE4 k\xE4sittely\xE4, mukauttaa valintojasi ja peruuttaa suostumuksesi milloin tahansa.",scopeServiceSpecific:"Suostumuksesi koskee vain t\xE4t\xE4 verkkosivustoa eik\xE4 vaikuta muihin palveluihin.",scopeGroup:"Valintasi koskee kaikkia verkkosivujamme t\xE4ss\xE4 ryhm\xE4ss\xE4."},preferenceCenter:{title:"Tietosuoja-asetukset",description:"Mukauta yksityisyysasetuksiasi t\xE4\xE4ll\xE4. Voit valita, mink\xE4 tyyppiset ev\xE4steet ja seurantatekniikat sallit.",tabs:{purposes:"K\xE4ytt\xF6tarkoitukset",vendors:"Kumppanit"},purposeItem:{partners:"{count} kumppania",vendorsUseLegitimateInterest:"{count} kumppania vetoaa oikeutettuun etuun",examples:"Esimerkit",partnersUsingPurpose:"T\xE4t\xE4 k\xE4ytt\xF6tarkoitusta k\xE4ytt\xE4v\xE4t kumppanit",withYourPermission:"Luvallasi",legitimateInterest:"Oikeutettu etu",objectButton:"Vastusta",objected:"Vastustettu",rightToObject:"Sinulla on oikeus vastustaa oikeutettuun etuun perustuvaa k\xE4sittely\xE4."},specialPurposes:{title:"V\xE4ltt\xE4m\xE4tt\xF6m\xE4t toiminnot (pakollinen)",tooltip:"N\xE4m\xE4 ovat v\xE4ltt\xE4m\xE4tt\xF6mi\xE4 sivuston toimivuuden ja turvallisuuden kannalta. IAB TCF:n mukaan et voi vastustaa n\xE4it\xE4 erityisi\xE4 k\xE4ytt\xF6tarkoituksia."},vendorList:{search:"Hae kumppaneita...",showingCount:"{filtered}/{total} kumppania",iabVendorsHeading:"IAB-rekister\xF6idyt kumppanit",iabVendorsNotice:"N\xE4m\xE4 kumppanit on rekister\xF6ity IAB Transparency & Consent Framework (TCF) -j\xE4rjestelm\xE4\xE4n, joka on alan standardi suostumusten hallintaan",customVendorsHeading:"Mukautetut kumppanit",customVendorsNotice:"N\xE4m\xE4 ovat mukautettuja kumppaneita, jotka eiv\xE4t ole rekister\xF6ityneet IAB Transparency & Consent Framework (TCF) -j\xE4rjestelm\xE4\xE4n. Ne k\xE4sittelev\xE4t tietoja suostumuksesi perusteella, ja niill\xE4 voi olla erilaiset tietosuojak\xE4yt\xE4nn\xF6t kuin IAB:hen rekister\xF6ityneill\xE4 toimittajilla.",purposes:"Tarkoitukset",specialPurposes:"Erityistarkoitukset",specialFeatures:"Erikoisominaisuudet",features:"Ominaisuudet",dataCategories:"Tietoluokat",usesCookies:"K\xE4ytt\xE4\xE4 ev\xE4steit\xE4",nonCookieAccess:"Muu kuin ev\xE4stepohjainen k\xE4ytt\xF6",maxAge:"Enimm\xE4isik\xE4: {days} pv",retention:"S\xE4ilytys: {days} pv",legitimateInterest:"Oikeutettu etu",privacyPolicy:"Tietosuojak\xE4yt\xE4nt\xF6",storageDisclosure:"Tallennustietojen julkistaminen",requiredNotice:"Vaaditaan sivuston toiminnallisuuden vuoksi, ei voi poistaa k\xE4yt\xF6st\xE4"},footer:{consentStorage:'Suostumusasetukset tallennetaan ev\xE4steeseen nimelt\xE4 "euconsent-v2" 13 kuukaudeksi. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Hyv\xE4ksy kaikki",rejectAll:"Hylk\xE4\xE4 kaikki",customize:"Mukauta",saveSettings:"Tallenna asetukset",loading:"Ladataan...",showingSelectedVendor:"N\xE4ytet\xE4\xE4n valittu toimittaja",clearSelection:"Tyhjenn\xE4",customPartner:"Mukautettu kumppani, joka ei ole rekister\xF6itynyt IAB:hen"}}},pi={common:{acceptAll:"Accepter tout",rejectAll:"Tout rejeter",customize:"Personnaliser",save:"Enregistrer les param\xE8tres"},cookieBanner:{title:"Nous respectons votre vie priv\xE9e",description:"Ce site utilise des cookies pour am\xE9liorer votre exp\xE9rience de navigation, analyser le trafic du site et afficher du contenu personnalis\xE9."},consentManagerDialog:{title:"Param\xE8tres de confidentialit\xE9",description:"Personnalisez vos param\xE8tres de confidentialit\xE9 ici. Vous pouvez choisir les types de cookies et de technologies de suivi que vous autorisez."},consentTypes:{necessary:{title:"Strictement n\xE9cessaire",description:"Ces cookies sont essentiels pour que le site web fonctionne correctement et ne peuvent pas \xEAtre d\xE9sactiv\xE9s."},functionality:{title:"Fonctionnalit\xE9",description:"Ces cookies permettent d'am\xE9liorer la fonctionnalit\xE9 et la personnalisation du site web."},marketing:{title:"Marketing",description:"Ces cookies sont utilis\xE9s pour offrir des publicit\xE9s pertinentes et suivre leur efficacit\xE9."},measurement:{title:"Analyse",description:"Ces cookies nous permettent de comprendre comment les visiteurs interagissent avec le site web et am\xE9liorent ses performances."},experience:{title:"Exp\xE9rience",description:"Ces cookies nous permettent de fournir une meilleure exp\xE9rience utilisateur et de tester de nouvelles fonctionnalit\xE9s."}},frame:{title:"Acceptez {category} pour afficher ce contenu.",actionButton:"Activer le consentement {category}"},legalLinks:{privacyPolicy:"Politique de Confidentialit\xE9",cookiePolicy:"Politique des Cookies",termsOfService:"Conditions de Service"},iab:{banner:{title:"Param\xE8tres de confidentialit\xE9",description:"Nous et nos {partnerCount} partenaires stockons et/ou acc\xE9dons \xE0 des informations sur votre appareil et traitons des donn\xE9es personnelles, telles que des identifiants uniques et des donn\xE9es de navigation, pour ce site web, afin de :",partnersLink:"{count} partenaires",andMore:"Et {count} de plus...",legitimateInterestNotice:"Certains partenaires revendiquent un int\xE9r\xEAt l\xE9gitime pour traiter vos donn\xE9es. Vous avez le droit de vous opposer \xE0 ce traitement, de personnaliser vos choix et de retirer votre consentement \xE0 tout moment.",scopeServiceSpecific:"Votre consentement s'applique uniquement \xE0 ce site web et n'affecte pas d'autres services.",scopeGroup:"Votre choix s'applique \xE0 tous nos sites web de ce groupe."},preferenceCenter:{title:"Param\xE8tres de confidentialit\xE9",description:"Personnalisez vos param\xE8tres de confidentialit\xE9 ici. Vous pouvez choisir les types de cookies et de technologies de suivi que vous autorisez.",tabs:{purposes:"Finalit\xE9s",vendors:"Fournisseurs"},purposeItem:{partners:"{count} partenaires",vendorsUseLegitimateInterest:"{count} fournisseurs revendiquent un int\xE9r\xEAt l\xE9gitime",examples:"Exemples",partnersUsingPurpose:"Partenaires utilisant cette finalit\xE9",withYourPermission:"Avec votre autorisation",legitimateInterest:"Int\xE9r\xEAt l\xE9gitime",objectButton:"S'opposer",objected:"Opposition enregistr\xE9e",rightToObject:"Vous avez le droit de vous opposer au traitement fond\xE9 sur l'int\xE9r\xEAt l\xE9gitime."},specialPurposes:{title:"Fonctions essentielles (obligatoires)",tooltip:"Ces fonctions sont n\xE9cessaires au fonctionnement et \xE0 la s\xE9curit\xE9 du site. Conform\xE9ment au TCF de l'IAB, vous ne pouvez pas vous opposer \xE0 ces finalit\xE9s sp\xE9ciales."},vendorList:{search:"Rechercher des fournisseurs...",showingCount:"{filtered} sur {total} fournisseurs",iabVendorsHeading:"Fournisseurs enregistr\xE9s IAB",iabVendorsNotice:"Ces partenaires sont enregistr\xE9s aupr\xE8s du Transparency & Consent Framework (TCF) de l'IAB, une norme industrielle pour la gestion du consentement",customVendorsHeading:"Partenaires personnalis\xE9s",customVendorsNotice:"Il s'agit de partenaires personnalis\xE9s non enregistr\xE9s aupr\xE8s de l'IAB Transparency & Consent Framework (TCF). Ils traitent les donn\xE9es sur la base de votre consentement et peuvent avoir des pratiques de confidentialit\xE9 diff\xE9rentes de celles des fournisseurs enregistr\xE9s aupr\xE8s de l'IAB.",purposes:"Finalit\xE9s",specialPurposes:"Finalit\xE9s sp\xE9ciales",specialFeatures:"Fonctionnalit\xE9s sp\xE9ciales",features:"Fonctionnalit\xE9s",dataCategories:"Cat\xE9gories de donn\xE9es",usesCookies:"Utilise des cookies",nonCookieAccess:"Acc\xE8s sans cookie",maxAge:"Dur\xE9e max. : {days} j",retention:"R\xE9tention : {days} j",legitimateInterest:"Int\xE9r\xEAt l\xE9gitime",privacyPolicy:"Politique de confidentialit\xE9",storageDisclosure:"Divulgation du stockage",requiredNotice:"Requis pour le fonctionnement du site, ne peut pas \xEAtre d\xE9sactiv\xE9"},footer:{consentStorage:"Les pr\xE9f\xE9rences de consentement sont stock\xE9es dans un cookie nomm\xE9 \xAB euconsent-v2 \xBB pendant 13 mois. The storage duration may be refreshed when you update your preferences."}},common:{acceptAll:"Accepter tout",rejectAll:"Tout rejeter",customize:"Personnaliser",saveSettings:"Enregistrer les param\xE8tres",loading:"Chargement...",showingSelectedVendor:"Affichage du fournisseur s\xE9lectionn\xE9",clearSelection:"Effacer",customPartner:"Partenaire personnalis\xE9 non enregistr\xE9 aupr\xE8s de l'IAB"}}},gi={common:{acceptAll:"Glac le Gach Rud",rejectAll:"Di\xFAltaigh do Gach Rud",customize:"Saincheap",save:"S\xE1bh\xE1il Socruithe"},cookieBanner:{title:"Tugaimid luach do do phr\xEDobh\xE1ideachas",description:"\xDAs\xE1ideann an su\xEDomh seo fian\xE1in chun do thaith\xED bhrabhs\xE1la a fheabhs\xFA, tr\xE1cht su\xEDmh a anail\xEDsi\xFA, agus \xE1bhar pearsantaithe a thaispe\xE1int."},consentManagerDialog:{title:"Socruithe Pr\xEDobh\xE1ideachais",description:"Saincheap do shocruithe pr\xEDobh\xE1ideachais anseo. Is f\xE9idir leat na cine\xE1lacha fian\xE1n agus teicneola\xEDochta\xED rianaithe a cheada\xEDonn t\xFA a roghn\xFA."},consentTypes:{necessary:{title:"F\xEDor-Riachtanach",description:"T\xE1 na fian\xE1in seo riachtanach chun go bhfeidhmeoidh an su\xEDomh gr\xE9as\xE1in i gceart agus n\xED f\xE9idir iad a dh\xEDchumas\xFA."},functionality:{title:"Feidhmi\xFAlacht",description:"Cumasa\xEDonn na fian\xE1in seo feidhmi\xFAlacht fheabhsaithe agus pearsant\xFA an tsu\xEDmh ghr\xE9as\xE1in."},marketing:{title:"Marga\xEDocht",description:"\xDAs\xE1idtear na fian\xE1in seo chun f\xF3gra\xED \xE1bhartha a sheachadadh agus a n-\xE9ifeachtacht a rian\xFA."},measurement:{title:"Anail\xEDs\xEDocht",description:"Cabhra\xEDonn na fian\xE1in seo linn tuiscint a fh\xE1il ar conas a idirghn\xEDomha\xEDonn cuairteoir\xED leis an su\xEDomh gr\xE9as\xE1in agus a fheidhm\xEDocht a fheabhs\xFA."},experience:{title:"Taith\xED",description:"Cabhra\xEDonn na fian\xE1in seo linn taith\xED \xFAs\xE1ideora n\xEDos fearr a shol\xE1thar agus gn\xE9ithe nua a th\xE1st\xE1il."}},frame:{title:"Glac le toili\xFA {category} chun an t-\xE1bhar seo a fheice\xE1il.",actionButton:"Cumasaigh toili\xFA {category}"},legalLinks:{privacyPolicy:"Beartas Pr\xEDobh\xE1ideachta",cookiePolicy:"Beartas Fian\xE1n",termsOfService:"T\xE9arma\xED Seirbh\xEDse"},iab:{banner:{title:"Socruithe pr\xEDobh\xE1ideachais",description:"St\xF3r\xE1laimid agus/n\xF3 faighimid rochtain ar fhaisn\xE9is ar do ghl\xE9as, muid f\xE9in agus \xE1r {partnerCount} comhph\xE1irt\xED, agus pr\xF3ise\xE1laimid sonra\xED pearsanta, amhail aitheant\xF3ir\xED uath\xFAla agus sonra\xED brabhs\xE1la, don su\xEDomh gr\xE9as\xE1in seo, chun:",partnersLink:"{count} comhph\xE1irt\xED",andMore:"Agus {count} eile...",legitimateInterestNotice:"\xC9il\xEDonn roinnt comhph\xE1irtithe leas dlisteanach chun do shonra\xED a phr\xF3ise\xE1il. T\xE1 an ceart agat cur in aghaidh an phr\xF3ise\xE1la seo, do roghanna a shaincheapadh, agus do thoili\xFA a tharraingt siar am ar bith.",scopeServiceSpecific:"Baineann do thoili\xFA leis an su\xEDomh gr\xE9as\xE1in seo amh\xE1in agus n\xED dh\xE9anfaidh s\xE9 difear do sheirbh\xEDs\xED eile.",scopeGroup:"Baineann do rogha le gach ceann d\xE1r l\xE1ithre\xE1in ghr\xE9as\xE1in sa ghr\xFApa seo."},preferenceCenter:{title:"Socruithe pr\xEDobh\xE1ideachais",description:"Saincheap do shocruithe pr\xEDobh\xE1ideachais anseo. Is f\xE9idir leat na cine\xE1lacha fian\xE1n agus teicneola\xEDochta\xED rianaithe a cheada\xEDonn t\xFA a roghn\xFA.",tabs:{purposes:"Cusp\xF3ir\xED",vendors:"Sol\xE1thr\xF3ir\xED"},purposeItem:{partners:"{count} comhph\xE1irt\xED",vendorsUseLegitimateInterest:"\xC9il\xEDonn {count} sol\xE1thr\xF3ir leas dlisteanach",examples:"Sampla\xED",partnersUsingPurpose:"Comhph\xE1irtithe a \xFAs\xE1ideann an cusp\xF3ir seo",withYourPermission:"Le do chead",legitimateInterest:"Leas dlisteanach",objectButton:"Cuir in aghaidh",objected:"Curtha in aghaidh",rightToObject:"T\xE1 an ceart agat cur in aghaidh pr\xF3ise\xE1la bunaithe ar leas dlisteanach."},specialPurposes:{title:"Feidhmeanna riachtanacha (\xE9igeantach)",tooltip:"T\xE1 siad seo riachtanach d'fheidhmi\xFAlacht agus sl\xE1nd\xE1il an tsu\xEDmh. De r\xE9ir IAB TCF, n\xED f\xE9idir leat cur in aghaidh na gcusp\xF3ir\xED speisialta seo."},vendorList:{search:"Cuardaigh sol\xE1thr\xF3ir\xED...",showingCount:"{filtered} as {total} sol\xE1thr\xF3ir",iabVendorsHeading:"Sol\xE1thr\xF3ir\xED cl\xE1raithe IAB",iabVendorsNotice:"T\xE1 na comhph\xE1irtithe seo cl\xE1raithe le Creat Tr\xE9dhearcachta agus Toilithe IAB (TCF), caighde\xE1n tionscail chun toili\xFA a bhainisti\xFA",customVendorsHeading:"Comhph\xE1irtithe saincheaptha",customVendorsNotice:"Is comhph\xE1irtithe saincheaptha iad seo nach bhfuil cl\xE1raithe le Creat Tr\xE9dhearcachta agus Toilithe IAB (TCF). Pr\xF3ise\xE1lann siad sonra\xED bunaithe ar do thoili\xFA agus d'fh\xE9adfadh cleachtais phr\xEDobh\xE1ideachta \xE9ags\xFAla a bheith acu \xF3 dh\xEDolt\xF3ir\xED cl\xE1raithe IAB.",purposes:"Cusp\xF3ir\xED",specialPurposes:"Cusp\xF3ir\xED speisialta",specialFeatures:"Gn\xE9ithe speisialta",features:"Gn\xE9ithe",dataCategories:"Catag\xF3ir\xED sonra\xED",usesCookies:"\xDAs\xE1ideann fian\xE1in",nonCookieAccess:"Rochtain neamh-fhian\xE1n",maxAge:"Uasaois: {days}l",retention:"Coinne\xE1il: {days}l",legitimateInterest:"Leas dlisteanach",privacyPolicy:"Beartas pr\xEDobh\xE1ideachta",storageDisclosure:"Nochtadh st\xF3r\xE1la",requiredNotice:"Riachtanach d'fheidhmi\xFAlacht an tsu\xEDmh, n\xED f\xE9idir \xE9 a dh\xEDchumas\xFA"},footer:{consentStorage:'St\xF3r\xE1iltear roghanna toilithe i bhfian\xE1n darb ainm "euconsent-v2" ar feadh 13 mh\xED. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Glac le gach rud",rejectAll:"Di\xFAltaigh do gach rud",customize:"Saincheap",saveSettings:"S\xE1bh\xE1il socruithe",loading:"\xC1 l\xF3d\xE1il...",showingSelectedVendor:"D\xEDolt\xF3ir roghnaithe \xE1 thaispe\xE1int",clearSelection:"Glan",customPartner:"Comhph\xE1irt\xED saincheaptha nach bhfuil cl\xE1raithe le IAB"}}},mi={common:{acceptAll:"\u05D0\u05E4\u05E9\u05E8 \u05D4\u05DB\u05DC",rejectAll:"\u05D3\u05D7\u05D4 \u05D4\u05DB\u05DC",customize:"\u05D4\u05EA\u05D0\u05DE\u05D4 \u05D0\u05D9\u05E9\u05D9\u05EA",save:"\u05E9\u05DE\u05D5\u05E8 \u05D4\u05D2\u05D3\u05E8\u05D5\u05EA"},cookieBanner:{title:"\u05E4\u05E8\u05D8\u05D9\u05D5\u05EA\u05DA \u05D7\u05E9\u05D5\u05D1\u05D4 \u05DC\u05E0\u05D5",description:"\u05D0\u05EA\u05E8 \u05D6\u05D4 \u05DE\u05E9\u05EA\u05DE\u05E9 \u05D1\u05E2\u05D5\u05D2\u05D9\u05D5\u05EA (\u05E7\u05D5\u05E7\u05D9\u05D6) \u05D1\u05DB\u05D3\u05D9 \u05DC\u05E9\u05E4\u05E8 \u05D0\u05EA \u05D7\u05D5\u05D5\u05D9\u05D9\u05EA \u05D4\u05E9\u05D9\u05DE\u05D5\u05E9, \u05DC\u05E0\u05D8\u05E8 \u05D0\u05EA \u05EA\u05E2\u05D1\u05D5\u05E8\u05EA \u05D4\u05D0\u05EA\u05E8 \u05D5\u05DC\u05D4\u05E6\u05D9\u05D2 \u05EA\u05D5\u05DB\u05DF \u05DE\u05D5\u05EA\u05D0\u05DD \u05D0\u05D9\u05E9\u05D9\u05EA."},consentManagerDialog:{title:"\u05D4\u05D2\u05D3\u05E8\u05D5\u05EA \u05E4\u05E8\u05D8\u05D9\u05D5\u05EA",description:"\u05D1\u05D7\u05E8 \u05D0\u05EA \u05D4\u05D2\u05D3\u05E8\u05D5\u05EA \u05D4\u05E4\u05E8\u05D8\u05D9\u05D5\u05EA \u05E9\u05DC\u05DA \u05DB\u05D0\u05DF. \u05D1\u05D0\u05E4\u05E9\u05E8\u05D5\u05EA\u05DA \u05DC\u05D1\u05D7\u05D5\u05E8 \u05D0\u05D9\u05DC\u05D5 \u05E1\u05D5\u05D2\u05D9 \u05E2\u05D5\u05D2\u05D9\u05D5\u05EA \u05D5\u05D8\u05DB\u05E0\u05D5\u05DC\u05D5\u05D2\u05D9\u05D5\u05EA \u05DE\u05E2\u05E7\u05D1 \u05EA\u05E8\u05E6\u05D4 \u05DC\u05D0\u05E4\u05E9\u05E8."},consentTypes:{necessary:{title:"\u05D4\u05DB\u05E8\u05D7\u05D9\u05D5\u05EA",description:"\u05E2\u05D5\u05D2\u05D9\u05D5\u05EA \u05D0\u05DC\u05D5 \u05D3\u05E8\u05D5\u05E9\u05D5\u05EA \u05DC\u05E4\u05E2\u05D5\u05DC\u05EA \u05D4\u05D0\u05EA\u05E8 \u05D5\u05DC\u05D0 \u05E0\u05D9\u05EA\u05DF \u05DC\u05D4\u05E9\u05D1\u05D9\u05EA \u05D0\u05D5\u05EA\u05DF."},functionality:{title:"\u05E4\u05D5\u05E0\u05E7\u05E6\u05D9\u05D5\u05E0\u05DC\u05D9\u05D5\u05EA",description:"\u05E2\u05D5\u05D2\u05D9\u05D5\u05EA \u05D0\u05DC\u05D5 \u05DE\u05D0\u05E4\u05E9\u05E8\u05D5\u05EA \u05E4\u05D5\u05E0\u05E7\u05E6\u05D9\u05D5\u05E0\u05DC\u05D9\u05D5\u05EA \u05DE\u05E9\u05D5\u05E4\u05E8\u05EA \u05D5\u05D4\u05EA\u05D0\u05DE\u05D4 \u05D0\u05D9\u05E9\u05D9\u05EA."},marketing:{title:"\u05E9\u05D9\u05D5\u05D5\u05E7",description:"\u05E2\u05D5\u05D2\u05D9\u05D5\u05EA \u05D0\u05DC\u05D5 \u05DE\u05E9\u05DE\u05E9\u05D5\u05EA \u05DC\u05D4\u05EA\u05D0\u05DE\u05EA \u05E4\u05E8\u05E1\u05D5\u05DE\u05D5\u05EA \u05D5\u05DE\u05E2\u05E7\u05D1 \u05D0\u05D7\u05E8 \u05D9\u05E2\u05D9\u05DC\u05D5\u05EA\u05DF."},measurement:{title:"\u05E0\u05D9\u05EA\u05D5\u05D7",description:"\u05E2\u05D5\u05D2\u05D9\u05D5\u05EA \u05D0\u05DC\u05D5 \u05DE\u05E1\u05D9\u05D9\u05E2\u05D5\u05EA \u05DC\u05D4\u05D1\u05D9\u05DF \u05D0\u05D9\u05DA \u05DE\u05E9\u05EA\u05DE\u05E9\u05D9\u05DD \u05D1\u05D0\u05EA\u05E8 \u05D5\u05DC\u05E9\u05E4\u05E8 \u05D0\u05EA \u05D1\u05D9\u05E6\u05D5\u05E2\u05D9\u05D5."},experience:{title:"\u05D7\u05D5\u05D5\u05D9\u05D9\u05EA \u05DE\u05E9\u05EA\u05DE\u05E9",description:"\u05E2\u05D5\u05D2\u05D9\u05D5\u05EA \u05D0\u05DC\u05D5 \u05DE\u05D0\u05E4\u05E9\u05E8\u05D5\u05EA \u05D7\u05D5\u05D5\u05D9\u05D9\u05EA \u05DE\u05E9\u05EA\u05DE\u05E9 \u05D8\u05D5\u05D1\u05D4 \u05D9\u05D5\u05EA\u05E8 \u05D5\u05D1\u05D3\u05D9\u05E7\u05EA \u05E4\u05D5\u05E0\u05E7\u05E6\u05D9\u05D5\u05E0\u05DC\u05D9\u05D5\u05EA \u05D7\u05D3\u05E9\u05D4 \u05D1\u05D0\u05EA\u05E8."}},frame:{title:"\u05E7\u05D1\u05DC {category} \u05DB\u05D3\u05D9 \u05DC\u05D4\u05E6\u05D9\u05D2 \u05EA\u05D5\u05DB\u05DF \u05D6\u05D4.",actionButton:"\u05D4\u05E4\u05E2\u05DC {category} \u05E8\u05E9\u05D5\u05EA"},legalLinks:{privacyPolicy:"\u05DE\u05D3\u05D9\u05E0\u05D9\u05D5\u05EA \u05E4\u05E8\u05D8\u05D9\u05D5\u05EA",cookiePolicy:"\u05DE\u05D3\u05D9\u05E0\u05D9\u05D5\u05EA \u05E2\u05D5\u05D2\u05D9\u05D5\u05EA",termsOfService:"\u05EA\u05E0\u05D0\u05D9 \u05E9\u05D9\u05E8\u05D5\u05EA"},iab:{banner:{title:"\u05D4\u05D2\u05D3\u05E8\u05D5\u05EA \u05E4\u05E8\u05D8\u05D9\u05D5\u05EA",description:"\u05D0\u05E0\u05D7\u05E0\u05D5 \u05D5-{partnerCount} \u05D4\u05E9\u05D5\u05EA\u05E4\u05D9\u05DD \u05E9\u05DC\u05E0\u05D5 \u05DE\u05D0\u05D7\u05E1\u05E0\u05D9\u05DD \u05D5/\u05D0\u05D5 \u05E0\u05D9\u05D2\u05E9\u05D9\u05DD \u05DC\u05DE\u05D9\u05D3\u05E2 \u05D1\u05DE\u05DB\u05E9\u05D9\u05E8 \u05E9\u05DC\u05DA \u05D5\u05DE\u05E2\u05D1\u05D3\u05D9\u05DD \u05E0\u05EA\u05D5\u05E0\u05D9\u05DD \u05D0\u05D9\u05E9\u05D9\u05D9\u05DD, \u05DB\u05D2\u05D5\u05DF \u05DE\u05D6\u05D4\u05D9\u05DD \u05D9\u05D9\u05D7\u05D5\u05D3\u05D9\u05D9\u05DD \u05D5\u05E0\u05EA\u05D5\u05E0\u05D9 \u05D2\u05DC\u05D9\u05E9\u05D4, \u05E2\u05D1\u05D5\u05E8 \u05D0\u05EA\u05E8 \u05D6\u05D4, \u05DB\u05D3\u05D9:",partnersLink:"{count} \u05E9\u05D5\u05EA\u05E4\u05D9\u05DD",andMore:"\u05D5\u05E2\u05D5\u05D3 {count}...",legitimateInterestNotice:"\u05D7\u05DC\u05E7 \u05DE\u05D4\u05E9\u05D5\u05EA\u05E4\u05D9\u05DD \u05D8\u05D5\u05E2\u05E0\u05D9\u05DD \u05DC\u05D0\u05D9\u05E0\u05D8\u05E8\u05E1 \u05DC\u05D2\u05D9\u05D8\u05D9\u05DE\u05D9 \u05DC\u05E2\u05D1\u05D3 \u05D0\u05EA \u05D4\u05E0\u05EA\u05D5\u05E0\u05D9\u05DD \u05E9\u05DC\u05DA. \u05D9\u05E9 \u05DC\u05DA \u05D6\u05DB\u05D5\u05EA \u05DC\u05D4\u05EA\u05E0\u05D2\u05D3 \u05DC\u05E2\u05D9\u05D1\u05D5\u05D3 \u05D6\u05D4, \u05DC\u05D4\u05EA\u05D0\u05D9\u05DD \u05D0\u05D9\u05E9\u05D9\u05EA \u05D0\u05EA \u05D4\u05D1\u05D7\u05D9\u05E8\u05D5\u05EA \u05E9\u05DC\u05DA \u05D5\u05DC\u05D1\u05D8\u05DC \u05D0\u05EA \u05D4\u05E1\u05DB\u05DE\u05EA\u05DA \u05D1\u05DB\u05DC \u05E2\u05EA.",scopeServiceSpecific:"\u05D4\u05D4\u05E1\u05DB\u05DE\u05D4 \u05E9\u05DC\u05DA \u05D7\u05DC\u05D4 \u05E8\u05E7 \u05E2\u05DC \u05D0\u05EA\u05E8 \u05D6\u05D4 \u05D5\u05DC\u05D0 \u05EA\u05E9\u05E4\u05D9\u05E2 \u05E2\u05DC \u05E9\u05D9\u05E8\u05D5\u05EA\u05D9\u05DD \u05D0\u05D7\u05E8\u05D9\u05DD.",scopeGroup:"\u05D4\u05D1\u05D7\u05D9\u05E8\u05D4 \u05E9\u05DC\u05DA \u05D7\u05DC\u05D4 \u05E2\u05DC \u05DB\u05DC \u05D4\u05D0\u05EA\u05E8\u05D9\u05DD \u05E9\u05DC\u05E0\u05D5 \u05D1\u05E7\u05D1\u05D5\u05E6\u05D4 \u05D6\u05D5."},preferenceCenter:{title:"\u05D4\u05D2\u05D3\u05E8\u05D5\u05EA \u05E4\u05E8\u05D8\u05D9\u05D5\u05EA",description:"\u05D4\u05EA\u05D0\u05DD \u05D0\u05D9\u05E9\u05D9\u05EA \u05D0\u05EA \u05D4\u05D2\u05D3\u05E8\u05D5\u05EA \u05D4\u05E4\u05E8\u05D8\u05D9\u05D5\u05EA \u05E9\u05DC\u05DA \u05DB\u05D0\u05DF. \u05D1\u05D0\u05E4\u05E9\u05E8\u05D5\u05EA\u05DA \u05DC\u05D1\u05D7\u05D5\u05E8 \u05D0\u05D9\u05DC\u05D5 \u05E1\u05D5\u05D2\u05D9 \u05E2\u05D5\u05D2\u05D9\u05D5\u05EA \u05D5\u05D8\u05DB\u05E0\u05D5\u05DC\u05D5\u05D2\u05D9\u05D5\u05EA \u05DE\u05E2\u05E7\u05D1 \u05EA\u05E8\u05E6\u05D4 \u05DC\u05D0\u05E4\u05E9\u05E8.",tabs:{purposes:"\u05DE\u05D8\u05E8\u05D5\u05EA",vendors:"\u05E1\u05E4\u05E7\u05D9\u05DD"},purposeItem:{partners:"{count} \u05E9\u05D5\u05EA\u05E4\u05D9\u05DD",vendorsUseLegitimateInterest:"{count} \u05E1\u05E4\u05E7\u05D9\u05DD \u05D8\u05D5\u05E2\u05E0\u05D9\u05DD \u05DC\u05D0\u05D9\u05E0\u05D8\u05E8\u05E1 \u05DC\u05D2\u05D9\u05D8\u05D9\u05DE\u05D9",examples:"\u05D3\u05D5\u05D2\u05DE\u05D0\u05D5\u05EA",partnersUsingPurpose:"\u05E9\u05D5\u05EA\u05E4\u05D9\u05DD \u05D4\u05DE\u05E9\u05EA\u05DE\u05E9\u05D9\u05DD \u05D1\u05DE\u05D8\u05E8\u05D4 \u05D6\u05D5",withYourPermission:"\u05D1\u05D4\u05E1\u05DB\u05DE\u05EA\u05DA",legitimateInterest:"\u05D0\u05D9\u05E0\u05D8\u05E8\u05E1 \u05DC\u05D2\u05D9\u05D8\u05D9\u05DE\u05D9",objectButton:"\u05D4\u05EA\u05E0\u05D2\u05D3",objected:"\u05D4\u05EA\u05E0\u05D2\u05D3\u05EA",rightToObject:"\u05D9\u05E9 \u05DC\u05DA \u05D6\u05DB\u05D5\u05EA \u05DC\u05D4\u05EA\u05E0\u05D2\u05D3 \u05DC\u05E2\u05D9\u05D1\u05D5\u05D3 \u05D4\u05DE\u05D1\u05D5\u05E1\u05E1 \u05E2\u05DC \u05D0\u05D9\u05E0\u05D8\u05E8\u05E1 \u05DC\u05D2\u05D9\u05D8\u05D9\u05DE\u05D9."},specialPurposes:{title:"\u05E4\u05D5\u05E0\u05E7\u05E6\u05D9\u05D5\u05EA \u05D7\u05D9\u05D5\u05E0\u05D9\u05D5\u05EA (\u05E0\u05D3\u05E8\u05E9)",tooltip:"\u05D0\u05DC\u05D5 \u05E0\u05D3\u05E8\u05E9\u05D5\u05EA \u05DC\u05EA\u05E4\u05E7\u05D5\u05D3 \u05D5\u05D0\u05D1\u05D8\u05D7\u05EA \u05D4\u05D0\u05EA\u05E8. \u05E2\u05DC \u05E4\u05D9 IAB TCF, \u05D0\u05D9\u05E0\u05DA \u05D9\u05DB\u05D5\u05DC \u05DC\u05D4\u05EA\u05E0\u05D2\u05D3 \u05DC\u05DE\u05D8\u05E8\u05D5\u05EA \u05DE\u05D9\u05D5\u05D7\u05D3\u05D5\u05EA \u05D0\u05DC\u05D5."},vendorList:{search:"\u05D7\u05E4\u05E9 \u05E1\u05E4\u05E7\u05D9\u05DD...",showingCount:"{filtered} \u05DE\u05EA\u05D5\u05DA {total} \u05E1\u05E4\u05E7\u05D9\u05DD",iabVendorsHeading:"\u05E1\u05E4\u05E7\u05D9\u05DD \u05E8\u05E9\u05D5\u05DE\u05D9\u05DD \u05D1-IAB",iabVendorsNotice:"\u05E9\u05D5\u05EA\u05E4\u05D9\u05DD \u05D0\u05DC\u05D5 \u05E8\u05E9\u05D5\u05DE\u05D9\u05DD \u05D1\u05DE\u05E1\u05D2\u05E8\u05EA \u05D4\u05E9\u05E7\u05D9\u05E4\u05D5\u05EA \u05D5\u05D4\u05D4\u05E1\u05DB\u05DE\u05D4 \u05E9\u05DC IAB (TCF), \u05EA\u05E7\u05DF \u05EA\u05E2\u05E9\u05D9\u05D9\u05EA\u05D9 \u05DC\u05E0\u05D9\u05D4\u05D5\u05DC \u05D4\u05E1\u05DB\u05DE\u05D4",customVendorsHeading:"\u05E9\u05D5\u05EA\u05E4\u05D9\u05DD \u05DE\u05D5\u05EA\u05D0\u05DE\u05D9\u05DD \u05D0\u05D9\u05E9\u05D9\u05EA",customVendorsNotice:"\u05D0\u05DC\u05D5 \u05D4\u05DD \u05E9\u05D5\u05EA\u05E4\u05D9\u05DD \u05DE\u05D5\u05EA\u05D0\u05DE\u05D9\u05DD \u05D0\u05D9\u05E9\u05D9\u05EA \u05E9\u05D0\u05D9\u05E0\u05DD \u05E8\u05E9\u05D5\u05DE\u05D9\u05DD \u05D1-IAB Transparency & Consent Framework (TCF). \u05D4\u05DD \u05DE\u05E2\u05D1\u05D3\u05D9\u05DD \u05E0\u05EA\u05D5\u05E0\u05D9\u05DD \u05E2\u05DC \u05D1\u05E1\u05D9\u05E1 \u05D4\u05E1\u05DB\u05DE\u05EA\u05DA \u05D5\u05E2\u05E9\u05D5\u05D9\u05D9\u05DD \u05DC\u05D4\u05D9\u05D5\u05EA \u05DC\u05D4\u05DD \u05E0\u05D4\u05DC\u05D9 \u05E4\u05E8\u05D8\u05D9\u05D5\u05EA \u05E9\u05D5\u05E0\u05D9\u05DD \u05DE\u05E9\u05D5\u05EA\u05E4\u05D9\u05DD \u05D4\u05E8\u05E9\u05D5\u05DE\u05D9\u05DD \u05D1-IAB.",purposes:"\u05DE\u05D8\u05E8\u05D5\u05EA",specialPurposes:"\u05DE\u05D8\u05E8\u05D5\u05EA \u05DE\u05D9\u05D5\u05D7\u05D3\u05D5\u05EA",specialFeatures:"\u05EA\u05DB\u05D5\u05E0\u05D5\u05EA \u05DE\u05D9\u05D5\u05D7\u05D3\u05D5\u05EA",features:"\u05EA\u05DB\u05D5\u05E0\u05D5\u05EA",dataCategories:"\u05E7\u05D8\u05D2\u05D5\u05E8\u05D9\u05D5\u05EA \u05E0\u05EA\u05D5\u05E0\u05D9\u05DD",usesCookies:"\u05DE\u05E9\u05EA\u05DE\u05E9 \u05D1\u05E2\u05D5\u05D2\u05D9\u05D5\u05EA",nonCookieAccess:"\u05D2\u05D9\u05E9\u05D4 \u05DC\u05DC\u05D0 \u05E2\u05D5\u05D2\u05D9\u05D5\u05EA",maxAge:"\u05EA\u05D5\u05E7\u05E3 \u05DE\u05E7\u05E1\u05D9\u05DE\u05DC\u05D9: {days} \u05D9\u05DE\u05D9\u05DD",retention:"\u05E9\u05DE\u05D9\u05E8\u05D4: {days} \u05D9\u05DE\u05D9\u05DD",legitimateInterest:"\u05D0\u05D9\u05E0\u05D8\u05E8\u05E1 \u05DC\u05D2\u05D9\u05D8\u05D9\u05DE\u05D9",privacyPolicy:"\u05DE\u05D3\u05D9\u05E0\u05D9\u05D5\u05EA \u05E4\u05E8\u05D8\u05D9\u05D5\u05EA",storageDisclosure:"\u05D2\u05D9\u05DC\u05D5\u05D9 \u05D0\u05D7\u05E1\u05D5\u05DF",requiredNotice:"\u05E0\u05D3\u05E8\u05E9 \u05DC\u05EA\u05E4\u05E2\u05D5\u05DC \u05D4\u05D0\u05EA\u05E8, \u05DC\u05D0 \u05E0\u05D9\u05EA\u05DF \u05DC\u05D4\u05E9\u05D1\u05D9\u05EA"},footer:{consentStorage:'\u05D4\u05E2\u05D3\u05E4\u05D5\u05EA \u05D4\u05E1\u05DB\u05DE\u05D4 \u05E0\u05E9\u05DE\u05E8\u05D5\u05EA \u05D1\u05E2\u05D5\u05D2\u05D9\u05D9\u05D4 \u05D1\u05E9\u05DD "euconsent-v2" \u05DC\u05DE\u05E9\u05DA 13 \u05D7\u05D5\u05D3\u05E9\u05D9\u05DD. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"\u05D0\u05E4\u05E9\u05E8 \u05D4\u05DB\u05DC",rejectAll:"\u05D3\u05D7\u05D4 \u05D4\u05DB\u05DC",customize:"\u05D4\u05EA\u05D0\u05DE\u05D4 \u05D0\u05D9\u05E9\u05D9\u05EA",saveSettings:"\u05E9\u05DE\u05D5\u05E8 \u05D4\u05D2\u05D3\u05E8\u05D5\u05EA",loading:"\u05D8\u05D5\u05E2\u05DF...",showingSelectedVendor:"\u05DE\u05E6\u05D9\u05D2 \u05E9\u05D5\u05EA\u05E3 \u05E0\u05D1\u05D7\u05E8",clearSelection:"\u05E0\u05E7\u05D4",customPartner:"\u05E9\u05D5\u05EA\u05E3 \u05DE\u05D5\u05EA\u05D0\u05DD \u05D0\u05D9\u05E9\u05D9\u05EA \u05E9\u05D0\u05D9\u05E0\u05D5 \u05E8\u05E9\u05D5\u05DD \u05D1-IAB"}}},fi={common:{acceptAll:"Prihvati sve",rejectAll:"Odbij sve",customize:"Prilagodi",save:"Spremi postavke"},cookieBanner:{title:"Cijenimo va\u0161u privatnost",description:"Ova stranica koristi kola\u010Di\u0107e za pobolj\u0161anje va\u0161eg iskustva pregledavanja, analizu prometa na stranici i prikaz personaliziranog sadr\u017Eaja."},consentManagerDialog:{title:"Postavke privatnosti",description:"Ovdje mo\u017Eete prilagoditi svoje postavke privatnosti. Mo\u017Eete odabrati koje vrste kola\u010Di\u0107a i tehnologija pra\u0107enja dopu\u0161tate."},consentTypes:{necessary:{title:"Strogo nu\u017Eno",description:"Ovi kola\u010Di\u0107i su klju\u010Dni za ispravno funkcioniranje web stranice i ne mogu se onemogu\u0107iti."},functionality:{title:"Funkcionalnost",description:"Ovi kola\u010Di\u0107i omogu\u0107uju pobolj\u0161anu funkcionalnost i personalizaciju web stranice."},marketing:{title:"Marketing",description:"Ovi kola\u010Di\u0107i se koriste za prikaz relevantnih oglasa i pra\u0107enje njihove u\u010Dinkovitosti."},measurement:{title:"Analitika",description:"Ovi kola\u010Di\u0107i nam poma\u017Eu razumjeti kako posjetitelji koriste web stranicu i pobolj\u0161ati njezine performanse."},experience:{title:"Iskustvo",description:"Ovi kola\u010Di\u0107i nam poma\u017Eu pru\u017Eiti bolje korisni\u010Dko iskustvo i testirati nove zna\u010Dajke."}},frame:{title:"Prihvatite {category} privolu za prikaz ovog sadr\u017Eaja.",actionButton:"Omogu\u0107i {category} privolu"},legalLinks:{privacyPolicy:"Pravila o privatnosti",cookiePolicy:"Pravila o kola\u010Di\u0107ima",termsOfService:"Uvjeti pru\u017Eanja usluge"},iab:{banner:{title:"Postavke privatnosti",description:"Mi i na\u0161ih {partnerCount} partnera pohranjujemo i/ili pristupamo informacijama na va\u0161em ure\u0111aju i obra\u0111ujemo osobne podatke, kao \u0161to su jedinstveni identifikatori i podaci o pregledavanju, za ovu web stranicu, kako bismo:",partnersLink:"{count} partnera",andMore:"I jo\u0161 {count}...",legitimateInterestNotice:"Neki partneri pola\u017Eu pravo na legitimni interes za obradu va\u0161ih podataka. Imate pravo prigovora na ovu obradu, prilagodbe svojih izbora i povla\u010Denja privole u bilo kojem trenutku.",scopeServiceSpecific:"Va\u0161 pristanak odnosi se samo na ovu web stranicu i ne\u0107e utjecati na druge usluge.",scopeGroup:"Va\u0161 izbor vrijedi za sve na\u0161e web stranice u ovoj grupi."},preferenceCenter:{title:"Postavke privatnosti",description:"Ovdje mo\u017Eete prilagoditi svoje postavke privatnosti. Mo\u017Eete odabrati koje vrste kola\u010Di\u0107a i tehnologija pra\u0107enja dopu\u0161tate.",tabs:{purposes:"Svrhe",vendors:"Prodava\u010Di"},purposeItem:{partners:"{count} partnera",vendorsUseLegitimateInterest:"{count} prodava\u010Da pola\u017Ee pravo na legitimni interes",examples:"Primjeri",partnersUsingPurpose:"Partneri koji koriste ovu svrhu",withYourPermission:"Uz va\u0161e dopu\u0161tenje",legitimateInterest:"Legitimni interes",objectButton:"Prigovori",objected:"Prigovoreno",rightToObject:"Imate pravo prigovora na obradu temeljenu na legitimnom interesu."},specialPurposes:{title:"Osnovne funkcije (obavezno)",tooltip:"Ove su funkcije potrebne za funkcionalnost i sigurnost stranice. Prema IAB TCF-u, ne mo\u017Eete ulo\u017Eiti prigovor na ove posebne svrhe."},vendorList:{search:"Pretra\u017Ei prodava\u010De...",showingCount:"{filtered} od {total} prodava\u010Da",iabVendorsHeading:"IAB registrirani prodava\u010Di",iabVendorsNotice:"Ovi partneri su registrirani u IAB Transparency & Consent Framework (TCF), industrijskom standardu za upravljanje privolama",customVendorsHeading:"Prilago\u0111eni partneri",customVendorsNotice:"Ovo su prilago\u0111eni partneri koji nisu registrirani u IAB Transparency & Consent Framework (TCF). Oni obra\u0111uju podatke na temelju va\u0161e privole i mogu imati druga\u010Dije prakse privatnosti od IAB registriranih prodava\u010Da.",purposes:"Svrhe",specialPurposes:"Posebne svrhe",specialFeatures:"Posebne zna\u010Dajke",features:"Zna\u010Dajke",dataCategories:"Kategorije podataka",usesCookies:"Koristi kola\u010Di\u0107e",nonCookieAccess:"Pristup bez kola\u010Di\u0107a",maxAge:"Maks. starost: {days}d",retention:"Zadr\u017Eavanje: {days}d",legitimateInterest:"Leg. interes",privacyPolicy:"Pravila o privatnosti",storageDisclosure:"Objavljivanje pohrane",requiredNotice:"Potrebno za funkcionalnost stranice, ne mo\u017Ee se onemogu\u0107iti"},footer:{consentStorage:'Postavke privole pohranjuju se u kola\u010Di\u0107u pod nazivom "euconsent-v2" tijekom 13 mjeseci. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Prihvati sve",rejectAll:"Odbij sve",customize:"Prilagodi",saveSettings:"Spremi postavke",loading:"U\u010Ditavanje...",showingSelectedVendor:"Prikaz odabranog prodava\u010Da",clearSelection:"O\u010Disti",customPartner:"Prilago\u0111eni partner koji nije registriran u IAB-u"}}},hi={common:{acceptAll:"\xD6sszes elfogad\xE1sa",rejectAll:"\xD6sszes elutas\xEDt\xE1sa",customize:"Testreszab\xE1s",save:"Be\xE1ll\xEDt\xE1sok ment\xE9se"},cookieBanner:{title:"\xC9rt\xE9kelj\xFCk az adatv\xE9delmet",description:"Ez a webhely s\xFCtiket haszn\xE1l a b\xF6ng\xE9sz\xE9si \xE9lm\xE9ny jav\xEDt\xE1s\xE1ra, a forgalom elemz\xE9s\xE9re \xE9s szem\xE9lyre szabott tartalom megjelen\xEDt\xE9s\xE9re."},consentManagerDialog:{title:"Adatv\xE9delmi be\xE1ll\xEDt\xE1sok",description:"Testreszabhatja adatv\xE9delmi be\xE1ll\xEDt\xE1sait itt. Kiv\xE1laszthatja, hogy milyen t\xEDpus\xFA s\xFCtiket \xE9s nyomk\xF6vet\u0151 technol\xF3gi\xE1kat enged\xE9lyez."},consentTypes:{necessary:{title:"Felt\xE9tlen\xFCl sz\xFCks\xE9ges",description:"Ezek a s\xFCtik elengedhetetlenek a weboldal megfelel\u0151 m\u0171k\xF6d\xE9s\xE9hez, \xE9s nem kapcsolhat\xF3k ki."},functionality:{title:"Funkcionalit\xE1s",description:"Ezek a s\xFCtik lehet\u0151v\xE9 teszik a weboldal tov\xE1bbfejlesztett funkci\xF3it \xE9s szem\xE9lyre szab\xE1s\xE1t."},marketing:{title:"Marketing",description:"Ezeket a s\xFCtiket relev\xE1ns hirdet\xE9sek megjelen\xEDt\xE9s\xE9re \xE9s hat\xE9konys\xE1guk nyomon k\xF6vet\xE9s\xE9re haszn\xE1ljuk."},measurement:{title:"Analitika",description:"Ezek a s\xFCtik seg\xEDtenek meg\xE9rteni, hogyan l\xE9pnek kapcsolatba a l\xE1togat\xF3k a weboldallal, \xE9s jav\xEDtj\xE1k annak teljes\xEDtm\xE9ny\xE9t."},experience:{title:"Felhaszn\xE1l\xF3i \xE9lm\xE9ny",description:"Ezek a s\xFCtik seg\xEDtenek jobb felhaszn\xE1l\xF3i \xE9lm\xE9nyt ny\xFAjtani \xE9s \xFAj funkci\xF3kat tesztelni."}},frame:{title:"Fogadja el a(z) {category} hozz\xE1j\xE1rul\xE1st a tartalom megtekint\xE9s\xE9hez.",actionButton:"A(z) {category} hozz\xE1j\xE1rul\xE1s enged\xE9lyez\xE9se"},legalLinks:{privacyPolicy:"Adatv\xE9delmi szab\xE1lyzat",cookiePolicy:"S\xFCti szab\xE1lyzat",termsOfService:"Felhaszn\xE1l\xE1si felt\xE9telek"},iab:{banner:{title:"Adatv\xE9delmi be\xE1ll\xEDt\xE1sok",description:"Mi \xE9s a(z) {partnerCount} partner\xFCnk inform\xE1ci\xF3kat t\xE1rolunk az \xD6n eszk\xF6z\xE9n \xE9s/vagy \xE9r\xFCnk el azokhoz, valamint szem\xE9lyes adatokat, p\xE9ld\xE1ul egyedi azonos\xEDt\xF3kat \xE9s b\xF6ng\xE9sz\xE9si adatokat dolgozunk fel ezen a weboldalon a k\xF6vetkez\u0151 c\xE9lokb\xF3l:",partnersLink:"{count} partner",andMore:"\xC9s m\xE9g {count}...",legitimateInterestNotice:"N\xE9h\xE1ny partner jogos \xE9rdekre hivatkozik az \xD6n adatainak feldolgoz\xE1s\xE1hoz. \xD6nnek joga van tiltakozni ez ellen a feldolgoz\xE1s ellen, testreszabni v\xE1laszt\xE1sait, \xE9s b\xE1rmikor visszavonni hozz\xE1j\xE1rul\xE1s\xE1t.",scopeServiceSpecific:"Az \xD6n hozz\xE1j\xE1rul\xE1sa csak erre a webhelyre vonatkozik, \xE9s nem \xE9rinti m\xE1s szolg\xE1ltat\xE1sokat.",scopeGroup:"A v\xE1laszt\xE1sa az ebben a csoportban l\xE9v\u0151 \xF6sszes weboldalunkra vonatkozik."},preferenceCenter:{title:"Adatv\xE9delmi be\xE1ll\xEDt\xE1sok",description:"Testreszabhatja adatv\xE9delmi be\xE1ll\xEDt\xE1sait itt. Kiv\xE1laszthatja, hogy milyen t\xEDpus\xFA s\xFCtiket \xE9s nyomk\xF6vet\u0151 technol\xF3gi\xE1kat enged\xE9lyez.",tabs:{purposes:"C\xE9lok",vendors:"Szolg\xE1ltat\xF3k"},purposeItem:{partners:"{count} partner",vendorsUseLegitimateInterest:"{count} szolg\xE1ltat\xF3 jogos \xE9rdekre hivatkozik",examples:"P\xE9ld\xE1k",partnersUsingPurpose:"Ezt a c\xE9lt haszn\xE1l\xF3 partnerek",withYourPermission:"Az \xD6n enged\xE9ly\xE9vel",legitimateInterest:"Jogos \xE9rdek",objectButton:"Tiltakoz\xE1s",objected:"Tiltakozott",rightToObject:"\xD6nnek joga van tiltakozni a jogos \xE9rdeken alapul\xF3 adatkezel\xE9s ellen."},specialPurposes:{title:"Alapvet\u0151 funkci\xF3k (sz\xFCks\xE9ges)",tooltip:"Ezek a webhely m\u0171k\xF6d\xE9s\xE9hez \xE9s biztons\xE1g\xE1hoz sz\xFCks\xE9gesek. Az IAB TCF szerint \xD6n nem tiltakozhat ezen k\xFCl\xF6nleges c\xE9lok ellen."},vendorList:{search:"Szolg\xE1ltat\xF3k keres\xE9se...",showingCount:"{total} szolg\xE1ltat\xF3b\xF3l {filtered} megjelen\xEDt\xE9se",iabVendorsHeading:"IAB regisztr\xE1lt szolg\xE1ltat\xF3k",iabVendorsNotice:"Ezek a partnerek regisztr\xE1lva vannak az IAB Transparency & Consent Framework (TCF) rendszer\xE9ben, amely a hozz\xE1j\xE1rul\xE1sok kezel\xE9s\xE9nek ipar\xE1gi szabv\xE1nya",customVendorsHeading:"Egyedi partnerek",customVendorsNotice:"Ezek olyan egyedi partnerek, akik nincsenek regisztr\xE1lva az IAB Transparency & Consent Framework (TCF) rendszer\xE9ben. Az \xD6n hozz\xE1j\xE1rul\xE1sa alapj\xE1n kezelik az adatokat, \xE9s az IAB-regisztr\xE1lt szolg\xE1ltat\xF3kt\xF3l elt\xE9r\u0151 adatv\xE9delmi gyakorlatot folytathatnak.",purposes:"C\xE9lok",specialPurposes:"K\xFCl\xF6nleges c\xE9lok",specialFeatures:"K\xFCl\xF6nleges funkci\xF3k",features:"Funkci\xF3k",dataCategories:"Adatkateg\xF3ri\xE1k",usesCookies:"S\xFCtiket haszn\xE1l",nonCookieAccess:"Nem s\xFCti alap\xFA hozz\xE1f\xE9r\xE9s",maxAge:"Max. \xE9lettartam: {days} nap",retention:"Meg\u0151rz\xE9s: {days} nap",legitimateInterest:"Jogos \xE9rdek",privacyPolicy:"Adatv\xE9delmi szab\xE1lyzat",storageDisclosure:"T\xE1rol\xE1si t\xE1j\xE9koztat\xF3",requiredNotice:"A webhely m\u0171k\xF6d\xE9s\xE9hez sz\xFCks\xE9ges, nem kapcsolhat\xF3 ki"},footer:{consentStorage:'A hozz\xE1j\xE1rul\xE1si be\xE1ll\xEDt\xE1sokat egy "euconsent-v2" nev\u0171 s\xFCtiben t\xE1roljuk 13 h\xF3napig. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"\xD6sszes elfogad\xE1sa",rejectAll:"\xD6sszes elutas\xEDt\xE1sa",customize:"Testreszab\xE1s",saveSettings:"Be\xE1ll\xEDt\xE1sok ment\xE9se",loading:"Bet\xF6lt\xE9s...",showingSelectedVendor:"A kiv\xE1lasztott szolg\xE1ltat\xF3 megjelen\xEDt\xE9se",clearSelection:"T\xF6rl\xE9s",customPartner:"IAB-n k\xEDv\xFCli egyedi partner"}}},ki={common:{acceptAll:"Terima Semua",rejectAll:"Tolak Semua",customize:"Sesuaikan",save:"Simpan Pengaturan"},cookieBanner:{title:"Kami menghargai privasi Anda",description:"Situs ini menggunakan cookie untuk meningkatkan pengalaman penelusuran Anda, menganalisis lalu lintas situs, dan menampilkan konten yang dipersonalisasi."},consentManagerDialog:{title:"Pengaturan Privasi",description:"Atur preferensi privasi Anda di sini. Anda dapat memilih jenis cookie dan teknologi pelacakan yang diizinkan."},consentTypes:{necessary:{title:"Sangat Diperlukan",description:"Cookie ini penting agar situs web dapat berfungsi dengan baik dan tidak dapat dinonaktifkan."},functionality:{title:"Fungsionalitas",description:"Cookie ini memungkinkan peningkatan fungsionalitas dan personalisasi situs web."},marketing:{title:"Pemasaran",description:"Cookie ini digunakan untuk menampilkan iklan yang relevan dan melacak efektivitasnya."},measurement:{title:"Analitik",description:"Cookie ini membantu kami memahami bagaimana pengunjung berinteraksi dengan situs web dan meningkatkan kinerjanya."},experience:{title:"Pengalaman",description:"Cookie ini membantu kami memberikan pengalaman pengguna yang lebih baik dan menguji fitur baru."}},frame:{title:"Setujui {category} untuk melihat konten ini.",actionButton:"Aktifkan persetujuan {category}"},legalLinks:{privacyPolicy:"Kebijakan Privasi",cookiePolicy:"Kebijakan Cookie",termsOfService:"Syarat Layanan"},iab:{banner:{title:"Pengaturan Privasi",description:"Kami dan {partnerCount} mitra kami menyimpan dan/atau mengakses informasi pada perangkat Anda dan memproses data pribadi, seperti pengidentifikasi unik dan data penelusuran, untuk situs web ini, untuk:",partnersLink:"{count} mitra",andMore:"Dan {count} lainnya...",legitimateInterestNotice:"Beberapa mitra mengklaim kepentingan sah untuk memproses data Anda. Anda memiliki hak untuk menolak pemrosesan ini, menyesuaikan pilihan Anda, dan menarik persetujuan Anda kapan saja.",scopeServiceSpecific:"Persetujuan Anda hanya berlaku untuk situs web ini dan tidak memengaruhi layanan lainnya.",scopeGroup:"Pilihan Anda berlaku untuk semua situs web kami dalam grup ini."},preferenceCenter:{title:"Pengaturan Privasi",description:"Atur preferensi privasi Anda di sini. Anda dapat memilih jenis cookie dan teknologi pelacakan yang diizinkan.",tabs:{purposes:"Tujuan",vendors:"Vendor"},purposeItem:{partners:"{count} mitra",vendorsUseLegitimateInterest:"{count} vendor mengklaim kepentingan sah",examples:"Contoh",partnersUsingPurpose:"Mitra yang Menggunakan Tujuan Ini",withYourPermission:"Dengan Izin Anda",legitimateInterest:"Kepentingan Sah",objectButton:"Keberatan",objected:"Ditolak",rightToObject:"Anda memiliki hak untuk menolak pemrosesan berdasarkan kepentingan sah."},specialPurposes:{title:"Fungsi Penting (Wajib)",tooltip:"Ini diperlukan untuk fungsionalitas dan keamanan situs. Per IAB TCF, Anda tidak dapat menolak tujuan khusus ini."},vendorList:{search:"Cari vendor...",showingCount:"{filtered} dari {total} vendor",iabVendorsHeading:"Vendor Terdaftar IAB",iabVendorsNotice:"Mitra-mitra ini terdaftar di IAB Transparency & Consent Framework (TCF), standar industri untuk mengelola persetujuan",customVendorsHeading:"Mitra Kustom",customVendorsNotice:"Ini adalah mitra kustom yang tidak terdaftar di IAB Transparency & Consent Framework (TCF). Mereka memproses data berdasarkan persetujuan Anda dan mungkin memiliki praktik privasi yang berbeda dari vendor terdaftar IAB.",purposes:"Tujuan",specialPurposes:"Tujuan Khusus",specialFeatures:"Fitur Khusus",features:"Fitur",dataCategories:"Kategori Data",usesCookies:"Menggunakan Cookie",nonCookieAccess:"Akses Non-Cookie",maxAge:"Usia Maks: {days}h",retention:"Retensi: {days}h",legitimateInterest:"Kepent. Sah",privacyPolicy:"Kebijakan Privasi",storageDisclosure:"Pengungkapan Penyimpanan",requiredNotice:"Diperlukan untuk fungsionalitas situs, tidak dapat dinonaktifkan"},footer:{consentStorage:'Preferensi persetujuan disimpan dalam cookie bernama "euconsent-v2" selama 13 bulan. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Terima Semua",rejectAll:"Tolak Semua",customize:"Sesuaikan",saveSettings:"Simpan Pengaturan",loading:"Memuat...",showingSelectedVendor:"Menampilkan vendor terpilih",clearSelection:"Bersihkan",customPartner:"Mitra kustom tidak terdaftar di IAB"}}},vi={common:{acceptAll:"Sam\xFEykkja allt",rejectAll:"Hafna \xF6llu",customize:"S\xE9rsn\xED\xF0a",save:"Vista stillingar"},cookieBanner:{title:"Vi\xF0 metum fri\xF0helgi \xFE\xEDna",description:"\xDEessi vefur notar vafrak\xF6kur til a\xF0 b\xE6ta vafraupplifun \xFE\xEDna, greina umfer\xF0 \xE1 vefnum og s\xFDna pers\xF3numi\xF0a\xF0 efni."},consentManagerDialog:{title:"Pers\xF3nuverndastillingar",description:"S\xE9rsn\xED\xF0a\xF0u pers\xF3nuverndastillingar \xFE\xEDnar h\xE9r. \xDE\xFA getur vali\xF0 hva\xF0a tegundir af vafrak\xF6kum og rakningart\xE6kni \xFE\xFA leyfir."},consentTypes:{necessary:{title:"Nau\xF0synlegar",description:"\xDEessar vafrak\xF6kur eru nau\xF0synlegar til a\xF0 vefs\xED\xF0an virki r\xE9tt og ekki er h\xE6gt a\xF0 sl\xF6kkva \xE1 \xFEeim."},functionality:{title:"Virkni",description:"\xDEessar vafrak\xF6kur gera m\xF6gulegt a\xF0 auka virkni og pers\xF3numi\xF0a vefs\xED\xF0una."},marketing:{title:"Marka\xF0ssetning",description:"\xDEessar vafrak\xF6kur eru nota\xF0ar til a\xF0 birta vi\xF0eigandi augl\xFDsingar og fylgjast me\xF0 \xE1rangri \xFEeirra."},measurement:{title:"Greining",description:"\xDEessar vafrak\xF6kur hj\xE1lpa okkur a\xF0 skilja hvernig gestir nota vefs\xED\xF0una og b\xE6ta frammist\xF6\xF0u hennar."},experience:{title:"Upplifun",description:"\xDEessar vafrak\xF6kur hj\xE1lpa okkur a\xF0 veita betri notendaupplifun og pr\xF3fa n\xFDja eiginleika."}},frame:{title:"Sam\xFEykktu {category} sam\xFEykki til a\xF0 sko\xF0a \xFEetta efni.",actionButton:"Virkja {category} sam\xFEykki"},legalLinks:{privacyPolicy:"Pers\xF3nuverndarstefna",cookiePolicy:"Stefna um vafrak\xF6kur",termsOfService:"\xDEj\xF3nustuskilm\xE1lar"},iab:{banner:{title:"Pers\xF3nuverndastillingar",description:"Vi\xF0 og {partnerCount} samstarfsa\xF0ilar okkar geymum og/e\xF0a h\xF6fum a\xF0gang a\xF0 uppl\xFDsingum \xE1 t\xE6kinu \xFE\xEDnu og vinnum pers\xF3nuuppl\xFDsingar, svo sem einst\xF6k au\xF0kenni og vafrauppl\xFDsingar, fyrir \xFEessa vefs\xED\xF0u, til a\xF0:",partnersLink:"{count} samstarfsa\xF0ilar",andMore:"Og {count} til vi\xF0b\xF3tar...",legitimateInterestNotice:"Sumir samstarfsa\xF0ilar krefjast l\xF6gm\xE6tra hagsmuna til a\xF0 vinna g\xF6gnin \xFE\xEDn. \xDE\xFA \xE1tt r\xE9tt \xE1 a\xF0 andm\xE6la \xFEessari vinnslu, s\xE9rsn\xED\xF0a val \xFEitt og draga sam\xFEykki \xFEitt til baka hven\xE6r sem er.",scopeServiceSpecific:"Sam\xFEykki \xFEitt gildir a\xF0eins fyrir \xFEessa vefs\xED\xF0u og hefur ekki \xE1hrif \xE1 a\xF0rar \xFEj\xF3nustur.",scopeGroup:"Val \xFEitt gildir \xE1 \xF6llum vefs\xED\xF0um okkar \xED \xFEessum h\xF3p."},preferenceCenter:{title:"Pers\xF3nuverndastillingar",description:"S\xE9rsn\xED\xF0a\xF0u pers\xF3nuverndastillingar \xFE\xEDnar h\xE9r. \xDE\xFA getur vali\xF0 hva\xF0a tegundir af vafrak\xF6kum og rakningart\xE6kni \xFE\xFA leyfir.",tabs:{purposes:"Tilgangur",vendors:"S\xF6lua\xF0ilar"},purposeItem:{partners:"{count} samstarfsa\xF0ilar",vendorsUseLegitimateInterest:"{count} s\xF6lua\xF0ilar krefjast l\xF6gm\xE6tra hagsmuna",examples:"D\xE6mi",partnersUsingPurpose:"Samstarfsa\xF0ilar sem nota \xFEennan tilgang",withYourPermission:"Me\xF0 \xFE\xEDnu leyfi",legitimateInterest:"L\xF6gm\xE6tir hagsmunir",objectButton:"Andm\xE6la",objected:"Andm\xE6lt",rightToObject:"\xDE\xFA \xE1tt r\xE9tt \xE1 a\xF0 andm\xE6la vinnslu sem byggir \xE1 l\xF6gm\xE6tum hagsmunum."},specialPurposes:{title:"Nau\xF0synleg virkni (krafist)",tooltip:"\xDEetta er nau\xF0synlegt fyrir virkni og \xF6ryggi vefsins. Samkv\xE6mt IAB TCF getur\xF0u ekki andm\xE6lt \xFEessum s\xE9rst\xF6ku markmi\xF0um."},vendorList:{search:"Leita a\xF0 s\xF6lua\xF0ilum...",showingCount:"{filtered} af {total} s\xF6lua\xF0ilum",iabVendorsHeading:"IAB skr\xE1\xF0ir s\xF6lua\xF0ilar",iabVendorsNotice:"\xDEessir samstarfsa\xF0ilar eru skr\xE1\xF0ir hj\xE1 IAB Transparency & Consent Framework (TCF), i\xF0na\xF0arsta\xF0li til a\xF0 stj\xF3rna sam\xFEykki",customVendorsHeading:"S\xE9rsni\xF0nir samstarfsa\xF0ilar",customVendorsNotice:"\xDEetta eru s\xE9rsni\xF0nir samstarfsa\xF0ilar sem eru ekki skr\xE1\xF0ir hj\xE1 IAB Transparency & Consent Framework (TCF). \xDEeir vinna g\xF6gn byggt \xE1 sam\xFEykki \xFE\xEDnu og g\xE6tu haft a\xF0rar pers\xF3nuverndarreglur en IAB-skr\xE1\xF0ir s\xF6lua\xF0ilar.",purposes:"Tilgangur",specialPurposes:"S\xE9rstakur tilgangur",specialFeatures:"S\xE9rstakir eiginleikar",features:"Eiginleikar",dataCategories:"Gagnaflokkar",usesCookies:"Notar vafrak\xF6kur",nonCookieAccess:"A\xF0gangur \xE1n vafrakaka",maxAge:"H\xE1marksaldur: {days}d",retention:"Var\xF0veisla: {days}d",legitimateInterest:"L\xF6gm. hagsmunir",privacyPolicy:"Pers\xF3nuverndarstefna",storageDisclosure:"Uppl\xFDsingar um geymslu",requiredNotice:"Nau\xF0synlegt fyrir virkni vefsins, ekki h\xE6gt a\xF0 sl\xF6kkva \xE1"},footer:{consentStorage:'Sam\xFEykkisstillingar eru geymdar \xED vafrak\xF6ku sem heitir "euconsent-v2" \xED 13 m\xE1nu\xF0i. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Sam\xFEykkja allt",rejectAll:"Hafna \xF6llu",customize:"S\xE9rsn\xED\xF0a",saveSettings:"Vista stillingar",loading:"Hle\xF0ur...",showingSelectedVendor:"S\xFDnir valdan s\xF6lua\xF0ila",clearSelection:"Hreinsa",customPartner:"S\xE9rsni\xF0inn samstarfsa\xF0ili ekki skr\xE1\xF0ur hj\xE1 IAB"}}},yi={common:{acceptAll:"Accetta tutto",rejectAll:"Rifiuta tutto",customize:"Personalizza",save:"Salva impostazioni"},cookieBanner:{title:"Rispettiamo la tua privacy",description:"Questo sito utilizza cookies per migliorare la tua esperienza di navigazione, analizzare il traffico e mostrare contenuti personalizzati."},consentManagerDialog:{title:"Impostazioni di privacy",description:"Personalizza le tue impostazioni di privacy. Puoi scegliere i tipi di cookies e tecnologie di tracciamento che autorizzi."},consentTypes:{necessary:{title:"Strettamente necessari",description:"Questi cookies sono essenziali per il sito web per funzionare correttamente e non possono essere disabilitati."},functionality:{title:"Funzionalit\xE0",description:"Questi cookies permettono di migliorare la funzionalit\xE0 e la personalizzazione del sito web."},marketing:{title:"Marketing",description:"Questi cookies sono utilizzati per fornire pubblicit\xE0 pertinenti e misurare la loro efficacia."},measurement:{title:"Misurazione",description:"Questi cookies ci aiutano a comprendere come i visitatori interagiscano con il sito web per migliorarne le sue prestazioni."},experience:{title:"Esperienza",description:"Questi cookies ci aiutano a fornire una migliore esperienza utente e per testare nuove funzionalit\xE0."}},frame:{title:"Accetta {category} per visualizzare questo contenuto",actionButton:"Abilita consenso {category}"},legalLinks:{privacyPolicy:"Informativa sulla Privacy",cookiePolicy:"Politica sui Cookie",termsOfService:"Termini di Servizio"},iab:{banner:{title:"Impostazioni di privacy",description:"Noi e i nostri {partnerCount} partner archiviamo e/o accediamo a informazioni su un dispositivo e trattiamo dati personali, come identificatori univoci e informazioni di navigazione, per questo sito web, per:",partnersLink:"{count} partner",andMore:"E altri {count}...",legitimateInterestNotice:"Alcuni partner rivendicano un interesse legittimo per trattare i tuoi dati. Hai il diritto di opporti a questo trattamento, personalizzare le tue scelte e revocare il tuo consenso in qualsiasi momento.",scopeServiceSpecific:"Il tuo consenso si applica solo a questo sito web e non influisce su altri servizi.",scopeGroup:"La tua scelta si applica a tutti i nostri siti web di questo gruppo."},preferenceCenter:{title:"Impostazioni di privacy",description:"Personalizza le tue impostazioni di privacy. Puoi scegliere i tipi di cookies e tecnologie di tracciamento che autorizzi.",tabs:{purposes:"Finalit\xE0",vendors:"Fornitori"},purposeItem:{partners:"{count} partner",vendorsUseLegitimateInterest:"{count} fornitori rivendicano un interesse legittimo",examples:"Esempi",partnersUsingPurpose:"Partner che utilizzano questa finalit\xE0",withYourPermission:"Con la tua autorizzazione",legitimateInterest:"Interesse legittimo",objectButton:"Opponiti",objected:"Opposizione registrata",rightToObject:"Hai il diritto di opporti al trattamento basato sull\u2019interesse legittimo."},specialPurposes:{title:"Funzioni essenziali (obbligatorie)",tooltip:"Queste sono necessarie per la funzionalit\xE0 e la sicurezza del sito. Secondo l\u2019IAB TCF, non puoi opporti a queste finalit\xE0 speciali."},vendorList:{search:"Cerca fornitori...",showingCount:"{filtered} di {total} fornitori",iabVendorsHeading:"Fornitori registrati IAB",iabVendorsNotice:"Questi partner sono registrati presso l\u2019IAB Transparency & Consent Framework (TCF), uno standard industriale per la gestione del consenso",customVendorsHeading:"Partner personalizzati",customVendorsNotice:"Si tratta di partner personalizzati non registrati presso l\u2019IAB Transparency & Consent Framework (TCF). Trattano i dati sulla base del tuo consenso e possono avere pratiche di privacy diverse rispetto ai fornitori registrati IAB.",purposes:"Finalit\xE0",specialPurposes:"Finalit\xE0 speciali",specialFeatures:"Funzionalit\xE0 speciali",features:"Funzionalit\xE0",dataCategories:"Categorie di dati",usesCookies:"Utilizza cookie",nonCookieAccess:"Accesso senza cookie",maxAge:"Durata massima: {days}g",retention:"Conservazione: {days}g",legitimateInterest:"Int. legittimo",privacyPolicy:"Informativa sulla privacy",storageDisclosure:"Informativa sull\u2019archiviazione",requiredNotice:"Richiesto per la funzionalit\xE0 del sito, non pu\xF2 essere disabilitato"},footer:{consentStorage:'Le preferenze di consenso vengono memorizzate in un cookie denominato "euconsent-v2" per 13 mesi. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Accetta tutto",rejectAll:"Rifiuta tutto",customize:"Personalizza",saveSettings:"Salva impostazioni",loading:"Caricamento...",showingSelectedVendor:"Visualizzazione del fornitore selezionato",clearSelection:"Cancella",customPartner:"Partner personalizzato non registrato presso l\u2019IAB"}}},bi={common:{acceptAll:"All akzept\xE9ieren",rejectAll:"All refus\xE9ieren",customize:"Upassen",save:"Astellunge sp\xE4icheren"},cookieBanner:{title:"Mir sch\xE4tzen \xC4r Privatsph\xE4r",description:"D\xEBs Webs\xE4it benotzt Cookien fir \xC4r Surferfahrung ze verbesseren, Webs\xE4it-Verk\xE9ier ze analys\xE9ieren an personalis\xE9ierten Inhalt unzebidden."},consentManagerDialog:{title:"Privatsph\xE4r Astellungen",description:"Passt \xC4r Privatsph\xE4r Astellungen hei un. Dir k\xEBnnt wielen w\xE9i eng Zorte vu Cookien an Tracking-Technologien Dir erlaabt."},consentTypes:{necessary:{title:"Strikt n\xE9ideg",description:"D\xEBs Cookien si wesentlech fir datt d'Webs\xE4it richteg funktion\xE9iert a k\xEBnnen net desaktiv\xE9iert ginn."},functionality:{title:"Funktionalit\xE9it",description:"D\xEBs Cookien erm\xE9iglechen erweidert Funktionalit\xE9it a Personalis\xE9ierung vun der Webs\xE4it."},marketing:{title:"Marketing",description:"D\xEBs Cookien ginn benotzt fir relevant Reklammen ze liwweren an hir Wierksamkeet ze verfolgen."},measurement:{title:"Analytik",description:"D\xEBs Cookien h\xEBllefen eis ze verstoen w\xE9i d'Besicher mat der Webs\xE4it interag\xE9ieren an hir Leeschtung verbesseren."},experience:{title:"Erfahrung",description:"D\xEBs Cookien h\xEBllefen eis eng besser Benotzererfabrung ze bidden an nei Funktiounen ze testen."}},frame:{title:"Akzept\xE9iert {category} Zoust\xEBmmung fir d\xEBsen Inhalt ze gesinn.",actionButton:"{category} Zoust\xEBmmung aktiv\xE9ieren"},legalLinks:{privacyPolicy:"Dateschutzrichtlinn",cookiePolicy:"Cookie-Politik",termsOfService:"Notzungsbedingungen"},iab:{banner:{title:"Privatsph\xE4r Astellungen",description:"Mir an eis {partnerCount} Partner sp\xE4icheren an/oder gr\xE4ifen op Informatiounen op \xC4rem Apparat zou a veraarbechten pers\xE9inlech Daten, w\xE9i eenzegaarteg Identifiz\xE9ierer a Browserdaten, fir d\xEBs Webs\xE4it, fir:",partnersLink:"{count} Partner",andMore:"An nach {count}...",legitimateInterestNotice:"E puer Partner behaapten e berechtegten Interessi fir \xC4r Daten ze veraarbechten. Dir hutt d\u2019Recht g\xE9int d\xEBs Veraarbechtung ze protest\xE9ieren, \xC4r Wiel unzepassen an \xC4r Zoust\xEBmmung zu all Moment zr\xE9ckzez\xE9ien.",scopeServiceSpecific:"\xC4r Zoust\xEBmmung g\xEBllt n\xEBmme fir d\xEBs Webs\xE4it a w\xE4ert aner Servicer net beaflossen.",scopeGroup:"\xC4r Auswiel g\xEBllt fir all eis Webs\xE4iten an d\xEBser Grupp."},preferenceCenter:{title:"Privatsph\xE4r Astellungen",description:"Passt \xC4r Privatsph\xE4r Astellungen hei un. Dir k\xEBnnt wielen w\xE9i eng Zorte vu Cookien an Tracking-Technologien Dir erlaabt.",tabs:{purposes:"Zwecker",vendors:"Ubidder"},purposeItem:{partners:"{count} Partner",vendorsUseLegitimateInterest:"{count} Ubidder behaapten berechtegten Interessi",examples:"Beispiller",partnersUsingPurpose:"Partner d\xE9i d\xEBsen Zweck benotzen",withYourPermission:"Mat \xC4rer Erlaabnis",legitimateInterest:"Berechtegten Interessi",objectButton:"Protest\xE9ieren",objected:"Protest\xE9iert",rightToObject:"Dir hutt d\u2019Recht g\xE9int d\u2019Veraarbechtung op Basis vu berechtegten Interessi ze protest\xE9ieren."},specialPurposes:{title:"Wichteg Funktiounen (erfuerderlech)",tooltip:"D\xEBs sinn erfuerderlech fir d\u2019Funktionalit\xE9it an d\u2019S\xE9cherheet vum Site. Gem\xE9iss IAB TCF k\xEBnnt Dir net g\xE9int d\xEBs speziell Zwecker protest\xE9ieren."},vendorList:{search:"Ubidder sichen...",showingCount:"{filtered} vun {total} Ubidder",iabVendorsHeading:"IAB registr\xE9iert Ubidder",iabVendorsNotice:"D\xEBs Partner sinn am IAB Transparency & Consent Framework (TCF) registr\xE9iert, en Industriestandard fir d\u2019Gestioun vun der Zoust\xEBmmung",customVendorsHeading:"Benotzerdefin\xE9iert Partner",customVendorsNotice:"D\xEBst si benotzerdefin\xE9iert Partner d\xE9i net am IAB Transparency & Consent Framework (TCF) registr\xE9iert sinn. Si veraarbechten Daten op Basis vun \xC4rer Zoust\xEBmmung a k\xEBnnen aner Dateschutzpraktiken hunn w\xE9i IAB-registr\xE9iert Ubidder.",purposes:"Zwecker",specialPurposes:"Speziell Zwecker",specialFeatures:"Speziell Fonctiounen",features:"Fonctiounen",dataCategories:"Datekategorien",usesCookies:"Benotzt Cookien",nonCookieAccess:"Net-Cookie-Zougang",maxAge:"Max Alter: {days}d",retention:"Bewaaren: {days}d",legitimateInterest:"Ber. Interessi",privacyPolicy:"Dateschutzrichtlinn",storageDisclosure:"Sp\xE4icher-Offenlegung",requiredNotice:"Erfuerderlech fir d\u2019Funktionalit\xE9it vum Site, kann net desaktiv\xE9iert ginn"},footer:{consentStorage:'Zoust\xEBmmungsvirl\xE9iften ginn an engem Cookie mam Numm "euconsent-v2" fir 13 M\xE9int gesp\xE4ichert. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"All akzept\xE9ieren",rejectAll:"All refus\xE9ieren",customize:"Upassen",saveSettings:"Astellunge sp\xE4icheren",loading:"Lueden...",showingSelectedVendor:"Gewielten Ubider g\xEBtt ugewisen",clearSelection:"L\xE4schen",customPartner:"Benotzerdefin\xE9ierte Partner net am IAB registr\xE9iert"}}},wi={common:{acceptAll:"Priimti visus",rejectAll:"Atmesti visus",customize:"Tinkinti",save:"I\u0161saugoti nustatymus"},cookieBanner:{title:"Mes vertiname j\u016Bs\u0173 privatum\u0105",description:"\u0160i svetain\u0117 naudoja slapukus nar\u0161ymo patir\u010Diai gerinti, svetain\u0117s srautui analizuoti ir rodyti jums pritaikyt\u0105 turin\u012F."},consentManagerDialog:{title:"Privatumo nustatymai",description:"\u010Cia galite tinkinti savo privatumo nustatymus. Galite pasirinkti, koki\u0173 tip\u0173 slapukus ir sekimo technologijas leid\u017Eiate naudoti."},consentTypes:{necessary:{title:"B\u016Btinieji",description:"\u0160ie slapukai yra b\u016Btini tinkamam svetain\u0117s veikimui ir negali b\u016Bti i\u0161jungti."},functionality:{title:"Funkcionalumo",description:"\u0160ie slapukai \u012Fgalina i\u0161pl\u0117stin\u012F funkcionalum\u0105 ir svetain\u0117s personalizavim\u0105."},marketing:{title:"Rinkodaros",description:"\u0160ie slapukai naudojami pateikti aktualius skelbimus ir sekti j\u0173 efektyvum\u0105."},measurement:{title:"Analitikos",description:"\u0160ie slapukai padeda mums suprasti, kaip lankytojai s\u0105veikauja su svetaine, ir pagerinti jos veikim\u0105."},experience:{title:"Patirties",description:"\u0160ie slapukai padeda mums u\u017Etikrinti geresn\u0119 vartotojo patirt\u012F ir i\u0161bandyti naujas funkcijas."}},frame:{title:"Priimkite {category} sutikim\u0105, kad gal\u0117tum\u0117te per\u017Ei\u016Br\u0117ti \u0161\u012F turin\u012F.",actionButton:"\u012Egalinti {category} sutikim\u0105"},legalLinks:{privacyPolicy:"Privatumo politika",cookiePolicy:"Slapuk\u0173 politika",termsOfService:"Naudojimosi s\u0105lygos"},iab:{banner:{title:"Privatumo nustatymai",description:"Mes ir m\u016Bs\u0173 {partnerCount} partneriai saugome ir (arba) pasiekiame informacij\u0105 j\u016Bs\u0173 \u012Frenginyje ir tvarkome asmens duomenis, tokius kaip unikal\u016Bs identifikatoriai ir nar\u0161ymo duomenys, \u0161ioje svetain\u0117je, kad gal\u0117tume:",partnersLink:"{count} partneriai",andMore:"Ir dar {count}...",legitimateInterestNotice:"Kai kurie partneriai teigia turintys teis\u0117t\u0105 interes\u0105 tvarkyti j\u016Bs\u0173 duomenis. J\u016Bs turite teis\u0119 nesutikti su tokiu tvarkymu, tinkinti savo pasirinkimus ir bet kada at\u0161aukti sutikim\u0105.",scopeServiceSpecific:"J\u016Bs\u0173 sutikimas taikomas tik \u0161iai svetainei ir netur\u0117s \u012Ftakos kitoms paslaugoms.",scopeGroup:"J\u016Bs\u0173 pasirinkimas taikomas visoms m\u016Bs\u0173 svetain\u0117ms \u0161ioje grup\u0117je."},preferenceCenter:{title:"Privatumo nustatymai",description:"\u010Cia galite tinkinti savo privatumo nustatymus. Galite pasirinkti, koki\u0173 tip\u0173 slapukus ir sekimo technologijas leid\u017Eiate naudoti.",tabs:{purposes:"Tikslai",vendors:"Tiek\u0117jai"},purposeItem:{partners:"{count} partneriai",vendorsUseLegitimateInterest:"{count} tiek\u0117jai teigia turintys teis\u0117t\u0105 interes\u0105",examples:"Pavyzd\u017Eiai",partnersUsingPurpose:"Partneriai, naudojantys \u0161\u012F tiksl\u0105",withYourPermission:"Su j\u016Bs\u0173 leidimu",legitimateInterest:"Teis\u0117tas interesas",objectButton:"Nesutikti",objected:"Prie\u0161tarauta",rightToObject:"J\u016Bs turite teis\u0119 nesutikti su tvarkymu, pagr\u012Fstu teis\u0117tu interesu."},specialPurposes:{title:"Esmin\u0117s funkcijos (privaloma)",tooltip:"Jos reikalingos svetain\u0117s funkcionalumui ir saugumui u\u017Etikrinti. Pagal IAB TCF negalite nesutikti su \u0161iais specialiais tikslais."},vendorList:{search:"Ie\u0161koti tiek\u0117j\u0173...",showingCount:"Rodoma {filtered} i\u0161 {total} tiek\u0117j\u0173",iabVendorsHeading:"IAB registruoti tiek\u0117jai",iabVendorsNotice:"\u0160ie partneriai yra u\u017Eregistruoti IAB Transparency & Consent Framework (TCF) \u2013 pramon\u0117s standarte, skirtame sutikim\u0173 valdymui",customVendorsHeading:"Pasirinktiniai partneriai",customVendorsNotice:"Tai yra pasirinktiniai partneriai, kurie n\u0117ra u\u017Eregistruoti IAB Transparency & Consent Framework (TCF). Jie tvarko duomenis remdamiesi j\u016Bs\u0173 sutikimu ir gali taikyti kitoki\u0105 privatumo praktik\u0105 nei IAB registruoti tiek\u0117jai.",purposes:"Tikslai",specialPurposes:"Special\u016Bs tikslai",specialFeatures:"Specialios funkcijos",features:"Funkcijos",dataCategories:"Duomen\u0173 kategorijos",usesCookies:"Naudoja slapukus",nonCookieAccess:"Prieiga be slapuk\u0173",maxAge:"Maks. am\u017Eius: {days}d",retention:"Saugojimas: {days}d",legitimateInterest:"Teis\u0117tas int.",privacyPolicy:"Privatumo politika",storageDisclosure:"Informacija apie saugojim\u0105",requiredNotice:"Reikalinga svetain\u0117s funkcionalumui, negalima i\u0161jungti"},footer:{consentStorage:"Sutikimo nuostatos saugomos slapuke pavadinimu \u201Eeuconsent-v2\u201C 13 m\u0117nesi\u0173. The storage duration may be refreshed when you update your preferences."}},common:{acceptAll:"Priimti visus",rejectAll:"Atmesti visus",customize:"Tinkinti",saveSettings:"I\u0161saugoti nustatymus",loading:"\u012Ekeliama...",showingSelectedVendor:"Rodomas pasirinktas tiek\u0117jas",clearSelection:"I\u0161valyti",customPartner:"Pasirinktinis partneris, ne\u012Fregistruotas IAB"}}},Ci={common:{acceptAll:"Pie\u0146emt visu",rejectAll:"Noraid\u012Bt visu",customize:"Piel\u0101got",save:"Saglab\u0101t iestat\u012Bjumus"},cookieBanner:{title:"M\u0113s nov\u0113rt\u0113jam j\u016Bsu priv\u0101tumu",description:"\u0160\u012B vietne izmanto s\u012Bkdatnes, lai uzlabotu j\u016Bsu p\u0101rl\u016Bko\u0161anas pieredzi, analiz\u0113tu vietnes datpl\u016Bsmu un r\u0101d\u012Btu personaliz\u0113tu saturu."},consentManagerDialog:{title:"Priv\u0101tuma iestat\u012Bjumi",description:"Piel\u0101gojiet savus priv\u0101tuma iestat\u012Bjumus \u0161eit. J\u016Bs varat izv\u0113l\u0113ties, k\u0101da veida s\u012Bkdatnes un izseko\u0161anas tehnolo\u0123ijas at\u013Caut."},consentTypes:{necessary:{title:"Stingri nepiecie\u0161am\u0101s",description:"\u0160\u012Bs s\u012Bkdatnes ir b\u016Btiskas, lai vietne darbotos pareizi, un t\u0101s nevar atsp\u0113jot."},functionality:{title:"Funkcionalit\u0101te",description:"\u0160\u012Bs s\u012Bkdatnes nodro\u0161ina uzlabotu funkcionalit\u0101ti un vietnes personaliz\u0101ciju."},marketing:{title:"M\u0101rketings",description:"\u0160\u012Bs s\u012Bkdatnes tiek izmantotas, lai pieg\u0101d\u0101tu atbilsto\u0161as rekl\u0101mas un izsekotu to efektivit\u0101ti."},measurement:{title:"Anal\u012Btika",description:"\u0160\u012Bs s\u012Bkdatnes pal\u012Bdz mums saprast, k\u0101 apmekl\u0113t\u0101ji mijiedarbojas ar vietni un uzlabo t\u0101s veiktsp\u0113ju."},experience:{title:"Pieredze",description:"\u0160\u012Bs s\u012Bkdatnes pal\u012Bdz mums nodro\u0161in\u0101t lab\u0101ku lietot\u0101ja pieredzi un test\u0113t jaunas funkcijas."}},frame:{title:"Pie\u0146emiet {category} piekri\u0161anu, lai skat\u012Btu \u0161o saturu.",actionButton:"Iesp\u0113jot {category} piekri\u0161anu"},legalLinks:{privacyPolicy:"Priv\u0101tuma politika",cookiePolicy:"S\u012Bkdat\u0146u politika",termsOfService:"Pakalpojumu snieg\u0161anas noteikumi"},iab:{banner:{title:"Priv\u0101tuma iestat\u012Bjumi",description:"M\u0113s un m\u016Bsu {partnerCount} partneri uzglab\u0101jam un/vai piek\u013C\u016Bstam inform\u0101cijai j\u016Bsu ier\u012Bc\u0113 un apstr\u0101d\u0101jam personas datus, piem\u0113ram, unik\u0101lus identifikatorus un p\u0101rl\u016Bko\u0161anas datus, \u0161ai vietnei, lai:",partnersLink:"{count} partneri",andMore:"Un v\u0113l {count}...",legitimateInterestNotice:"Da\u017Ei partneri pieprasa le\u0123it\u012Bmas intereses j\u016Bsu datu apstr\u0101dei. Jums ir ties\u012Bbas iebilst pret \u0161o apstr\u0101di, piel\u0101got savu izv\u0113li un jebkur\u0101 laik\u0101 atsaukt savu piekri\u0161anu.",scopeServiceSpecific:"J\u016Bsu piekri\u0161ana attiecas tikai uz \u0161o vietni un neietekm\u0113s citus pakalpojumus.",scopeGroup:"J\u016Bsu izv\u0113le attiecas uz vis\u0101m m\u016Bsu vietn\u0113m \u0161aj\u0101 grup\u0101."},preferenceCenter:{title:"Priv\u0101tuma iestat\u012Bjumi",description:"Piel\u0101gojiet savus priv\u0101tuma iestat\u012Bjumus \u0161eit. J\u016Bs varat izv\u0113l\u0113ties, k\u0101da veida s\u012Bkdatnes un izseko\u0161anas tehnolo\u0123ijas at\u013Caut.",tabs:{purposes:"M\u0113r\u0137i",vendors:"Pieg\u0101d\u0101t\u0101ji"},purposeItem:{partners:"{count} partneri",vendorsUseLegitimateInterest:"{count} pieg\u0101d\u0101t\u0101ji pieprasa le\u0123it\u012Bmas intereses",examples:"Piem\u0113ri",partnersUsingPurpose:"Partneri, kas izmanto \u0161o m\u0113r\u0137i",withYourPermission:"Ar j\u016Bsu at\u013Cauju",legitimateInterest:"Le\u0123it\u012Bm\u0101s intereses",objectButton:"Iebilst",objected:"Iebilsts",rightToObject:"Jums ir ties\u012Bbas iebilst pret apstr\u0101di, kuras pamat\u0101 ir le\u0123it\u012Bmas intereses."},specialPurposes:{title:"B\u016Btiskas funkcijas (nepiecie\u0161ams)",tooltip:"T\u0101s ir nepiecie\u0161amas vietnes funkcionalit\u0101tei un dro\u0161\u012Bbai. Saska\u0146\u0101 ar IAB TCF j\u016Bs nevarat iebilst pret \u0161iem \u012Bpa\u0161ajiem m\u0113r\u0137iem."},vendorList:{search:"Mekl\u0113t pieg\u0101d\u0101t\u0101jus...",showingCount:"R\u0101da {filtered} no {total} pieg\u0101d\u0101t\u0101jiem",iabVendorsHeading:"IAB re\u0123istr\u0113tie pieg\u0101d\u0101t\u0101ji",iabVendorsNotice:"\u0160ie partneri ir re\u0123istr\u0113ti IAB Transparency & Consent Framework (TCF) \u2014 nozares standart\u0101 piekri\u0161anas p\u0101rvald\u012Bbai",customVendorsHeading:"Piel\u0101goti partneri",customVendorsNotice:"\u0160ie ir piel\u0101goti partneri, kas nav re\u0123istr\u0113ti IAB Transparency & Consent Framework (TCF). Vi\u0146i apstr\u0101d\u0101 datus, pamatojoties auf j\u016Bsu piekri\u0161anu, un vi\u0146iem var b\u016Bt at\u0161\u0137ir\u012Bga priv\u0101tuma prakse nek\u0101 IAB re\u0123istr\u0113tajiem pieg\u0101d\u0101t\u0101jiem.",purposes:"M\u0113r\u0137i",specialPurposes:"\u012Apa\u0161ie m\u0113r\u0137i",specialFeatures:"\u012Apa\u0161\u0101s funkcijas",features:"Funkcijas",dataCategories:"Datu kategorijas",usesCookies:"Izmanto s\u012Bkdatnes",nonCookieAccess:"Piek\u013Cuve bez s\u012Bkdatn\u0113m",maxAge:"Maks. vecums: {days}d",retention:"Saglab\u0101\u0161ana: {days}d",legitimateInterest:"Le\u0123. intereses",privacyPolicy:"Priv\u0101tuma politika",storageDisclosure:"Inform\u0101cija par glab\u0101\u0161anu",requiredNotice:"Nepiecie\u0161ams vietnes funkcionalit\u0101tei, nevar atsp\u0113jot"},footer:{consentStorage:'Piekri\u0161anas preferences tiek glab\u0101tas s\u012Bkdatn\u0113 ar nosaukumu "euconsent-v2" 13 m\u0113ne\u0161us. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Pie\u0146emt visu",rejectAll:"Noraid\u012Bt visu",customize:"Piel\u0101got",saveSettings:"Saglab\u0101t iestat\u012Bjumus",loading:"Iel\u0101d\u0113...",showingSelectedVendor:"R\u0101da atlas\u012Bto pieg\u0101d\u0101t\u0101ju",clearSelection:"Not\u012Br\u012Bt",customPartner:"Piel\u0101gots partneris, kas nav re\u0123istr\u0113ts IAB"}}},Ii={common:{acceptAll:"A\u010B\u010Betta kollox",rejectAll:"Irrifjuta kollox",customize:"Personalizza",save:"Issejvja s-settings"},cookieBanner:{title:"Napprezzaw il-privatezza tieg\u0127ek",description:"Dan is-sit ju\u017Ca cookies biex itejjeb l-esperjenza tal-browsing tieg\u0127ek, janalizza t-traffiku tas-sit, u juri kontenut personalizzat."},consentManagerDialog:{title:"Settings tal-privatezza",description:"Personalizza s-settings tal-privatezza tieg\u0127ek hawn. Tista' tag\u0127\u017Cel liema tipi ta' cookies u teknolo\u0121iji ta' tra\u010B\u010Bar tippermetti."},consentTypes:{necessary:{title:"Strettament ne\u010Bessarji",description:"Dawn il-cookies huma essenzjali biex is-sit web ja\u0127dem sew u ma jistg\u0127ux ji\u0121u di\u017Cattivati."},functionality:{title:"Funzjonalit\xE0",description:"Dawn il-cookies jippermettu funzjonalit\xE0 mtejba u personalizzazzjoni tas-sit web."},marketing:{title:"Marketing",description:"Dawn il-cookies jintu\u017Caw biex iwasslu riklami rilevanti u jittra\u010B\u010Baw l-effettivit\xE0 tag\u0127hom."},measurement:{title:"Analitika",description:"Dawn il-cookies jg\u0127inuna nifhmu kif il-vi\u017Citaturi jintera\u0121ixxu mas-sit web u ntejbu l-prestazzjoni tieg\u0127u."},experience:{title:"Esperjenza",description:"Dawn il-cookies jg\u0127inuna nipprovdu esperjenza a\u0127jar g\u0127all-utent u nittestjaw karatteristi\u010Bi \u0121odda."}},frame:{title:"A\u010B\u010Betta l-kunsens ta' {category} biex tara dan il-kontenut.",actionButton:"Attiva l-kunsens ta' {category}"},legalLinks:{privacyPolicy:"Politika tal-Privatezza",cookiePolicy:"Politika tal-Cookies",termsOfService:"Termini tas-Servizz"},iab:{banner:{title:"Settings tal-privatezza",description:"A\u0127na u l-{partnerCount} sie\u0127eb tag\u0127na na\u0127\u017Cnu u/jew na\u010B\u010Bessaw informazzjoni fuq apparat u nippro\u010Bessaw data personali, b\u0127al identifikaturi uni\u010Bi u data tal-browsing, g\u0127al dan is-sit web, biex:",partnersLink:"{count} sie\u0127eb",andMore:"U {count} o\u0127ra...",legitimateInterestNotice:"Xi s\u0127ab jitolbu interess le\u0121ittimu biex jippro\u010Bessaw id-data tieg\u0127ek. G\u0127andek id-dritt li to\u0121\u0121ezzjona g\u0127al dan il-pro\u010Bessar, tippersonalizza l-g\u0127a\u017Cliet tieg\u0127ek, u tirtira l-kunsens tieg\u0127ek fi kwalunkwe \u0127in.",scopeServiceSpecific:"Il-kunsens tieg\u0127ek japplika biss g\u0127al dan is-sit web u ma jaffettwax servizzi o\u0127ra.",scopeGroup:"L-g\u0127a\u017Cla tieg\u0127ek tapplika g\u0127al kull sit web tag\u0127na f'din il-grupp."},preferenceCenter:{title:"Settings tal-privatezza",description:"Personalizza s-settings tal-privatezza tieg\u0127ek hawn. Tista' tag\u0127\u017Cel liema tipi ta' cookies u teknolo\u0121iji ta' tra\u010B\u010Bar tippermetti.",tabs:{purposes:"G\u0127anijiet",vendors:"Bejjieg\u0127a"},purposeItem:{partners:"{count} sie\u0127eb",vendorsUseLegitimateInterest:"{count} bejjieg\u0127 jitolbu interess le\u0121ittimu",examples:"E\u017Cempji",partnersUsingPurpose:"S\u0127ab li Ju\u017Caw dan l-G\u0127an",withYourPermission:"Bil-Permess Tieg\u0127ek",legitimateInterest:"Interess Le\u0121ittimu",objectButton:"O\u0121\u0121ezzjona",objected:"O\u0121\u0121ezzjonat",rightToObject:"G\u0127andek id-dritt li to\u0121\u0121ezzjona g\u0127all-ippro\u010Bessar ibba\u017Cat fuq interess le\u0121ittimu."},specialPurposes:{title:"Funzjonijiet Essenzjali (Me\u0127tie\u0121a)",tooltip:"Dawn huma me\u0127tie\u0121a g\u0127all-funzjonalit\xE0 u s-sigurt\xE0 tas-sit. Skont l-IAB TCF, ma tistax to\u0121\u0121ezzjona g\u0127al dawn l-g\u0127anijiet spe\u010Bjali."},vendorList:{search:"Fittex bejjieg\u0127a...",showingCount:"Qed jintwerew {filtered} minn {total} bejjieg\u0127",iabVendorsHeading:"Bejjieg\u0127a Re\u0121istrati fl-IAB",iabVendorsNotice:"Dawn is-s\u0127ab huma re\u0121istrati mal-IAB Transparency & Consent Framework (TCF), standard tal-industrija g\u0127all-immani\u0121\u0121jar tal-kunsens",customVendorsHeading:"S\u0127ab Personalizzati",customVendorsNotice:"Dawn huma s\u0127ab personalizzati mhux re\u0121istrati mal-IAB Transparency & Consent Framework (TCF). Huma jippro\u010Bessaw id-data abba\u017Ci tal-kunsens tieg\u0127ek u jista\u2019 jkollhom prattiki ta\u2019 privatezza differenti minn bejjieg\u0127a re\u0121istrati fl-IAB.",purposes:"G\u0127anijiet",specialPurposes:"G\u0127anijiet Spe\u010Bjali",specialFeatures:"Karatteristi\u010Bi Spe\u010Bjali",features:"Karatteristi\u010Bi",dataCategories:"Kategoriji tad-Data",usesCookies:"Ju\u017Ca l-Cookies",nonCookieAccess:"A\u010B\u010Bess Mhux tal-Cookie",maxAge:"Et\xE0 Massima: {days}j",retention:"\u017Bamma: {days}j",legitimateInterest:"Int. Le\u0121ittimu",privacyPolicy:"Politika tal-Privatezza",storageDisclosure:"\u017Bvelar tal-\u0126a\u017Cna",requiredNotice:"Me\u0127tie\u0121 g\u0127all-funzjonalit\xE0 tas-sit, ma jistax ji\u0121i di\u017Cattivat"},footer:{consentStorage:'Il-preferenzi tal-kunsens huma ma\u0127\u017Cuna f\u2019cookie msemmija "euconsent-v2" g\u0127al 13-il xahar. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"A\u010B\u010Betta kollox",rejectAll:"Irrifjuta kollox",customize:"Personalizza",saveSettings:"Issejvja s-settings",loading:"Qed jillowdja...",showingSelectedVendor:"Qed jintwera l-bejjieg\u0127 mag\u0127\u017Cul",clearSelection:"Ikklerja",customPartner:"Sie\u0127eb personalizzat mhux re\u0121istrat mal-IAB"}}},ji={common:{acceptAll:"Godta alle",rejectAll:"Avsl\xE5 alle",customize:"Tilpass",save:"Lagre innstillinger"},cookieBanner:{title:"Vi verdsetter ditt personvern",description:"Dette nettstedet bruker informasjonskapsler for \xE5 forbedre din nettopplevelse, analysere trafikk og vise personlig tilpasset innhold."},consentManagerDialog:{title:"Personverninnstillinger",description:"Tilpass personverninnstillingene dine her. Du kan velge hvilke typer informasjonskapsler og sporingsteknologier du vil tillate."},consentTypes:{necessary:{title:"Strengt n\xF8dvendige",description:"Disse informasjonskapslene er essensielle for at nettstedet skal fungere riktig og kan ikke deaktiveres."},functionality:{title:"Funksjonalitet",description:"Disse informasjonskapslene muliggj\xF8r forbedret funksjonalitet og personalisering av nettstedet."},marketing:{title:"Markedsf\xF8ring",description:"Disse informasjonskapslene brukes til \xE5 levere relevante annonser og spore deres effektivitet."},measurement:{title:"Analyse",description:"Disse informasjonskapslene hjelper oss med \xE5 forst\xE5 hvordan bes\xF8kende samhandler med nettstedet og forbedre ytelsen."},experience:{title:"Opplevelse",description:"Disse informasjonskapslene hjelper oss med \xE5 gi en bedre brukeropplevelse og teste nye funksjoner."}},frame:{title:"Godta {category}-samtykke for \xE5 se dette innholdet.",actionButton:"Aktiver {category}-samtykke"},legalLinks:{privacyPolicy:"Personvernerkl\xE6ring",cookiePolicy:"Retningslinjer for informasjonskapsler",termsOfService:"Vilk\xE5r for bruk"},iab:{banner:{title:"Personverninnstillinger",description:"Vi og v\xE5re {partnerCount} partnere lagrer og/eller har tilgang til informasjon p\xE5 enheten din og behandler personopplysninger, som unike identifikatorer og nettleserdata, for dette nettstedet, for \xE5:",partnersLink:"{count} partnere",andMore:"Og {count} til...",legitimateInterestNotice:"Noen partnere krever legitim interesse for \xE5 behandle dataene dine. Du har rett til \xE5 protestere mot denne behandlingen, tilpasse valgene dine og trekke tilbake samtykket ditt n\xE5r som helst.",scopeServiceSpecific:"Samtykket ditt gjelder bare for dette nettstedet og p\xE5virker ikke andre tjenester.",scopeGroup:"Valget ditt gjelder p\xE5 tvers av v\xE5re nettsider i denne gruppen."},preferenceCenter:{title:"Personverninnstillinger",description:"Tilpass personverninnstillingene dine her. Du kan velge hvilke typer informasjonskapsler og sporingsteknologier du vil tillate.",tabs:{purposes:"Form\xE5l",vendors:"Leverand\xF8rer"},purposeItem:{partners:"{count} partnere",vendorsUseLegitimateInterest:"{count} leverand\xF8rer krever legitim interesse",examples:"Eksempler",partnersUsingPurpose:"Partnere som bruker dette form\xE5let",withYourPermission:"Med din tillatelse",legitimateInterest:"Legitim interesse",objectButton:"Protester",objected:"Protestert",rightToObject:"Du har rett til \xE5 protestere mot behandling basert p\xE5 legitim interesse."},specialPurposes:{title:"Viktige funksjoner (p\xE5krevd)",tooltip:"Disse er n\xF8dvendige for nettstedets funksjonalitet og sikkerhet. I henhold til IAB TCF kan du ikke protestere mot disse spesielle form\xE5lene."},vendorList:{search:"S\xF8k etter leverand\xF8rer...",showingCount:"{filtered} av {total} leverand\xF8rer",iabVendorsHeading:"IAB-registrerte leverand\xF8rer",iabVendorsNotice:"Disse partnerne er registrert i IAB Transparency & Consent Framework (TCF), en bransjestandard for administrasjon av samtykke",customVendorsHeading:"Egendefinerte partnere",customVendorsNotice:"Dette er egendefinerte partnere som ikke er registrert i IAB Transparency & Consent Framework (TCF). De behandler data basert p\xE5 ditt samtykke og kan ha annen personvernpraksis enn IAB-registrerte leverand\xF8rer.",purposes:"Form\xE5l",specialPurposes:"Spesielle form\xE5l",specialFeatures:"Spesielle funksjoner",features:"Funksjoner",dataCategories:"Datakategorier",usesCookies:"Bruker informasjonskapsler",nonCookieAccess:"Ikke-informasjonskapsel-tilgang",maxAge:"Maks alder: {days}d",retention:"Oppbevaring: {days}d",legitimateInterest:"Leg. interesse",privacyPolicy:"Personvernerkl\xE6ring",storageDisclosure:"Lagringsinformasjon",requiredNotice:"P\xE5krevd for nettstedets funksjonalitet, kan ikke deaktiveres"},footer:{consentStorage:'Samtykkepreferanser lagres i en informasjonskapsel kalt "euconsent-v2" i 13 m\xE5neder. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Godta alle",rejectAll:"Avsl\xE5 alle",customize:"Tilpass",saveSettings:"Lagre innstillinger",loading:"Laster...",showingSelectedVendor:"Viser valgt leverand\xF8r",clearSelection:"T\xF8m",customPartner:"Egendefinert partner ikke registrert i IAB"}}},Si={common:{acceptAll:"Alles accepteren",rejectAll:"Alles weigeren",customize:"Aanpassen",save:"Instellingen opslaan"},cookieBanner:{title:"Wij hechten waarde aan uw privacy",description:"Deze site gebruikt cookies om uw surfervaring te verbeteren, het verkeer op de site te analyseren en gepersonaliseerde inhoud te tonen"},consentManagerDialog:{title:"Privacy-instellingen",description:"Pas hier uw privacyinstellingen aan. U kunt kiezen welke soorten cookies en trackingtechnologie\xEBn u toestaat."},consentTypes:{necessary:{title:"Strikt noodzakelijk",description:"Deze cookies zijn essentieel voor het goed functioneren van de website en kunnen niet worden uitgeschakeld"},functionality:{title:"Functionaliteit",description:"Deze cookies maken verbeterde functionaliteit en personalisatie van de website mogelijk."},marketing:{title:"Marketing",description:"Deze cookies worden gebruikt om relevante advertenties aan te bieden en de effectiviteit ervan bij te houden"},measurement:{title:"Analytics",description:"Deze cookies helpen ons te begrijpen hoe bezoekers omgaan met de website en de prestaties ervan te verbeteren"},experience:{title:"Ervaring",description:"Deze cookies helpen ons om een betere gebruikerservaring te bieden en nieuwe functies te testen"}},frame:{title:"Accepteer {category} om deze inhoud te bekijken",actionButton:"Schakel {category} toestemming in"},legalLinks:{privacyPolicy:"Privacybeleid",cookiePolicy:"Cookiebeleid",termsOfService:"Servicevoorwaarden"},iab:{banner:{title:"Privacy-instellingen",description:"Wij en onze {partnerCount} partners slaan informatie op een apparaat op en/of openen deze en verwerken persoonlijke gegevens, zoals unieke identificatoren en browsegegevens, voor deze website, om:",partnersLink:"{count} partners",andMore:"En nog {count}...",legitimateInterestNotice:"Sommige partners maken aanspraak op een gerechtvaardigd belang om uw gegevens te verwerken. U heeft het recht om bezwaar te maken tegen deze verwerking, uw keuzes aan te passen en uw toestemming op elk moment in te trekken.",scopeServiceSpecific:"Je toestemming geldt alleen voor deze website en heeft geen invloed op andere diensten.",scopeGroup:"Uw keuze geldt voor al onze websites in deze groep."},preferenceCenter:{title:"Privacy-instellingen",description:"Pas hier uw privacyinstellingen aan. U kunt kiezen welke soorten cookies en trackingtechnologie\xEBn u toestaat.",tabs:{purposes:"Doeleinden",vendors:"Leveranciers"},purposeItem:{partners:"{count} partners",vendorsUseLegitimateInterest:"{count} leveranciers maken aanspraak op gerechtvaardigd belang",examples:"Voorbeelden",partnersUsingPurpose:"Partners die dit doeleinde gebruiken",withYourPermission:"Met uw toestemming",legitimateInterest:"Gerechtvaardigd belang",objectButton:"Bezwaar maken",objected:"Bezwaar gemaakt",rightToObject:"U heeft het recht om bezwaar te maken tegen verwerking op basis van gerechtvaardigd belang."},specialPurposes:{title:"Essenti\xEBle functies (vereist)",tooltip:"Deze zijn vereist voor de functionaliteit en beveiliging van de site. Volgens IAB TCF kunt u geen bezwaar maken tegen deze speciale doeleinden."},vendorList:{search:"Zoek leveranciers...",showingCount:"{filtered} van {total} leveranciers",iabVendorsHeading:"IAB-geregistreerde leveranciers",iabVendorsNotice:"Deze partners zijn geregistreerd bij het IAB Transparency & Consent Framework (TCF), een industriestandaard voor het beheren van toestemming",customVendorsHeading:"Aangepaste partners",customVendorsNotice:"Dit zijn aangepaste partners die niet zijn geregistreerd bij het IAB Transparency & Consent Framework (TCF). Ze verwerken gegevens op basis van uw toestemming en kunnen andere privacypraktijken hebben dan IAB-geregistreerde leveranciers.",purposes:"Doeleinden",specialPurposes:"Speciale doeleinden",specialFeatures:"Speciale functies",features:"Functies",dataCategories:"Datacategorie\xEBn",usesCookies:"Gebruikt cookies",nonCookieAccess:"Toegang zonder cookies",maxAge:"Max. leeftijd: {days}d",retention:"Bewaartermijn: {days}d",legitimateInterest:"Gerechtv. belang",privacyPolicy:"Privacybeleid",storageDisclosure:"Openbaarmaking van opslag",requiredNotice:"Vereist voor websitefunctionaliteit, kan niet worden uitgeschakeld"},footer:{consentStorage:'Toestemmingsvoorkeuren worden gedurende 13 maanden opgeslagen in een cookie genaamd "euconsent-v2". The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Alles accepteren",rejectAll:"Alles weigeren",customize:"Aanpassen",saveSettings:"Instellingen opslaan",loading:"Laden...",showingSelectedVendor:"Geselecteerde leverancier wordt getoond",clearSelection:"Wissen",customPartner:"Aangepaste partner niet geregistreerd bij het IAB"}}},Ai={common:{acceptAll:"Godta alle",rejectAll:"Avvis alle",customize:"Tilpass",save:"Lagre innstillingar"},cookieBanner:{title:"Vi verdset personvernet ditt",description:"Denne nettstaden brukar informasjonskapslar for \xE5 forbetre nettopplevinga di, analysere nettstadtrafikk og vise personleg tilpassa innhald."},consentManagerDialog:{title:"Personverninnstillingar",description:"Tilpass personverninnstillingane dine her. Du kan velje kva typar informasjonskapslar og sporingsteknologiar du till\xE8t."},consentTypes:{necessary:{title:"Strengt n\xF8dvendige",description:"Desse informasjonskapslane er n\xF8dvendige for at nettstaden skal fungere riktig og kan ikkje deaktiverast."},functionality:{title:"Funksjonalitet",description:"Desse informasjonskapslane gjer det mogleg med forbetra funksjonalitet og personleggjering av nettstaden."},marketing:{title:"Marknadsf\xF8ring",description:"Desse informasjonskapslane blir brukte til \xE5 levere relevante annonsar og spore effektiviteten deira."},measurement:{title:"Analyse",description:"Desse informasjonskapslane hjelper oss \xE5 forst\xE5 korleis bes\xF8kande samhandlar med nettstaden og forbetre ytinga."},experience:{title:"Oppleving",description:"Desse informasjonskapslane hjelper oss \xE5 gi ei betre brukaroppleving og teste nye funksjonar."}},frame:{title:"Godta {category}-samtykke for \xE5 sj\xE5 dette innhaldet.",actionButton:"Aktiver {category}-samtykke"},legalLinks:{privacyPolicy:"Personvernerkl\xE6ring",cookiePolicy:"Retningslinjer for informasjonskapslar",termsOfService:"Brukarvilk\xE5r"},iab:{banner:{title:"Personverninnstillingar",description:"Vi og v\xE5re {partnerCount} partnarar lagrar og/eller har tilgang til informasjon p\xE5 eininga di og behandlar personopplysningar, som unike identifikatorar og nettlesardata, for denne nettstaden, for \xE5:",partnersLink:"{count} partnarar",andMore:"Og {count} til...",legitimateInterestNotice:"Nokre partnarar krev legitim interesse for \xE5 behandle dataa dine. Du har rett til \xE5 protestere mot denne behandlinga, tilpasse vala dine og trekkje tilbake samtykket ditt n\xE5r som helst.",scopeServiceSpecific:"Samtykket ditt gjeld berre for denne nettstaden og p\xE5verkar ikkje andre tenester.",scopeGroup:"Valet ditt gjeld p\xE5 tvers av nettsidene v\xE5re i denne gruppa."},preferenceCenter:{title:"Personverninnstillingar",description:"Tilpass personverninnstillingane dine her. Du kan velje kva typar informasjonskapslar og sporingsteknologiar du till\xE8t.",tabs:{purposes:"F\xF8rem\xE5l",vendors:"Leverand\xF8rar"},purposeItem:{partners:"{count} partnarar",vendorsUseLegitimateInterest:"{count} leverand\xF8rar krev legitim interesse",examples:"D\xF8me",partnersUsingPurpose:"Partnarar som brukar dette f\xF8rem\xE5let",withYourPermission:"Med di tillating",legitimateInterest:"Legitim interesse",objectButton:"Protester",objected:"Protestert",rightToObject:"Du har rett til \xE5 protestere mot behandling basert p\xE5 legitim interesse."},specialPurposes:{title:"Viktige funksjonar (p\xE5kravd)",tooltip:"Desse er n\xF8dvendige for funksjonaliteten og tryggleiken til nettstaden. I f\xF8lgje IAB TCF kan du ikkje protestere mot desse spesielle f\xF8rem\xE5la."},vendorList:{search:"S\xF8k etter leverand\xF8rar...",showingCount:"{filtered} av {total} leverand\xF8rar",iabVendorsHeading:"IAB-registrerte leverand\xF8rar",iabVendorsNotice:"Disse partnarane er registrerte i IAB Transparency & Consent Framework (TCF), ein bransjestandard for administrasjon av samtykke",customVendorsHeading:"Eigendefinerte partnarar",customVendorsNotice:"Dette er eigendefinerte partnarar som ikkje er registrerte i IAB Transparency & Consent Framework (TCF). Dei behandlar data basert p\xE5 ditt samtykke og kan ha annan personvernpraksis enn IAB-registrerte leverand\xF8rar.",purposes:"F\xF8rem\xE5l",specialPurposes:"Spesielle f\xF8rem\xE5l",specialFeatures:"Spesielle funksjonar",features:"Funksjonar",dataCategories:"Datakategoriar",usesCookies:"Brukar informasjonskapslar",nonCookieAccess:"Ikkje-informasjonskapsel-tilgang",maxAge:"Maks alder: {days}d",retention:"Lagring: {days}d",legitimateInterest:"Leg. interesse",privacyPolicy:"Personvernerkl\xE6ring",storageDisclosure:"Lagringsinformasjon",requiredNotice:"P\xE5kravd for funksjonaliteten til nettstaden, kan ikkje deaktiverast"},footer:{consentStorage:'Samtykkepreferansar blir lagra i ein informasjonskapsel kalla "euconsent-v2" i 13 m\xE5nader. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Godta alle",rejectAll:"Avvis alle",customize:"Tilpass",saveSettings:"Lagre innstillingar",loading:"Lastar...",showingSelectedVendor:"Viser vald leverand\xF8r",clearSelection:"T\xF8m",customPartner:"Eigendefinert partnar ikkje registrert i IAB"}}},zi={common:{acceptAll:"Zaakceptuj wszystkie",rejectAll:"Odrzu\u0107 wszystkie",customize:"Dostosuj",save:"Zapisz ustawienia"},cookieBanner:{title:"Cenimy Twoj\u0105 prywatno\u015B\u0107",description:"Ta strona u\u017Cywa plik\xF3w cookie, aby poprawi\u0107 Twoje wra\u017Cenia z przegl\u0105dania, analizowa\u0107 ruch na stronie i wy\u015Bwietla\u0107 spersonalizowane tre\u015Bci."},consentManagerDialog:{title:"Ustawienia prywatno\u015Bci",description:"Dostosuj tutaj swoje ustawienia prywatno\u015Bci. Mo\u017Cesz wybra\u0107, kt\xF3re rodzaje plik\xF3w cookie i technologii \u015Bledzenia chcesz zaakceptowa\u0107."},consentTypes:{necessary:{title:"\u015Aci\u015Ble niezb\u0119dne",description:"Te pliki cookie s\u0105 niezb\u0119dne do prawid\u0142owego funkcjonowania strony internetowej i nie mo\u017Cna ich wy\u0142\u0105czy\u0107."},functionality:{title:"Funkcjonalno\u015B\u0107",description:"Te pliki cookie umo\u017Cliwiaj\u0105 ulepszon\u0105 funkcjonalno\u015B\u0107 i personalizacj\u0119 strony internetowej."},marketing:{title:"Marketing",description:"Te pliki cookie s\u0105 u\u017Cywane do dostarczania odpowiednich reklam i \u015Bledzenia ich skuteczno\u015Bci."},measurement:{title:"Analityka",description:"Te pliki cookie pomagaj\u0105 nam zrozumie\u0107, jak odwiedzaj\u0105cy korzystaj\u0105 ze strony internetowej, i poprawi\u0107 jej wydajno\u015B\u0107."},experience:{title:"Do\u015Bwiadczenie",description:"Te pliki cookie pomagaj\u0105 nam zapewni\u0107 lepsze wra\u017Cenia u\u017Cytkownika i testowa\u0107 nowe funkcje."}},frame:{title:"Zaakceptuj zgod\u0119 na {category}, aby wy\u015Bwietli\u0107 t\u0119 tre\u015B\u0107.",actionButton:"W\u0142\u0105cz zgod\u0119 na {category}"},legalLinks:{privacyPolicy:"Polityka prywatno\u015Bci",cookiePolicy:"Polityka plik\xF3w cookie",termsOfService:"Regulamin"},iab:{banner:{title:"Ustawienia prywatno\u015Bci",description:"My i nasi {partnerCount} partnerzy przechowujemy i/lub uzyskujemy dost\u0119p do informacji na urz\u0105dzeniu oraz przetwarzamy dane osobowe, takie jak unikalne identyfikatory i dane dotycz\u0105ce przegl\u0105dania, w tej witrynie, aby:",partnersLink:"{count} partner\xF3w",andMore:"I {count} wi\u0119cej...",legitimateInterestNotice:"Niekt\xF3rzy partnerzy powo\u0142uj\u0105 si\u0119 na uzasadniony interes w przetwarzaniu Twoich danych. Masz prawo sprzeciwi\u0107 si\u0119 temu przetwarzaniu, dostosowa\u0107 swoje wybory i wycofa\u0107 zgod\u0119 w dowolnym momencie.",scopeServiceSpecific:"Twoja zgoda dotyczy tylko tej strony internetowej i nie wp\u0142ywa na inne us\u0142ugi.",scopeGroup:"Tw\xF3j wyb\xF3r ma zastosowanie do wszystkich naszych stron w tej grupie."},preferenceCenter:{title:"Ustawienia prywatno\u015Bci",description:"Dostosuj tutaj swoje ustawienia prywatno\u015Bci. Mo\u017Cesz wybra\u0107, kt\xF3re rodzaje plik\xF3w cookie i technologii \u015Bledzenia chcesz zaakceptowa\u0107.",tabs:{purposes:"Cele",vendors:"Dostawcy"},purposeItem:{partners:"{count} partner\xF3w",vendorsUseLegitimateInterest:"{count} dostawc\xF3w powo\u0142uje si\u0119 na uzasadniony interes",examples:"Przyk\u0142ady",partnersUsingPurpose:"Partnerzy korzystaj\u0105cy z tego celu",withYourPermission:"Za Twoj\u0105 zgod\u0105",legitimateInterest:"Uzasadniony interes",objectButton:"Sprzeciw",objected:"Zg\u0142oszono sprzeciw",rightToObject:"Masz prawo sprzeciwi\u0107 si\u0119 przetwarzaniu opartemu na uzasadnionym interesie."},specialPurposes:{title:"Funkcje niezb\u0119dne (wymagane)",tooltip:"S\u0105 one wymagane dla funkcjonalno\u015Bci i bezpiecze\u0144stwa witryny. Zgodnie z IAB TCF nie mo\u017Cna sprzeciwi\u0107 si\u0119 tym celom specjalnym."},vendorList:{search:"Szukaj dostawc\xF3w...",showingCount:"{filtered} z {total} dostawc\xF3w",iabVendorsHeading:"Dostawcy zarejestrowani w IAB",iabVendorsNotice:"Ci partnerzy s\u0105 zarejestrowani w IAB Transparency & Consent Framework (TCF), standardzie bran\u017Cowym dotycz\u0105cym zarz\u0105dzania zgodami",customVendorsHeading:"Partnerzy niestandardowi",customVendorsNotice:"S\u0105 to partnerzy niestandardowi, kt\xF3rzy nie s\u0105 zarejestrowani w IAB Transparency & Consent Framework (TCF). Przetwarzaj\u0105 dane na podstawie Twojej zgody i mog\u0105 stosowa\u0107 inne praktyki prywatno\u015Bci ni\u017C dostawcy zarejestrowani w IAB.",purposes:"Cele",specialPurposes:"Cele specjalne",specialFeatures:"Funkcje specjalne",features:"Funkcje",dataCategories:"Kategorie danych",usesCookies:"U\u017Cywa plik\xF3w cookie",nonCookieAccess:"Dost\u0119p bez plik\xF3w cookie",maxAge:"Maks. wiek: {days}d",retention:"Przechowywanie: {days}d",legitimateInterest:"Uzasadn. interes",privacyPolicy:"Polityka prywatno\u015Bci",storageDisclosure:"Ujawnienie informacji o przechowywaniu",requiredNotice:"Wymagane dla funkcjonalno\u015Bci witryny, nie mo\u017Cna wy\u0142\u0105czy\u0107"},footer:{consentStorage:"Preferencje dotycz\u0105ce zgody s\u0105 przechowywane w pliku cookie o nazwie \u201Eeuconsent-v2\u201D przez 13 miesi\u0119cy. The storage duration may be refreshed when you update your preferences."}},common:{acceptAll:"Zaakceptuj wszystkie",rejectAll:"Odrzu\u0107 wszystkie",customize:"Dostosuj",saveSettings:"Zapisz ustawienia",loading:"\u0141adowanie...",showingSelectedVendor:"Pokazywanie wybranego dostawcy",clearSelection:"Wyczy\u015B\u0107",customPartner:"Partner niestandardowy niezarejestrowany w IAB"}}},Pi={common:{acceptAll:"Aceitar todos",rejectAll:"Rejeitar todos",customize:"Personalizar",save:"Salvar configura\xE7\xF5es"},cookieBanner:{title:"Respeitamos a sua privacidade",description:"Este site utiliza cookies para melhorar a sua experi\xEAncia de navega\xE7\xE3o, analisar o tr\xE1fego do site e mostrar conte\xFAdos personalizados."},consentManagerDialog:{title:"Configura\xE7\xF5es",description:"Personalize suas configura\xE7\xF5es de privacidade aqui. Voc\xEA pode escolher quais tipos de cookies e tecnologias de rastreamento voc\xEA permite."},consentTypes:{necessary:{title:"Estritamente necess\xE1rio",description:"Estes cookies s\xE3o essenciais para o site funcionar corretamente e n\xE3o podem ser desativados."},functionality:{title:"Funcionalidade",description:"Estes cookies permitem funcionalidades aprimoradas e personaliza\xE7\xE3o do site."},marketing:{title:"Marketing",description:"Estes cookies s\xE3o utilizados para fornecer publicidade relevante e rastrear a sua efic\xE1cia."},measurement:{title:"An\xE1lise",description:"Estes cookies nos ajudam a compreender como os visitantes interagem com o site e melhoram o seu desempenho."},experience:{title:"Experi\xEAncia",description:"Estes cookies nos ajudam a fornecer uma experi\xEAncia de usu\xE1rio melhor e testar novas funcionalidades."}},frame:{title:"Aceite {category} para ver este conte\xFAdo",actionButton:"Ativar consentimento {category}"},legalLinks:{privacyPolicy:"Pol\xEDtica de Privacidade",cookiePolicy:"Pol\xEDtica de Cookies",termsOfService:"Termos de Servi\xE7o"},iab:{banner:{title:"Configura\xE7\xF5es de privacidade",description:"N\xF3s e os nossos {partnerCount} parceiros armazenamos e/ou acedemos a informa\xE7\xF5es num dispositivo e processamos dados pessoais, tais como identificadores \xFAnicos e informa\xE7\xF5es de navega\xE7\xE3o, para este website, para:",partnersLink:"{count} parceiros",andMore:"E mais {count}...",legitimateInterestNotice:"Alguns parceiros alegam um interesse leg\xEDtimo para processar os seus dados. Tem o direito de se opor a este processamento, personalizar as suas escolhas e retirar o seu consentimento a qualquer momento.",scopeServiceSpecific:"O seu consentimento aplica-se apenas a este site e n\xE3o afetar\xE1 outros servi\xE7os.",scopeGroup:"A sua escolha aplica-se a todos os nossos sites neste grupo."},preferenceCenter:{title:"Configura\xE7\xF5es de privacidade",description:"Personalize suas configura\xE7\xF5es de privacidade aqui. Voc\xEA pode escolher quais tipos de cookies e tecnologias de rastreamento voc\xEA permite.",tabs:{purposes:"Finalidades",vendors:"Fornecedores"},purposeItem:{partners:"{count} parceiros",vendorsUseLegitimateInterest:"{count} fornecedores alegam interesse leg\xEDtimo",examples:"Exemplos",partnersUsingPurpose:"Parceiros que utilizam esta finalidade",withYourPermission:"Com a sua permiss\xE3o",legitimateInterest:"Interesse leg\xEDtimo",objectButton:"Opor-se",objected:"Oposi\xE7\xE3o registada",rightToObject:"Tem o direito de se opor ao processamento baseado no interesse leg\xEDtimo."},specialPurposes:{title:"Fun\xE7\xF5es essenciais (obrigat\xF3rias)",tooltip:"Estas s\xE3o necess\xE1rias para a funcionalidade e seguran\xE7a do site. De acordo com o IAB TCF, n\xE3o pode opor-se a estas finalidades especiais."},vendorList:{search:"Procurar fornecedores...",showingCount:"{filtered} de {total} fornecedores",iabVendorsHeading:"Fornecedores registados no IAB",iabVendorsNotice:"Estes parceiros est\xE3o registados no IAB Transparency & Consent Framework (TCF), um padr\xE3o da ind\xFAstria para gerir o consentimento",customVendorsHeading:"Parceiros personalizados",customVendorsNotice:"Estes s\xE3o parceiros personalizados n\xE3o registados no IAB Transparency & Consent Framework (TCF). Processam dados com base no seu consentimento e podem ter pr\xE1ticas de privacidade diferentes das dos fornecedores registados no IAB.",purposes:"Finalidades",specialPurposes:"Finalidades especiais",specialFeatures:"Funcionalidades especiais",features:"Funcionalidades",dataCategories:"Categorias de dados",usesCookies:"Utiliza cookies",nonCookieAccess:"Acesso sem cookies",maxAge:"Idade m\xE1x.: {days}d",retention:"Reten\xE7\xE3o: {days}d",legitimateInterest:"Int. leg\xEDtimo",privacyPolicy:"Pol\xEDtica de privacidade",storageDisclosure:"Divulga\xE7\xE3o de armazenamento",requiredNotice:"Necess\xE1rio para a funcionalidade do site, n\xE3o pode ser desativado"},footer:{consentStorage:'As prefer\xEAncias de consentimento s\xE3o armazenadas num cookie chamado "euconsent-v2" durante 13 meses. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Aceitar todos",rejectAll:"Rejeitar todos",customize:"Personalizar",saveSettings:"Salvar configura\xE7\xF5es",loading:"A carregar...",showingSelectedVendor:"A mostrar o fornecedor selecionado",clearSelection:"Limpar",customPartner:"Parceiro personalizado n\xE3o registado no IAB"}}},Ti={common:{acceptAll:"Acceptar tut",rejectAll:"Refusar tut",customize:"Persunalisar",save:"Memorisar las configuraziuns"},cookieBanner:{title:"Nus stimain vossa sfera privata",description:"Questa pagina d'internet dovra cookies per meglierar vossa experientscha da navigaziun, analisar il traffic da la pagina e mussar cuntegns persunalisads."},consentManagerDialog:{title:"Configuraziuns da la sfera privata",description:"Persunalisai vossas configuraziuns da la sfera privata qua. Vus pudais tscherner tge tips da cookies e tecnologias da tracking che vus lubis."},consentTypes:{necessary:{title:"Absolutamain necessari",description:"Quests cookies \xE8n essenzials per il funcziunament da la pagina d'internet e na pon betg vegnir deactivads."},functionality:{title:"Funcziunalitad",description:"Quests cookies permettan funcziunalitads avanzadas e la persunalisaziun da la pagina d'internet."},marketing:{title:"Marketing",description:"Quests cookies vegnan duvrads per mussar reclamas relevantas e per evaluar lur efficacitad."},measurement:{title:"Analisa",description:"Quests cookies ans gidan a chapir co ils visitaders interageschan cun la pagina d'internet e meglierar sia prestaziun."},experience:{title:"Experientscha",description:"Quests cookies ans gidan a porscher ina meglra experientscha d'utilisader e testar novas funcziuns."}},frame:{title:"Acceptai il consentiment da {category} per vesair quest cuntegn.",actionButton:"Activar il consentiment da {category}"},legalLinks:{privacyPolicy:"Directivas da protecziun da datas",cookiePolicy:"Directivas da cookies",termsOfService:"Cundiziuns d'utilisaziun"},iab:{banner:{title:"Configuraziuns da la sfera privata",description:"Nus ed noss {partnerCount} partunaris memorisain e/u accessain ad infurmaziuns sin voss apparat e processain datas persunalas, sco identificaturs unics e datas da navigaziun, per questa pagina d\u2019internet, per:",partnersLink:"{count} partunaris",andMore:"Ed anc {count}...",legitimateInterestNotice:"Inscunter partunaris pretendan in interess legitim per processar vossas datas. Vus avais il dretg da far opposiziun cunter quest processament, persunalisar vossas tschernas e revocar voss consentiment en mintga mument.",scopeServiceSpecific:"Voss consent vala be per questa pagina web e na pertutga betg auters servetschs.",scopeGroup:"Vossa tscherna vala per tut nossas websites en quest gruppa."},preferenceCenter:{title:"Configuraziuns da la sfera privata",description:"Persunalisai vossas configuraziuns da la sfera privata qua. Vus pudais tscherner tge tips da cookies e tecnologias da tracking che vus lubis.",tabs:{purposes:"Finamiras",vendors:"Proveders"},purposeItem:{partners:"{count} partunaris",vendorsUseLegitimateInterest:"{count} proveders pretendan in interess legitim",examples:"Exempels",partnersUsingPurpose:"Partunaris che duvran questa finamira",withYourPermission:"Cun vossa permissiun",legitimateInterest:"Interess legitim",objectButton:"Far opposiziun",objected:"Opposiziun fatta",rightToObject:"Vus avais il dretg da far opposiziun cunter il processament sa basond sin in interess legitim."},specialPurposes:{title:"Funcziuns essenzialas (necessari)",tooltip:"Questas \xE8n necessarias per la funcziunalitad e la segirezza da la pagina. Tenor IAB TCF na pudais vus betg far opposiziun cunter questas finamiras spezialas."},vendorList:{search:"Tscherchar proveders...",showingCount:"Mussa {filtered} da {total} proveders",iabVendorsHeading:"Proveders registrads tar l\u2019IAB",iabVendorsNotice:"Quests partunaris \xE8n registrads tar l\u2019IAB Transparency & Consent Framework (TCF), in standard industrial per la gestiun dal consentiment",customVendorsHeading:"Partunaris persunalisads",customVendorsNotice:"Quai \xE8n partunaris persunalisads che n\u2019\xE8n betg registrads tar l\u2019IAB Transparency & Consent Framework (TCF). Els processan datas sa basond sin voss consentiment e pon avair autras praticas da protecziun da datas che proveders registrads tar l\u2019IAB.",purposes:"Finamiras",specialPurposes:"Finamiras spezialas",specialFeatures:"Funcziuns spezialas",features:"Funcziuns",dataCategories:"Categorias da datas",usesCookies:"Dovra cookies",nonCookieAccess:"Access betg tras cookies",maxAge:"Gradi maximal: {days}d",retention:"Retegnida: {days}d",legitimateInterest:"Int. legitim",privacyPolicy:"Directivas da protecziun da datas",storageDisclosure:"Infurmaziun davart la memorisaziun",requiredNotice:"Necessari per la funcziunalitad da la pagina, na po betg vegnir deactiv\xE0"},footer:{consentStorage:'Las preferenzas da consentiment vegnan memorisadas en in cookie numn\xE0 "euconsent-v2" per 13 mais. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Acceptar tut",rejectAll:"Refusar tut",customize:"Persunalisar",saveSettings:"Memorisar las configuraziuns",loading:"Chargia...",showingSelectedVendor:"Mussa il proveder tschern\xEC",clearSelection:"Stizzar",customPartner:"Partunari persunalis\xE0 betg registr\xE0 tar l\u2019IAB"}}},Li={common:{acceptAll:"Accept\u0103 toate",rejectAll:"Respinge toate",customize:"Personalizeaz\u0103",save:"Salveaz\u0103 set\u0103rile"},cookieBanner:{title:"Pre\u021Buim confiden\u021Bialitatea ta",description:"Acest site folose\u0219te cookie-uri pentru a \xEEmbun\u0103t\u0103\u021Bi experien\u021Ba de navigare, a analiza traficul site-ului \u0219i a afi\u0219a con\u021Binut personalizat."},consentManagerDialog:{title:"Set\u0103ri de confiden\u021Bialitate",description:"Personalizeaz\u0103 set\u0103rile de confiden\u021Bialitate aici. Po\u021Bi alege ce tipuri de cookie-uri \u0219i tehnologii de urm\u0103rire permi\u021Bi."},consentTypes:{necessary:{title:"Strict necesare",description:"Aceste cookie-uri sunt esen\u021Biale pentru func\u021Bionarea corect\u0103 a site-ului \u0219i nu pot fi dezactivate."},functionality:{title:"Func\u021Bionalitate",description:"Aceste cookie-uri permit func\u021Bionalit\u0103\u021Bi avansate \u0219i personalizarea site-ului."},marketing:{title:"Marketing",description:"Aceste cookie-uri sunt utilizate pentru a livra reclame relevante \u0219i pentru a urm\u0103ri eficien\u021Ba acestora."},measurement:{title:"Analitice",description:"Aceste cookie-uri ne ajut\u0103 s\u0103 \xEEn\u021Belegem cum interac\u021Bioneaz\u0103 vizitatorii cu site-ul \u0219i s\u0103 \xEEi \xEEmbun\u0103t\u0103\u021Bim performan\u021Ba."},experience:{title:"Experien\u021B\u0103",description:"Aceste cookie-uri ne ajut\u0103 s\u0103 oferim o experien\u021B\u0103 mai bun\u0103 utilizatorilor \u0219i s\u0103 test\u0103m func\u021Bionalit\u0103\u021Bi noi."}},frame:{title:"Accept\u0103 consim\u021B\u0103m\xE2ntul pentru {category} pentru a vizualiza acest con\u021Binut.",actionButton:"Activeaz\u0103 consim\u021B\u0103m\xE2ntul pentru {category}"},legalLinks:{privacyPolicy:"Politica de confiden\u021Bialitate",cookiePolicy:"Politica privind cookie-urile",termsOfService:"Termeni \u0219i condi\u021Bii"},iab:{banner:{title:"Set\u0103ri de confiden\u021Bialitate",description:"Noi \u0219i cei {partnerCount} parteneri ai no\u0219tri stoc\u0103m \u0219i/sau acces\u0103m informa\u021Bii pe dispozitivul t\u0103u \u0219i proces\u0103m date personale, cum ar fi identificatori unici \u0219i date de navigare, pentru acest site web, pentru:",partnersLink:"{count} parteneri",andMore:"\u0218i \xEEnc\u0103 {count}...",legitimateInterestNotice:"Unii parteneri invoc\u0103 un interes legitim pentru a procesa datele tale. Ai dreptul de a te opune acestei proces\u0103ri, de a-\u021Bi personaliza alegerile \u0219i de a-\u021Bi retrage consim\u021B\u0103m\xE2ntul \xEEn orice moment.",scopeServiceSpecific:"Consim\u021B\u0103m\xE2ntul t\u0103u se aplic\u0103 doar acestui site web \u0219i nu va afecta alte servicii.",scopeGroup:"Alegerea dvs. se aplic\u0103 tuturor site-urilor noastre din acest grup."},preferenceCenter:{title:"Set\u0103ri de confiden\u021Bialitate",description:"Personalizeaz\u0103 set\u0103rile de confiden\u021Bialitate aici. Po\u021Bi alege ce tipuri de cookie-uri \u0219i tehnologii de urm\u0103rire permi\u021Bi.",tabs:{purposes:"Scopuri",vendors:"Furnizori"},purposeItem:{partners:"{count} parteneri",vendorsUseLegitimateInterest:"{count} furnizori invoc\u0103 interes legitim",examples:"Exemple",partnersUsingPurpose:"Parteneri care utilizeaz\u0103 acest scop",withYourPermission:"Cu permisiunea ta",legitimateInterest:"Interes legitim",objectButton:"Opunere",objected:"Opozi\u021Bie exprimat\u0103",rightToObject:"Ai dreptul de a te opune proces\u0103rii bazate pe interesul legitim."},specialPurposes:{title:"Func\u021Bii esen\u021Biale (obligatorii)",tooltip:"Acestea sunt necesare pentru func\u021Bionalitatea \u0219i securitatea site-ului. Conform IAB TCF, nu te po\u021Bi opune acestor scopuri speciale."},vendorList:{search:"Caut\u0103 furnizori...",showingCount:"Se afi\u0219eaz\u0103 {filtered} din {total} furnizori",iabVendorsHeading:"Furnizori \xEEnregistra\u021Bi IAB",iabVendorsNotice:"Ace\u0219ti parteneri sunt \xEEnregistra\u021Bi \xEEn cadrul IAB Transparency & Consent Framework (TCF), un standard industrial pentru gestionarea consim\u021B\u0103m\xE2ntului",customVendorsHeading:"Parteneri personaliza\u021Bi",customVendorsNotice:"Ace\u0219tia sunt parteneri personaliza\u021Bi care nu sunt \xEEnregistra\u021Bi \xEEn IAB Transparency & Consent Framework (TCF). Ei proceseaz\u0103 datele pe baza consim\u021B\u0103m\xE2ntului t\u0103u \u0219i pot avea practici de confiden\u021Bialitate diferite de cele ale furnizorilor \xEEnregistra\u021Bi IAB.",purposes:"Scopuri",specialPurposes:"Scopuri speciale",specialFeatures:"Func\u021Bionalit\u0103\u021Bi speciale",features:"Func\u021Bionalit\u0103\u021Bi",dataCategories:"Categorii de date",usesCookies:"Utilizeaz\u0103 cookie-uri",nonCookieAccess:"Acces non-cookie",maxAge:"V\xE2rst\u0103 max.: {days}z",retention:"Reten\u021Bie: {days}z",legitimateInterest:"Int. legitim",privacyPolicy:"Politic\u0103 de confiden\u021Bialitate",storageDisclosure:"Prezentarea stoc\u0103rii",requiredNotice:"Necesar pentru func\u021Bionalitatea site-ului, nu poate fi dezactivat"},footer:{consentStorage:"Preferin\u021Bele de consim\u021B\u0103m\xE2nt sunt stocate \xEEntr-un cookie numit \u201Eeuconsent-v2\u201D timp de 13 luni. The storage duration may be refreshed when you update your preferences."}},common:{acceptAll:"Accept\u0103 toate",rejectAll:"Respinge toate",customize:"Personalizeaz\u0103",saveSettings:"Salveaz\u0103 set\u0103rile",loading:"Se \xEEncarc\u0103...",showingSelectedVendor:"Se afi\u0219eaz\u0103 furnizorul selectat",clearSelection:"\u0218terge",customPartner:"Partener personalizat ne\xEEnregistrat \xEEn IAB"}}},Ei={common:{acceptAll:"Prija\u0165 v\u0161etko",rejectAll:"Odmietnu\u0165 v\u0161etko",customize:"Prisp\xF4sobi\u0165",save:"Ulo\u017Ei\u0165 nastavenia"},cookieBanner:{title:"V\xE1\u017Eime si va\u0161e s\xFAkromie",description:"T\xE1to str\xE1nka pou\u017E\xEDva cookies na zlep\u0161enie v\xE1\u0161ho prehliadania, anal\xFDzu n\xE1v\u0161tevnosti a zobrazovanie personalizovan\xE9ho obsahu."},consentManagerDialog:{title:"Nastavenia s\xFAkromia",description:"Prisp\xF4sobte si nastavenia s\xFAkromia tu. M\xF4\u017Eete si vybra\u0165, ktor\xE9 typy cookies a sledovac\xEDch technol\xF3gi\xED povol\xEDte."},consentTypes:{necessary:{title:"Nevyhnutn\xE9",description:"Tieto cookies s\xFA nevyhnutn\xE9 pre spr\xE1vne fungovanie webovej str\xE1nky a nemo\u017Eno ich deaktivova\u0165."},functionality:{title:"Funk\u010Dnos\u0165",description:"Tieto cookies umo\u017E\u0148uj\xFA roz\u0161\xEDren\xFA funk\u010Dnos\u0165 a personaliz\xE1ciu webovej str\xE1nky."},marketing:{title:"Marketing",description:"Tieto cookies sa pou\u017E\xEDvaj\xFA na doru\u010Dovanie relevantn\xFDch rekl\xE1m a sledovanie ich \xFA\u010Dinnosti."},measurement:{title:"Analytika",description:"Tieto cookies n\xE1m pom\xE1haj\xFA pochopi\u0165, ako n\xE1v\u0161tevn\xEDci interaguj\xFA s webovou str\xE1nkou a zlep\u0161i\u0165 jej v\xFDkon."},experience:{title:"Pou\u017E\xEDvate\u013Esk\xE1 sk\xFAsenos\u0165",description:"Tieto cookies n\xE1m pom\xE1haj\xFA poskytova\u0165 lep\u0161iu pou\u017E\xEDvate\u013Esk\xFA sk\xFAsenos\u0165 a testova\u0165 nov\xE9 funkcie."}},frame:{title:"Prijmite s\xFAhlas pre kateg\xF3riu {category} na zobrazenie tohto obsahu.",actionButton:"Povoli\u0165 s\xFAhlas pre {category}"},legalLinks:{privacyPolicy:"Z\xE1sady ochrany osobn\xFDch \xFAdajov",cookiePolicy:"Z\xE1sady pou\u017E\xEDvania s\xFAborov cookie",termsOfService:"Podmienky pou\u017E\xEDvania slu\u017Eby"},iab:{banner:{title:"Nastavenia s\xFAkromia",description:"My a na\u0161i {partnerCount} partneri uklad\xE1me a/alebo pristupujeme k inform\xE1ci\xE1m vo va\u0161om zariaden\xED a sprac\xFAvame osobn\xE9 \xFAdaje, ako s\xFA jedine\u010Dn\xE9 identifik\xE1tory a \xFAdaje o prehliadan\xED, pre t\xFAto webov\xFA str\xE1nku s cie\u013Eom:",partnersLink:"{count} partneri",andMore:"A \u010Fal\u0161\xEDch {count}...",legitimateInterestNotice:"Niektor\xED partneri si uplat\u0148uj\xFA opr\xE1vnen\xFD z\xE1ujem na sprac\xFAvanie va\u0161ich \xFAdajov. M\xE1te pr\xE1vo vznies\u0165 n\xE1mietku proti tomuto sprac\xFAvaniu, prisp\xF4sobi\u0165 svoje vo\u013Eby a kedyko\u013Evek odvola\u0165 svoj s\xFAhlas.",scopeServiceSpecific:"V\xE1\u0161 s\xFAhlas plat\xED len pre t\xFAto webov\xFA str\xE1nku a neovplyvn\xED in\xE9 slu\u017Eby.",scopeGroup:"Va\u0161a vo\u013Eba plat\xED pre v\u0161etky na\u0161e weby v tejto skupine."},preferenceCenter:{title:"Nastavenia s\xFAkromia",description:"Prisp\xF4sobte si nastavenia s\xFAkromia tu. M\xF4\u017Eete si vybra\u0165, ktor\xE9 typy cookies a sledovac\xEDch technol\xF3gi\xED povol\xEDte.",tabs:{purposes:"\xDA\u010Dely",vendors:"Dod\xE1vatelia"},purposeItem:{partners:"{count} partneri",vendorsUseLegitimateInterest:"{count} dod\xE1vatelia si uplat\u0148uj\xFA opr\xE1vnen\xFD z\xE1ujem",examples:"Pr\xEDklady",partnersUsingPurpose:"Partneri vyu\u017E\xEDvaj\xFAci tento \xFA\u010Del",withYourPermission:"S va\u0161\xEDm povolen\xEDm",legitimateInterest:"Opr\xE1vnen\xFD z\xE1ujem",objectButton:"Vznies\u0165 n\xE1mietku",objected:"N\xE1mietka vznesen\xE1",rightToObject:"M\xE1te pr\xE1vo vznies\u0165 n\xE1mietku proti sprac\xFAvaniu zalo\u017Een\xE9mu na opr\xE1vnenom z\xE1ujme."},specialPurposes:{title:"Z\xE1kladn\xE9 funkcie (povinn\xE9)",tooltip:"Tieto s\xFA potrebn\xE9 pre funk\u010Dnos\u0165 a bezpe\u010Dnos\u0165 str\xE1nky. Pod\u013Ea IAB TCF nem\xF4\u017Eete vznies\u0165 n\xE1mietku proti t\xFDmto osobitn\xFDm \xFA\u010Delom."},vendorList:{search:"H\u013Eada\u0165 dod\xE1vate\u013Eov...",showingCount:"Zobrazuje sa {filtered} z {total} dod\xE1vate\u013Eov",iabVendorsHeading:"Dod\xE1vatelia registrovan\xED v IAB",iabVendorsNotice:"T\xEDto partneri s\xFA registrovan\xED v r\xE1mci IAB Transparency & Consent Framework (TCF), priemyseln\xE9ho \u0161tandardu pre spr\xE1vu s\xFAhlasu",customVendorsHeading:"Vlastn\xED partneri",customVendorsNotice:"Toto s\xFA vlastn\xED partneri, ktor\xED nie s\xFA registrovan\xED v r\xE1mci IAB Transparency & Consent Framework (TCF). Sprac\xFAvaj\xFA \xFAdaje na z\xE1klade v\xE1\u0161ho s\xFAhlasu a m\xF4\u017Eu ma\u0165 in\xE9 postupy ochrany s\xFAkromia ako dod\xE1vatelia registrovan\xED v IAB.",purposes:"\xDA\u010Dely",specialPurposes:"Osobitn\xE9 \xFA\u010Dely",specialFeatures:"Osobitn\xE9 funkcie",features:"Funkcie",dataCategories:"Kateg\xF3rie \xFAdajov",usesCookies:"Pou\u017E\xEDva cookies",nonCookieAccess:"Pr\xEDstup bez cookies",maxAge:"Max. vek: {days}d",retention:"Uchov\xE1vanie: {days}d",legitimateInterest:"Opr\xE1v. z\xE1ujem",privacyPolicy:"Z\xE1sady ochrany s\xFAkromia",storageDisclosure:"Zverejnenie inform\xE1ci\xED o ukladan\xED",requiredNotice:"Vy\u017Eaduje sa pre funk\u010Dnos\u0165 str\xE1nky, nemo\u017Eno zak\xE1za\u0165"},footer:{consentStorage:'Predvo\u013Eby s\xFAhlasu s\xFA ulo\u017Een\xE9 v cookie s n\xE1zvom "euconsent-v2" po dobu 13 mesiacov. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Prija\u0165 v\u0161etko",rejectAll:"Odmietnu\u0165 v\u0161etko",customize:"Prisp\xF4sobi\u0165",saveSettings:"Ulo\u017Ei\u0165 nastavenia",loading:"Na\u010D\xEDtava sa...",showingSelectedVendor:"Zobrazenie vybran\xE9ho dod\xE1vate\u013Ea",clearSelection:"Vymaza\u0165",customPartner:"Vlastn\xFD partner neregistrovan\xFD v IAB"}}},Vi={common:{acceptAll:"Sprejmi vse",rejectAll:"Zavrni vse",customize:"Prilagodi",save:"Shrani nastavitve"},cookieBanner:{title:"Cenimo va\u0161o zasebnost",description:"Ta spletna stran uporablja pi\u0161kotke za izbolj\u0161anje va\u0161e uporabni\u0161ke izku\u0161nje, analizo prometa na strani in prikaz personaliziranih vsebin."},consentManagerDialog:{title:"Nastavitve zasebnosti",description:"Tukaj prilagodite svoje nastavitve zasebnosti. Izberete lahko, katere vrste pi\u0161kotkov in tehnologij sledenja dovolite."},consentTypes:{necessary:{title:"Nujno potrebni",description:"Ti pi\u0161kotki so bistveni za pravilno delovanje spletne strani in jih ni mogo\u010De onemogo\u010Diti."},functionality:{title:"Funkcionalnost",description:"Ti pi\u0161kotki omogo\u010Dajo izbolj\u0161ano funkcionalnost in personalizacijo spletne strani."},marketing:{title:"Tr\u017Eenje",description:"Ti pi\u0161kotki se uporabljajo za prikazovanje relevantnih oglasov in spremljanje njihove u\u010Dinkovitosti."},measurement:{title:"Analitika",description:"Ti pi\u0161kotki nam pomagajo razumeti, kako obiskovalci uporabljajo spletno stran, in izbolj\u0161ati njeno delovanje."},experience:{title:"Izku\u0161nja",description:"Ti pi\u0161kotki nam pomagajo zagotoviti bolj\u0161o uporabni\u0161ko izku\u0161njo in testirati nove funkcije."}},frame:{title:"Za ogled te vsebine sprejmite soglasje za kategorijo {category}.",actionButton:"Omogo\u010Di soglasje za {category}"},legalLinks:{privacyPolicy:"Pravilnik o zasebnosti",cookiePolicy:"Pravilnik o pi\u0161kotkih",termsOfService:"Pogoji uporabe"},iab:{banner:{title:"Nastavitve zasebnosti",description:"Mi in na\u0161ih {partnerCount} partnerjev shranjujemo in/ali dostopamo do informacij na va\u0161i napravi ter obdelujemo osebne podatke, kot so edinstveni identifikatorji in podatki o brskanju, za to spletno mesto, da bi:",partnersLink:"{count} partnerjev",andMore:"In \u0161e {count}...",legitimateInterestNotice:"Nekateri partnerji uveljavljajo zakoniti interes za obdelavo va\u0161ih podatkov. Imate pravico do ugovora tej obdelavi, prilagoditve svojih izbir in preklica soglasja kadar koli.",scopeServiceSpecific:"Va\u0161e soglasje velja samo za to spletno mesto in ne bo vplivalo na druge storitve.",scopeGroup:"Va\u0161a izbira velja za vse na\u0161e spletne strani v tej skupini."},preferenceCenter:{title:"Nastavitve zasebnosti",description:"Tukaj prilagodite svoje nastavitve zasebnosti. Izberete lahko, katere vrste pi\u0161kotkov in tehnologij sledenja dovolite.",tabs:{purposes:"Nameni",vendors:"Ponudniki"},purposeItem:{partners:"{count} partnerjev",vendorsUseLegitimateInterest:"{count} ponudnikov uveljavlja zakoniti interes",examples:"Primeri",partnersUsingPurpose:"Partnerji, ki uporabljajo ta namen",withYourPermission:"Z va\u0161im dovoljenjem",legitimateInterest:"Zakoniti interes",objectButton:"Ugovarjaj",objected:"Ugovarjano",rightToObject:"Imate pravico do ugovora obdelavi, ki temelji na zakonitem interesu."},specialPurposes:{title:"Bistvene funkcije (obvezno)",tooltip:"Te so potrebne for funkcionalnost in varnost spletnega mesta. V skladu z IAB TCF ne morete ugovarjati tem posebnim namenom."},vendorList:{search:"I\u0161\u010Di ponudnike...",showingCount:"Prikazano {filtered} od {total} ponudnikov",iabVendorsHeading:"Ponudniki, registrirani v IAB",iabVendorsNotice:"Ti partnerji so registrirani v okviru IAB Transparency & Consent Framework (TCF), industrijskega standarda za upravljanje soglasij",customVendorsHeading:"Partnerji po meri",customVendorsNotice:"To so partnerji po meri, ki niso registrirani v okviru IAB Transparency & Consent Framework (TCF). Podatke obdelujejo na podlagi va\u0161ega soglasja in imajo lahko druga\u010Dne prakse zasebnosti kot ponudniki, registrirani v IAB.",purposes:"Nameni",specialPurposes:"Posebni nameni",specialFeatures:"Posebne funkcije",features:"Funkcije",dataCategories:"Kategorije podatkov",usesCookies:"Uporablja pi\u0161kotke",nonCookieAccess:"Dostop brez pi\u0161kotkov",maxAge:"Najv. starost: {days}d",retention:"Hramba: {days}d",legitimateInterest:"Zakoniti int.",privacyPolicy:"Pravilnik o zasebnosti",storageDisclosure:"Razkritje shranjevanja",requiredNotice:"Zahtevano za delovanje spletnega mesta, ni mogo\u010De onemogo\u010Diti"},footer:{consentStorage:'Preference glede soglasja so shranjene v pi\u0161kotku z imenom "euconsent-v2" 13 mesecev. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Sprejmi vse",rejectAll:"Zavrni vse",customize:"Prilagodi",saveSettings:"Shrani nastavitve",loading:"Nalaganje...",showingSelectedVendor:"Prikaz izbranega ponudnika",clearSelection:"Po\u010Disti",customPartner:"Partner po meri, ki ni registriran v IAB"}}},xi={common:{acceptAll:"Acceptera alla",rejectAll:"Avvisa alla",customize:"Anpassa",save:"Spara inst\xE4llningar"},cookieBanner:{title:"Vi v\xE4rdes\xE4tter din integritet",description:"Den h\xE4r webbplatsen anv\xE4nder cookies f\xF6r att f\xF6rb\xE4ttra din surfupplevelse, analysera webbplatstrafik och visa personligt anpassat inneh\xE5ll."},consentManagerDialog:{title:"Integritetsinst\xE4llningar",description:"Anpassa dina integritetsinst\xE4llningar h\xE4r. Du kan v\xE4lja vilka typer av cookies och sp\xE5rningstekniker du till\xE5ter."},consentTypes:{necessary:{title:"Absolut n\xF6dv\xE4ndiga",description:"Dessa cookies \xE4r n\xF6dv\xE4ndiga f\xF6r att webbplatsen ska fungera korrekt och kan inte inaktiveras."},functionality:{title:"Funktionalitet",description:"Dessa cookies m\xF6jligg\xF6r f\xF6rb\xE4ttrad funktionalitet och personalisering av webbplatsen."},marketing:{title:"Marknadsf\xF6ring",description:"Dessa cookies anv\xE4nds f\xF6r att leverera relevanta annonser och sp\xE5ra deras effektivitet."},measurement:{title:"Analys",description:"Dessa cookies hj\xE4lper oss att f\xF6rst\xE5 hur bes\xF6kare interagerar med webbplatsen och f\xF6rb\xE4ttra dess prestanda."},experience:{title:"Upplevelse",description:"Dessa cookies hj\xE4lper oss att ge en b\xE4ttre anv\xE4ndarupplevelse och testa nya funktioner."}},frame:{title:"Acceptera {category}-samtycke f\xF6r att visa detta inneh\xE5ll.",actionButton:"Aktivera {category}-samtycke"},legalLinks:{privacyPolicy:"Integritetspolicy",cookiePolicy:"Cookiepolicy",termsOfService:"Anv\xE4ndarvillkor"},iab:{banner:{title:"Integritetsinst\xE4llningar",description:"Vi och v\xE5ra {partnerCount} partner lagrar och/eller f\xE5r tillg\xE5ng till information p\xE5 din enhet och behandlar personuppgifter, s\xE5som unika identifierare och webbl\xE4sardata, f\xF6r denna webbplats, f\xF6r att:",partnersLink:"{count} partner",andMore:"Och {count} till...",legitimateInterestNotice:"Vissa partner h\xE4vdar ett ber\xE4ttigat intresse f\xF6r att behandla dina uppgifter. Du har r\xE4tt att inv\xE4nda mot denna behandling, anpassa dina val och n\xE4r som helst \xE5terkalla ditt samtycke.",scopeServiceSpecific:"Ditt samtycke g\xE4ller endast f\xF6r den h\xE4r webbplatsen och p\xE5verkar inte andra tj\xE4nster.",scopeGroup:"Ditt val g\xE4ller f\xF6r alla v\xE5ra webbplatser i denna grupp."},preferenceCenter:{title:"Integritetsinst\xE4llningar",description:"Anpassa dina integritetsinst\xE4llningar h\xE4r. Du kan v\xE4lja vilka typer av cookies och sp\xE5rningstekniker du till\xE5ter.",tabs:{purposes:"\xC4ndam\xE5l",vendors:"Leverant\xF6rer"},purposeItem:{partners:"{count} partner",vendorsUseLegitimateInterest:"{count} leverant\xF6rer h\xE4vdar ber\xE4ttigat intresse",examples:"Exempel",partnersUsingPurpose:"Partner som anv\xE4nder detta \xE4ndam\xE5l",withYourPermission:"Med ditt tillst\xE5nd",legitimateInterest:"Ber\xE4ttigat intresse",objectButton:"Inv\xE4nd",objected:"Inv\xE4nt",rightToObject:"Du har r\xE4tt att inv\xE4nda mot behandling baserad p\xE5 ber\xE4ttigat intresse."},specialPurposes:{title:"Viktiga funktioner (kr\xE4vs)",tooltip:"Dessa kr\xE4vs f\xF6r webbplatsens funktionalitet och s\xE4kerhet. Enligt IAB TCF kan du inte inv\xE4nda mot dessa speciella \xE4ndam\xE5l."},vendorList:{search:"S\xF6k leverant\xF6rer...",showingCount:"{filtered} av {total} leverant\xF6rer",iabVendorsHeading:"IAB-registrerade leverant\xF6rer",iabVendorsNotice:"Dessa partner \xE4r registrerade i IAB Transparency & Consent Framework (TCF), en branschstandard f\xF6r hantering av samtycke",customVendorsHeading:"Anpassade partner",customVendorsNotice:"Dessa \xE4r anpassade partner som inte \xE4r registrerade i IAB Transparency & Consent Framework (TCF). De behandlar data baserat p\xE5 ditt samtycke och kan ha andra integritetspraxis \xE4n IAB-registrerade leverant\xF6rer.",purposes:"\xC4ndam\xE5l",specialPurposes:"Speciella \xE4ndam\xE5l",specialFeatures:"Speciella funktioner",features:"Funktioner",dataCategories:"Datakategorier",usesCookies:"Anv\xE4nder cookies",nonCookieAccess:"Icke-cookie-\xE5tkomst",maxAge:"Max \xE5lder: {days}d",retention:"Lagring: {days}d",legitimateInterest:"Ber\xE4tt. intresse",privacyPolicy:"Integritetspolicy",storageDisclosure:"Lagringsinformation",requiredNotice:"Kr\xE4vs f\xF6r webbplatsens funktionalitet, kan inte inaktiveras"},footer:{consentStorage:'Samtyckesinst\xE4llningar lagras i en cookie med namnet "euconsent-v2" i 13 m\xE5nader. The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"Acceptera alla",rejectAll:"Avvisa alla",customize:"Anpassa",saveSettings:"Spara inst\xE4llningar",loading:"Laddar...",showingSelectedVendor:"Visar vald leverant\xF6r",clearSelection:"Rensa",customPartner:"Anpassad partner som inte \xE4r registrerad i IAB"}}},Fi={common:{acceptAll:"\u5168\u90E8\u540C\u610F",rejectAll:"\u5168\u90E8\u62D2\u7EDD",customize:"\u81EA\u5B9A\u4E49\u8BBE\u7F6E",save:"\u4FDD\u5B58\u8BBE\u7F6E"},cookieBanner:{title:"\u6211\u4EEC\u91CD\u89C6\u60A8\u7684\u9690\u79C1",description:"\u672C\u7F51\u7AD9\u4F7F\u7528cookies\u6765\u63D0\u5347\u60A8\u7684\u6D4F\u89C8\u4F53\u9A8C\u3001\u5206\u6790\u7F51\u7AD9\u6D41\u91CF\u5E76\u5C55\u793A\u4E2A\u6027\u5316\u5185\u5BB9\u3002"},consentManagerDialog:{title:"\u9690\u79C1\u8BBE\u7F6E",description:"\u5728\u6B64\u81EA\u5B9A\u4E49\u60A8\u7684\u9690\u79C1\u8BBE\u7F6E\u3002\u60A8\u53EF\u4EE5\u9009\u62E9\u5141\u8BB8\u54EA\u4E9B\u7C7B\u578B\u7684cookies\u548C\u8DDF\u8E2A\u6280\u672F\u3002"},consentTypes:{necessary:{title:"\u4E25\u683C\u5FC5\u8981\u7C7B",description:"\u8FD9\u4E9Bcookies\u662F\u7F51\u7AD9\u6B63\u5E38\u8FD0\u884C\u6240\u5FC5\u9700\u7684\uFF0C\u65E0\u6CD5\u88AB\u7981\u7528\u3002"},functionality:{title:"\u529F\u80FD\u7C7B",description:"\u8FD9\u4E9Bcookies\u53EF\u589E\u5F3A\u7F51\u7AD9\u7684\u529F\u80FD\u548C\u4E2A\u6027\u5316\u4F53\u9A8C\u3002"},marketing:{title:"\u8425\u9500\u7C7B",description:"\u8FD9\u4E9Bcookies\u7528\u4E8E\u6295\u653E\u76F8\u5173\u5E7F\u544A\u5E76\u8DDF\u8E2A\u5E7F\u544A\u6548\u679C\u3002"},measurement:{title:"\u5206\u6790\u7C7B",description:"\u8FD9\u4E9Bcookies\u5E2E\u52A9\u6211\u4EEC\u4E86\u89E3\u8BBF\u5BA2\u5982\u4F55\u4E0E\u7F51\u7AD9\u4E92\u52A8\u5E76\u6539\u8FDB\u5176\u6027\u80FD\u3002"},experience:{title:"\u4F53\u9A8C\u7C7B",description:"\u8FD9\u4E9Bcookies\u5E2E\u52A9\u6211\u4EEC\u63D0\u4F9B\u66F4\u597D\u7684\u7528\u6237\u4F53\u9A8C\u5E76\u6D4B\u8BD5\u65B0\u529F\u80FD\u3002"}},frame:{title:"\u63A5\u53D7 {category} \u4EE5\u67E5\u770B\u6B64\u5185\u5BB9\u3002",actionButton:"\u542F\u7528 {category} \u540C\u610F"},legalLinks:{privacyPolicy:"\u9690\u79C1\u653F\u7B56",cookiePolicy:"Cookie\u653F\u7B56",termsOfService:"\u670D\u52A1\u6761\u6B3E"},iab:{banner:{title:"\u9690\u79C1\u8BBE\u7F6E",description:"\u6211\u4EEC\u548C\u6211\u4EEC\u7684 {partnerCount} \u4E2A\u5408\u4F5C\u4F19\u4F34\u5728\u60A8\u7684\u8BBE\u5907\u4E0A\u5B58\u50A8\u548C/\u6216\u8BBF\u95EE\u4FE1\u606F\uFF0C\u5E76\u4E3A\u6B64\u7F51\u7AD9\u5904\u7406\u4E2A\u4EBA\u6570\u636E\uFF08\u5982\u552F\u4E00\u6807\u8BC6\u7B26\u548C\u6D4F\u89C8\u6570\u636E\uFF09\uFF0C\u4EE5\u4FBF\uFF1A",partnersLink:"{count} \u4E2A\u5408\u4F5C\u4F19\u4F34",andMore:"\u8FD8\u6709 {count} \u4E2A...",legitimateInterestNotice:"\u67D0\u4E9B\u5408\u4F5C\u4F19\u4F34\u58F0\u79F0\u5BF9\u5904\u7406\u60A8\u7684\u6570\u636E\u5177\u6709\u6B63\u5F53\u5229\u76CA\u3002\u60A8\u6709\u6743\u53CD\u5BF9\u8FD9\u79CD\u5904\u7406\u3001\u81EA\u5B9A\u4E49\u60A8\u7684\u9009\u62E9\u5E76\u968F\u65F6\u64A4\u56DE\u60A8\u7684\u540C\u610F\u3002",scopeServiceSpecific:"\u60A8\u7684\u540C\u610F\u4EC5\u9002\u7528\u4E8E\u672C\u7F51\u7AD9\uFF0C\u4E0D\u4F1A\u5F71\u54CD\u5176\u4ED6\u670D\u52A1\u3002",scopeGroup:"\u60A8\u7684\u9009\u62E9\u9002\u7528\u4E8E\u672C\u7EC4\u5185\u7684\u6240\u6709\u7F51\u7AD9\u3002"},preferenceCenter:{title:"\u9690\u79C1\u8BBE\u7F6E",description:"\u5728\u6B64\u81EA\u5B9A\u4E49\u60A8\u7684\u9690\u79C1\u8BBE\u7F6E\u3002\u60A8\u53EF\u4EE5\u9009\u62E9\u5141\u8BB8\u54EA\u4E9B\u7C7B\u578B\u7684 cookies \u548C\u8DDF\u8E2A\u6280\u672F\u3002",tabs:{purposes:"\u76EE\u7684",vendors:"\u4F9B\u5E94\u5546"},purposeItem:{partners:"{count} \u4E2A\u5408\u4F5C\u4F19\u4F34",vendorsUseLegitimateInterest:"{count} \u4E2A\u4F9B\u5E94\u5546\u58F0\u79F0\u5177\u6709\u6B63\u5F53\u5229\u76CA",examples:"\u793A\u4F8B",partnersUsingPurpose:"\u4F7F\u7528\u6B64\u76EE\u7684\u7684\u5408\u4F5C\u4F19\u4F34",withYourPermission:"\u5F81\u5F97\u60A8\u7684\u8BB8\u53EF",legitimateInterest:"\u6B63\u5F53\u5229\u76CA",objectButton:"\u53CD\u5BF9",objected:"\u5DF2\u53CD\u5BF9",rightToObject:"\u60A8\u6709\u6743\u53CD\u5BF9\u57FA\u4E8E\u6B63\u5F53\u5229\u76CA\u7684\u5904\u7406\u3002"},specialPurposes:{title:"\u57FA\u672C\u529F\u80FD\uFF08\u5FC5\u9700\uFF09",tooltip:"\u8FD9\u4E9B\u662F\u7F51\u7AD9\u529F\u80FD\u548C\u5B89\u5168\u6240\u5FC5\u9700\u7684\u3002\u6839\u636E IAB TCF\uFF0C\u60A8\u4E0D\u80FD\u53CD\u5BF9\u8FD9\u4E9B\u7279\u6B8A\u76EE\u7684\u3002"},vendorList:{search:"\u641C\u7D22\u4F9B\u5E94\u5546...",showingCount:"\u663E\u793A {total} \u4E2A\u4F9B\u5E94\u5546\u4E2D\u7684 {filtered} \u4E2A",iabVendorsHeading:"IAB \u6CE8\u518C\u4F9B\u5E94\u5546",iabVendorsNotice:"\u8FD9\u4E9B\u5408\u4F5C\u4F19\u4F34\u5DF2\u5728 IAB \u900F\u660E\u5EA6\u4E0E\u540C\u610F\u6846\u67B6 (TCF) \u6CE8\u518C\uFF0C\u8FD9\u662F\u7BA1\u7406\u540C\u610F\u7684\u884C\u4E1A\u6807\u51C6",customVendorsHeading:"\u81EA\u5B9A\u4E49\u5408\u4F5C\u4F19\u4F34",customVendorsNotice:"\u8FD9\u4E9B\u662F\u672A\u5728 IAB \u900F\u660E\u5EA6\u4E0E\u540C\u610F\u6846\u67B6 (TCF) \u6CE8\u518C\u7684\u81EA\u5B9A\u4E49\u5408\u4F5C\u4F19\u4F34\u3002\u4ED6\u4EEC\u6839\u636E\u60A8\u7684\u540C\u610F\u5904\u7406\u6570\u636E\uFF0C\u5E76\u4E14\u53EF\u80FD\u5177\u6709\u4E0E IAB \u6CE8\u518C\u4F9B\u5E94\u5546\u4E0D\u540C\u7684\u9690\u79C1\u60EF\u4F8B\u3002",purposes:"\u76EE\u7684",specialPurposes:"\u7279\u6B8A\u76EE\u7684",specialFeatures:"\u7279\u6B8A\u529F\u80FD",features:"\u529F\u80FD",dataCategories:"\u6570\u636E\u7C7B\u522B",usesCookies:"\u4F7F\u7528 Cookies",nonCookieAccess:"\u975E Cookie \u8BBF\u95EE",maxAge:"\u6700\u957F\u671F\u9650\uFF1A{days}\u5929",retention:"\u4FDD\u7559\u671F\u9650\uFF1A{days}\u5929",legitimateInterest:"\u6B63\u5F53\u5229\u76CA",privacyPolicy:"\u9690\u79C1\u653F\u7B56",storageDisclosure:"\u5B58\u50A8\u62AB\u9732",requiredNotice:"\u7F51\u7AD9\u529F\u80FD\u5FC5\u9700\uFF0C\u65E0\u6CD5\u7981\u7528"},footer:{consentStorage:'\u540C\u610F\u504F\u597D\u5B58\u50A8\u5728\u540D\u4E3A "euconsent-v2" \u7684 cookie \u4E2D\uFF0C\u6709\u6548\u671F\u4E3A 13 \u4E2A\u6708\u3002 The storage duration may be refreshed when you update your preferences.'}},common:{acceptAll:"\u5168\u90E8\u540C\u610F",rejectAll:"\u5168\u90E8\u62D2\u7EDD",customize:"\u81EA\u5B9A\u4E49\u8BBE\u7F6E",saveSettings:"\u4FDD\u5B58\u8BBE\u7F6E",loading:"\u52A0\u8F7D\u4E2D...",showingSelectedVendor:"\u663E\u793A\u9009\u5B9A\u7684\u4F9B\u5E94\u5546",clearSelection:"\u6E05\u9664",customPartner:"\u672A\u5728 IAB \u6CE8\u518C\u7684\u81EA\u5B9A\u4E49\u5408\u4F5C\u4F19\u4F34"}}},Bi={bg:ii,cs:si,da:oi,de:ai,el:ci,en:it,es:li,et:ui,fi:di,fr:pi,ga:gi,he:mi,hr:fi,hu:hi,id:ki,it:yi,lt:wi,lv:Ci,mt:Ii,nl:Si,pl:zi,pt:Pi,ro:Li,sk:Ei,sl:Vi,sv:xi,zh:Fi,is:vi,nb:ji,nn:Ai,lb:bi,rm:Ti,cy:ri};function _t(n){return!(!n||typeof n!="object"||Array.isArray(n))}function Ot(n,e){if(!n&&!e)return{};let t={};if(n)for(let i of Object.keys(n))t[i]=n[i];if(!e)return t;for(let i of Object.keys(e)){let s=e[i];if(s===void 0)continue;let r=n?n[i]:void 0;_t(r)&&_t(s)?t[i]=Ot(r,s):t[i]=s}return t}function Rt(n,e){let t=["cookieBanner","consentManagerDialog","common","consentTypes","frame","legalLinks","iab"],i={};for(let s of t){let r=n[s],o=e[s];(r||o)&&(i[s]=Ot(r,o))}return i}function Mt(n){return n?n.split(",").map(e=>e.split(";")[0]?.trim().toLowerCase()).filter(e=>!!e).map(e=>e.split("-")[0]??e):[]}function Di(n,e){let t=e?.fallback??"en";if(!n.length)return t;let i=Mt(e?.header);for(let s of i)if(n.includes(s))return s;return t}function Ut(n,e){let t={en:it},i=[n.translations,e?.translations];for(let s of i)if(s)for(let[r,o]of Object.entries(s)){if(!o)continue;let a=t[r]||t.en;t[r]=Rt(a,o)}return{...n,...e,translations:t}}function Gt(n,e,t=!1){if(t||typeof window>"u")return e||"en";let i=window.navigator.language?.split("-")[0]||"";return i&&i in n?i:e||"en"}function Ni(n,e){let t=Ut(n,e),i=Gt(t.translations,t.defaultLanguage,t.disableAutoLanguageSwitch);return{...t,defaultLanguage:i}}var Ht=n=>{let e,t=new Set,i=(u,p)=>{let d=typeof u=="function"?u(e):u;if(!Object.is(d,e)){let l=e;e=p??(typeof d!="object"||d===null)?d:Object.assign({},e,d),t.forEach(y=>y(e,l))}},s=()=>e,a={setState:i,getState:s,getInitialState:()=>c,subscribe:u=>(t.add(u),()=>t.delete(u))},c=e=n(i,s,a);return a},qt=(n=>n?Ht(n):Ht);var _i={"./src/libs/cookie/index.ts"(n,e,t){t.d(e,{_y:()=>D,If:()=>O,TV:()=>y,Yj:()=>b,Xk:()=>s,jD:()=>X,Ri:()=>m});function i(g){return{expiryDays:g?.defaultExpiryDays??365,crossSubdomain:g?.crossSubdomain??!1,domain:g?.defaultDomain??"",path:"/",secure:typeof window<"u"&&window.location.protocol==="https:",sameSite:"Lax"}}function s(){if(typeof window>"u")return"";let g=window.location.hostname;if(g==="localhost"||/^\d+\.\d+\.\d+\.\d+$/.test(g))return g;let f=g.split(".");return f.length>=2?`.${f.slice(-2).join(".")}`:g}let r={consents:"c",consentInfo:"i",timestamp:"ts",iabCustomVendorConsents:"icv",iabCustomVendorLegitimateInterests:"icvli",time:"t",type:"y",id:"id",subjectId:"sid",externalId:"eid",identityProvider:"idp"},o=Object.entries(r).reduce((g,[f,k])=>(g[k]=f,g),{});function a(g){let f={};for(let[k,j]of Object.entries(g)){let z=k.split(".").map(T=>r[T]||T);f[z.join(".")]=j}return f}function c(g){let f={};for(let[k,j]of Object.entries(g)){let z=k.split(".").map(T=>o[T]||T);f[z.join(".")]=j}return f}function u(g,f=""){let k={};for(let[j,I]of Object.entries(g)){let z=f?`${f}.${j}`:j;I==null?k[z]="":typeof I=="boolean"?I&&(k[z]="1"):typeof I!="object"||Array.isArray(I)?k[z]=String(I):Object.assign(k,u(I,z))}return k}function p(g){let f={};for(let[k,j]of Object.entries(g)){let I=k.split(".");if(I.length===0)continue;let z=f;for(let E=0;E`${f}:${k}`).join(",")}function l(g){if(!g)return{};let f={},k=g.split(",");for(let j of k){let I=j.indexOf(":");if(I===-1)continue;let z=j.substring(0,I),T=j.substring(I+1);f[z]=T}return f}function y(g,f,k,j){if(typeof document>"u")return;let I={...i(j),...k};I.crossSubdomain&&!k?.domain&&(I.domain=s());try{let z;if(typeof f=="string")z=f;else{let _=u(f),R=a(_);z=d(R)}let T=new Date;T.setTime(T.getTime()+24*I.expiryDays*36e5);let E=`expires=${T.toUTCString()}`,V=[`${g}=${z}`,E,`path=${I.path}`];I.domain&&V.push(`domain=${I.domain}`),I.secure&&V.push("secure"),I.sameSite&&V.push(`SameSite=${I.sameSite}`),document.cookie=V.join("; ")}catch(z){console.warn(`Failed to set cookie "${g}":`,z)}}function m(g){if(typeof document>"u")return null;try{let f=`${g}=`,k=document.cookie.split(";");for(let j of k){let I=j;for(;I.charAt(0)===" ";)I=I.substring(1);if(I.indexOf(f)===0){let z=I.substring(f.length);if(z.includes(":")){let T=l(z),E=c(T);return p(E)}return z}}return null}catch(f){return console.warn(`Failed to get cookie "${g}":`,f),null}}function b(g,f,k){if(typeof document>"u")return;let j={...i(k),...f};j.crossSubdomain&&!f?.domain&&(j.domain=s());try{let I=[`${g}=`,"expires=Thu, 01 Jan 1970 00:00:00 GMT",`path=${j.path}`];j.domain&&I.push(`domain=${j.domain}`),document.cookie=I.join("; ")}catch(I){console.warn(`Failed to delete cookie "${g}":`,I)}}var h=t("./src/store/initial-state.ts"),w=t("./src/types/consent-types.ts"),C=t("./src/libs/debug.ts");function A(g){if(typeof g!="object"||g===null)return!1;let k=g.consentInfo;if(!k||typeof k!="object")return!1;let j=typeof k.id=="string",I=typeof k.subjectId=="string";return j&&!I}function x(g){let f=g?.storageKey||h.ln,k=h.AQ;if(f!==k)try{if(typeof window<"u"&&window.localStorage){if(window.localStorage.getItem(f))return void window.localStorage.removeItem(k);let I=window.localStorage.getItem(k);I&&(window.localStorage.setItem(f,I),window.localStorage.removeItem(k),(0,C.YA)().log(`Migrated consent data from "${k}" to "${f}"`))}}catch(j){console.warn("[c15t] Failed to migrate legacy storage:",j)}}function D(g,f,k){let j=!1,I=!1,z=k?.storageKey||h.ln,T=O(k),V={...{...T,...g,iabCustomVendorConsents:g.iabCustomVendorConsents??T?.iabCustomVendorConsents,iabCustomVendorLegitimateInterests:g.iabCustomVendorLegitimateInterests??T?.iabCustomVendorLegitimateInterests}};(!V.iabCustomVendorConsents||Object.keys(V.iabCustomVendorConsents).length===0)&&delete V.iabCustomVendorConsents,(!V.iabCustomVendorLegitimateInterests||Object.keys(V.iabCustomVendorLegitimateInterests).length===0)&&delete V.iabCustomVendorLegitimateInterests;try{typeof window<"u"&&window.localStorage&&(window.localStorage.setItem(z,JSON.stringify(V)),j=!0)}catch(_){console.warn("Failed to save consent to localStorage:",_)}try{y(z,V,f,k),I=!0}catch(_){console.warn("Failed to save consent to cookie:",_)}if(!j&&!I)throw new Error("Failed to save consent to any storage method")}function F(g){let f=g.consents||{},k={...f};for(let j of w.W)k[j]=f[j]??!1;return{...g,consents:k}}function O(g){x(g);let f=g?.storageKey||h.ln,k=null,j=null;try{if(typeof window<"u"&&window.localStorage){let T=window.localStorage.getItem(f);T&&(k=JSON.parse(T))}}catch(T){console.warn("Failed to read consent from localStorage:",T)}try{j=m(f)}catch(T){console.warn("Failed to read consent from cookie:",T)}let I=null,z=null;if(j?(I=j,z="cookie"):k&&(I=k,z="localStorage"),I&&z){let T=g?.crossSubdomain===!0||!!g?.defaultDomain;if(z!=="localStorage"||j){if(z==="cookie")try{if(typeof window<"u"&&window.localStorage){let E=I;typeof E=="object"&&E!==null&&"consents"in E&&(E=F(E));let V=null;try{let ue=window.localStorage.getItem(f);if(ue){let re=JSON.parse(ue);V=typeof re=="object"&&re!==null&&"consents"in re?F(re):re}}catch{V=null}let _=JSON.stringify(E),R=JSON.stringify(V);_!==R&&(window.localStorage.setItem(f,_),V?T?(0,C.YA)().log("Updated localStorage with consent from cookie (cross-subdomain mode)"):(0,C.YA)().log("Updated localStorage with consent from cookie"):(0,C.YA)().log("Synced consent from cookie to localStorage"))}}catch(E){console.warn("[c15t] Failed to sync consent to localStorage:",E)}}else try{y(f,I,void 0,g),(0,C.YA)().log("Synced consent from localStorage to cookie")}catch(E){console.warn("[c15t] Failed to sync consent to cookie:",E)}}return I&&A(I)?((0,C.YA)().log("Detected legacy consent format (v1.x). Re-consent required for v2.0."),X(void 0,g),null):I&&typeof I=="object"?F(I):I}function X(g,f){let k=f?.storageKey||h.ln;try{typeof window<"u"&&window.localStorage&&(window.localStorage.removeItem(k),k!==h.AQ&&window.localStorage.removeItem(h.AQ))}catch(j){console.warn("Failed to remove consent from localStorage:",j)}try{b(k,g,f),k!==h.AQ&&b(h.AQ,g,f)}catch(j){console.warn("Failed to remove consent cookie:",j)}}},"./src/libs/debug.ts"(n,e,t){t.d(e,{YA:()=>o,tJ:()=>a});let i=()=>{};function s(c){return c?{log:(...u)=>console.log("[c15t]",...u),debug:(...u)=>console.debug("[c15t]",...u)}:{log:i,debug:i}}let r=s(!1);function o(){return r}function a(c){r=s(c)}},"./src/libs/generate-subject-id.ts"(n,e,t){t.d(e,{L:()=>o,U:()=>a});let i="123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz";function s(c){let u=BigInt(58),p=BigInt(0);for(let l of c)p=p*BigInt(256)+BigInt(l);let d=[];for(;p>0;){let l=p%u;d.unshift(i.charAt(Number(l))),p/=u}for(let l of c)if(l===0)d.unshift(i.charAt(0));else break;return d.join("")||i.charAt(0)}let r=17e11;function o(){let c=crypto.getRandomValues(new Uint8Array(20)),u=Date.now()-r,p=Math.floor(u/4294967296),d=u>>>0;return c[0]=p>>>24&255,c[1]=p>>>16&255,c[2]=p>>>8&255,c[3]=255&p,c[4]=d>>>24&255,c[5]=d>>>16&255,c[6]=d>>>8&255,c[7]=255&d,`sub_${s(c)}`}function a(c){if(!c.startsWith("sub_"))return!1;let u=c.slice(4);if(u.length===0)return!1;for(let p of u)if(!i.includes(p))return!1;return!0}},"./src/libs/iab-tcf/cmp-defaults.ts"(n,e,t){t.d(e,{D:()=>s,I:()=>r});var i=t("./src/version.ts");let s=0,r=i.r},"./src/libs/iab-tcf/fetch-gvl.ts"(n,e,t){t.d(e,{Ww:()=>a,ix:()=>o,wL:()=>p,xe:()=>c});var i=t("./src/libs/iab-tcf/types.ts");let s=new Map,r;async function o(d,l={}){let y=typeof window<"u"?window.__c15t_mock_gvl:void 0;if(y!==void 0)return r=y,y;if(u!==void 0)return r=u,u;let{endpoint:m=i.w,headers:b}=l,h=d?[...d].sort((F,O)=>F-O):[],w=b?JSON.stringify(b):"",C=`${m}|${h.join(",")}|${w}`,A=s.get(C);if(A)return A;let x=new URL(m);h.length>0&&x.searchParams.set("vendorIds",h.join(","));let D=(async()=>{try{let F=await fetch(x.toString(),{headers:b});if(F.status===204)return r=null,null;if(!F.ok)throw new Error(`Failed to fetch GVL: ${F.status} ${F.statusText}`);let O=await F.json();if(!O.vendorListVersion||!O.purposes||!O.vendors)throw new Error("Invalid GVL response: missing required fields");return r=O,O}finally{s.delete(C)}})();return s.set(C,D),D}function a(){return r}function c(){s.clear(),r=void 0,u=void 0}let u;function p(d){u=d,d!==void 0&&(r=d)}},"./src/libs/iab-tcf/index.ts"(n,e,t){t.d(e,{generateTCString:()=>h,initializeIABStub:()=>p,wL:()=>F.wL,Ww:()=>F.Ww,decodeTCString:()=>w,xe:()=>F.xe,fetchGVL:()=>F.ix,iabPurposesToC15tConsents:()=>X,createCMPApi:()=>D});var i=t("./src/libs/iab-tcf/cmp-defaults.ts"),s=t("./src/version.ts");let r=!1;function o(){return{gdprApplies:void 0,cmpLoaded:!1,cmpStatus:"stub",displayStatus:"hidden",apiVersion:"2.3",cmpVersion:s.r,cmpId:0,gvlVersion:0,tcfPolicyVersion:5}}function a(){let g=[],f=(k,j,I,z)=>{k==="ping"?I(o(),!0):g.push([k,j,I,z])};return f.queue=g,f}function c(){if(typeof document>"u"||document.querySelector('iframe[name="__tcfapiLocator"]'))return null;let g=document.createElement("iframe");return g.name="__tcfapiLocator",g.style.display="none",g.setAttribute("aria-hidden","true"),g.tabIndex=-1,(document.body??document.documentElement).appendChild(g),g}function u(g){if(typeof window>"u"||!window.__tcfapi)return;let{data:f}=g;if(!f||typeof f!="object"||!("__tcfapiCall"in f))return;let k=f.__tcfapiCall;!k||!k.command||!k.callId||window.__tcfapi(k.command,k.version,(j,I)=>{let z={__tcfapiReturn:{returnValue:j,success:I,callId:k.callId}};g.source&&typeof g.source.postMessage=="function"&&g.source.postMessage(z,"*")},k.parameter)}function p(){typeof window>"u"||r||(window.__tcfapi||(window.__tcfapi=a()),c(),window.addEventListener("message",u),r=!0)}function d(){return typeof window>"u"||!window.__tcfapi?[]:window.__tcfapi.queue??[]}function l(){typeof window<"u"&&window.__tcfapi?.queue&&(window.__tcfapi.queue=[])}let y=null,m=null;async function b(){return y||m||(m=Promise.resolve().then(()=>(An(),Sn)).then(g=>(y=g,m=null,g)).catch(g=>{throw m=null,new Error(`Failed to load @iabtechlabtcf/core: ${g instanceof Error?g.message:"Unknown error"}. Make sure it is installed as a dependency.`)}),m)}async function h(g,f,k){let{TCModel:j,TCString:I,GVL:z}=await b(),T=new z(f),E=new j(T);E.cmpId=k.cmpId,E.cmpVersion=typeof k.cmpVersion=="number"?k.cmpVersion:Number.parseInt(String(k.cmpVersion??"1"),10)||1,E.consentScreen=k.consentScreen??1,E.consentLanguage=k.consentLanguage??"EN",E.publisherCountryCode=k.publisherCountryCode??"US",E.isServiceSpecific=k.isServiceSpecific??!0;for(let[V,_]of Object.entries(g.purposeConsents))_&&E.purposeConsents.set(Number(V));for(let[V,_]of Object.entries(g.purposeLegitimateInterests))_&&E.purposeLegitimateInterests.set(Number(V));for(let[V,_]of Object.entries(g.vendorConsents)){let R=Number(V);_&&Number.isFinite(R)&&E.vendorConsents.set(R)}for(let[V,_]of Object.entries(g.vendorLegitimateInterests)){let R=Number(V);_&&Number.isFinite(R)&&E.vendorLegitimateInterests.set(R)}for(let[V,_]of Object.entries(g.specialFeatureOptIns))_&&E.specialFeatureOptins.set(Number(V));for(let[V,_]of Object.entries(g.vendorsDisclosed))_&&E.vendorsDisclosed.set(Number(V));return I.encode(E)}async function w(g){let{TCString:f}=await b(),k=f.decode(g),j=(I,z)=>{let T={};for(let E=1;E<=z;E++)I.has(E)&&(T[E]=!0);return T};return{cmpId:k.cmpId,cmpVersion:k.cmpVersion,consentLanguage:k.consentLanguage,isServiceSpecific:k.isServiceSpecific,purposeConsents:j(k.purposeConsents,11),purposeLegitimateInterests:j(k.purposeLegitimateInterests,11),vendorConsents:j(k.vendorConsents,1e3),vendorLegitimateInterests:j(k.vendorLegitimateInterests,1e3),specialFeatureOptIns:j(k.specialFeatureOptins,2),vendorsDisclosed:j(k.vendorsDisclosed,1e3),created:k.created,lastUpdated:k.lastUpdated,vendorListVersion:k.vendorListVersion,policyVersion:k.policyVersion}}var C=t("./src/libs/iab-tcf/types.ts");function A(g,f,k){if(typeof document>"u")return;let j=24*k*3600;document.cookie=`${g}=${encodeURIComponent(f)}; max-age=${j}; path=/; SameSite=Lax`}function x(g){if(typeof document>"u")return null;let f=document.cookie.match(new RegExp(`(^| )${g}=([^;]+)`));return f?.[2]?decodeURIComponent(f[2]):null}function D(g){let{cmpId:f=i.D,cmpVersion:k=i.I,gvl:j,gdprApplies:I=!0}=g,z="",T="loading",E="hidden",V=new Map,_=0,R=null;async function ue(B,M){if(R&&R.tcString===z&&!B)return R;let se={},ie={},Ie={},Ft={},Bt={};if(z)try{let Le=await w(z);se=Le.purposeConsents,ie=Le.purposeLegitimateInterests,Ie=Le.vendorConsents,Ft=Le.vendorLegitimateInterests,Bt=Le.specialFeatureOptIns}catch{}let ti=typeof k=="number"?k:Number.parseInt(String(k),10)||1,Dt={tcString:z,tcfPolicyVersion:j.tcfPolicyVersion,cmpId:f,cmpVersion:ti,gdprApplies:I,listenerId:M,eventStatus:B,cmpStatus:T,isServiceSpecific:!0,useNonStandardTexts:!1,publisherCC:"US",purposeOneTreatment:!1,purpose:{consents:se,legitimateInterests:ie},vendor:{consents:Ie,legitimateInterests:Ft},specialFeatureOptins:Bt,publisher:{consents:{},legitimateInterests:{},customPurpose:{consents:{},legitimateInterests:{}},restrictions:{}}};return B||(R=Dt),Dt}function re(B){let M={gdprApplies:I,cmpLoaded:T==="loaded",cmpStatus:T,displayStatus:E,apiVersion:"2.3",cmpVersion:typeof k=="string"?k:String(k),cmpId:f,gvlVersion:j.vendorListVersion,tcfPolicyVersion:j.tcfPolicyVersion};B(M,!0)}async function ee(B,M){let se=await ue();B(se,!0)}async function Ce(B){return ee(B)}function Zn(B,M){B(j,!0)}async function Qn(B){let M=_++;V.set(M,B);let se=await ue("tcloaded",M);B(se,!0)}function Xn(B,M){let se=V.has(M);V.delete(M),B(se,!0)}async function Ue(B){for(let[M,se]of V){let ie=await ue(B,M);se(ie,!0)}}function ei(){if(typeof window>"u")return;let B=d();window.__tcfapi=(M,se,ie,Ie)=>{switch(M){case"ping":re(ie);break;case"getTCData":ee(ie,Ie);break;case"getInAppTCData":Ce(ie);break;case"getVendorList":Zn(ie,Ie);break;case"addEventListener":Qn(ie);break;case"removeEventListener":Xn(ie,Ie);break;default:ie(null,!1)}},l();for(let M of B)window.__tcfapi?.(...M);T="loaded"}return ei(),{updateConsent:B=>{z=B,R=null,T="loaded",Ue("useractioncomplete")},setDisplayStatus:B=>{E=B,B==="visible"&&Ue("cmpuishown")},loadFromStorage:()=>{let B=x(C.Y.TC_STRING_COOKIE);if(B)return z=B,R=null,Ue("tcloaded"),B;if(typeof localStorage<"u")try{let M=localStorage.getItem(C.Y.TC_STRING_LOCAL);if(M)return z=M,R=null,Ue("tcloaded"),M}catch{}return null},saveToStorage:B=>{if(A(C.Y.TC_STRING_COOKIE,B,395),typeof localStorage<"u")try{localStorage.setItem(C.Y.TC_STRING_LOCAL,B)}catch{}},getTcString:()=>z,destroy:()=>{V.clear(),R=null,typeof window<"u"&&delete window.__tcfapi}}}var F=t("./src/libs/iab-tcf/fetch-gvl.ts");let O={necessary:[1],marketing:[2,3,4],experience:[5,6],measurement:[7,8,9],functionality:[10,11]};function X(g){let f={necessary:!1,marketing:!1,experience:!1,measurement:!1,functionality:!1};for(let[k,j]of Object.entries(O)){let I=j.every(z=>g[z]===!0);f[k]=I}return f}t("./src/libs/iab-tcf/store.ts")},"./src/libs/iab-tcf/store.ts"(n,e,t){t.d(e,{yx:()=>c});var i=t("./src/libs/cookie/index.ts"),s=t("./src/libs/generate-subject-id.ts"),r=t("./src/libs/iab-tcf/cmp-defaults.ts");function o(l){return{config:l,gvl:null,isLoadingGVL:!1,nonIABVendors:[],tcString:null,vendorConsents:{},vendorLegitimateInterests:{},purposeConsents:{},purposeLegitimateInterests:{},specialFeatureOptIns:{},vendorsDisclosed:{},cmpApi:null,preferenceCenterTab:"purposes"}}function a(l,y,m){let b=h=>{let{iab:w}=l();w&&y({iab:{...w,...h}})};return{_updateState:b,setPurposeConsent:(h,w)=>{let{iab:C}=l();C&&b({purposeConsents:{...C.purposeConsents,[h]:w}})},setPurposeLegitimateInterest:(h,w)=>{let{iab:C}=l();C&&b({purposeLegitimateInterests:{...C.purposeLegitimateInterests,[h]:w}})},setVendorConsent:(h,w)=>{let{iab:C}=l();C&&b({vendorConsents:{...C.vendorConsents,[String(h)]:w}})},setVendorLegitimateInterest:(h,w)=>{let{iab:C}=l();C&&b({vendorLegitimateInterests:{...C.vendorLegitimateInterests,[String(h)]:w}})},setSpecialFeatureOptIn:(h,w)=>{let{iab:C}=l();C&&b({specialFeatureOptIns:{...C.specialFeatureOptIns,[h]:w}})},setPreferenceCenterTab:h=>{b({preferenceCenterTab:h})},acceptAll:()=>{let{iab:h}=l();if(!h?.gvl)return;let{purposeConsents:w,purposeLegitimateInterests:C}=u(h.gvl,!0),{vendorConsents:A,vendorLegitimateInterests:x}=p(h.gvl,h.nonIABVendors,!0),D=d(h.gvl,!0);b({purposeConsents:w,purposeLegitimateInterests:C,vendorConsents:A,vendorLegitimateInterests:x,specialFeatureOptIns:D})},rejectAll:()=>{let{iab:h}=l();if(!h?.gvl)return;let w={1:!0},C={};for(let F of Object.keys(h.gvl.purposes))Number(F)!==1&&(w[Number(F)]=!1,C[Number(F)]=!1);let{vendorConsents:A,vendorLegitimateInterests:x}=p(h.gvl,h.nonIABVendors,!1),D=d(h.gvl,!1);b({purposeConsents:w,purposeLegitimateInterests:C,vendorConsents:A,vendorLegitimateInterests:x,specialFeatureOptIns:D})},save:async()=>{let{iab:h,locationInfo:w,user:C,callbacks:A}=l();if(!h?.cmpApi||!h.gvl)return;let{config:x,gvl:D,cmpApi:F,purposeConsents:O,purposeLegitimateInterests:X,vendorConsents:g,vendorLegitimateInterests:f,specialFeatureOptIns:k}=h,{generateTCString:j,iabPurposesToC15tConsents:I}=await Promise.resolve().then(t.bind(t,"./src/libs/iab-tcf/index.ts")),z={};for(let ee of Object.keys(D.vendors))z[Number(ee)]=!0;let T=await j({purposeConsents:O,purposeLegitimateInterests:X,vendorConsents:g,vendorLegitimateInterests:f,specialFeatureOptIns:k,vendorsDisclosed:z},D,{cmpId:x.cmpId??r.D,cmpVersion:x.cmpVersion??r.I,publisherCountryCode:x.publisherCountryCode??"GB",isServiceSpecific:x.isServiceSpecific??!0});F.saveToStorage(T),F.updateConsent(T);let E=I(O),V=Date.now();b({tcString:T,vendorsDisclosed:z});let _=l().consentInfo?.subjectId;_||(_=(0,s.L)()),y({consents:E,selectedConsents:E,activeUI:"none",consentInfo:{time:V,subjectId:_,externalId:C?.id,identityProvider:C?.identityProvider}});let R={},ue={};for(let ee of h.nonIABVendors){let Ce=String(ee.id);ee.purposes&&ee.purposes.length>0&&(R[Ce]=g[Ce]??!1),ee.legIntPurposes&&ee.legIntPurposes.length>0&&(ue[Ce]=f[Ce]??!0)}(0,i._y)({consents:E,consentInfo:{time:V,subjectId:_,externalId:C?.id,identityProvider:C?.identityProvider},iabCustomVendorConsents:R,iabCustomVendorLegitimateInterests:ue},void 0,l().storageConfig),l().updateScripts();let re=await m.setConsent({body:{subjectId:_,givenAt:V,type:"cookie_banner",domain:typeof window<"u"?window.location.hostname:"",preferences:E,externalSubjectId:C?.id,identityProvider:C?.identityProvider,tcString:T,jurisdiction:w?.jurisdiction??void 0,jurisdictionModel:"iab",metadata:{source:"iab_tcf",acceptanceMethod:"iab"}}});if(!re.ok){let ee=re.error?.message??"Failed to save IAB consents";A.onError?.({error:ee}),A.onError||console.error(ee)}}}}function c(l,y,m,b){let h=o(l),w=a(y,m,b);return{...h,...w}}function u(l,y){let m={},b={};for(let h of Object.keys(l.purposes))m[Number(h)]=y,b[Number(h)]=y;return{purposeConsents:m,purposeLegitimateInterests:b}}function p(l,y,m){let b={},h={};for(let[w,C]of Object.entries(l.vendors)){let A=String(w);C.purposes&&C.purposes.length>0&&(b[A]=m),C.legIntPurposes&&C.legIntPurposes.length>0&&(h[A]=m)}return y.forEach(w=>{let C=String(w.id);w.purposes&&w.purposes.length>0&&(b[C]=m),w.legIntPurposes&&w.legIntPurposes.length>0&&(h[C]=m)}),{vendorConsents:b,vendorLegitimateInterests:h}}function d(l,y){let m={};for(let b of Object.keys(l.specialFeatures))m[Number(b)]=y;return m}},"./src/libs/iab-tcf/types.ts"(n,e,t){t.d(e,{Y:()=>i,w:()=>s});let i={TC_STRING_COOKIE:"euconsent-v2",TC_STRING_LOCAL:"euconsent-v2"},s="https://gvl.consent.io"},"./src/store/initial-state.ts"(n,e,t){t.d(e,{AQ:()=>a,ln:()=>o,ue:()=>c});var i=t("./src/translations/index.ts"),s=t("./src/types/consent-types.ts"),r=t("./src/version.ts");let o="c15t",a="privacy-consent-storage",c={debug:!1,config:{pkg:"c15t",version:r.r,mode:"Unknown"},consents:s.y.reduce((u,p)=>(u[p.name]=p.defaultValue,u),{}),selectedConsents:s.y.reduce((u,p)=>(u[p.name]=p.defaultValue,u),{}),consentInfo:null,branding:"c15t",activeUI:"none",isLoadingConsentInfo:!1,hasFetchedBanner:!1,lastBannerFetchData:null,consentCategories:["necessary"],callbacks:{},locationInfo:null,overrides:void 0,legalLinks:{},translationConfig:i.Z,user:void 0,networkBlocker:void 0,storageConfig:void 0,includeNonDisplayedConsents:!1,consentTypes:s.y,iframeBlockerConfig:{disableAutomaticBlocking:!1},scripts:[],loadedScripts:{},scriptIdMap:{},model:"opt-in",iab:null,reloadOnConsentRevoked:!0,ssrDataUsed:!1,ssrSkippedReason:null}},"./src/translations/index.ts"(n,e,t){t.d(e,{Z:()=>s});var i=t("@c15t/translations");let s={translations:{en:i.enTranslations},defaultLanguage:"en",disableAutoLanguageSwitch:!1}},"./src/types/consent-types.ts"(n,e,t){t.d(e,{W:()=>s,y:()=>i});let i=[{defaultValue:!0,description:"These trackers are used for activities that are strictly necessary to operate or deliver the service you requested from us and, therefore, do not require you to consent.",disabled:!0,display:!0,gdprType:1,name:"necessary"},{defaultValue:!1,description:"These trackers enable basic interactions and functionalities that allow you to access selected features of our service and facilitate your communication with us.",display:!1,gdprType:2,name:"functionality"},{defaultValue:!1,description:"These trackers help us to measure traffic and analyze your behavior to improve our service.",display:!1,gdprType:4,name:"measurement"},{defaultValue:!1,description:"These trackers help us to improve the quality of your user experience and enable interactions with external content, networks, and platforms.",display:!1,gdprType:3,name:"experience"},{defaultValue:!1,description:"These trackers help us to deliver personalized ads or marketing content to you, and to measure their performance.",display:!1,gdprType:5,name:"marketing"}],s=i.map(r=>r.name)},"./src/version.ts"(n,e,t){t.d(e,{r:()=>i});let i="2.0.0-rc.3"},"@c15t/translations"(n){n.exports=st}},zn={};function U(n){var e=zn[n];if(e!==void 0)return e.exports;var t=zn[n]={exports:{}};return _i[n](t,t.exports,U),t.exports}U.d=(n,e)=>{for(var t in e)U.o(e,t)&&!U.o(n,t)&&Object.defineProperty(n,t,{enumerable:!0,get:e[t]})};U.o=(n,e)=>Object.prototype.hasOwnProperty.call(n,e);var le=U("@c15t/translations"),_n=/^\/+/;function Lt(n,e=null,t=null,i=null){return{data:e,error:t,ok:n,response:i}}function Oi(n,e=500,t="ERROR",i){return Lt(!1,null,{message:n,status:e,code:t,cause:i},null)}var de={maxRetries:3,initialDelayMs:100,backoffFactor:2,retryableStatusCodes:[500,502,503,504],nonRetryableStatusCodes:[400,401,403,404],retryOnNetworkError:!0,shouldRetry:void 0},Ri=/^(?:[a-z+]+:)?\/\//i,Pn=_n,K=U("./src/libs/debug.ts"),Je=n=>new Promise(e=>setTimeout(e,n));function Mi(){return"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,n=>{let e=16*Math.random()|0;return(n==="x"?e:3&e|8).toString(16)})}function Tn(n){let e=n.length;for(;e>0&&n[e-1]==="/";)e--;return n.slice(0,e)}function Ui(n,e){if(Ri.test(n)){let s=new URL(n),r=Tn(s.pathname),o=e.replace(Pn,""),a=`${r}/${o}`;return s.pathname=a,s.toString()}let t=Tn(n),i=e.replace(Pn,"");return`${t}/${i}`}var me=Lt;async function _e(n,e,t){let i={...n.retryConfig,...t?.retryConfig||{},retryableStatusCodes:t?.retryConfig?.retryableStatusCodes??n.retryConfig.retryableStatusCodes??de.retryableStatusCodes,nonRetryableStatusCodes:t?.retryConfig?.nonRetryableStatusCodes??n.retryConfig.nonRetryableStatusCodes??de.nonRetryableStatusCodes},{maxRetries:s,initialDelayMs:r,backoffFactor:o,retryableStatusCodes:a,nonRetryableStatusCodes:c,retryOnNetworkError:u}=i,p=0,d=r,l=null;for(;p<=(s??0);){let m=Mi(),b=n.customFetch||globalThis.fetch,h=Ui(n.backendURL,e),w;try{w=new URL(h)}catch{w=new URL(h,window.location.origin)}if(t?.query)for(let[A,x]of Object.entries(t.query))x!==void 0&&w.searchParams.append(A,String(x));let C={method:t?.method||"GET",mode:n.corsMode,credentials:"include",headers:{...n.headers,"X-Request-ID":m,...t?.headers},...t?.fetchOptions};t?.body&&C.method!=="GET"&&(C.body=JSON.stringify(t.body));try{let A=await b(w.toString(),C),x=null,D=null;try{A.headers.get("content-type")?.includes("application/json")&&A.status!==204&&A.headers.get("content-length")!=="0"?x=await A.json():A.status===204&&(x=null)}catch(f){D=f}if(D){let f=me(!1,null,{message:"Failed to parse response",status:A.status,code:"PARSE_ERROR",cause:D},A);if(t?.onError?.(f,e),t?.throw)throw new Error("Failed to parse response");return f}if(A.status>=200&&A.status<300){let f=me(!0,x,null,A);return t?.onSuccess?.(f),f}let O=x,X=me(!1,null,{message:O?.message||`Request failed with status ${A.status}`,status:A.status,code:O?.code||"API_ERROR",details:O?.details||null},A);l=X;let g=!1;if(c?.includes(A.status))(0,K.YA)().debug(`Not retrying request to ${e} with status ${A.status} (nonRetryableStatusCodes)`),g=!1;else if(typeof i.shouldRetry=="function")try{g=i.shouldRetry(A,{attemptsMade:p,url:w.toString(),method:C.method||"GET"}),(0,K.YA)().debug(`Custom retry strategy for ${e} with status ${A.status}: ${g}`)}catch{g=a?.includes(A.status)??!1,(0,K.YA)().debug(`Custom retry strategy failed, falling back to status code check: ${g}`)}else g=a?.includes(A.status)??!1,(0,K.YA)().debug(`Standard retry check for ${e} with status ${A.status}: ${g}`);if(!g||p>=(s??0)){if(t?.onError?.(X,e),t?.throw)throw new Error(X.error?.message||"Request failed");return X}p++,await Je(d??0),d=(d??0)*(o??2)}catch(A){if(A&&A.message==="Failed to parse response")throw A;let x=!(A instanceof Response),D=me(!1,null,{message:A instanceof Error?A.message:String(A),status:0,code:"NETWORK_ERROR",cause:A},null);if(l=D,!(x&&u)||p>=(s??0)){if(t?.onError?.(D,e),t?.throw)throw A;return D}p++,await Je(d??0),d=(d??0)*(o??2)}}let y=l||me(!1,null,{message:`Request failed after ${s} retries`,status:0,code:"MAX_RETRIES_EXCEEDED"},null);if(t?.onError?.(y,e),t?.throw)throw new Error(`Request failed after ${s} retries`);return y}var Y=U("./src/libs/cookie/index.ts"),Oe={INIT:"/init",POST_SUBJECT:"/subjects",GET_SUBJECT:"/subjects",PATCH_SUBJECT:"/subjects",CHECK_CONSENT:"/consents/check",LIST_SUBJECTS:"/subjects"};async function On(n,e,t,i,s){try{let r=await _e(n,e,{method:t,...i});return r.ok?r:(console.warn(`API request failed, falling back to offline mode for ${e}`),s(i))}catch(r){return console.warn(`Error calling ${e}, falling back to offline mode:`,r),s(i)}}async function Gi(n){let e="c15t-pending-identify-submissions";try{if(typeof window<"u"&&n?.body&&window.localStorage){let i=[];try{let o=window.localStorage.getItem(e);o&&(i=JSON.parse(o))}catch(o){console.warn("Error parsing pending identify submissions:",o),i=[]}let s=n.body;i.some(o=>o.id===s.id&&o.externalId===s.externalId)||(i.push(s),window.localStorage.setItem(e,JSON.stringify(i)),(0,K.YA)().log("Queued identify user submission for retry on next page load"))}}catch(i){console.warn("Failed to write to localStorage in identify offline fallback:",i)}let t=me(!0,null,null,null);return n?.onSuccess&&await n.onSuccess(t),t}async function Hi(n,e,t){let{body:i,...s}=t;if(!i?.id)return{ok:!1,data:null,response:null,error:{message:"Subject ID is required to identify user",status:400,code:"MISSING_SUBJECT_ID"}};let r=(0,Y.If)(e);(0,Y._y)({consents:r?.consents||{},consentInfo:{...r?.consentInfo,time:r?.consentInfo?.time||Date.now(),subjectId:i.id,externalId:i.externalId,identityProvider:i.identityProvider}},void 0,e);let o=`${Oe.PATCH_SUBJECT}/${i.id}`,{id:a,...c}=i;return On(n,o,"PATCH",{...s,body:c},async u=>{let p={id:i.id,...u?.body};return Gi({...u,body:p})})}var Rn=U("./src/libs/iab-tcf/fetch-gvl.ts");async function Ln(n,e){let t=null;if(e?.enabled)try{t=await(0,Rn.ix)(e.vendorIds)}catch(s){console.warn("Failed to fetch GVL in offline fallback:",s)}let i=me(!0,{jurisdiction:"NONE",location:{countryCode:null,regionCode:null},translations:{language:"en",translations:le.enTranslations},branding:"c15t",gvl:t},null,null);return n?.onSuccess&&await n.onSuccess(i),i}async function qi(n,e,t){try{let i=await _e(n,Oe.INIT,{method:"GET",...e});return i.ok?i:(console.warn("API request failed, falling back to offline mode for consent banner"),Ln(e,t))}catch(i){return console.warn("Error fetching consent banner info, falling back to offline mode:",i),Ln(e,t)}}var Mn="c15t-pending-consent-submissions",Ze="c15t-pending-identify-submissions";function $i(n,e){let t=Mn;if(!(typeof window>"u"||!window.localStorage))try{window.localStorage.setItem("c15t-storage-test-key","test"),window.localStorage.removeItem("c15t-storage-test-key");let i=window.localStorage.getItem(t);if(!i)return;let s=JSON.parse(i);if(!s.length)return void window.localStorage.removeItem(t);(0,K.YA)().log(`Found ${s.length} pending consent submission(s) to retry`),setTimeout(()=>{e(s)},2e3)}catch(i){console.warn("Failed to check for pending consent submissions:",i)}}async function Ki(n,e){let t=Mn,i=3,s=[...e];for(let r=0;r0;r++){let o=[];for(let a=0;a=0;a--){let c=o[a];c!==void 0&&s.splice(c,1)}if(s.length===0)break;r0?(window.localStorage.setItem(t,JSON.stringify(s)),(0,K.YA)().log(`${s.length} consent submissions still pending for future retry`)):(window.localStorage.removeItem(t),(0,K.YA)().log("All pending consent submissions processed successfully")))}catch(r){console.warn("Error updating pending submissions storage:",r)}}function Yi(n,e){if(!(typeof window>"u"||!window.localStorage))try{let t=window.localStorage.getItem(Ze);if(!t)return;let i=JSON.parse(t);if(!i.length)return void window.localStorage.removeItem(Ze);(0,K.YA)().log(`Found ${i.length} pending identify user submission(s) to retry`),setTimeout(()=>{e(i)},2500)}catch(t){console.warn("Failed to check for pending identify submissions:",t)}}async function Wi(n,e){let i=[...e];for(let s=0;s<3&&i.length>0;s++){let r=[];for(let o=0;o=0;o--){let a=r[o];a!==void 0&&i.splice(a,1)}if(i.length===0)break;s<2&&await Je(1e3*(s+1))}try{typeof window<"u"&&window.localStorage&&(i.length>0?(window.localStorage.setItem(Ze,JSON.stringify(i)),(0,K.YA)().log(`${i.length} identify submissions still pending for future retry`)):(window.localStorage.removeItem(Ze),(0,K.YA)().log("All pending identify submissions processed successfully")))}catch(s){console.warn("Error updating pending identify submissions storage:",s)}}async function Ji(n,e){let t="c15t-pending-consent-submissions",i=e?.body?.subjectId;try{if(typeof window<"u"&&((0,Y._y)({consents:e?.body?.preferences||{},consentInfo:{time:Date.now(),subjectId:i,externalId:e?.body?.externalSubjectId,identityProvider:e?.body?.identityProvider}},void 0,n),e?.body&&window.localStorage)){let r=[];try{let c=window.localStorage.getItem(t);c&&(r=JSON.parse(c))}catch(c){console.warn("Error parsing pending submissions:",c),r=[]}let o=e.body;r.some(c=>JSON.stringify(c)===JSON.stringify(o))||(r.push(o),window.localStorage.setItem(t,JSON.stringify(r)),(0,K.YA)().log("Queued consent submission for retry on next page load"))}}catch(r){console.warn("Failed to write to localStorage in offline fallback:",r)}let s=me(!0,null,null,null);return e?.onSuccess&&await e.onSuccess(s),s}async function Zi(n,e,t){return(0,Y._y)({consents:t?.body?.preferences||{},consentInfo:{time:Date.now(),subjectId:t?.body?.subjectId,externalId:t?.body?.externalSubjectId,identityProvider:t?.body?.identityProvider}},void 0,e),await On(n,Oe.POST_SUBJECT,"POST",t,async s=>Ji(e,s))}var Qe=class{backendURL;storageConfig;iabConfig;headers;customFetch;corsMode;retryConfig;fetcherContext;constructor(e){this.backendURL=e.backendURL.endsWith("/")?e.backendURL.slice(0,-1):e.backendURL,this.headers={"Content-Type":"application/json",...e.headers},this.customFetch=e.customFetch,this.corsMode=e.corsMode||"cors",this.storageConfig=e.storageConfig,this.iabConfig=e.iabConfig,this.retryConfig={maxRetries:e.retryConfig?.maxRetries??de.maxRetries??3,initialDelayMs:e.retryConfig?.initialDelayMs??de.initialDelayMs??100,backoffFactor:e.retryConfig?.backoffFactor??de.backoffFactor??2,retryableStatusCodes:e.retryConfig?.retryableStatusCodes??de.retryableStatusCodes,nonRetryableStatusCodes:e.retryConfig?.nonRetryableStatusCodes??de.nonRetryableStatusCodes,shouldRetry:e.retryConfig?.shouldRetry??de.shouldRetry,retryOnNetworkError:e.retryConfig?.retryOnNetworkError??de.retryOnNetworkError},this.fetcherContext={backendURL:this.backendURL,headers:this.headers,customFetch:this.customFetch,corsMode:this.corsMode,retryConfig:this.retryConfig},this.checkPendingConsentSubmissions(),this.checkPendingIdentifySubmissions()}async init(e){return qi(this.fetcherContext,e,this.iabConfig)}async setConsent(e){return Zi(this.fetcherContext,this.storageConfig,e)}async identifyUser(e){return Hi(this.fetcherContext,this.storageConfig,e)}async $fetch(e,t){return _e(this.fetcherContext,e,t)}checkPendingConsentSubmissions(){$i(this.fetcherContext,e=>this.processPendingConsentSubmissions(e))}async processPendingConsentSubmissions(e){return Ki(this.fetcherContext,e)}checkPendingIdentifySubmissions(){Yi(this.fetcherContext,e=>this.processPendingIdentifySubmissions(e))}async processPendingIdentifySubmissions(e){return Wi(this.fetcherContext,e)}};function Xe(n,e=500,t="HANDLER_ERROR",i){return Oi(n,e,t,i)}async function Et(n,e,t){let i=n[e];if(!i){let s=Xe(`No endpoint handler found for '${String(e)}'`,404,"ENDPOINT_NOT_FOUND");if(t?.throw)throw new Error(`No endpoint handler found for '${String(e)}'`);return s}try{let s=await i(t);return{data:s.data,error:s.error,ok:s.ok??!s.error,response:s.response}}catch(s){let r=Xe(s instanceof Error?s.message:String(s),0,"HANDLER_ERROR",s);if(t?.throw)throw s;return r}}async function Qi(n,e,t,i){let s=t.replace(_n,"").split("/")[0],r=e[t];if(r)try{return await r(i)}catch(o){return Xe(o instanceof Error?o.message:String(o),0,"HANDLER_ERROR",o)}return!s||!(s in n)?Xe(`No endpoint handler found for '${t}'`,404,"ENDPOINT_NOT_FOUND"):await Et(n,s,i)}async function Xi(n,e){let t=("init"in n&&n.init!==void 0,"init");return await Et(n,t,e)}async function es(n,e){return await Et(n,"setConsent",e)}var jt=class{endpointHandlers;dynamicHandlers={};constructor(e){this.endpointHandlers=e.endpointHandlers}async init(e){return Xi(this.endpointHandlers,e)}async setConsent(e){return es(this.endpointHandlers,e)}async identifyUser(e){if(this.endpointHandlers.identifyUser)return this.endpointHandlers.identifyUser(e);let t=e.body?.id;return t?this.$fetch(`/subjects/${t}`,{...e,method:"PATCH"}):{ok:!1,data:null,response:null,error:{message:"Subject ID is required to identify user",status:400,code:"MISSING_SUBJECT_ID"}}}registerHandler(e,t){this.dynamicHandlers[e]=t}async $fetch(e,t){return Qi(this.endpointHandlers,this.dynamicHandlers,e,t)}};function ts(n,e){let t={EU:new Set(["AT","BE","BG","HR","CY","CZ","DK","EE","FI","FR","DE","GR","HU","IE","IT","LV","LT","LU","MT","NL","PL","PT","RO","SK","SI","ES","SE"]),EEA:new Set(["IS","NO","LI"]),UK:new Set(["GB"]),CH:new Set(["CH"]),BR:new Set(["BR"]),CA:new Set(["CA"]),AU:new Set(["AU"]),JP:new Set(["JP"]),KR:new Set(["KR"]),CA_QC_REGIONS:new Set(["QC"])},i="NONE";if(n){let s=n.toUpperCase(),r=e&&typeof e=="string"?(e.includes("-")?e.split("-").pop():e).toUpperCase():null;if(s==="CA"&&r&&t.CA_QC_REGIONS.has(r))return"QC_LAW25";let o=[{sets:[t.EU,t.EEA,t.UK],code:"GDPR"},{sets:[t.CH],code:"CH"},{sets:[t.BR],code:"BR"},{sets:[t.CA],code:"PIPEDA"},{sets:[t.AU],code:"AU"},{sets:[t.JP],code:"APPI"},{sets:[t.KR],code:"PIPA"}];for(let{sets:a,code:c}of o)if(a.some(u=>u.has(s))){i=c;break}}return i}var We=U("./src/translations/index.ts");function Un(n=null){return Lt(!0,n)}async function St(n){let e=Un();return n?.onSuccess&&await n.onSuccess(e),e}async function ns(n,e,t){let i=e?.headers?.["x-c15t-country"]??"GB",s=e?.headers?.["x-c15t-region"]??null,r,o,a=e?.headers?.["accept-language"]??null;if(n?.translations&&Object.keys(n.translations).length>0){let d=n.translations,l=Array.from(new Set(["en",...Object.keys(d)])),y=n.defaultLanguage??"en";r=(0,le.selectLanguage)(l,{header:a,fallback:y});let m=d[r]??{};o=(0,le.deepMergeTranslations)(le.enTranslations,m)}else{let d=Object.keys(We.Z.translations),l=We.Z.defaultLanguage??"en";r=(0,le.selectLanguage)(d,{header:a,fallback:l}),o=We.Z.translations[r]}let c=ts(i,s),u=null;if(t?.enabled)if(t.gvl)u=t.gvl;else try{u=await(0,Rn.ix)(t.vendorIds)}catch(d){console.warn("Failed to fetch GVL in offline mode:",d)}let p=Un({jurisdiction:c,location:{countryCode:i,regionCode:s},translations:{language:r,translations:o},branding:"c15t",gvl:u});return e?.onSuccess&&await e.onSuccess(p),p}async function is(n,e){let t=e?.body?.subjectId;try{typeof window<"u"&&(0,Y._y)({consentInfo:{time:Date.now(),subjectId:t,externalId:e?.body?.externalSubjectId,identityProvider:e?.body?.identityProvider},consents:e?.body?.preferences||{}},void 0,n)}catch(i){console.warn("Failed to write to storage:",i)}return await St(e)}var At=class{storageConfig;initialTranslationConfig;iabConfig;constructor(e,t,i){this.storageConfig=e,this.initialTranslationConfig=t,this.iabConfig=i}async init(e){return ns(this.initialTranslationConfig,e,this.iabConfig)}async setConsent(e){return is(this.storageConfig,e)}async identifyUser(e){return console.warn("identifyUser called in offline mode - external ID will not be linked"),St(e)}async $fetch(e,t){return await St(t)}},ss="/api/c15t",rs="c15t",Ye=new Map;function os(n){return n?Object.keys(n).sort().map(t=>{let i=n[t];return i==null?`${t}:null`:`${t}:${String(i)}`}).join("|"):""}function as(n){let e=os(n.storageConfig),t=e?`:storage:${e}`:"";if(n.mode==="offline"){let s=n.store?.initialTranslationConfig?.translations,r=s?`:translations:${Object.keys(s).sort().join(",")}`:"",a=n.store?.iab?.enabled?":iab:enabled":"";return`offline${t}${r}${a}`}if(n.mode==="custom")return`custom:${Object.keys(n.endpointHandlers||{}).sort().join(",")}${t}`;let i="";return"headers"in n&&n.headers&&(i=`:headers:${Object.keys(n.headers).sort().map(r=>`${r}=${n.headers?.[r]}`).join(",")}`),`c15t:${n.backendURL||""}${i}${t}`}function wt(n){let e=as(n);if(Ye.has(e)){if(n.mode!=="offline"&&n.mode!=="custom"&&"headers"in n&&n.headers){let r=Ye.get(e);r instanceof Qe&&(r.headers={"Content-Type":"application/json",...n.headers})}let s=Ye.get(e);if(s)return new Proxy(s,{get(r,o){return r[o]}})}let t=n.mode||rs,i;switch(t){case"custom":{let s=n;i=new jt({endpointHandlers:s.endpointHandlers});break}case"offline":{let s=n.store?.iab;i=new At(n.storageConfig,n.store?.initialTranslationConfig,s?{enabled:s.enabled,vendorIds:s.vendors,gvl:s.gvl}:void 0);break}default:{let s=n,r=n.store?.iab;i=new Qe({backendURL:s.backendURL||ss,headers:s.headers,customFetch:s.customFetch,retryConfig:s.retryConfig,storageConfig:n.storageConfig,iabConfig:r?{enabled:r.enabled,vendorIds:r.vendors,gvl:r.gvl}:void 0});break}}return Ye.set(e,i),i}var Vt=U("./src/libs/generate-subject-id.ts");function Gn(n,e){if(n.length===0)throw new TypeError(`${e} condition cannot be empty`)}function cs(n,e){if(!(n in e))throw new Error(`Consent category "${n}" not found in consent state`);return e[n]||!1}function ls(n,e){let t=Array.isArray(n)?n:[n];return Gn(t,"AND"),t.every(i=>et(i,e))}function us(n,e){let t=Array.isArray(n)?n:[n];return Gn(t,"OR"),t.some(i=>et(i,e))}function et(n,e){if(typeof n=="string")return cs(n,e);if(typeof n=="object"&&n!==null){if("and"in n)return ls(n.and,e);if("or"in n)return us(n.or,e);if("not"in n)return!et(n.not,e)}throw new TypeError(`Invalid condition structure: ${JSON.stringify(n)}`)}function tt(n,e){return et(n,e)}function Hn(n){let e=new Set;function t(i){if(typeof i=="string")return void e.add(i);typeof i=="object"&&i!==null&&("and"in i?(Array.isArray(i.and)?i.and:[i.and]).forEach(t):"or"in i?(Array.isArray(i.or)?i.or:[i.or]).forEach(t):"not"in i&&t(i.not))}return t(n),Array.from(e)}var nt=U("./src/libs/iab-tcf/index.ts"),Te=U("./src/types/consent-types.ts");function ds(n){let e=n.getAttribute("data-category");if(e){if(!Te.W.includes(e))throw new Error(`Invalid category attribute "${e}" on iframe. Must be one of: ${Te.W.join(", ")}`);return e}}function zt(n,e){let t=n.getAttribute("data-src"),i=ds(n);if(!i)return;tt(i,e)?t&&!n.src&&(n.src=t,n.removeAttribute("data-src")):n.src&&n.removeAttribute("src")}function qn(){if(typeof document>"u")return[];let n=document.querySelectorAll("iframe[data-category]"),e=new Set;return n?(n.forEach(t=>{let i=t.getAttribute("data-category");if(!i)return;let s=i.trim();Te.W.includes(s)&&e.add(s)}),Array.from(e)):[]}function En(n){if(typeof document>"u")return;let e=document.querySelectorAll("iframe");e&&e.forEach(t=>{zt(t,n)})}function ps(n,e){let t=new MutationObserver(i=>{let s=n(),r=!1;if(i.forEach(o=>{o.addedNodes.forEach(a=>{if(a.nodeType===Node.ELEMENT_NODE){let c=a;c.tagName&&c.tagName.toUpperCase()==="IFRAME"&&(zt(c,s),c.hasAttribute("data-category")&&(r=!0));let u=c.querySelectorAll?.("iframe");u&&u.length>0&&u.forEach(p=>{zt(p,s),p.hasAttribute("data-category")&&(r=!0)})}})}),r&&e){let o=qn();o.length>0&&e(o)}});return t.observe(document.body,{childList:!0,subtree:!0}),t}function $n(){if(typeof crypto<"u"&&crypto.randomUUID)return crypto.randomUUID().replace(/-/g,"").substring(0,8);if(typeof crypto<"u"&&crypto.getRandomValues){let e=new Uint8Array(4);return crypto.getRandomValues(e),Array.from(e,t=>t.toString(36)).join("").padEnd(8,"0").substring(0,8)}return Math.random().toString(36).substring(2).padEnd(8,"0").substring(0,8)}function Ct(n,e,t){return e?(t[n]||(t[n]=$n()),t[n]):`c15t-script-${n}`}var Re=new Map;function Me(n){return Re.has(n)}function xt(n){return Re.get(n)}function It(n,e){Re.set(n,e)}function Pe(n){Re.delete(n)}function gs(){return Re}function ms(n,e){if(n.vendorId!==void 0){let t=String(n.vendorId);if(!e.vendorConsents[t])return!1}return!(n.iabPurposes&&n.iabPurposes.length>0&&!n.iabPurposes.every(i=>e.purposeConsents[i]===!0)||n.iabLegIntPurposes&&n.iabLegIntPurposes.length>0&&!n.iabLegIntPurposes.every(i=>e.purposeLegitimateInterests[i]===!0)||n.iabSpecialFeatures&&n.iabSpecialFeatures.length>0&&!n.iabSpecialFeatures.every(i=>e.specialFeatureOptIns[i]===!0))}function we(n,e,t){return t?.model==="iab"&&t.iabConsent&&(n.vendorId!==void 0||n.iabPurposes||n.iabLegIntPurposes||n.iabSpecialFeatures)?ms(n,t.iabConsent):tt(n.category,e)}function Kn(n,e,t={},i){let s=[];return n.forEach(r=>{if(!r.alwaysLoad&&!we(r,e,i))return;if(Me(r.id))return void r.onConsentChange?.({id:r.id,elementId:Ct(r.id,r.anonymizeId!==!1,t),hasConsent:we(r,e,i),consents:e});if(r.src&&r.textContent)throw new Error(`Script '${r.id}' cannot have both 'src' and 'textContent'. Choose one.`);if(!r.src&&!r.textContent&&!r.callbackOnly)throw new Error(`Script '${r.id}' must have either 'src', 'textContent', or 'callbackOnly' set to true.`);if(r.callbackOnly===!0){let l=r.anonymizeId!==!1,y=Ct(r.id,l,t),m={id:r.id,elementId:y,consents:e,hasConsent:we(r,e,i)};r.onBeforeLoad&&r.onBeforeLoad(m),r.onLoad&&r.onLoad(m),It(r.id,null),s.push(r.id);return}let o=r.anonymizeId!==!1,a=Ct(r.id,o,t);if(r.persistAfterConsentRevoked===!0){let l=document.getElementById(a);if(l){let y={id:r.id,hasConsent:we(r,e,i),elementId:a,consents:e,element:l};r.onConsentChange?.(y),r.onLoad?.(y),It(r.id,l),s.push(r.id);return}}let c=document.createElement("script");c.id=a,r.src?c.src=r.src:r.textContent&&(c.textContent=r.textContent),r.fetchPriority&&(c.fetchPriority=r.fetchPriority),r.async&&(c.async=!0),r.defer&&(c.defer=!0),r.nonce&&(c.nonce=r.nonce),r.attributes&&Object.entries(r.attributes).forEach(([l,y])=>{c.setAttribute(l,y)});let u={id:r.id,hasConsent:we(r,e,i),elementId:a,consents:e,element:c};r.onLoad&&(r.textContent?setTimeout(()=>{r.onLoad?.({...u})},0):c.addEventListener("load",()=>{r.onLoad?.({...u})})),r.onError&&(r.textContent||c.addEventListener("error",()=>{r.onError?.({...u,error:new Error(`Failed to load script: ${r.src}`)})})),r.onBeforeLoad&&r.onBeforeLoad(u);let p=r.target??"head",d=p==="body"?document.body:document.head;if(!d)throw new Error(`Document ${p} is not available for script injection`);d.appendChild(c),It(r.id,c),s.push(r.id)}),s}function fs(n,e,t={},i){let s=[];return n.forEach(r=>{if(Me(r.id)&&!r.alwaysLoad&&!we(r,e,i)){let o=xt(r.id);r.callbackOnly===!0||o===null?(Pe(r.id),s.push(r.id)):o&&(r.persistAfterConsentRevoked?(Pe(r.id),s.push(r.id)):(o.remove(),Pe(r.id),s.push(r.id)))}}),s}function hs(n,e,t={},i){let s=fs(n,e,t,i);return{loaded:Kn(n,e,t,i),unloaded:s}}function ks(n){return Me(n)}function vs(){return Array.from(gs().keys())}function ys(n,e,t,i={},s){let r=e.find(o=>o.id===n);if(!r)return!1;if(Me(n)){let o=xt(n);r.callbackOnly===!0||o===null?Pe(n):o&&(r.persistAfterConsentRevoked||o.remove(),Pe(n))}return!r.alwaysLoad&&!we(r,t,s)?!1:(Kn([r],t,i,s),!0)}function bs(n,e){let t=()=>{let{scripts:i,consents:s,scriptIdMap:r,model:o,iab:a}=n(),c=a?.config.enabled?{vendorConsents:a.vendorConsents,vendorLegitimateInterests:a.vendorLegitimateInterests,purposeConsents:a.purposeConsents,purposeLegitimateInterests:a.purposeLegitimateInterests,specialFeatureOptIns:a.specialFeatureOptIns}:void 0,u=hs(i,s,r,{model:o,iabConsent:c}),p={...n().loadedScripts};return u.loaded.forEach(d=>{p[d]=!0}),u.unloaded.forEach(d=>{p[d]=!1}),e({loadedScripts:p}),u};return{updateScripts:()=>t(),setScripts:i=>{let s=n(),r={...s.scriptIdMap};i.forEach(u=>{u.anonymizeId!==!1&&(r[u.id]=$n())});let o=i.flatMap(u=>Hn(u.category)),a=new Set([...s.consentCategories,...o]),c=Array.from(a);e({scripts:[...s.scripts,...i],scriptIdMap:r,consentCategories:c}),t()},removeScript:i=>{let s=n();if(Me(i)){let o=xt(i);o&&(o.remove(),Pe(i))}let r={...s.scriptIdMap};delete r[i],e({scripts:s.scripts.filter(o=>o.id!==i),loadedScripts:{...s.loadedScripts,[i]:!1},scriptIdMap:r})},reloadScript:i=>{let s=n();return ys(i,s.scripts,s.consents,s.scriptIdMap)},isScriptLoaded:i=>ks(i),getLoadedScriptIds:()=>vs()}}var ws=U("./src/version.ts"),Cs=U("./src/libs/iab-tcf/store.ts");function Is(n,e){let t=null,i=!1;return{initializeIframeBlocker:()=>{if(i||typeof document>"u")return;let s=n();if(s.iframeBlockerConfig?.disableAutomaticBlocking)return;let r=()=>{let o=qn();o.length>0&&n().updateConsentCategories(o)};document.readyState==="loading"?document.addEventListener("DOMContentLoaded",r):r(),setTimeout(r,100),En(s.consents),t=ps(()=>n().consents,o=>n().updateConsentCategories(o)),i=!0},updateIframeConsents:()=>{if(!i||typeof document>"u")return;let s=n(),{consents:r,iframeBlockerConfig:o}=s;o?.disableAutomaticBlocking||En(r)},destroyIframeBlocker:()=>{if(!i||typeof document>"u")return;let s=n(),{iframeBlockerConfig:r}=s;r?.disableAutomaticBlocking||(t&&(t.disconnect(),t=null),i=!1)}}}var Pt="c15t:pending-consent-sync";function js(n,e,t,i,s){if(!i||t===null)return!1;let r=new Set(s.filter(a=>a.disabled).map(a=>a.name));return Object.entries(e).some(([a,c])=>!r.has(a)&&n[a]===!0&&c===!1)}async function Ss({manager:n,type:e,get:t,set:i,options:s}){let{callbacks:r,selectedConsents:o,consents:a,consentTypes:c,updateScripts:u,updateIframeConsents:p,updateNetworkBlockerConsents:d,consentCategories:l,locationInfo:y,model:m,consentInfo:b,reloadOnConsentRevoked:h}=t(),w={...a},C=b,A={...o??a??{}},x=Date.now();if(e==="all")for(let f of c)l.includes(f.name)&&(A[f.name]=!0);else if(e==="necessary")for(let f of c)A[f.name]=f.disabled===!0?f.defaultValue:!1;let D=b?.subjectId;D||(D=(0,Vt.L)());let F=t().consentInfo?.externalId||t().user?.id,O=t().consentInfo?.identityProvider||t().user?.identityProvider,X=js(w,A,C,h,c);if(i({consents:A,selectedConsents:A,activeUI:"none",consentInfo:{time:x,subjectId:D,externalId:F,identityProvider:O}}),X){let f={type:e,subjectId:D,externalId:F,identityProvider:O,preferences:A,givenAt:x,jurisdiction:y?.jurisdiction??void 0,jurisdictionModel:m,domain:window.location.hostname,uiSource:s?.uiSource??"api"};try{localStorage.setItem(Pt,JSON.stringify(f))}catch{}r.onConsentSet?.({preferences:A}),r.onBeforeConsentRevocationReload?.({preferences:A}),window.location.reload();return}await new Promise(f=>setTimeout(f,0)),p(),u(),d(),r.onConsentSet?.({preferences:A});let g=await n.setConsent({body:{type:"cookie_banner",domain:window.location.hostname,preferences:A,subjectId:D,externalSubjectId:String(F),identityProvider:O,jurisdiction:y?.jurisdiction??void 0,jurisdictionModel:m??void 0,givenAt:x,uiSource:s?.uiSource??"api",consentAction:e}});if(!g.ok){let f=g.error?.message??"Failed to save consents";r.onError?.({error:f}),r.onError||console.error(f)}}function As(n,e){return n==null||n==="NONE"?null:e&&["UK_GDPR","GDPR"].includes(n)?"iab":["UK_GDPR","GDPR","CH","BR","APPI","PIPA","QC_LAW25"].includes(n)?"opt-in":["CCPA","AU","PIPEDA"].includes(n)?"opt-out":"opt-in"}function zs(){if(typeof window>"u")return!1;try{let e=window.navigator.globalPrivacyControl;return e===!0||e==="1"}catch{return!1}}var Vn=U("./src/libs/iab-tcf/cmp-defaults.ts");function be({get:n,set:e},t){let{iab:i}=n();i&&e({iab:{...i,...t}})}async function Ps(n,e,t){let{get:i}=e;if(t!==null){be(e,{isLoadingGVL:!0,nonIABVendors:n.customVendors??[]});try{let{initializeIABStub:s,fetchGVL:r,createCMPApi:o}=await Promise.resolve().then(U.bind(U,"./src/libs/iab-tcf/index.ts"));s();let a;if(t)a=t;else if(a=await r(),a===null)return void be(e,{isLoadingGVL:!1});be(e,{gvl:a,isLoadingGVL:!1});let c={},u={};for(let[h,w]of Object.entries(a.vendors)){let C=String(h);w.purposes&&w.purposes.length>0&&(c[C]=!1),w.legIntPurposes&&w.legIntPurposes.length>0&&(u[C]=!0)}(n.customVendors??[]).forEach(h=>{let w=String(h.id);h.purposes&&h.purposes.length>0&&(c[w]=!1),h.legIntPurposes&&h.legIntPurposes.length>0&&(u[w]=!0)});let d=(0,Y.If)(i().storageConfig);d?.iabCustomVendorConsents&&Object.assign(c,d.iabCustomVendorConsents),d?.iabCustomVendorLegitimateInterests&&Object.assign(u,d.iabCustomVendorLegitimateInterests),be(e,{vendorConsents:c,vendorLegitimateInterests:u});let l=n.cmpId??Vn.D,y=n.cmpVersion??Vn.I;if(l===0)throw new Error("[c15t] IAB TCF Error: CMP ID is 0. A valid CMP ID registered with IAB Europe is required for IAB TCF compliance.\nIf using consent.io, the CMP ID should be provided automatically via /init.\nIf self-hosting, configure it on the backend via `advanced.iab.cmpId` or on the client via `iab.cmpId`.\nTo register your own CMP: https://iabeurope.eu/tcf-for-cmps/");let m=o({cmpId:l,cmpVersion:y,gvl:a,gdprApplies:!0});be(e,{cmpApi:m});let b=m.loadFromStorage();b&&await Ts(b,e),i().updateScripts()}catch(s){console.error("Failed to initialize IAB mode:",s),be(e,{isLoadingGVL:!1})}}}async function Ts(n,e){let{set:t}=e;try{let{decodeTCString:i,iabPurposesToC15tConsents:s}=await Promise.resolve().then(U.bind(U,"./src/libs/iab-tcf/index.ts")),r=await i(n),o=(0,Y.If)(e.get().storageConfig),a={...r.vendorConsents,...o?.iabCustomVendorConsents??{}},c={...r.vendorLegitimateInterests,...o?.iabCustomVendorLegitimateInterests??{}},u=s(r.purposeConsents);be(e,{tcString:n,purposeConsents:r.purposeConsents,purposeLegitimateInterests:r.purposeLegitimateInterests,vendorConsents:a,vendorLegitimateInterests:c,specialFeatureOptIns:r.specialFeatureOptIns}),t({consents:u,selectedConsents:u,activeUI:"none"})}catch{}}function Ls(n,e){return n?{necessary:!0,functionality:!0,experience:!0,marketing:!e,measurement:!e}:null}function Yn(n,e,t,i){let s=As(n,e),r=i!==void 0?i:zs(),a=Ls((s===null||s==="opt-out")&&t===null,r);return{consentModel:s,autoGrantedConsents:a}}function Es(n,e,t,i){let{get:s,initialTranslationConfig:r}=e,{consentInfo:o}=s(),{translations:a,location:c}=n,{consentModel:u,autoGrantedConsents:p}=Yn(n.jurisdiction??null,i,o,e.get().overrides?.gpc),d={model:u,isLoadingConsentInfo:!1,branding:n.branding??"c15t",hasFetchedBanner:!0,lastBannerFetchData:n,locationInfo:{countryCode:c?.countryCode??null,regionCode:c?.regionCode??null,jurisdiction:n.jurisdiction??null}};return o===null&&(d.activeUI=u?"banner":"none"),p&&(d.consents=p,d.selectedConsents=p),a?.language&&a?.translations&&(d.translationConfig=(0,le.prepareTranslationConfig)({translations:{[a.language]:a.translations},disableAutoLanguageSwitch:!0,defaultLanguage:a.language},r)),d}function Vs(n,e,t){let{get:i}=e,{callbacks:s}=i(),{translations:r}=n;t&&s?.onConsentSet?.({preferences:t}),r?.language&&r?.translations&&s?.onBannerFetched?.({jurisdiction:n.jurisdiction,location:n.location,translations:{language:r.language,translations:r.translations}})}function Wn(n,e,t,i){let{set:s,get:r}=e,{consentInfo:o,iab:a}=r(),c=a?.config.enabled&&!i,u=a?.config.enabled&&!c;c&&console.warn("IAB mode disabled: Server returned 200 without GVL. Client IAB settings overridden.");let{consentModel:p,autoGrantedConsents:d}=Yn(n.jurisdiction??null,u,o,r().overrides?.gpc),l=Es(n,e,t,u);if(c&&a?l.iab={...a,config:{...a.config,enabled:!1}}:a&&n.cmpId!=null&&(l.iab={...a,config:{...a.config,cmpId:n.cmpId}}),s(l),Vs(n,e,d),r().updateScripts(),u&&p==="iab"&&a){let y=n.customVendors??[],m=a.config.customVendors??[],b=new Set(y.map(C=>C.id)),h=[...y,...m.filter(C=>!b.has(C.id))],w={...a.config,customVendors:h,...n.cmpId!=null&&{cmpId:n.cmpId}};Ps(w,{set:s,get:r},i).catch(C=>{console.error("Failed to initialize IAB mode in updateStore:",C)})}}function xs(n){try{if(window.localStorage)return window.localStorage.setItem("c15t-storage-test-key","test"),window.localStorage.removeItem("c15t-storage-test-key"),!0}catch(e){console.warn("localStorage not available, skipping consent banner:",e),n({isLoadingConsentInfo:!1,activeUI:"none"})}return!1}async function xn(n){let{get:e,set:t,manager:i}=n,{callbacks:s}=e();if(typeof window>"u")return;let r=xs(t);if(!r)return;t({isLoadingConsentInfo:!0}),Ds(i,s);let o=await Fs(n);return o||Bs(n,r,i,s)}async function Fs(n){let{ssrData:e,get:t,set:i}=n;if(!e||t().overrides)return void i({ssrDataUsed:!1,ssrSkippedReason:"no_data"});let s=await e;if(s?.init)return Wn(s.init,n,!0,s.gvl),i({ssrDataUsed:!0,ssrSkippedReason:null}),s.init;i({ssrDataUsed:!1,ssrSkippedReason:"fetch_failed"})}async function Bs(n,e,t,i){let{set:s}=n;try{let{language:r,country:o,region:a}=n.get().overrides??{},{data:c,error:u}=await t.init({headers:{...r&&{"accept-language":r},...o&&{"x-c15t-country":o},...a&&{"x-c15t-region":a}},onError:i.onError?p=>{i.onError?.({error:p.error?.message||"Unknown error"})}:void 0});if(u||!c)throw new Error(`Failed to fetch consent banner info: ${u?.message}`);return Wn(c,n,e,c.gvl??void 0),c}catch(r){console.error("Error fetching consent banner information:",r),s({isLoadingConsentInfo:!1,activeUI:"none"});let o=r instanceof Error?r.message:"Unknown error fetching consent banner information";i.onError?.({error:o});return}}function Ds(n,e){try{let t=localStorage.getItem(Pt);if(!t)return;localStorage.removeItem(Pt);let i=JSON.parse(t);n.setConsent({body:{type:"cookie_banner",domain:i.domain,preferences:i.preferences,subjectId:i.subjectId,externalSubjectId:i.externalId,identityProvider:i.identityProvider,jurisdiction:i.jurisdiction,jurisdictionModel:i.jurisdictionModel??void 0,givenAt:i.givenAt,uiSource:i.uiSource??"api"}}).then(s=>{if(!s.ok){let r=s.error?.message??"Failed to sync consent after reload";e.onError?.({error:r}),e.onError||console.error("Failed to sync consent after reload:",r)}}).catch(s=>{let r=s instanceof Error?s.message:"Failed to sync consent after reload";e.onError?.({error:r}),e.onError||console.error("Failed to sync consent after reload:",s)})}catch{}}function Tt(n){return n?n.toUpperCase():"GET"}function Ns(n){if(!n)return null;try{return typeof window>"u"?null:new URL(n,window.location.href)}catch{return null}}function _s(n,e){if(!n)return!1;let t=e.domain.trim().toLowerCase(),i=n.trim().toLowerCase();if(!t||!i)return!1;if(i===t)return!0;let s=`.${t}`;return i.endsWith(s)}function Os(n,e){return typeof e.pathIncludes=="string"?n?n.includes(e.pathIncludes):!1:!0}function Rs(n,e){if(!e.methods||e.methods.length===0)return!0;if(!n)return!1;let t=Tt(n);return e.methods.some(i=>Tt(i)===t)}function Ms(n,e,t){return!(!_s(n.hostname,t)||!Os(n.pathname,t)||!Rs(e,t))}function Fn(n,e,t){if(!t)return{shouldBlock:!1};if(!(t.enabled!==!1))return{shouldBlock:!1};if(!t.rules||t.rules.length===0)return{shouldBlock:!1};let s=Ns(n.url);if(!s)return{shouldBlock:!1};let r=Tt(n.method);for(let o of t.rules){if(!Ms(s,r,o))continue;if(!tt(o.category,e))return{shouldBlock:!0,rule:o}}return{shouldBlock:!1}}function Us(n,e){let t=null,i=null,s=null,r=!1,o=null,a=(d,l)=>{if(d){if(d.logBlockedRequests!==!1){let y=l.rule?.id??"unknown";console.warn("[c15t] Network request blocked by consent manager",{method:l.method,url:l.url,ruleId:y})}d.onRequestBlocked&&d.onRequestBlocked(l)}},c=()=>o||n().consents,u=()=>{typeof window>"u"||!(typeof window.fetch=="function")||t||(t=window.fetch,window.fetch=(l,y)=>{let b=n().networkBlocker;if(!t)throw new Error("Network blocker fetch wrapper not initialized.");if(!(b?.enabled&&b?.rules&&b?.rules.length>0))return t.call(window,l,y);let w="GET";y?.method?w=y.method:l instanceof Request&&(w=l.method);let C;C=typeof l=="string"||l instanceof URL?l.toString():l.url;let A=c(),{shouldBlock:x,rule:D}=Fn({url:C,method:w},A,b);if(x){a(b,{method:w,url:C,rule:D});let F=new Response(null,{status:451,statusText:"Request blocked by consent manager"});return Promise.resolve(F)}return t.call(window,l,y)})},p=()=>{typeof window>"u"||!(window.XMLHttpRequest!==void 0&&typeof window.XMLHttpRequest.prototype.open=="function"&&typeof window.XMLHttpRequest.prototype.send=="function")||i||s||(i=window.XMLHttpRequest.prototype.open,s=window.XMLHttpRequest.prototype.send,window.XMLHttpRequest.prototype.open=function(l,y,m,b,h){let w=this;if(w.__c15tMethod=l,w.__c15tUrl=y,!i)throw new Error("Network blocker XHR open wrapper not initialized.");return i.call(this,l,y,m??!0,b,h)},window.XMLHttpRequest.prototype.send=function(l){let m=n().networkBlocker;if(m?.enabled!==!1&&m?.rules&&m?.rules.length>0){let w=this,C=w.__c15tMethod||"GET",A=w.__c15tUrl||"",x=c(),{shouldBlock:D,rule:F}=Fn({url:A,method:C},x,m);if(D){a(m,{method:C,url:A,rule:F});try{this.abort()}catch{}let O=new ProgressEvent("error");typeof this.onerror=="function"&&this.onerror(O),this.dispatchEvent(O);return}}if(!s)throw new Error("Network blocker XHR send wrapper not initialized.");return s.call(this,l)})};return{initializeNetworkBlocker:()=>{if(r||typeof window>"u")return;let d=n(),l=d.networkBlocker;l?.enabled&&l?.rules&&l?.rules.length>0&&(o=d.consents,u(),p(),r=!0)},updateNetworkBlockerConsents:()=>{r&&(o=n().consents)},setNetworkBlocker:d=>{let y=d?.enabled!==!1&&d?.rules&&d?.rules.length>0;if(e({networkBlocker:d}),!y){if(!r||typeof window>"u")return;t&&(window.fetch=t,t=null),i&&s&&(window.XMLHttpRequest.prototype.open=i,window.XMLHttpRequest.prototype.send=s,i=null,s=null),o=null,r=!1;return}r||(o=n().consents,u(),p(),r=!0)},destroyNetworkBlocker:()=>{r&&(typeof window>"u"||(t&&(window.fetch=t,t=null),i&&s&&(window.XMLHttpRequest.prototype.open=i,window.XMLHttpRequest.prototype.send=s,i=null,s=null),o=null,r=!1))}}}var Gs=U("./src/store/initial-state.ts"),Bn=n=>{if(typeof window>"u")return null;try{return(0,Y.If)(n)}catch(e){return console.error("Failed to retrieve stored consent:",e),null}},Hs=(n,e={})=>{let{namespace:t="c15tStore",iab:i,ssrData:s,initialConsentCategories:r,initialTranslationConfig:o,enabled:a,debug:c,...u}=e;(0,K.tJ)(e.debug===!0);let p=Bn(e.storageConfig),d=qt((l,y)=>({...Gs.ue,...u,namespace:t,iab:i?(0,Cs.yx)(i,y,l,n):null,...r&&{consentCategories:r},...p?{consents:p.consents,selectedConsents:p.consents,consentInfo:p.consentInfo,user:p.consentInfo?.externalId?{id:p.consentInfo.externalId,identityProvider:p.consentInfo.identityProvider}:void 0,activeUI:"none",isLoadingConsentInfo:!1}:{activeUI:"none",isLoadingConsentInfo:!0},setActiveUI:(m,b={})=>{if(m==="none"||m==="dialog")return void l({activeUI:m});if(b.force)return void l({activeUI:"banner"});let h=y();!Bn()&&!h.consentInfo&&!h.isLoadingConsentInfo&&l({activeUI:"banner"})},setSelectedConsent:(m,b)=>{l(h=>h.consentTypes.find(C=>C.name===m)?.disabled?h:{selectedConsents:{...h.selectedConsents,[m]:b}})},saveConsents:async(m,b)=>await Ss({manager:n,type:m,get:y,set:l,options:b}),setConsent:(m,b)=>{l(h=>h.consentTypes.find(A=>A.name===m)?.disabled?h:{selectedConsents:{...h.consents,[m]:b}}),y().saveConsents("custom")},resetConsents:()=>{l(()=>{let m=Te.y.reduce((h,w)=>(h[w.name]=w.defaultValue,h),{}),b={consents:m,selectedConsents:m,consentInfo:null};return(0,Y.jD)(void 0,e.storageConfig),b})},setConsentCategories:m=>l({consentCategories:m}),setCallback:(m,b)=>{let h=y();if(l(w=>({callbacks:{...w.callbacks,[m]:b}})),m==="onConsentSet"&&b&&typeof b=="function"&&b?.({preferences:h.consents}),m==="onBannerFetched"&&h.hasFetchedBanner&&h.lastBannerFetchData&&b&&typeof b=="function"){let{lastBannerFetchData:w}=h,C=w.jurisdiction??"NONE";b?.({jurisdiction:{code:C,message:""},location:{countryCode:w.location.countryCode??null,regionCode:w.location.regionCode??null},translations:{language:w.translations.language,translations:w.translations.translations}})}},setLocationInfo:m=>l({locationInfo:m}),initConsentManager:()=>xn({manager:n,ssrData:e.ssrData,initialTranslationConfig:e.initialTranslationConfig,get:y,set:l}),getDisplayedConsents:()=>{let{consentCategories:m,consentTypes:b}=y();return b.filter(h=>m.includes(h.name))},hasConsented:()=>{let{consentInfo:m}=y();return m!=null},has:m=>{let{consents:b}=y();return tt(m,b)},setTranslationConfig:m=>{l({translationConfig:m})},updateConsentCategories:m=>{let b=new Set([...y().consentCategories,...m]),h=Array.from(b);l({consentCategories:h})},identifyUser:async m=>{let b=y().consentInfo,h=b?.subjectId;l({user:m}),h&&(String(b?.externalId)===String(m.id)&&b?.identityProvider===m.identityProvider||(await n.identifyUser({body:{id:h,externalId:m.id,identityProvider:m.identityProvider}}),l({consentInfo:{...b,time:b?.time||Date.now(),subjectId:h,externalId:m.id,identityProvider:m.identityProvider}})))},setOverrides:async m=>(l({overrides:{...y().overrides,...m}}),await xn({manager:n,initialTranslationConfig:e.initialTranslationConfig,get:y,set:l})),setLanguage:async m=>await y().setOverrides({...y().overrides??{},language:m}),...bs(y,l),...Is(y,l),...Us(y,l)}));return d.getState().initializeIframeBlocker(),e.networkBlocker&&(d.setState({networkBlocker:e.networkBlocker}),d.getState().initializeNetworkBlocker()),e.scripts&&e.scripts.length>0&&d.getState().updateConsentCategories(e.scripts.flatMap(l=>Hn(l.category))),typeof window<"u"&&(window[t]=d,d.getState().callbacks.onConsentSet?.({preferences:d.getState().consents}),e.user&&d.getState().identifyUser(e.user),d.getState().initConsentManager()),d},qs="/api/c15t",Dn=new Map,Nn=new Map;function $s(n){let e=n.enabled===!1?"disabled":"enabled";return`${n.mode??"c15t"}:${n.backendURL??"default"}:${n.endpointHandlers?"custom":"none"}:${n.storageConfig?.storageKey??"default"}:${n.defaultLanguage??"default"}:${e}`}function Jn(n,e){let{mode:t,backendURL:i,store:s,translations:r,storageConfig:o,enabled:a,iab:c,consentCategories:u,debug:p}=n,d=$s({mode:t,backendURL:i,endpointHandlers:"endpointHandlers"in n?n.endpointHandlers:void 0,storageConfig:o,defaultLanguage:r?.defaultLanguage,enabled:a}),l=Dn.get(d);if(!l){let m={...s,initialTranslationConfig:r,iab:c};l=t==="offline"?wt({mode:"offline",store:m,storageConfig:o}):t==="custom"&&"endpointHandlers"in n?wt({mode:"custom",endpointHandlers:n.endpointHandlers,store:m,storageConfig:o}):wt({mode:"c15t",backendURL:i||qs,store:m,storageConfig:o}),Dn.set(d,l)}let y=Nn.get(d);return y||(y=Hs(l,{config:{pkg:e?.pkg||"c15t",version:e?.version||ws.r,mode:t||"Unknown"},...n,...s,initialTranslationConfig:r,initialConsentCategories:u,debug:p}),Nn.set(d,y)),{consentManager:l,consentStore:y,cacheKey:d}}var Ha=Te.W,qa=nt.xe,$a=Te.y,Ka=le.deepMergeTranslations,Ya=We.Z,Wa=Y.jD,Ja=Y.Yj,Za=le.detectBrowserLanguage,Qa=nt.fetchGVL,Xa=Vt.L,ec=nt.Ww,tc=Y.If,nc=Y.Ri,ic=Y.Xk,sc=Vt.U,rc=le.mergeTranslationConfigs,oc=le.prepareTranslationConfig,ac=Y._y,cc=Y.TV,lc=nt.wL;window.c15t={getOrCreateConsentRuntime:Jn};})(); diff --git a/docs/theme/consent-banner.css b/docs/theme/consent-banner.css new file mode 100644 index 0000000000000000000000000000000000000000..bdebbed80a997ca57be2516cdd0472fd4f52cae9 --- /dev/null +++ b/docs/theme/consent-banner.css @@ -0,0 +1,292 @@ +#c15t-banner { + --color-offgray-50: hsl(218, 12%, 95%); + --color-offgray-100: hsl(218, 12%, 88%); + --color-offgray-200: hsl(218, 12%, 80%); + --color-offgray-300: hsl(218, 12%, 75%); + --color-offgray-400: hsl(218, 12%, 64%); + --color-offgray-500: hsl(218, 12%, 56%); + --color-offgray-600: hsl(218, 12%, 48%); + --color-offgray-700: hsl(218, 12%, 40%); + --color-offgray-800: hsl(218, 12%, 34%); + --color-offgray-900: hsl(218, 12%, 24%); + --color-offgray-950: hsl(218, 12%, 15%); + --color-offgray-1000: hsl(218, 12%, 5%); + + --color-blue-50: oklch(97% 0.014 254.604); + --color-blue-100: oklch(93.2% 0.032 255.585); + --color-blue-200: oklch(88.2% 0.059 254.128); + --color-blue-300: oklch(80.9% 0.105 251.813); + --color-blue-400: oklch(70.7% 0.165 254.624); + --color-blue-500: oklch(62.3% 0.214 259.815); + --color-blue-600: oklch(54.6% 0.245 262.881); + --color-blue-700: oklch(48.8% 0.243 264.376); + --color-blue-800: oklch(42.4% 0.199 265.638); + --color-blue-900: oklch(37.9% 0.146 265.522); + --color-blue-950: oklch(28.2% 0.091 267.935); + + --color-accent-blue: hsla(218, 93%, 42%, 1); + + position: fixed; + z-index: 9999; + bottom: 16px; + right: 16px; + border-radius: 4px; + max-width: 300px; + background: white; + border: 1px solid + color-mix(in oklab, var(--color-offgray-200) 50%, transparent); + box-shadow: 6px 6px 0 + color-mix(in oklab, var(--color-accent-blue) 6%, transparent); +} + +.dark #c15t-banner { + border-color: color-mix(in oklab, var(--color-offgray-600) 14%, transparent); + background: var(--color-offgray-1000); + box-shadow: 5px 5px 0 + color-mix(in oklab, var(--color-accent-blue) 8%, transparent); +} + +#c15t-banner > div:first-child { + padding: 12px; + display: flex; + flex-direction: column; +} + +#c15t-banner a { + color: var(--links); + text-decoration: underline; + text-decoration-color: var(--link-line-decoration); +} + +#c15t-banner a:hover { + text-decoration-color: var(--link-line-decoration-hover); +} + +#c15t-description { + font-size: 12px; + margin: 0; + margin-top: 4px; +} + +#c15t-configure-section { + display: flex; + flex-direction: column; + gap: 8px; + border-top: 1px solid var(--divider); + padding: 12px; +} + +#c15t-configure-section > div { + display: flex; + align-items: center; + justify-content: space-between; +} + +#c15t-configure-section label { + text-transform: uppercase; + font-size: 11px; +} + +#c15t-footer { + padding: 12px; + display: flex; + justify-content: space-between; + border-top: 1px solid var(--divider); + background-color: color-mix( + in oklab, + var(--color-offgray-50) 50%, + transparent + ); +} + +.dark #c15t-footer { + background-color: color-mix( + in oklab, + var(--color-offgray-600) 4%, + transparent + ); +} + +.c15t-button { + display: inline-flex; + align-items: center; + justify-content: center; + max-height: 28px; + color: black; + padding: 4px 8px; + font-size: 14px; + border-radius: 4px; + background: transparent; + border: 1px solid transparent; + transition: 100ms; + transition-property: box-shadow, border-color, background-color; +} + +.c15t-button:hover { + background: color-mix(in oklab, var(--color-offgray-100) 50%, transparent); +} + +.dark .c15t-button { + color: var(--color-offgray-50); +} + +.dark .c15t-button:hover { + background: color-mix(in oklab, var(--color-offgray-500) 10%, transparent); +} + +.c15t-button.icon { + padding: 0; + width: 24px; + height: 24px; +} + +.c15t-button.primary { + color: var(--color-blue-700); + background: color-mix(in oklab, var(--color-blue-50) 60%, transparent); + border-color: color-mix(in oklab, var(--color-blue-500) 20%, transparent); + box-shadow: color-mix(in oklab, var(--color-blue-400) 10%, transparent) 0 -2px + 0 0 inset; +} + +.c15t-button.primary:hover { + background: color-mix(in oklab, var(--color-blue-100) 50%, transparent); + box-shadow: none; +} + +.dark .c15t-button.primary { + color: var(--color-blue-50); + background: color-mix(in oklab, var(--color-blue-500) 10%, transparent); + border-color: color-mix(in oklab, var(--color-blue-300) 10%, transparent); + box-shadow: color-mix(in oklab, var(--color-blue-300) 8%, transparent) 0 -2px + 0 0 inset; +} + +.dark .c15t-button.primary:hover { + background: color-mix(in oklab, var(--color-blue-500) 20%, transparent); + box-shadow: none; +} + +.c15t-button.secondary { + background: color-mix(in oklab, var(--color-offgray-50) 60%, transparent); + border-color: color-mix(in oklab, var(--color-offgray-200) 50%, transparent); + box-shadow: color-mix(in oklab, var(--color-offgray-500) 10%, transparent) + 0 -2px 0 0 inset; +} + +.c15t-button.secondary:hover { + background: color-mix(in oklab, var(--color-offgray-100) 50%, transparent); + box-shadow: none; +} + +.dark .c15t-button.secondary { + background: color-mix(in oklab, var(--color-offgray-300) 5%, transparent); + border-color: color-mix(in oklab, var(--color-offgray-400) 20%, transparent); + box-shadow: color-mix(in oklab, var(--color-offgray-300) 8%, transparent) + 0 -2px 0 0 inset; +} + +.dark .c15t-button.secondary:hover { + background: color-mix(in oklab, var(--color-offgray-200) 10%, transparent); + box-shadow: none; +} + +.c15t-switch { + position: relative; + display: inline-block; + width: 32px; + height: 20px; + flex-shrink: 0; +} + +.c15t-switch input { + opacity: 0; + width: 0; + height: 0; + position: absolute; +} + +.c15t-slider { + position: absolute; + cursor: pointer; + inset: 0; + background-color: color-mix( + in oklab, + var(--color-offgray-100) 80%, + transparent + ); + border-radius: 20px; + box-shadow: inset 0 0 0 1px color-mix(in oklab, #000 5%, transparent); + transition: background-color 0.2s; +} + +.c15t-slider:hover { + background-color: var(--color-offgray-100); +} + +.dark .c15t-slider { + background-color: color-mix(in oklab, #fff 5%, transparent); + box-shadow: inset 0 0 0 1px color-mix(in oklab, #fff 15%, transparent); +} + +.dark .c15t-slider:hover { + background-color: color-mix(in oklab, #fff 10%, transparent); +} + +.c15t-slider:before { + position: absolute; + content: ""; + height: 14px; + width: 14px; + left: 3px; + bottom: 3px; + background-color: white; + border-radius: 50%; + box-shadow: + 0 1px 3px 0 rgb(0 0 0 / 0.1), + 0 1px 2px -1px rgb(0 0 0 / 0.1); + transition: transform 0.2s; +} + +.c15t-switch input:checked + .c15t-slider { + background-color: var(--color-accent-blue); + box-shadow: inset 0 0 0 1px color-mix(in oklab, #000 5%, transparent); +} + +.c15t-switch input:checked + .c15t-slider:hover { + background-color: var(--color-accent-blue); +} + +.dark .c15t-switch input:checked + .c15t-slider { + background-color: var(--color-accent-blue); + box-shadow: inset 0 0 0 1px color-mix(in oklab, #fff 15%, transparent); +} + +.c15t-switch input:checked + .c15t-slider:before { + transform: translateX(12px); +} + +.c15t-switch input:disabled + .c15t-slider { + opacity: 0.5; + cursor: default; + pointer-events: none; +} + +.c15t-switch input:disabled + .c15t-slider:hover { + background-color: color-mix( + in oklab, + var(--color-offgray-100) 80%, + transparent + ); +} + +#c15t-manage-consent-btn { + appearance: none; + background: none; + border: none; + padding: 0; + cursor: pointer; +} + +#c15t-manage-consent-btn:hover { + text-decoration-color: var(--link-line-decoration-hover); +} diff --git a/docs/theme/index.hbs b/docs/theme/index.hbs index 8e6d185a57874a84bd373115e2f4b988a6c0b864..1c833ee94d428a1578b35c7944c4d300a04a21db 100644 --- a/docs/theme/index.hbs +++ b/docs/theme/index.hbs @@ -70,6 +70,8 @@ {{/if}} + +
@@ -343,6 +345,13 @@ href="https://zed.dev/blog" >Blog + +
@@ -444,23 +453,82 @@ {{/if}} {{/if}} - - + +
diff --git a/typos.toml b/typos.toml index 6f76cc75d25add39d841c07bbde82f93514adac5..c4e326359dec6e2a47861df1aab7b66f0644d7a3 100644 --- a/typos.toml +++ b/typos.toml @@ -42,6 +42,8 @@ extend-exclude = [ "crates/gpui_windows/src/window.rs", # Some typos in the base mdBook CSS. "docs/theme/css/", + # Automatically generated JS. + "docs/theme/c15t@*.js", # Spellcheck triggers on `|Fixe[sd]|` regex part. "script/danger/dangerfile.ts", # Eval examples for prompts and criteria From a1d40370cfbcc086a89350cc798125d055773947 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Wed, 4 Mar 2026 15:45:33 +0100 Subject: [PATCH 302/548] cloud_api_client: Send the organization ID in LLM token requests (#50517) This is already expected on the cloud side. This lets us know under which organization the user is logged in when requesting an llm_api token. Closes CLO-337 Release Notes: - N/A --- Cargo.lock | 1 + .../cloud_api_client/src/cloud_api_client.rs | 10 +- crates/cloud_api_types/src/cloud_api_types.rs | 6 + crates/edit_prediction/src/edit_prediction.rs | 67 ++++++++--- crates/edit_prediction/src/zeta.rs | 13 +++ crates/http_client/src/async_body.rs | 14 +++ crates/http_client/src/http_client.rs | 2 +- .../language_model/src/model/cloud_model.rs | 28 ++++- crates/language_models/src/provider/cloud.rs | 104 ++++++++++++++---- crates/web_search_providers/Cargo.toml | 1 + crates/web_search_providers/src/cloud.rs | 36 ++++-- .../src/web_search_providers.rs | 22 +++- crates/zed/src/main.rs | 2 +- crates/zed/src/zed.rs | 2 +- 14 files changed, 247 insertions(+), 61 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4e4d86b947be1f68d03b225d4a62747659c99bf8..b1ff28fcf52e118830e2100d35a6cdbca6f6f013 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19851,6 +19851,7 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "cloud_api_types", "cloud_llm_client", "futures 0.3.31", "gpui", diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index f485e2d20c619715ea342fccd2a5cec0ecaa6f4e..13d67838b216f4990f15ec22c1701aa7aef9dbf2 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -9,7 +9,9 @@ use futures::AsyncReadExt as _; use gpui::{App, Task}; use gpui_tokio::Tokio; use http_client::http::request; -use http_client::{AsyncBody, HttpClientWithUrl, HttpRequestExt, Method, Request, StatusCode}; +use http_client::{ + AsyncBody, HttpClientWithUrl, HttpRequestExt, Json, Method, Request, StatusCode, +}; use parking_lot::RwLock; use thiserror::Error; use yawc::WebSocket; @@ -141,6 +143,7 @@ impl CloudApiClient { pub async fn create_llm_token( &self, system_id: Option, + organization_id: Option, ) -> Result { let request_builder = Request::builder() .method(Method::POST) @@ -153,7 +156,10 @@ impl CloudApiClient { builder.header(ZED_SYSTEM_ID_HEADER_NAME, system_id) }); - let request = self.build_request(request_builder, AsyncBody::default())?; + let request = self.build_request( + request_builder, + Json(CreateLlmTokenBody { organization_id }), + )?; let mut response = self.http_client.send(request).await?; diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index 2d457fc6630d5b32f049e67a6a460047e925973a..42d3442bfc016f5cb1a39ba421ccdfe386bcbc65 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -52,6 +52,12 @@ pub struct AcceptTermsOfServiceResponse { #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct LlmToken(pub String); +#[derive(Debug, Default, PartialEq, Clone, Serialize, Deserialize)] +pub struct CreateLlmTokenBody { + #[serde(default)] + pub organization_id: Option, +} + #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub struct CreateLlmTokenResponse { pub token: LlmToken, diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 33c3ea1e56648c73682e06f685f91f54344200d6..6b2019aa30030b0852f74bc851e2012feac4f0e2 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -1,7 +1,7 @@ use anyhow::Result; use arrayvec::ArrayVec; use client::{Client, EditPredictionUsage, UserStore}; -use cloud_api_types::SubmitEditPredictionFeedbackBody; +use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody}; use cloud_llm_client::predict_edits_v3::{ PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse, }; @@ -143,7 +143,7 @@ pub struct EditPredictionStore { pub sweep_ai: SweepAi, pub mercury: Mercury, data_collection_choice: DataCollectionChoice, - reject_predictions_tx: mpsc::UnboundedSender, + reject_predictions_tx: mpsc::UnboundedSender, settled_predictions_tx: mpsc::UnboundedSender, shown_predictions: VecDeque, rated_predictions: HashSet, @@ -151,6 +151,11 @@ pub struct EditPredictionStore { settled_event_callback: Option>, } +pub(crate) struct EditPredictionRejectionPayload { + rejection: EditPredictionRejection, + organization_id: Option, +} + #[derive(Copy, Clone, PartialEq, Eq)] pub enum EditPredictionModel { Zeta, @@ -719,8 +724,13 @@ impl EditPredictionStore { |this, _listener, _event, cx| { let client = this.client.clone(); let llm_token = this.llm_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); cx.spawn(async move |_this, _cx| { - llm_token.refresh(&client).await?; + llm_token.refresh(&client, organization_id).await?; anyhow::Ok(()) }) .detach_and_log_err(cx); @@ -781,11 +791,17 @@ impl EditPredictionStore { let client = self.client.clone(); let llm_token = self.llm_token.clone(); let app_version = AppVersion::global(cx); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + cx.spawn(async move |this, cx| { let experiments = cx .background_spawn(async move { let http_client = client.http_client(); - let token = llm_token.acquire(&client).await?; + let token = llm_token.acquire(&client, organization_id).await?; let url = http_client.build_zed_llm_url("/edit_prediction_experiments", &[])?; let request = http_client::Request::builder() .method(Method::GET) @@ -1424,7 +1440,7 @@ impl EditPredictionStore { } async fn handle_rejected_predictions( - rx: UnboundedReceiver, + rx: UnboundedReceiver, client: Arc, llm_token: LlmApiToken, app_version: Version, @@ -1433,7 +1449,11 @@ impl EditPredictionStore { let mut rx = std::pin::pin!(rx.peekable()); let mut batched = Vec::new(); - while let Some(rejection) = rx.next().await { + while let Some(EditPredictionRejectionPayload { + rejection, + organization_id, + }) = rx.next().await + { batched.push(rejection); if batched.len() < MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST / 2 { @@ -1471,6 +1491,7 @@ impl EditPredictionStore { }, client.clone(), llm_token.clone(), + organization_id, app_version.clone(), true, ) @@ -1676,13 +1697,23 @@ impl EditPredictionStore { all_language_settings(None, cx).edit_predictions.provider, EditPredictionProvider::Ollama | EditPredictionProvider::OpenAiCompatibleApi ); + if is_cloud { + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); + self.reject_predictions_tx - .unbounded_send(EditPredictionRejection { - request_id: prediction_id.to_string(), - reason, - was_shown, - model_version, + .unbounded_send(EditPredictionRejectionPayload { + rejection: EditPredictionRejection { + request_id: prediction_id.to_string(), + reason, + was_shown, + model_version, + }, + organization_id, }) .log_err(); } @@ -2337,6 +2368,7 @@ impl EditPredictionStore { client: Arc, custom_url: Option>, llm_token: LlmApiToken, + organization_id: Option, app_version: Version, ) -> Result<(RawCompletionResponse, Option)> { let url = if let Some(custom_url) = custom_url { @@ -2356,6 +2388,7 @@ impl EditPredictionStore { }, client, llm_token, + organization_id, app_version, true, ) @@ -2366,6 +2399,7 @@ impl EditPredictionStore { input: ZetaPromptInput, client: Arc, llm_token: LlmApiToken, + organization_id: Option, app_version: Version, trigger: PredictEditsRequestTrigger, ) -> Result<(PredictEditsV3Response, Option)> { @@ -2388,6 +2422,7 @@ impl EditPredictionStore { }, client, llm_token, + organization_id, app_version, true, ) @@ -2441,6 +2476,7 @@ impl EditPredictionStore { build: impl Fn(http_client::http::request::Builder) -> Result>, client: Arc, llm_token: LlmApiToken, + organization_id: Option, app_version: Version, require_auth: bool, ) -> Result<(Res, Option)> @@ -2450,9 +2486,12 @@ impl EditPredictionStore { let http_client = client.http_client(); let mut token = if require_auth { - Some(llm_token.acquire(&client).await?) + Some(llm_token.acquire(&client, organization_id.clone()).await?) } else { - llm_token.acquire(&client).await.ok() + llm_token + .acquire(&client, organization_id.clone()) + .await + .ok() }; let mut did_retry = false; @@ -2494,7 +2533,7 @@ impl EditPredictionStore { return Ok((serde_json::from_slice(&body)?, usage)); } else if !did_retry && token.is_some() && response.needs_llm_token_refresh() { did_retry = true; - token = Some(llm_token.refresh(&client).await?); + token = Some(llm_token.refresh(&client, organization_id.clone()).await?); } else { let mut body = String::new(); response.body_mut().read_to_string(&mut body).await?; diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index f038d2a4ca1929faee2a02391534539b5b63e2d0..8c158c074bf926d2cee9b77cec65b28c4317a22a 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -66,6 +66,11 @@ pub fn request_prediction_with_zeta( let client = store.client.clone(); let llm_token = store.llm_token.clone(); + let organization_id = store + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); let app_version = AppVersion::global(cx); let request_task = cx.background_spawn({ @@ -201,6 +206,7 @@ pub fn request_prediction_with_zeta( client, None, llm_token, + organization_id, app_version, ) .await?; @@ -219,6 +225,7 @@ pub fn request_prediction_with_zeta( prompt_input.clone(), client, llm_token, + organization_id, app_version, trigger, ) @@ -430,6 +437,11 @@ pub(crate) fn edit_prediction_accepted( let require_auth = custom_accept_url.is_none(); let client = store.client.clone(); let llm_token = store.llm_token.clone(); + let organization_id = store + .user_store + .read(cx) + .current_organization() + .map(|organization| organization.id.clone()); let app_version = AppVersion::global(cx); cx.background_spawn(async move { @@ -454,6 +466,7 @@ pub(crate) fn edit_prediction_accepted( }, client, llm_token, + organization_id, app_version, require_auth, ) diff --git a/crates/http_client/src/async_body.rs b/crates/http_client/src/async_body.rs index 8fb49f218568ea36078d772a7225229f31a916c4..a59a7339db1e4449b875e2c539e98c86b4279365 100644 --- a/crates/http_client/src/async_body.rs +++ b/crates/http_client/src/async_body.rs @@ -7,6 +7,7 @@ use std::{ use bytes::Bytes; use futures::AsyncRead; use http_body::{Body, Frame}; +use serde::Serialize; /// Based on the implementation of AsyncBody in /// . @@ -88,6 +89,19 @@ impl From<&'static str> for AsyncBody { } } +/// Newtype wrapper that serializes a value as JSON into an `AsyncBody`. +pub struct Json(pub T); + +impl From> for AsyncBody { + fn from(json: Json) -> Self { + Self::from_bytes( + serde_json::to_vec(&json.0) + .expect("failed to serialize JSON") + .into(), + ) + } +} + impl> From> for AsyncBody { fn from(body: Option) -> Self { match body { diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 5cf25a8277872ba3c6d502565e8057623b267d42..bbbe3b1a832332bd6bee693b4c0b916b4f4c182a 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -5,7 +5,7 @@ pub mod github; pub mod github_download; pub use anyhow::{Result, anyhow}; -pub use async_body::{AsyncBody, Inner}; +pub use async_body::{AsyncBody, Inner, Json}; use derive_more::Deref; use http::HeaderValue; pub use http::{self, Method, Request, Response, StatusCode, Uri, request::Builder}; diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index 18e099b4d6fc62867bf35fbd1d4573093af44744..b2af80a3c295cab1cf40a330eb8d84f94a137eb7 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; use client::Client; use cloud_api_client::ClientApiError; +use cloud_api_types::OrganizationId; use cloud_api_types::websocket_protocol::MessageToClient; use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME}; use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _}; @@ -26,29 +27,46 @@ impl fmt::Display for PaymentRequiredError { pub struct LlmApiToken(Arc>>); impl LlmApiToken { - pub async fn acquire(&self, client: &Arc) -> Result { + pub async fn acquire( + &self, + client: &Arc, + organization_id: Option, + ) -> Result { let lock = self.0.upgradable_read().await; if let Some(token) = lock.as_ref() { Ok(token.to_string()) } else { - Self::fetch(RwLockUpgradableReadGuard::upgrade(lock).await, client).await + Self::fetch( + RwLockUpgradableReadGuard::upgrade(lock).await, + client, + organization_id, + ) + .await } } - pub async fn refresh(&self, client: &Arc) -> Result { - Self::fetch(self.0.write().await, client).await + pub async fn refresh( + &self, + client: &Arc, + organization_id: Option, + ) -> Result { + Self::fetch(self.0.write().await, client, organization_id).await } async fn fetch( mut lock: RwLockWriteGuard<'_, Option>, client: &Arc, + organization_id: Option, ) -> Result { let system_id = client .telemetry() .system_id() .map(|system_id| system_id.to_string()); - let result = client.cloud_client().create_llm_token(system_id).await; + let result = client + .cloud_client() + .create_llm_token(system_id, organization_id) + .await; match result { Ok(response) => { *lock = Some(response.token.0.clone()); diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 19009013bf84ad9751e9ed0de2d3338b279a258e..b84b19b038905ba9f3d9a0637c770acc95687976 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -3,7 +3,7 @@ use anthropic::AnthropicModelMode; use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; use client::{Client, UserStore, zed_urls}; -use cloud_api_types::Plan; +use cloud_api_types::{OrganizationId, Plan}; use cloud_llm_client::{ CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME, CLIENT_SUPPORTS_STATUS_STREAM_ENDED_HEADER_NAME, CLIENT_SUPPORTS_X_AI_HEADER_NAME, CompletionBody, CompletionEvent, CompletionRequestStatus, @@ -122,15 +122,25 @@ impl State { recommended_models: Vec::new(), _fetch_models_task: cx.spawn(async move |this, cx| { maybe!(async move { - let (client, llm_api_token) = this - .read_with(cx, |this, _cx| (client.clone(), this.llm_api_token.clone()))?; + let (client, llm_api_token, organization_id) = + this.read_with(cx, |this, cx| { + ( + client.clone(), + this.llm_api_token.clone(), + this.user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()), + ) + })?; while current_user.borrow().is_none() { current_user.next().await; } let response = - Self::fetch_models(client.clone(), llm_api_token.clone()).await?; + Self::fetch_models(client.clone(), llm_api_token.clone(), organization_id) + .await?; this.update(cx, |this, cx| this.update_models(response, cx))?; anyhow::Ok(()) }) @@ -146,9 +156,17 @@ impl State { move |this, _listener, _event, cx| { let client = this.client.clone(); let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); cx.spawn(async move |this, cx| { - llm_api_token.refresh(&client).await?; - let response = Self::fetch_models(client, llm_api_token).await?; + llm_api_token + .refresh(&client, organization_id.clone()) + .await?; + let response = + Self::fetch_models(client, llm_api_token, organization_id).await?; this.update(cx, |this, cx| { this.update_models(response, cx); }) @@ -209,9 +227,10 @@ impl State { async fn fetch_models( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, ) -> Result { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client).await?; + let token = llm_api_token.acquire(&client, organization_id).await?; let request = http_client::Request::builder() .method(Method::GET) @@ -273,11 +292,13 @@ impl CloudLanguageModelProvider { &self, model: Arc, llm_api_token: LlmApiToken, + user_store: Entity, ) -> Arc { Arc::new(CloudLanguageModel { id: LanguageModelId(SharedString::from(model.id.0.clone())), model, llm_api_token, + user_store, client: self.client.clone(), request_limiter: RateLimiter::new(4), }) @@ -306,36 +327,46 @@ impl LanguageModelProvider for CloudLanguageModelProvider { } fn default_model(&self, cx: &App) -> Option> { - let default_model = self.state.read(cx).default_model.clone()?; - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - Some(self.create_language_model(default_model, llm_api_token)) + let state = self.state.read(cx); + let default_model = state.default_model.clone()?; + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + Some(self.create_language_model(default_model, llm_api_token, user_store)) } fn default_fast_model(&self, cx: &App) -> Option> { - let default_fast_model = self.state.read(cx).default_fast_model.clone()?; - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - Some(self.create_language_model(default_fast_model, llm_api_token)) + let state = self.state.read(cx); + let default_fast_model = state.default_fast_model.clone()?; + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + Some(self.create_language_model(default_fast_model, llm_api_token, user_store)) } fn recommended_models(&self, cx: &App) -> Vec> { - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - self.state - .read(cx) + let state = self.state.read(cx); + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + state .recommended_models .iter() .cloned() - .map(|model| self.create_language_model(model, llm_api_token.clone())) + .map(|model| { + self.create_language_model(model, llm_api_token.clone(), user_store.clone()) + }) .collect() } fn provided_models(&self, cx: &App) -> Vec> { - let llm_api_token = self.state.read(cx).llm_api_token.clone(); - self.state - .read(cx) + let state = self.state.read(cx); + let llm_api_token = state.llm_api_token.clone(); + let user_store = state.user_store.clone(); + state .models .iter() .cloned() - .map(|model| self.create_language_model(model, llm_api_token.clone())) + .map(|model| { + self.create_language_model(model, llm_api_token.clone(), user_store.clone()) + }) .collect() } @@ -367,6 +398,7 @@ pub struct CloudLanguageModel { id: LanguageModelId, model: Arc, llm_api_token: LlmApiToken, + user_store: Entity, client: Arc, request_limiter: RateLimiter, } @@ -380,12 +412,15 @@ impl CloudLanguageModel { async fn perform_llm_completion( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, app_version: Option, body: CompletionBody, ) -> Result { let http_client = &client.http_client(); - let mut token = llm_api_token.acquire(&client).await?; + let mut token = llm_api_token + .acquire(&client, organization_id.clone()) + .await?; let mut refreshed_token = false; loop { @@ -416,7 +451,9 @@ impl CloudLanguageModel { } if !refreshed_token && response.needs_llm_token_refresh() { - token = llm_api_token.refresh(&client).await?; + token = llm_api_token + .refresh(&client, organization_id.clone()) + .await?; refreshed_token = true; continue; } @@ -670,12 +707,17 @@ impl LanguageModel for CloudLanguageModel { cloud_llm_client::LanguageModelProvider::Google => { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); let model_id = self.model.id.to_string(); let generate_content_request = into_google(request, model_id.clone(), GoogleModelMode::Default); async move { let http_client = &client.http_client(); - let token = llm_api_token.acquire(&client).await?; + let token = llm_api_token.acquire(&client, organization_id).await?; let request_body = CountTokensBody { provider: cloud_llm_client::LanguageModelProvider::Google, @@ -736,6 +778,13 @@ impl LanguageModel for CloudLanguageModel { let prompt_id = request.prompt_id.clone(); let intent = request.intent; let app_version = Some(cx.update(|cx| AppVersion::global(cx))); + let user_store = self.user_store.clone(); + let organization_id = cx.update(|cx| { + user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()) + }); let thinking_allowed = request.thinking_allowed; let enable_thinking = thinking_allowed && self.model.supports_thinking; let provider_name = provider_name(&self.model.provider); @@ -767,6 +816,7 @@ impl LanguageModel for CloudLanguageModel { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let future = self.request_limiter.stream(async move { let PerformLlmCompletionResponse { response, @@ -774,6 +824,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, @@ -803,6 +854,7 @@ impl LanguageModel for CloudLanguageModel { cloud_llm_client::LanguageModelProvider::OpenAi => { let client = self.client.clone(); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let effort = request .thinking_effort .as_ref() @@ -828,6 +880,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, @@ -861,6 +914,7 @@ impl LanguageModel for CloudLanguageModel { None, ); let llm_api_token = self.llm_api_token.clone(); + let organization_id = organization_id.clone(); let future = self.request_limiter.stream(async move { let PerformLlmCompletionResponse { response, @@ -868,6 +922,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, @@ -902,6 +957,7 @@ impl LanguageModel for CloudLanguageModel { } = Self::perform_llm_completion( client.clone(), llm_api_token, + organization_id, app_version, CompletionBody { thread_id, diff --git a/crates/web_search_providers/Cargo.toml b/crates/web_search_providers/Cargo.toml index ecdca5883ff541459e94170986df3b7f16036c5a..ff264edcb150063237c633de746b2f6b9f6f250c 100644 --- a/crates/web_search_providers/Cargo.toml +++ b/crates/web_search_providers/Cargo.toml @@ -14,6 +14,7 @@ path = "src/web_search_providers.rs" [dependencies] anyhow.workspace = true client.workspace = true +cloud_api_types.workspace = true cloud_llm_client.workspace = true futures.workspace = true gpui.workspace = true diff --git a/crates/web_search_providers/src/cloud.rs b/crates/web_search_providers/src/cloud.rs index 2f3ccdbb52a884471250ad458e8b7922437cb9ae..c8bc89953f2b2d3ec62bac07e80f2737522824f7 100644 --- a/crates/web_search_providers/src/cloud.rs +++ b/crates/web_search_providers/src/cloud.rs @@ -1,7 +1,8 @@ use std::sync::Arc; use anyhow::{Context as _, Result}; -use client::Client; +use client::{Client, UserStore}; +use cloud_api_types::OrganizationId; use cloud_llm_client::{WebSearchBody, WebSearchResponse}; use futures::AsyncReadExt as _; use gpui::{App, AppContext, Context, Entity, Subscription, Task}; @@ -14,8 +15,8 @@ pub struct CloudWebSearchProvider { } impl CloudWebSearchProvider { - pub fn new(client: Arc, cx: &mut App) -> Self { - let state = cx.new(|cx| State::new(client, cx)); + pub fn new(client: Arc, user_store: Entity, cx: &mut App) -> Self { + let state = cx.new(|cx| State::new(client, user_store, cx)); Self { state } } @@ -23,24 +24,31 @@ impl CloudWebSearchProvider { pub struct State { client: Arc, + user_store: Entity, llm_api_token: LlmApiToken, _llm_token_subscription: Subscription, } impl State { - pub fn new(client: Arc, cx: &mut Context) -> Self { + pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); Self { client, + user_store, llm_api_token: LlmApiToken::default(), _llm_token_subscription: cx.subscribe( &refresh_llm_token_listener, |this, _, _event, cx| { let client = this.client.clone(); let llm_api_token = this.llm_api_token.clone(); + let organization_id = this + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); cx.spawn(async move |_this, _cx| { - llm_api_token.refresh(&client).await?; + llm_api_token.refresh(&client, organization_id).await?; anyhow::Ok(()) }) .detach_and_log_err(cx); @@ -61,21 +69,31 @@ impl WebSearchProvider for CloudWebSearchProvider { let state = self.state.read(cx); let client = state.client.clone(); let llm_api_token = state.llm_api_token.clone(); + let organization_id = state + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); let body = WebSearchBody { query }; - cx.background_spawn(async move { perform_web_search(client, llm_api_token, body).await }) + cx.background_spawn(async move { + perform_web_search(client, llm_api_token, organization_id, body).await + }) } } async fn perform_web_search( client: Arc, llm_api_token: LlmApiToken, + organization_id: Option, body: WebSearchBody, ) -> Result { const MAX_RETRIES: usize = 3; let http_client = &client.http_client(); let mut retries_remaining = MAX_RETRIES; - let mut token = llm_api_token.acquire(&client).await?; + let mut token = llm_api_token + .acquire(&client, organization_id.clone()) + .await?; loop { if retries_remaining == 0 { @@ -100,7 +118,9 @@ async fn perform_web_search( response.body_mut().read_to_string(&mut body).await?; return Ok(serde_json::from_str(&body)?); } else if response.needs_llm_token_refresh() { - token = llm_api_token.refresh(&client).await?; + token = llm_api_token + .refresh(&client, organization_id.clone()) + .await?; retries_remaining -= 1; } else { // For now we will only retry if the LLM token is expired, diff --git a/crates/web_search_providers/src/web_search_providers.rs b/crates/web_search_providers/src/web_search_providers.rs index 8ab0aee47a414c4cc669ab05e727a827d17c2844..509632429fb167cd489cd4253ceae0ce479b10a8 100644 --- a/crates/web_search_providers/src/web_search_providers.rs +++ b/crates/web_search_providers/src/web_search_providers.rs @@ -1,26 +1,28 @@ mod cloud; -use client::Client; +use client::{Client, UserStore}; use gpui::{App, Context, Entity}; use language_model::LanguageModelRegistry; use std::sync::Arc; use web_search::{WebSearchProviderId, WebSearchRegistry}; -pub fn init(client: Arc, cx: &mut App) { +pub fn init(client: Arc, user_store: Entity, cx: &mut App) { let registry = WebSearchRegistry::global(cx); registry.update(cx, |registry, cx| { - register_web_search_providers(registry, client, cx); + register_web_search_providers(registry, client, user_store, cx); }); } fn register_web_search_providers( registry: &mut WebSearchRegistry, client: Arc, + user_store: Entity, cx: &mut Context, ) { register_zed_web_search_provider( registry, client.clone(), + user_store.clone(), &LanguageModelRegistry::global(cx), cx, ); @@ -29,7 +31,13 @@ fn register_web_search_providers( &LanguageModelRegistry::global(cx), move |this, registry, event, cx| { if let language_model::Event::DefaultModelChanged = event { - register_zed_web_search_provider(this, client.clone(), ®istry, cx) + register_zed_web_search_provider( + this, + client.clone(), + user_store.clone(), + ®istry, + cx, + ) } }, ) @@ -39,6 +47,7 @@ fn register_web_search_providers( fn register_zed_web_search_provider( registry: &mut WebSearchRegistry, client: Arc, + user_store: Entity, language_model_registry: &Entity, cx: &mut Context, ) { @@ -47,7 +56,10 @@ fn register_zed_web_search_provider( .default_model() .is_some_and(|default| default.is_provided_by_zed()); if using_zed_provider { - registry.register_provider(cloud::CloudWebSearchProvider::new(client, cx), cx) + registry.register_provider( + cloud::CloudWebSearchProvider::new(client, user_store, cx), + cx, + ) } else { registry.unregister_provider(WebSearchProviderId( cloud::ZED_WEB_SEARCH_PROVIDER_ID.into(), diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 38238d8af519c0506ab451bccaa1abe3a893e4c9..a3379a6017b7e3b7c26e2a98346e4926e90e0999 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -645,7 +645,7 @@ fn main() { zed::remote_debug::init(cx); edit_prediction_ui::init(cx); web_search::init(cx); - web_search_providers::init(app_state.client.clone(), cx); + web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx); snippet_provider::init(cx); edit_prediction_registry::init(app_state.client.clone(), app_state.user_store.clone(), cx); let prompt_builder = PromptBuilder::load(app_state.fs.clone(), stdout_is_a_pty(), cx); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 17832bdd1833cabb42af2195f9d9aab1a6bf3fab..20629785c7172241f49a0e7a69f9dcc1953f6a95 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -5021,7 +5021,7 @@ mod tests { language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx); web_search::init(cx); git_graph::init(cx); - web_search_providers::init(app_state.client.clone(), cx); + web_search_providers::init(app_state.client.clone(), app_state.user_store.clone(), cx); let prompt_builder = PromptBuilder::load(app_state.fs.clone(), false, cx); project::AgentRegistryStore::init_global( cx, From 9b8ad0176928a324d828294c67b10fab272c2f95 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Wed, 4 Mar 2026 16:56:56 +0200 Subject: [PATCH 303/548] ep: Option to configure custom Baseten environment (#50706) Release Notes: - N/A --- crates/edit_prediction/src/edit_prediction.rs | 8 +++++++- crates/edit_prediction/src/zeta.rs | 6 +++++- crates/edit_prediction_cli/src/predict.rs | 7 ++++++- 3 files changed, 18 insertions(+), 3 deletions(-) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 6b2019aa30030b0852f74bc851e2012feac4f0e2..5c7ce045121739f341b84dd87d827878550f4048 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -125,6 +125,7 @@ impl Global for EditPredictionStoreGlobal {} #[derive(Clone)] pub struct Zeta2RawConfig { pub model_id: Option, + pub environment: Option, pub format: ZetaFormat, } @@ -760,7 +761,12 @@ impl EditPredictionStore { let version_str = env::var("ZED_ZETA_FORMAT").ok()?; let format = ZetaFormat::parse(&version_str).ok()?; let model_id = env::var("ZED_ZETA_MODEL").ok(); - Some(Zeta2RawConfig { model_id, format }) + let environment = env::var("ZED_ZETA_ENVIRONMENT").ok(); + Some(Zeta2RawConfig { + model_id, + environment, + format, + }) } pub fn set_edit_prediction_model(&mut self, model: EditPredictionModel) { diff --git a/crates/edit_prediction/src/zeta.rs b/crates/edit_prediction/src/zeta.rs index 8c158c074bf926d2cee9b77cec65b28c4317a22a..ccb058e1193eaf2919c286c6e675a907e4af159f 100644 --- a/crates/edit_prediction/src/zeta.rs +++ b/crates/edit_prediction/src/zeta.rs @@ -186,13 +186,17 @@ pub fn request_prediction_with_zeta( let prompt = format_zeta_prompt(&prompt_input, config.format); let prefill = get_prefill(&prompt_input, config.format); let prompt = format!("{prompt}{prefill}"); + let environment = config + .environment + .clone() + .or_else(|| Some(config.format.to_string().to_lowercase())); let request = RawCompletionRequest { model: config.model_id.clone().unwrap_or_default(), prompt, temperature: None, stop: vec![], max_tokens: Some(2048), - environment: Some(config.format.to_string().to_lowercase()), + environment, }; editable_range_in_excerpt = zeta_prompt::excerpt_range_for_format( diff --git a/crates/edit_prediction_cli/src/predict.rs b/crates/edit_prediction_cli/src/predict.rs index 8f537dc0817a9cb0b4fd74348ae5e43d4f63beb9..bd89d54ab37521ecb9661b6f1bb0156f30ba1acb 100644 --- a/crates/edit_prediction_cli/src/predict.rs +++ b/crates/edit_prediction_cli/src/predict.rs @@ -148,7 +148,12 @@ pub async fn run_prediction( if let PredictionProvider::Zeta2(format) = provider { if format != ZetaFormat::default() { let model_id = std::env::var("ZED_ZETA_MODEL").ok(); - store.set_zeta2_raw_config(Zeta2RawConfig { model_id, format }); + let environment = std::env::var("ZED_ZETA_ENVIRONMENT").ok(); + store.set_zeta2_raw_config(Zeta2RawConfig { + model_id, + environment, + format, + }); } } }); From 866ec42371b091c3a4d451a8d62182c9e40bdc14 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Wed, 4 Mar 2026 10:53:26 -0500 Subject: [PATCH 304/548] Remove deprecated Gemini 3 Pro Preview (#50503) Gemini 3 Pro Preview has been deprecated in favor of Gemini 3.1 Pro. This removes the `Gemini3Pro` variant from the `Model` enum and all associated match arms, updates eval model lists, docs, and test fixtures. A serde alias (`"gemini-3-pro-preview"`) is kept on `Gemini31Pro` so existing user settings gracefully migrate to the replacement model. Closes AI-66 Release Notes: - Removed deprecated Gemini 3 Pro Preview model; existing configurations automatically migrate to Gemini 3.1 Pro. --- .github/ISSUE_TEMPLATE/10_bug_report.yml | 2 +- .github/workflows/run_cron_unit_evals.yml | 2 +- crates/google_ai/src/google_ai.rs | 14 ++------------ crates/language_models/src/provider/open_router.rs | 8 ++++---- crates/ui/src/components/callout.rs | 2 +- docs/src/ai/models.md | 8 ++------ .../xtask/src/tasks/workflows/run_agent_evals.rs | 2 +- 7 files changed, 12 insertions(+), 26 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml index 13e43219dd65a78af4afec479330bbc5fd85fe42..5eb8e8a6299c5189384b6d060e12cd61a2249a3c 100644 --- a/.github/ISSUE_TEMPLATE/10_bug_report.yml +++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml @@ -100,7 +100,7 @@ body: label: (for AI issues) Model provider details placeholder: | - Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.) - - Model Name: (Claude Sonnet 4.5, Gemini 3 Pro, GPT-5) + - Model Name: (Claude Sonnet 4.5, Gemini 3.1 Pro, GPT-5) - Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads) - Other details (ACPs, MCPs, other settings, etc.): validations: diff --git a/.github/workflows/run_cron_unit_evals.yml b/.github/workflows/run_cron_unit_evals.yml index e57b54e4f2249b92630b2d3636ce2316a0814625..2a204a9d40d78bf52f38825b4db060216e348a87 100644 --- a/.github/workflows/run_cron_unit_evals.yml +++ b/.github/workflows/run_cron_unit_evals.yml @@ -16,7 +16,7 @@ jobs: model: - anthropic/claude-sonnet-4-5-latest - anthropic/claude-opus-4-5-latest - - google/gemini-3-pro + - google/gemini-3.1-pro - openai/gpt-5 fail-fast: false steps: diff --git a/crates/google_ai/src/google_ai.rs b/crates/google_ai/src/google_ai.rs index 3a686f97a8825b30a8f02f4149b110c3d1aacb1e..7659be8ab44da35efd16389c4abd0bf99d8cf3a4 100644 --- a/crates/google_ai/src/google_ai.rs +++ b/crates/google_ai/src/google_ai.rs @@ -510,11 +510,9 @@ pub enum Model { alias = "gemini-2.5-pro-preview-06-05" )] Gemini25Pro, - #[serde(rename = "gemini-3-pro-preview")] - Gemini3Pro, #[serde(rename = "gemini-3-flash-preview")] Gemini3Flash, - #[serde(rename = "gemini-3.1-pro-preview")] + #[serde(rename = "gemini-3.1-pro-preview", alias = "gemini-3-pro-preview")] Gemini31Pro, #[serde(rename = "custom")] Custom { @@ -537,7 +535,6 @@ impl Model { Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", - Self::Gemini3Pro => "gemini-3-pro-preview", Self::Gemini3Flash => "gemini-3-flash-preview", Self::Gemini31Pro => "gemini-3.1-pro-preview", Self::Custom { name, .. } => name, @@ -548,7 +545,6 @@ impl Model { Self::Gemini25FlashLite => "gemini-2.5-flash-lite", Self::Gemini25Flash => "gemini-2.5-flash", Self::Gemini25Pro => "gemini-2.5-pro", - Self::Gemini3Pro => "gemini-3-pro-preview", Self::Gemini3Flash => "gemini-3-flash-preview", Self::Gemini31Pro => "gemini-3.1-pro-preview", Self::Custom { name, .. } => name, @@ -560,7 +556,6 @@ impl Model { Self::Gemini25FlashLite => "Gemini 2.5 Flash-Lite", Self::Gemini25Flash => "Gemini 2.5 Flash", Self::Gemini25Pro => "Gemini 2.5 Pro", - Self::Gemini3Pro => "Gemini 3 Pro", Self::Gemini3Flash => "Gemini 3 Flash", Self::Gemini31Pro => "Gemini 3.1 Pro", Self::Custom { @@ -574,7 +569,6 @@ impl Model { Self::Gemini25FlashLite | Self::Gemini25Flash | Self::Gemini25Pro - | Self::Gemini3Pro | Self::Gemini3Flash | Self::Gemini31Pro => 1_048_576, Self::Custom { max_tokens, .. } => *max_tokens, @@ -586,7 +580,6 @@ impl Model { Model::Gemini25FlashLite | Model::Gemini25Flash | Model::Gemini25Pro - | Model::Gemini3Pro | Model::Gemini3Flash | Model::Gemini31Pro => Some(65_536), Model::Custom { .. } => None, @@ -603,10 +596,7 @@ impl Model { pub fn mode(&self) -> GoogleModelMode { match self { - Self::Gemini25FlashLite - | Self::Gemini25Flash - | Self::Gemini25Pro - | Self::Gemini3Pro => { + Self::Gemini25FlashLite | Self::Gemini25Flash | Self::Gemini25Pro => { GoogleModelMode::Thinking { // By default these models are set to "auto", so we preserve that behavior // but indicate they are capable of thinking mode diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index 3e5128fcc5a366b4156afe6b28f3efc7bd697e12..7a74125d606ddc4be56d113fbbf3fa66866fb595 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -889,7 +889,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -914,7 +914,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -940,7 +940,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { @@ -967,7 +967,7 @@ mod tests { ResponseStreamEvent { id: Some("response_123".into()), created: 1234567890, - model: "google/gemini-3-pro-preview".into(), + model: "google/gemini-3.1-pro-preview".into(), choices: vec![ChoiceDelta { index: 0, delta: ResponseMessageDelta { diff --git a/crates/ui/src/components/callout.rs b/crates/ui/src/components/callout.rs index 24762ec1765a58259b061194ea31ed7e8721c2a0..23c820cd545adff2985a4116a6efb00c1e731693 100644 --- a/crates/ui/src/components/callout.rs +++ b/crates/ui/src/components/callout.rs @@ -295,7 +295,7 @@ impl Component for Callout { "Error details:", "• Quota exceeded for metric", "• Limit: 0", - "• Model: gemini-3-pro", + "• Model: gemini-3.1-pro", "Please retry in 26.33s.", "Additional details:", "- Request ID: abc123def456", diff --git a/docs/src/ai/models.md b/docs/src/ai/models.md index a86b873ef8aff112ceddbe7da000e4350023ec42..bbf41cf66cc4d93b38123c12fadd7a60c119dfef 100644 --- a/docs/src/ai/models.md +++ b/docs/src/ai/models.md @@ -43,10 +43,6 @@ Zed's plans offer hosted versions of major LLMs with higher rate limits than dir | | OpenAI | Cached Input | $0.005 | $0.0055 | | Gemini 3.1 Pro | Google | Input | $2.00 | $2.20 | | | Google | Output | $12.00 | $13.20 | -| Gemini 3.1 Pro | Google | Input | $2.00 | $2.20 | -| | Google | Output | $12.00 | $13.20 | -| Gemini 3 Pro | Google | Input | $2.00 | $2.20 | -| | Google | Output | $12.00 | $13.20 | | Gemini 3 Flash | Google | Input | $0.30 | $0.33 | | | Google | Output | $2.50 | $2.75 | | Grok 4 | X.ai | Input | $3.00 | $3.30 | @@ -70,7 +66,8 @@ As of February 19, 2026, Zed Pro serves newer model versions in place of the ret - Claude Sonnet 4 → Claude Sonnet 4.5 or Claude Sonnet 4.6 - Claude Sonnet 3.7 (retired Feb 19) → Claude Sonnet 4.5 or Claude Sonnet 4.6 - GPT-5.1 and GPT-5 → GPT-5.2 or GPT-5.2 Codex -- Gemini 2.5 Pro → Gemini 3 Pro or Gemini 3.1 Pro +- Gemini 2.5 Pro → Gemini 3.1 Pro +- Gemini 3 Pro → Gemini 3.1 Pro - Gemini 2.5 Flash → Gemini 3 Flash ## Usage {#usage} @@ -95,7 +92,6 @@ A context window is the maximum span of text and code an LLM can consider at onc | GPT-5 mini | OpenAI | 400k | | GPT-5 nano | OpenAI | 400k | | Gemini 3.1 Pro | Google | 200k | -| Gemini 3 Pro | Google | 200k | | Gemini 3 Flash | Google | 200k | > Context window limits for hosted Sonnet 4.5/4.6 and Gemini 3.1 Pro/3 Pro/Flash may increase in future releases. diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs index e83d3a07f079c1f40360f413f3007813dbe552ce..521f419d9b317c42a1106ebe8500ccf0a3f494ec 100644 --- a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -123,7 +123,7 @@ fn cron_unit_evals() -> NamedJob { const UNIT_EVAL_MODELS: &[&str] = &[ "anthropic/claude-sonnet-4-5-latest", "anthropic/claude-opus-4-5-latest", - "google/gemini-3-pro", + "google/gemini-3.1-pro", "openai/gpt-5", ]; From 489ec6611ea2db9f1366670911ba95e3536b7b2c Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 4 Mar 2026 10:54:11 -0500 Subject: [PATCH 305/548] Bump Zed to v0.228 (#50710) Release Notes: - N/A --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b1ff28fcf52e118830e2100d35a6cdbca6f6f013..02d2026fe828d956bae8d134d7d6acef91c7fec6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21725,7 +21725,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.227.0" +version = "0.228.0" dependencies = [ "acp_thread", "acp_tools", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index c04e10636f9088cf5f12dbda526a4e933a5e37e3..3d9e433d73dac7d79fc008c79b3ab2db5863a8db 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.227.0" +version = "0.228.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] From d329961d7c6468b9760307f82b6d3d044ec38e67 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 4 Mar 2026 17:02:26 +0100 Subject: [PATCH 306/548] workspace: Remove superfluous call dependency (#50713) Closes #50701 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- Cargo.lock | 1 - crates/workspace/Cargo.toml | 2 -- 2 files changed, 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 02d2026fe828d956bae8d134d7d6acef91c7fec6..dabf43599e8a44396935235c773c4609e84f76f2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21272,7 +21272,6 @@ dependencies = [ "any_vec", "anyhow", "async-recursion", - "call", "chrono", "client", "clock", diff --git a/crates/workspace/Cargo.toml b/crates/workspace/Cargo.toml index dcd0bf640fdf279fb1874ba77307ccbd3c431393..84fd10c8c03e4f7411fc8c813b70255f5e00031d 100644 --- a/crates/workspace/Cargo.toml +++ b/crates/workspace/Cargo.toml @@ -14,7 +14,6 @@ doctest = false [features] test-support = [ - "call/test-support", "client/test-support", "http_client/test-support", "db/test-support", @@ -72,7 +71,6 @@ zed_actions.workspace = true windows.workspace = true [dev-dependencies] -call = { workspace = true, features = ["test-support"] } client = { workspace = true, features = ["test-support"] } dap = { workspace = true, features = ["test-support"] } db = { workspace = true, features = ["test-support"] } From 731a80053cdaf95b567a333fd786ed7130ebaa3a Mon Sep 17 00:00:00 2001 From: MostlyK <135974627+MostlyKIGuess@users.noreply.github.com> Date: Wed, 4 Mar 2026 21:44:32 +0530 Subject: [PATCH 307/548] repl: Bump `runtimed` ecosystem packages and add support for V3 Jupyter Notebooks (#49914) - Add support for v3 Jupyter Notebooks ( nbformat 1.2.0 <-> https://github.com/runtimed/runtimed/pull/275 ) - This means that we can now open notebooks like [Signal Processing for Python](https://nbviewer.org/github/unpingco/Python-for-Signal-Processing/tree/master/) and much more. Release Notes: - N/A --- Cargo.lock | 12 ++++++------ Cargo.toml | 6 +++--- crates/repl/src/notebook/notebook_ui.rs | 6 ++++++ 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dabf43599e8a44396935235c773c4609e84f76f2..e09d057f706615a58f8762b51fd01965c0c43614 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9141,9 +9141,9 @@ dependencies = [ [[package]] name = "jupyter-protocol" -version = "1.2.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c75a69caf8b8e781224badfb76c4a8da4d49856de36ce72ae3cf5d4a1c94e42" +checksum = "4649647741f9794a7a02e3be976f1b248ba28a37dbfc626d5089316fd4fbf4c8" dependencies = [ "async-trait", "bytes 1.11.1", @@ -10785,9 +10785,9 @@ dependencies = [ [[package]] name = "nbformat" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b10a89a2d910233ec3fca4de359b16ebe95e833c8b2162643ef98c6053a0549d" +checksum = "d4983a40792c45e8639f77ef8e4461c55679cbc618f4b9e83830e8c7e79c8383" dependencies = [ "anyhow", "chrono", @@ -14648,9 +14648,9 @@ dependencies = [ [[package]] name = "runtimelib" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d80685459e1e5fa5603182058351ae91c98ca458dfef4e85f0a37be4f7cf1e6c" +checksum = "fa84884e45ed4a1e663120cef3fc11f14d1a2a1933776e1c31599f7bd2dd0c9e" dependencies = [ "async-dispatcher", "async-std", diff --git a/Cargo.toml b/Cargo.toml index 15d39992804b5ed7ad99fadd46e350b1357b17d1..40a81636a4fd558ddae317f051587f09409cb748 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -574,7 +574,7 @@ itertools = "0.14.0" json_dotpath = "1.1" jsonschema = "0.37.0" jsonwebtoken = "10.0" -jupyter-protocol = "1.2.0" +jupyter-protocol = "1.4.0" jupyter-websocket-client = "1.0.0" libc = "0.2" libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } @@ -590,7 +590,7 @@ minidumper = "0.8" moka = { version = "0.12.10", features = ["sync"] } naga = { version = "28.0", features = ["wgsl-in"] } nanoid = "0.4" -nbformat = "1.1.0" +nbformat = "1.2.0" nix = "0.29" num-format = "0.4.4" objc = "0.2" @@ -660,7 +660,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662 "stream", ], package = "zed-reqwest", version = "0.12.15-zed" } rsa = "0.9.6" -runtimelib = { version = "1.2.0", default-features = false, features = [ +runtimelib = { version = "1.4.0", default-features = false, features = [ "async-dispatcher-runtime", "aws-lc-rs" ] } rust-embed = { version = "8.4", features = ["include-exclude"] } diff --git a/crates/repl/src/notebook/notebook_ui.rs b/crates/repl/src/notebook/notebook_ui.rs index 5b8c0746cdf1289ac3c612139fab1819b5596c07..87f18708a1988c70d66dc4cef5355d4cbcb11dba 100644 --- a/crates/repl/src/notebook/notebook_ui.rs +++ b/crates/repl/src/notebook/notebook_ui.rs @@ -1514,6 +1514,9 @@ impl project::ProjectItem for NotebookItem { nbformat::upgrade_legacy_notebook(legacy_notebook)? } + nbformat::Notebook::V3(v3_notebook) => { + nbformat::upgrade_v3_notebook(v3_notebook)? + } } }; @@ -1791,6 +1794,9 @@ impl Item for NotebookEditor { Ok(nbformat::Notebook::Legacy(legacy_notebook)) => { nbformat::upgrade_legacy_notebook(legacy_notebook)? } + Ok(nbformat::Notebook::V3(v3_notebook)) => { + nbformat::upgrade_v3_notebook(v3_notebook)? + } Err(e) => { anyhow::bail!("Failed to parse notebook: {:?}", e); } From 0394341c814c64711272fca19e4e7fc0370cdf35 Mon Sep 17 00:00:00 2001 From: Oleksiy Syvokon Date: Wed, 4 Mar 2026 18:28:48 +0200 Subject: [PATCH 308/548] ep: Collapse whitespace in deltaChrF (#50716) Release Notes: - N/A --- crates/edit_prediction_cli/src/metrics.rs | 50 +++++++++++++++++++++-- 1 file changed, 46 insertions(+), 4 deletions(-) diff --git a/crates/edit_prediction_cli/src/metrics.rs b/crates/edit_prediction_cli/src/metrics.rs index fc870c36c9c62f4d74486ddd4b2d35176b00bb5c..1bfd8e542fa3d74b55f091d2ac13aa22883f6a2f 100644 --- a/crates/edit_prediction_cli/src/metrics.rs +++ b/crates/edit_prediction_cli/src/metrics.rs @@ -76,14 +76,21 @@ impl ClassificationMetrics { } enum ChrfWhitespace { + /// Preserve whitespace as-is #[allow(unused)] Unchanged, + + /// Ignore all whitespace differences + #[allow(unused)] Ignore, + + /// Collapse whitespace into single spaces + Collapse, } const CHR_F_CHAR_ORDER: usize = 6; const CHR_F_BETA: f64 = 2.0; -const CHR_F_WHITESPACE: ChrfWhitespace = ChrfWhitespace::Ignore; +const CHR_F_WHITESPACE: ChrfWhitespace = ChrfWhitespace::Collapse; /// Computes a delta-chrF score that compares two sets of edits. /// @@ -196,9 +203,34 @@ fn filter_whitespace_chars(text: &str) -> Vec { match CHR_F_WHITESPACE { ChrfWhitespace::Unchanged => text.chars().collect(), ChrfWhitespace::Ignore => text.chars().filter(|c| !c.is_whitespace()).collect(), + ChrfWhitespace::Collapse => collapse_whitespace(text.chars()), } } +/// Collapse whitespace into single spaces. +/// Newlines and spaces are collapsed separately. +fn collapse_whitespace(chars: impl Iterator) -> Vec { + let mut result = Vec::new(); + let mut last_whitespace = None; + for c in chars { + if c.is_whitespace() && c != '\n' { + if last_whitespace != Some(' ') { + result.push(' '); + last_whitespace = Some(' '); + } + } else if c == '\n' { + if last_whitespace != Some('\n') { + result.push(c); + last_whitespace = Some('\n'); + } + } else { + result.push(c); + last_whitespace = None; + } + } + result +} + /// Extract only the changed regions between two texts, with context for n-gram boundaries. /// /// Returns (original_affected_region, modified_affected_region) as Vec. @@ -269,15 +301,15 @@ fn count_ngrams_from_chars(chars: &[char], n: usize) -> Counts { #[allow(dead_code)] fn chr_f_ngram_counts(text: &str) -> Vec { - // Ignore whitespace. The original chrF implementation skips all - // whitespace. We should consider compressing multiple consecutive - // spaces into one -- this may reflect our task more closely. let text = match CHR_F_WHITESPACE { ChrfWhitespace::Unchanged => text.to_string(), ChrfWhitespace::Ignore => text .chars() .filter(|c| !c.is_whitespace()) .collect::(), + ChrfWhitespace::Collapse => collapse_whitespace(text.chars()) + .into_iter() + .collect::(), }; (1..=CHR_F_CHAR_ORDER) @@ -1175,4 +1207,14 @@ index abc123..def456 100644 assert!(counts.deleted_tokens >= 2); assert!(counts.inserted_tokens >= 2); } + + #[test] + fn test_whitespace_collapse() { + let text = "abc \n\n\n 123"; + let collapsed = collapse_whitespace(text.chars()); + assert_eq!( + collapsed, + vec!['a', 'b', 'c', ' ', '\n', ' ', '1', '2', '3'] + ); + } } From 87bc2aac5cc99e8425e1c29c1af6bb7bc15e280f Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 4 Mar 2026 17:36:25 +0100 Subject: [PATCH 309/548] Add support for streaming tool input to more providers (#50682) To test: - [x] Bedrock - [x] Copilot Chat - [x] Deepseek - [x] Open AI - [x] Open Router - [x] Vercel - [x] Vercel AI Gateway - [x] xAI - [x] Mistral Release Notes: - N/A --- crates/agent/src/thread.rs | 12 +- .../language_models/src/provider/bedrock.rs | 23 ++- .../src/provider/copilot_chat.rs | 21 +++ .../language_models/src/provider/deepseek.rs | 21 +++ .../language_models/src/provider/mistral.rs | 21 +++ .../language_models/src/provider/open_ai.rs | 142 +++++++++++++++++- .../src/provider/open_ai_compatible.rs | 4 + .../src/provider/open_router.rs | 21 +++ crates/language_models/src/provider/vercel.rs | 4 + .../src/provider/vercel_ai_gateway.rs | 4 + crates/language_models/src/provider/x_ai.rs | 7 +- crates/x_ai/src/x_ai.rs | 12 ++ 12 files changed, 279 insertions(+), 13 deletions(-) diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 616ae414d4d51a384a18460e8339fd07770fa6b9..be87a6a1e1e5ddba8a5d4b3b5bca82168a141840 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -2335,20 +2335,18 @@ impl Thread { ) { // Ensure the last message ends in the current tool use let last_message = self.pending_message(); - let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| { + + let has_tool_use = last_message.content.iter_mut().rev().any(|content| { if let AgentMessageContent::ToolUse(last_tool_use) = content { if last_tool_use.id == tool_use.id { *last_tool_use = tool_use.clone(); - false - } else { - true + return true; } - } else { - true } + false }); - if push_new_tool_use { + if !has_tool_use { event_stream.send_tool_call( &tool_use.id, &tool_use.name, diff --git a/crates/language_models/src/provider/bedrock.rs b/crates/language_models/src/provider/bedrock.rs index bcf8401c1c14ae1a74bb7136141d0b35509cdd40..5b493fdf1087911372d8796cc88f4ad14eef8df0 100644 --- a/crates/language_models/src/provider/bedrock.rs +++ b/crates/language_models/src/provider/bedrock.rs @@ -658,6 +658,10 @@ impl LanguageModel for BedrockModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn telemetry_id(&self) -> String { format!("bedrock/{}", self.model.id()) } @@ -1200,8 +1204,25 @@ pub fn map_to_language_model_completion_events( .get_mut(&cb_delta.content_block_index) { tool_use.input_json.push_str(tool_output.input()); + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&tool_use.input_json), + ) { + Some(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: tool_use.id.clone().into(), + name: tool_use.name.clone().into(), + is_input_complete: false, + raw_input: tool_use.input_json.clone(), + input, + thought_signature: None, + }, + ))) + } else { + None + } + } else { + None } - None } Some(ContentBlockDelta::ReasoningContent(thinking)) => match thinking { ReasoningContentBlockDelta::Text(thoughts) => { diff --git a/crates/language_models/src/provider/copilot_chat.rs b/crates/language_models/src/provider/copilot_chat.rs index 4363430f865de63ed5fec0d6b40b085d9413fc2a..7d714cd93a2a93dbb9fd02ec4d2b95149bb43330 100644 --- a/crates/language_models/src/provider/copilot_chat.rs +++ b/crates/language_models/src/provider/copilot_chat.rs @@ -246,6 +246,10 @@ impl LanguageModel for CopilotChatLanguageModel { self.model.supports_tools() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_images(&self) -> bool { self.model.supports_vision() } @@ -455,6 +459,23 @@ pub fn map_to_language_model_completion_events( entry.thought_signature = Some(thought_signature); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: entry.thought_signature.clone(), + }, + ))); + } + } } if let Some(usage) = event.usage { diff --git a/crates/language_models/src/provider/deepseek.rs b/crates/language_models/src/provider/deepseek.rs index 2a9f7322b1fb5d3d1e6713c5a084b83dc2b01ce2..0bf86ef15c91b16dbc496ff732b087fedd0da0a9 100644 --- a/crates/language_models/src/provider/deepseek.rs +++ b/crates/language_models/src/provider/deepseek.rs @@ -246,6 +246,10 @@ impl LanguageModel for DeepSeekLanguageModel { true } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool { true } @@ -469,6 +473,23 @@ impl DeepSeekEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } diff --git a/crates/language_models/src/provider/mistral.rs b/crates/language_models/src/provider/mistral.rs index 02d46dcaa7ce7acc76d85c93cad610a7d2489bf0..6af66f4e9a9d257b385c84a6c0c6d989f04c013f 100644 --- a/crates/language_models/src/provider/mistral.rs +++ b/crates/language_models/src/provider/mistral.rs @@ -280,6 +280,10 @@ impl LanguageModel for MistralLanguageModel { self.model.supports_tools() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool { self.model.supports_tools() } @@ -629,6 +633,23 @@ impl MistralEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } diff --git a/crates/language_models/src/provider/open_ai.rs b/crates/language_models/src/provider/open_ai.rs index 7fb65df0a534c7600f7315fd85d7adda0d66314a..57b3a6b20a9712e7c4d99b3ccfc48719e632da9d 100644 --- a/crates/language_models/src/provider/open_ai.rs +++ b/crates/language_models/src/provider/open_ai.rs @@ -328,6 +328,10 @@ impl LanguageModel for OpenAiLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_thinking(&self) -> bool { self.model.reasoning_effort().is_some() } @@ -824,6 +828,23 @@ impl OpenAiEventMapper { entry.arguments.push_str(&arguments); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))); + } + } } } } @@ -954,6 +975,20 @@ impl OpenAiResponseEventMapper { ResponsesStreamEvent::FunctionCallArgumentsDelta { item_id, delta, .. } => { if let Some(entry) = self.function_calls_by_item.get_mut(&item_id) { entry.arguments.push_str(&delta); + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + return vec![Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: LanguageModelToolUseId::from(entry.call_id.clone()), + name: entry.name.clone(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: None, + }, + ))]; + } } Vec::new() } @@ -1670,19 +1705,30 @@ mod tests { ]; let mapped = map_response_events(events); + assert_eq!(mapped.len(), 3); + // First event is the partial tool use (from FunctionCallArgumentsDelta) assert!(matches!( mapped[0], + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: false, + .. + }) + )); + // Second event is the complete tool use (from FunctionCallArgumentsDone) + assert!(matches!( + mapped[1], LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref id, ref name, ref raw_input, + is_input_complete: true, .. }) if id.to_string() == "call_123" && name.as_ref() == "get_weather" && raw_input == "{\"city\":\"Boston\"}" )); assert!(matches!( - mapped[1], + mapped[2], LanguageModelCompletionEvent::Stop(StopReason::ToolUse) )); } @@ -1878,13 +1924,27 @@ mod tests { ]; let mapped = map_response_events(events); + assert_eq!(mapped.len(), 3); + // First event is the partial tool use (from FunctionCallArgumentsDelta) assert!(matches!( mapped[0], - LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { ref raw_input, .. }) - if raw_input == "{\"city\":\"Boston\"}" + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: false, + .. + }) )); + // Second event is the complete tool use (from the Incomplete response output) assert!(matches!( mapped[1], + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + ref raw_input, + is_input_complete: true, + .. + }) + if raw_input == "{\"city\":\"Boston\"}" + )); + assert!(matches!( + mapped[2], LanguageModelCompletionEvent::Stop(StopReason::MaxTokens) )); } @@ -1976,4 +2036,80 @@ mod tests { LanguageModelCompletionEvent::Stop(StopReason::ToolUse) )); } + + #[test] + fn responses_stream_emits_partial_tool_use_events() { + let events = vec![ + ResponsesStreamEvent::OutputItemAdded { + output_index: 0, + sequence_number: None, + item: ResponseOutputItem::FunctionCall(ResponseFunctionToolCall { + id: Some("item_fn".to_string()), + status: Some("in_progress".to_string()), + name: Some("get_weather".to_string()), + call_id: Some("call_abc".to_string()), + arguments: String::new(), + }), + }, + ResponsesStreamEvent::FunctionCallArgumentsDelta { + item_id: "item_fn".into(), + output_index: 0, + delta: "{\"city\":\"Bos".into(), + sequence_number: None, + }, + ResponsesStreamEvent::FunctionCallArgumentsDelta { + item_id: "item_fn".into(), + output_index: 0, + delta: "ton\"}".into(), + sequence_number: None, + }, + ResponsesStreamEvent::FunctionCallArgumentsDone { + item_id: "item_fn".into(), + output_index: 0, + arguments: "{\"city\":\"Boston\"}".into(), + sequence_number: None, + }, + ResponsesStreamEvent::Completed { + response: ResponseSummary::default(), + }, + ]; + + let mapped = map_response_events(events); + // Two partial events + one complete event + Stop + assert!(mapped.len() >= 3); + + // The last complete ToolUse event should have is_input_complete: true + let complete_tool_use = mapped.iter().find(|e| { + matches!( + e, + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: true, + .. + }) + ) + }); + assert!( + complete_tool_use.is_some(), + "should have a complete tool use event" + ); + + // All ToolUse events before the final one should have is_input_complete: false + let tool_uses: Vec<_> = mapped + .iter() + .filter(|e| matches!(e, LanguageModelCompletionEvent::ToolUse(_))) + .collect(); + assert!( + tool_uses.len() >= 2, + "should have at least one partial and one complete event" + ); + + let last = tool_uses.last().unwrap(); + assert!(matches!( + last, + LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + is_input_complete: true, + .. + }) + )); + } } diff --git a/crates/language_models/src/provider/open_ai_compatible.rs b/crates/language_models/src/provider/open_ai_compatible.rs index d47ea26c594ab0abb5c859ed549d43e0ed3f859b..b478bc843c05e01d428561d9c255ef0d2ca97148 100644 --- a/crates/language_models/src/provider/open_ai_compatible.rs +++ b/crates/language_models/src/provider/open_ai_compatible.rs @@ -319,6 +319,10 @@ impl LanguageModel for OpenAiCompatibleLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_split_token_display(&self) -> bool { true } diff --git a/crates/language_models/src/provider/open_router.rs b/crates/language_models/src/provider/open_router.rs index 7a74125d606ddc4be56d113fbbf3fa66866fb595..e0e56bc1beadd8309a4c1b3c7626efa99c1c6473 100644 --- a/crates/language_models/src/provider/open_router.rs +++ b/crates/language_models/src/provider/open_router.rs @@ -314,6 +314,10 @@ impl LanguageModel for OpenRouterLanguageModel { self.model.supports_tool_calls() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_thinking(&self) -> bool { matches!(self.model.mode, OpenRouterModelMode::Thinking { .. }) } @@ -650,6 +654,23 @@ impl OpenRouterEventMapper { entry.thought_signature = Some(signature); } } + + if !entry.id.is_empty() && !entry.name.is_empty() { + if let Ok(input) = serde_json::from_str::( + &partial_json_fixer::fix_json(&entry.arguments), + ) { + events.push(Ok(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: entry.id.clone().into(), + name: entry.name.as_str().into(), + is_input_complete: false, + input, + raw_input: entry.arguments.clone(), + thought_signature: entry.thought_signature.clone(), + }, + ))); + } + } } } diff --git a/crates/language_models/src/provider/vercel.rs b/crates/language_models/src/provider/vercel.rs index 3b324e46927f5864d83a5e4b74c46f5e39e8ab3a..b71da5b7db05710ee30115ab54379c9ee4e4c750 100644 --- a/crates/language_models/src/provider/vercel.rs +++ b/crates/language_models/src/provider/vercel.rs @@ -248,6 +248,10 @@ impl LanguageModel for VercelLanguageModel { true } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { LanguageModelToolChoice::Auto diff --git a/crates/language_models/src/provider/vercel_ai_gateway.rs b/crates/language_models/src/provider/vercel_ai_gateway.rs index 69c54e624b9e7289abaefbe7ab654d73df385b62..78f900de0c94fd3bbbff3962e92d1a8cb9f3e118 100644 --- a/crates/language_models/src/provider/vercel_ai_gateway.rs +++ b/crates/language_models/src/provider/vercel_ai_gateway.rs @@ -385,6 +385,10 @@ impl LanguageModel for VercelAiGatewayLanguageModel { } } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_split_token_display(&self) -> bool { true } diff --git a/crates/language_models/src/provider/x_ai.rs b/crates/language_models/src/provider/x_ai.rs index 06564224dea9621d594e5cf3f4a84093f1620446..f1f8bb658f04a91341951d1602af04f858af7bd3 100644 --- a/crates/language_models/src/provider/x_ai.rs +++ b/crates/language_models/src/provider/x_ai.rs @@ -257,6 +257,10 @@ impl LanguageModel for XAiLanguageModel { self.model.supports_images() } + fn supports_streaming_tools(&self) -> bool { + true + } + fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool { match choice { LanguageModelToolChoice::Auto @@ -265,8 +269,7 @@ impl LanguageModel for XAiLanguageModel { } } fn tool_input_format(&self) -> LanguageModelToolSchemaFormat { - let model_id = self.model.id().trim().to_lowercase(); - if model_id.eq(x_ai::Model::Grok4.id()) || model_id.eq(x_ai::Model::GrokCodeFast1.id()) { + if self.model.requires_json_schema_subset() { LanguageModelToolSchemaFormat::JsonSchemaSubset } else { LanguageModelToolSchemaFormat::JsonSchema diff --git a/crates/x_ai/src/x_ai.rs b/crates/x_ai/src/x_ai.rs index 072a893a6a8f4fc7fbc8a6f4f5ed43316915b974..1abb2b53771fa1e29e2979560e9f394744b26158 100644 --- a/crates/x_ai/src/x_ai.rs +++ b/crates/x_ai/src/x_ai.rs @@ -165,6 +165,18 @@ impl Model { } } + pub fn requires_json_schema_subset(&self) -> bool { + match self { + Self::Grok4 + | Self::Grok4FastReasoning + | Self::Grok4FastNonReasoning + | Self::Grok41FastNonReasoning + | Self::Grok41FastReasoning + | Self::GrokCodeFast1 => true, + _ => false, + } + } + pub fn supports_prompt_cache_key(&self) -> bool { false } From 68cb60afdd53007496320a54b9ef8da12abfa5d9 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 4 Mar 2026 17:41:34 +0100 Subject: [PATCH 310/548] Staff-ship streaming edit file tool (#50720) Release Notes: - N/A --- crates/feature_flags/src/flags.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/feature_flags/src/flags.rs b/crates/feature_flags/src/flags.rs index c8524022d9d8295900638a09c528dfc3fdb85afd..77a98aae05572ac72b239db8bb3d4496bd1c0f4d 100644 --- a/crates/feature_flags/src/flags.rs +++ b/crates/feature_flags/src/flags.rs @@ -69,6 +69,6 @@ impl FeatureFlag for StreamingEditFileToolFeatureFlag { const NAME: &'static str = "streaming-edit-file-tool"; fn enabled_for_staff() -> bool { - false + true } } From 83b05f1cbba7ab38d2aaae869ecfc3fef57cfcc0 Mon Sep 17 00:00:00 2001 From: xcb3d <122720156+xcb3d@users.noreply.github.com> Date: Wed, 4 Mar 2026 23:42:14 +0700 Subject: [PATCH 311/548] Fix terminal path click failing when path is prefixed with '0:' (#50663) The path hyperlink regex's middle-char pattern [[:(][^0-9()]](cci:2://file:///d:/zed/crates/fs/src/fs.rs:89:0-157:1) allowed colon+space because space was not in the exclusion set. This caused `0: foo/bar.txt` to be matched as a single path instead of just `foo/bar.txt`. Fix: add space to the exclusion class: [[:(][^0-9()\\ ]](cci:2://file:///d:/zed/crates/fs/src/fs.rs:89:0-157:1) Closes #50531 - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) (N/A) Release Notes: - Fixed terminal Ctrl-click path detection failing when path is preceded by a prefix like `0:` (#50531) --- assets/settings/default.json | 4 ++-- crates/terminal/src/terminal_hyperlinks.rs | 12 ++++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index b193c0f60d0087972381f4f85f2b864b52fdbc7d..6593c3b192cb9ac388c67170fe20787bdbcf1bbc 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -1831,8 +1831,8 @@ " (", " # multi-char path: first char (not opening delimiter, space, or box drawing char)", " [^({\\[<\"'`\\ \\u2500-\\u257F]", - " # middle chars: non-space, and colon/paren only if not followed by digit/paren", - " ([^\\ :(]|[:(][^0-9()])*", + " # middle chars: non-space, and colon/paren only if not followed by digit/paren/space", + " ([^\\ :(]|[:(][^0-9()\\ ])*", " # last char: not closing delimiter or colon", " [^()}\\]>\"'`.,;:\\ ]", " |", diff --git a/crates/terminal/src/terminal_hyperlinks.rs b/crates/terminal/src/terminal_hyperlinks.rs index d239f680f9e2ecbd3d320e731d3cc74303a552ed..0ca6cb2edd916019a4a7822830faa1fdfaa238f3 100644 --- a/crates/terminal/src/terminal_hyperlinks.rs +++ b/crates/terminal/src/terminal_hyperlinks.rs @@ -905,6 +905,18 @@ mod tests { ); } + #[test] + // + fn issue_50531() { + // Paths preceded by "N:" prefix (e.g. grep output line numbers) + // should still be clickable + test_path!("0: ‹«foo/👉bar.txt»›"); + test_path!("0: ‹«👉foo/bar.txt»›"); + test_path!("42: ‹«👉foo/bar.txt»›"); + test_path!("1: ‹«/👉test/cool.rs»›"); + test_path!("1: ‹«/👉test/cool.rs»:«4»:«2»›"); + } + #[test] // fn issue_46795() { From 55ae7b09e68d579b7d7937066941070609d54c96 Mon Sep 17 00:00:00 2001 From: Cole Miller Date: Wed, 4 Mar 2026 12:25:12 -0500 Subject: [PATCH 312/548] Increase timeout for `test_random_blocks` (#50724) See https://github.com/zed-industries/zed/actions/runs/22679055818 Release Notes: - N/A --- .config/nextest.toml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.config/nextest.toml b/.config/nextest.toml index ab03abd839600e1a84ebd5eea9709f60cea1c7f0..b18a3f31e4a75af0636b4d8d8fdd81f48d8d93e6 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -42,3 +42,7 @@ slow-timeout = { period = "300s", terminate-after = 1 } [[profile.default.overrides]] filter = 'package(editor) and test(test_random_split_editor)' slow-timeout = { period = "300s", terminate-after = 1 } + +[[profile.default.overrides]] +filter = 'package(editor) and test(test_random_blocks)' +slow-timeout = { period = "300s", terminate-after = 1 } From 74e747a6c77ee7c5a6eb5dd70e4aae23002a2947 Mon Sep 17 00:00:00 2001 From: Kyle Kelley Date: Wed, 4 Mar 2026 09:58:51 -0800 Subject: [PATCH 313/548] repl: Support kernel language aliases in REPL (#49762) Add a `kernel_language_names` field to `LanguageConfig` that allows languages to declare alternative names that Jupyter kernels may use. This fixes REPL matching for cases where a kernel reports a different language identifier than Zed's language name. For example, the Nu extension would set `kernel_language_names = ["nushell", "nu"]` in its config.toml, enabling REPL support for nu-jupyter-kernel which reports `"language": "nushell"` in its kernelspec. The change consolidates kernel language matching logic into a single `Language::matches_kernel_language()` method that checks the code fence block name, language name, and the new aliases list (all case-insensitive). - [x] Done a self-review taking into account security and performance aspects Release Notes: - Added `kernel_language_names` field for extensions to self identify REPL mappings --- crates/language/src/language.rs | 23 +++++++++++++++++++++++ crates/repl/src/repl_editor.rs | 9 +++------ crates/repl/src/repl_store.rs | 9 ++++----- 3 files changed, 30 insertions(+), 11 deletions(-) diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index fe5c5d09aa0765e2c305d88c65e86d6832443b1e..435d3d4e27998cb135dc3145ad7800ed8da97c9e 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -835,6 +835,11 @@ pub struct LanguageConfig { pub name: LanguageName, /// The name of this language for a Markdown code fence block pub code_fence_block_name: Option>, + /// Alternative language names that Jupyter kernels may report for this language. + /// Used when a kernel's `language` field differs from Zed's language name. + /// For example, the Nu extension would set this to `["nushell"]`. + #[serde(default)] + pub kernel_language_names: Vec>, // The name of the grammar in a WASM bundle (experimental). pub grammar: Option>, /// The criteria for matching this language to a given file. @@ -1141,6 +1146,7 @@ impl Default for LanguageConfig { Self { name: LanguageName::new_static(""), code_fence_block_name: None, + kernel_language_names: Default::default(), grammar: None, matcher: LanguageMatcher::default(), brackets: Default::default(), @@ -2075,6 +2081,23 @@ impl Language { .unwrap_or_else(|| self.config.name.as_ref().to_lowercase().into()) } + pub fn matches_kernel_language(&self, kernel_language: &str) -> bool { + let kernel_language_lower = kernel_language.to_lowercase(); + + if self.code_fence_block_name().to_lowercase() == kernel_language_lower { + return true; + } + + if self.config.name.as_ref().to_lowercase() == kernel_language_lower { + return true; + } + + self.config + .kernel_language_names + .iter() + .any(|name| name.to_lowercase() == kernel_language_lower) + } + pub fn context_provider(&self) -> Option> { self.context_provider.clone() } diff --git a/crates/repl/src/repl_editor.rs b/crates/repl/src/repl_editor.rs index 6e061c3e2e37aa94074f17f94791ad147f56f344..56b79e20ffca74ab3f9f9c7948a7caeffc4ad4ce 100644 --- a/crates/repl/src/repl_editor.rs +++ b/crates/repl/src/repl_editor.rs @@ -636,12 +636,9 @@ fn language_supported(language: &Arc, cx: &mut App) -> bool { let store = ReplStore::global(cx); let store_read = store.read(cx); - // Since we're just checking for general language support, we only need to look at - // the pure Jupyter kernels - these are all the globally available ones - store_read.pure_jupyter_kernel_specifications().any(|spec| { - // Convert to lowercase for case-insensitive comparison since kernels might report "python" while our language is "Python" - spec.language().as_ref().to_lowercase() == language.name().as_ref().to_lowercase() - }) + store_read + .pure_jupyter_kernel_specifications() + .any(|spec| language.matches_kernel_language(spec.language().as_ref())) } fn get_language(editor: WeakEntity, cx: &mut App) -> Option> { diff --git a/crates/repl/src/repl_store.rs b/crates/repl/src/repl_store.rs index 1c6ce99c2177260c1b9aaf1733326ddbda85a64f..8da94eaa7fe40e28a1d6336a648d7eae5c6767ae 100644 --- a/crates/repl/src/repl_store.rs +++ b/crates/repl/src/repl_store.rs @@ -289,7 +289,6 @@ impl ReplStore { } let language_at_cursor = language_at_cursor?; - let language_name = language_at_cursor.code_fence_block_name().to_lowercase(); // Prefer the recommended (active toolchain) kernel if it has ipykernel if let Some(active_path) = self.active_python_toolchain_path(worktree_id) { @@ -297,7 +296,7 @@ impl ReplStore { .kernel_specifications_for_worktree(worktree_id) .find(|spec| { spec.has_ipykernel() - && spec.language().as_ref().to_lowercase() == language_name + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) && spec.path().as_ref() == active_path.as_ref() }) .cloned(); @@ -312,7 +311,7 @@ impl ReplStore { .find(|spec| { matches!(spec, KernelSpecification::PythonEnv(_)) && spec.has_ipykernel() - && spec.language().as_ref().to_lowercase() == language_name + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) }) .cloned(); if python_env.is_some() { @@ -350,10 +349,10 @@ impl ReplStore { return Some(found_by_name); } - let language_name = language_at_cursor.code_fence_block_name().to_lowercase(); self.kernel_specifications_for_worktree(worktree_id) .find(|spec| { - spec.has_ipykernel() && spec.language().as_ref().to_lowercase() == language_name + spec.has_ipykernel() + && language_at_cursor.matches_kernel_language(spec.language().as_ref()) }) .cloned() } From f3e4c152a366123e3abe3fb992998874f27a8ea6 Mon Sep 17 00:00:00 2001 From: Viraj Bhartiya Date: Wed, 4 Mar 2026 23:43:32 +0530 Subject: [PATCH 314/548] project_panel: Fix scrolling in empty area below file list (#50683) Closes #50624 The empty bottom section of the project panel showed a horizontal scrollbar on hover, but scrolling didn't work there. Added a scroll wheel handler to the blank area that forwards scroll events to the uniform list's scroll handle, making both horizontal and vertical scrolling work from anywhere in the panel. Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zedindustries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed project panel empty area showing a non-functional scrollbar; scrolling now works from anywhere in the panel --------- Co-authored-by: MrSubidubi --- Cargo.lock | 60 ++++++------- Cargo.toml | 11 ++- .../src/session/running/memory_view.rs | 4 +- crates/gpui/src/elements/div.rs | 12 +-- crates/gpui/src/elements/list.rs | 4 +- crates/gpui/src/elements/svg.rs | 5 +- crates/gpui/src/geometry.rs | 90 +++---------------- crates/miniprofiler_ui/src/miniprofiler_ui.rs | 2 +- crates/project_panel/src/project_panel.rs | 19 ++++ .../terminal_view/src/terminal_scrollbar.rs | 6 +- crates/ui/src/components/scrollbar.rs | 18 ++-- crates/workspace/src/pane.rs | 4 +- 12 files changed, 95 insertions(+), 140 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e09d057f706615a58f8762b51fd01965c0c43614..d1b0a39869a44af1295235214836d446c509c360 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -170,7 +170,7 @@ dependencies = [ "context_server", "ctor", "db", - "derive_more 0.99.20", + "derive_more", "editor", "env_logger 0.11.8", "eval_utils", @@ -242,7 +242,7 @@ dependencies = [ "anyhow", "async-broadcast", "async-trait", - "derive_more 2.0.1", + "derive_more", "futures 0.3.31", "log", "serde", @@ -256,7 +256,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1" dependencies = [ "anyhow", - "derive_more 2.0.1", + "derive_more", "schemars", "serde", "serde_json", @@ -815,7 +815,7 @@ dependencies = [ "anyhow", "async-trait", "collections", - "derive_more 0.99.20", + "derive_more", "extension", "futures 0.3.31", "gpui", @@ -3002,7 +3002,7 @@ dependencies = [ "cloud_llm_client", "collections", "credentials_provider", - "derive_more 0.99.20", + "derive_more", "feature_flags", "fs", "futures 0.3.31", @@ -3440,7 +3440,7 @@ name = "command_palette_hooks" version = "0.1.0" dependencies = [ "collections", - "derive_more 0.99.20", + "derive_more", "gpui", "workspace", ] @@ -3616,15 +3616,18 @@ dependencies = [ [[package]] name = "convert_case" -version = "0.4.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +dependencies = [ + "unicode-segmentation", +] [[package]] name = "convert_case" -version = "0.8.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" dependencies = [ "unicode-segmentation", ] @@ -4794,34 +4797,23 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" -dependencies = [ - "convert_case 0.4.0", - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.106", -] - -[[package]] -name = "derive_more" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ "derive_more-impl", ] [[package]] name = "derive_more-impl" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ + "convert_case 0.10.0", "proc-macro2", "quote", + "rustc_version", "syn 2.0.106", "unicode-xid", ] @@ -7130,7 +7122,7 @@ version = "0.8.0" source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac" dependencies = [ "async-trait", - "derive_more 2.0.1", + "derive_more", "derive_setters", "gh-workflow-macros", "indexmap", @@ -7199,7 +7191,7 @@ dependencies = [ "askpass", "async-trait", "collections", - "derive_more 0.99.20", + "derive_more", "futures 0.3.31", "git2", "gpui", @@ -7578,7 +7570,7 @@ dependencies = [ "core-text", "core-video", "ctor", - "derive_more 0.99.20", + "derive_more", "embed-resource", "env_logger 0.11.8", "etagere", @@ -7706,7 +7698,7 @@ dependencies = [ "core-text", "core-video", "ctor", - "derive_more 0.99.20", + "derive_more", "dispatch2", "etagere", "foreign-types 0.5.0", @@ -8264,7 +8256,7 @@ dependencies = [ "async-fs", "async-tar", "bytes 1.11.1", - "derive_more 0.99.20", + "derive_more", "futures 0.3.31", "http 1.3.1", "http-body 1.0.1", @@ -15556,7 +15548,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.20", + "derive_more", "gpui", "log", "schemars", @@ -17339,7 +17331,7 @@ version = "0.1.0" dependencies = [ "anyhow", "collections", - "derive_more 0.99.20", + "derive_more", "fs", "futures 0.3.31", "gpui", diff --git a/Cargo.toml b/Cargo.toml index 40a81636a4fd558ddae317f051587f09409cb748..d88868f9582e34228991847e30aeaeab565933a1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -538,7 +538,16 @@ criterion = { version = "0.5", features = ["html_reports"] } ctor = "0.4.0" dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "1b461b310481d01e02b2603c16d7144b926339f8" } dashmap = "6.0" -derive_more = "0.99.17" +derive_more = { version = "2.1.1", features = [ + "add", + "add_assign", + "deref", + "deref_mut", + "from_str", + "mul", + "mul_assign", + "not", +] } dirs = "4.0" documented = "0.9.1" dotenvy = "0.15.0" diff --git a/crates/debugger_ui/src/session/running/memory_view.rs b/crates/debugger_ui/src/session/running/memory_view.rs index f10e5179e37f87be0e27985b557fcb63cf089a42..69ea556018fdadeb1e270b1d7c2520d25752e670 100644 --- a/crates/debugger_ui/src/session/running/memory_view.rs +++ b/crates/debugger_ui/src/session/running/memory_view.rs @@ -133,7 +133,7 @@ impl ViewState { fn set_offset(&mut self, point: Point) { if point.y >= -Pixels::ZERO { self.schedule_scroll_up(); - } else if point.y <= -self.scroll_handle.max_offset().height { + } else if point.y <= -self.scroll_handle.max_offset().y { self.schedule_scroll_down(); } self.scroll_handle.set_offset(point); @@ -141,7 +141,7 @@ impl ViewState { } impl ScrollableHandle for ViewStateHandle { - fn max_offset(&self) -> gpui::Size { + fn max_offset(&self) -> gpui::Point { self.0.borrow().scroll_handle.max_offset() } diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 2b4a3c84e8111796bf7ce32a4c6ad83854ded6fd..58f11a7fa1fb876ef4b4ef80fedf1948423a24f5 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -1886,18 +1886,18 @@ impl Interactivity { // high for the maximum scroll, we round the scroll max to 2 decimal // places here. let padded_content_size = self.content_size + padding_size; - let scroll_max = (padded_content_size - bounds.size) + let scroll_max = Point::from(padded_content_size - bounds.size) .map(round_to_two_decimals) .max(&Default::default()); // Clamp scroll offset in case scroll max is smaller now (e.g., if children // were removed or the bounds became larger). let mut scroll_offset = scroll_offset.borrow_mut(); - scroll_offset.x = scroll_offset.x.clamp(-scroll_max.width, px(0.)); + scroll_offset.x = scroll_offset.x.clamp(-scroll_max.x, px(0.)); if scroll_to_bottom { - scroll_offset.y = -scroll_max.height; + scroll_offset.y = -scroll_max.y; } else { - scroll_offset.y = scroll_offset.y.clamp(-scroll_max.height, px(0.)); + scroll_offset.y = scroll_offset.y.clamp(-scroll_max.y, px(0.)); } if let Some(mut scroll_handle_state) = tracked_scroll_handle { @@ -3285,7 +3285,7 @@ impl ScrollAnchor { struct ScrollHandleState { offset: Rc>>, bounds: Bounds, - max_offset: Size, + max_offset: Point, child_bounds: Vec>, scroll_to_bottom: bool, overflow: Point, @@ -3329,7 +3329,7 @@ impl ScrollHandle { } /// Get the maximum scroll offset. - pub fn max_offset(&self) -> Size { + pub fn max_offset(&self) -> Point { self.0.borrow().max_offset } diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 5403bf10eb9a078dfd113462644636b49d1840e4..92b5389fecf219c0c113f682463498902df4c07d 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -491,7 +491,7 @@ impl ListState { /// Returns the maximum scroll offset according to the items we have measured. /// This value remains constant while dragging to prevent the scrollbar from moving away unexpectedly. - pub fn max_offset_for_scrollbar(&self) -> Size { + pub fn max_offset_for_scrollbar(&self) -> Point { let state = self.0.borrow(); let bounds = state.last_layout_bounds.unwrap_or_default(); @@ -499,7 +499,7 @@ impl ListState { .scrollbar_drag_start_height .unwrap_or_else(|| state.items.summary().height); - Size::new(Pixels::ZERO, Pixels::ZERO.max(height - bounds.size.height)) + point(Pixels::ZERO, Pixels::ZERO.max(height - bounds.size.height)) } /// Returns the current scroll offset adjusted for the scrollbar diff --git a/crates/gpui/src/elements/svg.rs b/crates/gpui/src/elements/svg.rs index dff389fb93fe7abd2862be70731cc9e6fb613e94..a29b106c0e223b01340ecab27b45fdb94163d207 100644 --- a/crates/gpui/src/elements/svg.rs +++ b/crates/gpui/src/elements/svg.rs @@ -3,8 +3,7 @@ use std::{fs, path::Path, sync::Arc}; use crate::{ App, Asset, Bounds, Element, GlobalElementId, Hitbox, InspectorElementId, InteractiveElement, Interactivity, IntoElement, LayoutId, Pixels, Point, Radians, SharedString, Size, - StyleRefinement, Styled, TransformationMatrix, Window, geometry::Negate as _, point, px, - radians, size, + StyleRefinement, Styled, TransformationMatrix, Window, point, px, radians, size, }; use gpui_util::ResultExt; @@ -254,7 +253,7 @@ impl Transformation { .translate(center.scale(scale_factor) + self.translate.scale(scale_factor)) .rotate(self.rotate) .scale(self.scale) - .translate(center.scale(scale_factor).negate()) + .translate(center.scale(-scale_factor)) } } diff --git a/crates/gpui/src/geometry.rs b/crates/gpui/src/geometry.rs index 73fa9906267412c9f1c840d8403beeef4718119e..76157a06a587ac851d19f19fc5a4ed23c634bab5 100644 --- a/crates/gpui/src/geometry.rs +++ b/crates/gpui/src/geometry.rs @@ -78,6 +78,7 @@ pub trait Along { Deserialize, JsonSchema, Hash, + Neg, )] #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] #[repr(C)] @@ -182,12 +183,6 @@ impl Along for Point { } } -impl Negate for Point { - fn negate(self) -> Self { - self.map(Negate::negate) - } -} - impl Point { /// Scales the point by a given factor, which is typically derived from the resolution /// of a target display to ensure proper sizing of UI elements. @@ -393,7 +388,9 @@ impl Display for Point { /// /// This struct is generic over the type `T`, which can be any type that implements `Clone`, `Default`, and `Debug`. /// It is commonly used to specify dimensions for elements in a UI, such as a window or element. -#[derive(Refineable, Default, Clone, Copy, PartialEq, Div, Hash, Serialize, Deserialize)] +#[derive( + Add, Clone, Copy, Default, Deserialize, Div, Hash, Neg, PartialEq, Refineable, Serialize, Sub, +)] #[refineable(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] #[repr(C)] pub struct Size { @@ -598,34 +595,6 @@ where } } -impl Sub for Size -where - T: Sub + Clone + Debug + Default + PartialEq, -{ - type Output = Size; - - fn sub(self, rhs: Self) -> Self::Output { - Size { - width: self.width - rhs.width, - height: self.height - rhs.height, - } - } -} - -impl Add for Size -where - T: Add + Clone + Debug + Default + PartialEq, -{ - type Output = Size; - - fn add(self, rhs: Self) -> Self::Output { - Size { - width: self.width + rhs.width, - height: self.height + rhs.height, - } - } -} - impl Mul for Size where T: Mul + Clone + Debug + Default + PartialEq, @@ -1245,6 +1214,15 @@ where } } +impl From> for Point { + fn from(size: Size) -> Self { + Self { + x: size.width, + y: size.height, + } + } +} + impl Bounds where T: Add + Clone + Debug + Default + PartialEq, @@ -3754,48 +3732,6 @@ impl Half for Rems { } } -/// Provides a trait for types that can negate their values. -pub trait Negate { - /// Returns the negation of the given value - fn negate(self) -> Self; -} - -impl Negate for i32 { - fn negate(self) -> Self { - -self - } -} - -impl Negate for f32 { - fn negate(self) -> Self { - -self - } -} - -impl Negate for DevicePixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for ScaledPixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for Pixels { - fn negate(self) -> Self { - Self(-self.0) - } -} - -impl Negate for Rems { - fn negate(self) -> Self { - Self(-self.0) - } -} - /// A trait for checking if a value is zero. /// /// This trait provides a method to determine if a value is considered to be zero. diff --git a/crates/miniprofiler_ui/src/miniprofiler_ui.rs b/crates/miniprofiler_ui/src/miniprofiler_ui.rs index 12b2bce77b5866e885483a847d40647f525207e6..9ae0a33471d31f32852b4b376bbc71ff0911c60b 100644 --- a/crates/miniprofiler_ui/src/miniprofiler_ui.rs +++ b/crates/miniprofiler_ui/src/miniprofiler_ui.rs @@ -464,7 +464,7 @@ impl Render for ProfilerWindow { let scroll_offset = self.scroll_handle.offset(); let max_offset = self.scroll_handle.max_offset(); - self.autoscroll = -scroll_offset.y >= (max_offset.height - px(24.)); + self.autoscroll = -scroll_offset.y >= (max_offset.y - px(24.)); if self.autoscroll { self.scroll_handle.scroll_to_bottom(); } diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 0dd19dddde7ab947cfe85a1fd9d96ad7b2d6f23d..082086d6a0a946e610be4c96e50d626b7000bda4 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -46,6 +46,7 @@ use settings::{ update_settings_file, }; use smallvec::SmallVec; +use std::ops::Neg; use std::{any::TypeId, time::Instant}; use std::{ cell::OnceCell, @@ -6691,6 +6692,24 @@ impl Render for ProjectPanel { .id("project-panel-blank-area") .block_mouse_except_scroll() .flex_grow() + .on_scroll_wheel({ + let scroll_handle = self.scroll_handle.clone(); + let entity_id = cx.entity().entity_id(); + move |event, window, cx| { + let state = scroll_handle.0.borrow(); + let base_handle = &state.base_handle; + let current_offset = base_handle.offset(); + let max_offset = base_handle.max_offset(); + let delta = event.delta.pixel_delta(window.line_height()); + let new_offset = (current_offset + delta) + .clamp(&max_offset.neg(), &Point::default()); + + if new_offset != current_offset { + base_handle.set_offset(new_offset); + cx.notify(entity_id); + } + } + }) .when( self.drag_target_entry.as_ref().is_some_and( |entry| match entry { diff --git a/crates/terminal_view/src/terminal_scrollbar.rs b/crates/terminal_view/src/terminal_scrollbar.rs index 82ca0b4097dad1be899879b0241aed50d8e60bfa..16dc580e877310b79501ca469b0351935dbb46f7 100644 --- a/crates/terminal_view/src/terminal_scrollbar.rs +++ b/crates/terminal_view/src/terminal_scrollbar.rs @@ -3,7 +3,7 @@ use std::{ rc::Rc, }; -use gpui::{Bounds, Point, Size, size}; +use gpui::{Bounds, Point, point, size}; use terminal::Terminal; use ui::{Pixels, ScrollableHandle, px}; @@ -46,9 +46,9 @@ impl TerminalScrollHandle { } impl ScrollableHandle for TerminalScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { let state = self.state.borrow(); - size( + point( Pixels::ZERO, state.total_lines.saturating_sub(state.viewport_lines) as f32 * state.line_height, ) diff --git a/crates/ui/src/components/scrollbar.rs b/crates/ui/src/components/scrollbar.rs index 8e8e89be9c0580a7820685b5690a996dfd2dade0..21d6aa46d0f90a0d48e267e935b00d9f263a30c5 100644 --- a/crates/ui/src/components/scrollbar.rs +++ b/crates/ui/src/components/scrollbar.rs @@ -9,8 +9,8 @@ use gpui::{ Along, App, AppContext as _, Axis as ScrollbarAxis, BorderStyle, Bounds, ContentMask, Context, Corner, Corners, CursorStyle, DispatchPhase, Div, Edges, Element, ElementId, Entity, EntityId, GlobalElementId, Hitbox, HitboxBehavior, Hsla, InteractiveElement, IntoElement, IsZero, - LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Negate, - ParentElement, Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful, + LayoutId, ListState, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, ParentElement, + Pixels, Point, Position, Render, ScrollHandle, ScrollWheelEvent, Size, Stateful, StatefulInteractiveElement, Style, Styled, Task, UniformListDecoration, UniformListScrollHandle, Window, ease_in_out, prelude::FluentBuilder as _, px, quad, relative, size, @@ -258,7 +258,7 @@ impl UniformListDecoration for ScrollbarStateWrapper { _cx: &mut App, ) -> gpui::AnyElement { ScrollbarElement { - origin: scroll_offset.negate(), + origin: -scroll_offset, state: self.0.clone(), } .into_any() @@ -911,7 +911,7 @@ impl ThumbState { } impl ScrollableHandle for UniformListScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.0.borrow().base_handle.max_offset() } @@ -929,7 +929,7 @@ impl ScrollableHandle for UniformListScrollHandle { } impl ScrollableHandle for ListState { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.max_offset_for_scrollbar() } @@ -955,7 +955,7 @@ impl ScrollableHandle for ListState { } impl ScrollableHandle for ScrollHandle { - fn max_offset(&self) -> Size { + fn max_offset(&self) -> Point { self.max_offset() } @@ -973,7 +973,7 @@ impl ScrollableHandle for ScrollHandle { } pub trait ScrollableHandle: 'static + Any + Sized + Clone { - fn max_offset(&self) -> Size; + fn max_offset(&self) -> Point; fn set_offset(&self, point: Point); fn offset(&self) -> Point; fn viewport(&self) -> Bounds; @@ -984,7 +984,7 @@ pub trait ScrollableHandle: 'static + Any + Sized + Clone { self.max_offset().along(axis) > Pixels::ZERO } fn content_size(&self) -> Size { - self.viewport().size + self.max_offset() + self.viewport().size + self.max_offset().into() } } @@ -1006,7 +1006,7 @@ impl ScrollbarLayout { fn compute_click_offset( &self, event_position: Point, - max_offset: Size, + max_offset: Point, event_type: ScrollbarMouseEvent, ) -> Pixels { let Self { diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index a39be125a5784b8c9d995bb750b9d7ff57a67191..81283427e83afb820b113250545d90f787030e25 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -3450,7 +3450,7 @@ impl Pane { cx, ) .children(pinned_tabs.len().ne(&0).then(|| { - let max_scroll = self.tab_bar_scroll_handle.max_offset().width; + let max_scroll = self.tab_bar_scroll_handle.max_offset().x; // We need to check both because offset returns delta values even when the scroll handle is not scrollable let is_scrolled = self.tab_bar_scroll_handle.offset().x < px(0.); // Avoid flickering when max_offset is very small (< 2px). @@ -7974,7 +7974,7 @@ mod tests { let scroll_handle = pane.update_in(cx, |pane, _window, _cx| pane.tab_bar_scroll_handle.clone()); assert!( - scroll_handle.max_offset().width > px(0.), + scroll_handle.max_offset().x > px(0.), "Test requires tab overflow to verify scrolling. Increase tab count or reduce window width." ); From 4af77fb33d29fac4aa36c6b9e5a9248d9951f2ee Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Wed, 4 Mar 2026 19:45:52 +0100 Subject: [PATCH 315/548] docs: Remove outdated reference to simple-completion-language-server (#50732) Closes #46811 Release Notes: - N/A --- docs/src/snippets.md | 22 +--------------------- 1 file changed, 1 insertion(+), 21 deletions(-) diff --git a/docs/src/snippets.md b/docs/src/snippets.md index 72cbec7b20ff694304a58a70cd9b142a60fc58a2..9f6b6c880be9edcace23f0e3fd0a02263549776a 100644 --- a/docs/src/snippets.md +++ b/docs/src/snippets.md @@ -42,24 +42,4 @@ To create JSX snippets you have to use `javascript.json` snippets file, instead ## Known Limitations - Only the first prefix is used when a list of prefixes is passed in. -- Currently only the `json` snippet file format is supported, even though the `simple-completion-language-server` supports both `json` and `toml` file formats. - -## See also - -The `feature_paths` option in `simple-completion-language-server` is disabled by default. - -If you want to enable it you can add the following to your `settings.json`: - -```json [settings] -{ - "lsp": { - "snippet-completion-server": { - "settings": { - "feature_paths": true - } - } - } -} -``` - -For more configuration information, see the [`simple-completion-language-server` instructions](https://github.com/zed-industries/simple-completion-language-server/tree/main). +- Currently only the `json` snippet file format is supported. From 5c91ebf1fe9f8716a945a669b2e9ebeb83cb6fbe Mon Sep 17 00:00:00 2001 From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com> Date: Wed, 4 Mar 2026 19:54:23 +0100 Subject: [PATCH 316/548] git: Move diff num stat calculation to repository snapshot layer (#50645) Follow up on: https://github.com/zed-industries/zed/pull/49519 This PR reworks how Zed calculates diff num stats by moving the calculation to the `RepositorySnapshot` layer, instead of the `GitPanel`. This has a couple of benefits: 1. Snapshot recalculations are already set up to recompute on file system changes and only update the affected files. This means that diff stats don't need to manage their own subscription or states anymore like they did in the original PR. 2. We're able to further separate the data layer from the UI. Before, the git panel owned all the subscriptions and tasks that refreshed the diff stat, now the repository does, which is more inline with the code base. 3. Integration tests are cleaner because `FakeRepository` can handle all the data and calculations of diff stat and make it accessible to more tests in the codebase. Because a lot of tests wouldn't initialize the git panel when they used the git repository. 4. This made implementing remote/collab support for this feature streamline. Remote clients wouldn't get the same buffer events as local clients, so they wouldn't know that the diff stat state has been updated and invalidate their data. 5. File system changes that happened outside of Zed now trigger the diff stat refresh because we're using the `RepositorySnapshot`. I added some integration tests as well to make sure collab support is working this time. Finally, adding the initial diff calculation to `compute_snapshot` didn't affect performance for me when checking against chromium's diff with HEAD~1000. So this should be a safe change to make. I decided to add diff stats on the status entry struct because it made updating changed paths and the collab database much simpler than having two separate SumTrees. Also whenever the UI got a file's status it would check its diff stat as well, so this change makes that code more streamlined as well. Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing. - [x] Done a self-review taking into account security and performance aspects. Release Notes: - N/A --- Cargo.lock | 1 + .../20221109000000_test_schema.sql | 2 + .../migrations/20251208000000_test_schema.sql | 2 + crates/collab/src/db.rs | 2 + crates/collab/src/db/queries/projects.rs | 149 +----------- crates/collab/src/db/queries/rooms.rs | 2 +- .../db/tables/project_repository_statuses.rs | 2 + crates/collab/tests/integration/git_tests.rs | 223 +++++++++++++++++- crates/fs/src/fake_git_repo.rs | 199 +++++++--------- crates/git/src/repository.rs | 63 ++--- crates/git/src/status.rs | 52 ++-- crates/git_ui/src/git_panel.rs | 153 ++++-------- crates/lsp/Cargo.toml | 4 +- crates/lsp/src/lsp.rs | 10 +- crates/project/src/git_store.rs | 196 +++++++-------- .../tests/integration/project_tests.rs | 39 ++- crates/proto/proto/git.proto | 25 +- crates/proto/proto/zed.proto | 5 +- crates/proto/src/proto.rs | 4 - .../remote_server/src/remote_editing_tests.rs | 124 ---------- 20 files changed, 562 insertions(+), 695 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d1b0a39869a44af1295235214836d446c509c360..c4ec49e50c3aa75e5e470414da470301e6f77e04 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10025,6 +10025,7 @@ dependencies = [ "ctor", "futures 0.3.31", "gpui", + "gpui_util", "log", "lsp-types", "parking_lot", diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 71e39fb595656e0dcdc53d97705b87a216ceb0f3..3e4b5c2ce211f68ef7e12895b542db5e6e3ea47c 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -122,6 +122,8 @@ CREATE TABLE "project_repository_statuses" ( "status_kind" INT4 NOT NULL, "first_status" INT4 NULL, "second_status" INT4 NULL, + "lines_added" INT4 NULL, + "lines_deleted" INT4 NULL, "scan_id" INT8 NOT NULL, "is_deleted" BOOL NOT NULL, PRIMARY KEY (project_id, repository_id, repo_path) diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql index 493be3823e25a433d4a6a27a21c508f218dc68d1..0f4e4f2d2e3925ea1e4d2b964c5e4f159f393b4f 100644 --- a/crates/collab/migrations/20251208000000_test_schema.sql +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -315,6 +315,8 @@ CREATE TABLE public.project_repository_statuses ( status_kind integer NOT NULL, first_status integer, second_status integer, + lines_added integer, + lines_deleted integer, scan_id bigint NOT NULL, is_deleted boolean NOT NULL ); diff --git a/crates/collab/src/db.rs b/crates/collab/src/db.rs index 57fb0df86495dc2013e7cd780c2e62e57298bd11..d8803c253f5feef8ef5e040f3ea112abcc688f52 100644 --- a/crates/collab/src/db.rs +++ b/crates/collab/src/db.rs @@ -732,6 +732,8 @@ fn db_status_to_proto( status: Some(proto::GitFileStatus { variant: Some(variant), }), + diff_stat_added: entry.lines_added.map(|v| v as u32), + diff_stat_deleted: entry.lines_deleted.map(|v| v as u32), }) } diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index fa3f99e1483e8a5d8410378493556b189eff78f1..24cf639a715aa9b88da80375b389debaea0c4295 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -334,147 +334,6 @@ impl Database { .await?; } - // Backward-compatibility for old Zed clients. - // - // Remove this block when Zed 1.80 stable has been out for a week. - { - if !update.updated_repositories.is_empty() { - project_repository::Entity::insert_many( - update.updated_repositories.iter().map(|repository| { - project_repository::ActiveModel { - project_id: ActiveValue::set(project_id), - legacy_worktree_id: ActiveValue::set(Some(worktree_id)), - id: ActiveValue::set(repository.repository_id as i64), - scan_id: ActiveValue::set(update.scan_id as i64), - is_deleted: ActiveValue::set(false), - branch_summary: ActiveValue::Set( - repository - .branch_summary - .as_ref() - .map(|summary| serde_json::to_string(summary).unwrap()), - ), - current_merge_conflicts: ActiveValue::Set(Some( - serde_json::to_string(&repository.current_merge_conflicts) - .unwrap(), - )), - // Old clients do not use abs path, entry ids, head_commit_details, or merge_message. - abs_path: ActiveValue::set(String::new()), - entry_ids: ActiveValue::set("[]".into()), - head_commit_details: ActiveValue::set(None), - merge_message: ActiveValue::set(None), - remote_upstream_url: ActiveValue::set(None), - remote_origin_url: ActiveValue::set(None), - } - }), - ) - .on_conflict( - OnConflict::columns([ - project_repository::Column::ProjectId, - project_repository::Column::Id, - ]) - .update_columns([ - project_repository::Column::ScanId, - project_repository::Column::BranchSummary, - project_repository::Column::CurrentMergeConflicts, - ]) - .to_owned(), - ) - .exec(&*tx) - .await?; - - let has_any_statuses = update - .updated_repositories - .iter() - .any(|repository| !repository.updated_statuses.is_empty()); - - if has_any_statuses { - project_repository_statuses::Entity::insert_many( - update.updated_repositories.iter().flat_map( - |repository: &proto::RepositoryEntry| { - repository.updated_statuses.iter().map(|status_entry| { - let (repo_path, status_kind, first_status, second_status) = - proto_status_to_db(status_entry.clone()); - project_repository_statuses::ActiveModel { - project_id: ActiveValue::set(project_id), - repository_id: ActiveValue::set( - repository.repository_id as i64, - ), - scan_id: ActiveValue::set(update.scan_id as i64), - is_deleted: ActiveValue::set(false), - repo_path: ActiveValue::set(repo_path), - status: ActiveValue::set(0), - status_kind: ActiveValue::set(status_kind), - first_status: ActiveValue::set(first_status), - second_status: ActiveValue::set(second_status), - } - }) - }, - ), - ) - .on_conflict( - OnConflict::columns([ - project_repository_statuses::Column::ProjectId, - project_repository_statuses::Column::RepositoryId, - project_repository_statuses::Column::RepoPath, - ]) - .update_columns([ - project_repository_statuses::Column::ScanId, - project_repository_statuses::Column::StatusKind, - project_repository_statuses::Column::FirstStatus, - project_repository_statuses::Column::SecondStatus, - ]) - .to_owned(), - ) - .exec(&*tx) - .await?; - } - - for repo in &update.updated_repositories { - if !repo.removed_statuses.is_empty() { - project_repository_statuses::Entity::update_many() - .filter( - project_repository_statuses::Column::ProjectId - .eq(project_id) - .and( - project_repository_statuses::Column::RepositoryId - .eq(repo.repository_id), - ) - .and( - project_repository_statuses::Column::RepoPath - .is_in(repo.removed_statuses.iter()), - ), - ) - .set(project_repository_statuses::ActiveModel { - is_deleted: ActiveValue::Set(true), - scan_id: ActiveValue::Set(update.scan_id as i64), - ..Default::default() - }) - .exec(&*tx) - .await?; - } - } - } - - if !update.removed_repositories.is_empty() { - project_repository::Entity::update_many() - .filter( - project_repository::Column::ProjectId - .eq(project_id) - .and(project_repository::Column::LegacyWorktreeId.eq(worktree_id)) - .and(project_repository::Column::Id.is_in( - update.removed_repositories.iter().map(|id| *id as i64), - )), - ) - .set(project_repository::ActiveModel { - is_deleted: ActiveValue::Set(true), - scan_id: ActiveValue::Set(update.scan_id as i64), - ..Default::default() - }) - .exec(&*tx) - .await?; - } - } - let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?; Ok(connection_ids) }) @@ -552,6 +411,12 @@ impl Database { status_kind: ActiveValue::set(status_kind), first_status: ActiveValue::set(first_status), second_status: ActiveValue::set(second_status), + lines_added: ActiveValue::set( + status_entry.diff_stat_added.map(|v| v as i32), + ), + lines_deleted: ActiveValue::set( + status_entry.diff_stat_deleted.map(|v| v as i32), + ), } }), ) @@ -566,6 +431,8 @@ impl Database { project_repository_statuses::Column::StatusKind, project_repository_statuses::Column::FirstStatus, project_repository_statuses::Column::SecondStatus, + project_repository_statuses::Column::LinesAdded, + project_repository_statuses::Column::LinesDeleted, ]) .to_owned(), ) diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index 7c007a570a0cb25c5302495d7342882eec0e1942..b4cbd83167b227542d8de1022b7e2cf49f5a7645 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -738,7 +738,7 @@ impl Database { while let Some(db_status) = db_statuses.next().await { let db_status: project_repository_statuses::Model = db_status?; if db_status.is_deleted { - removed_statuses.push(db_status.repo_path); + removed_statuses.push(db_status.repo_path.clone()); } else { updated_statuses.push(db_status_to_proto(db_status)?); } diff --git a/crates/collab/src/db/tables/project_repository_statuses.rs b/crates/collab/src/db/tables/project_repository_statuses.rs index 7bb903d45085467a3285a58f8afdd7a29339731a..8160d8a03c2a3b4dd0db7675489eeafcef020a9a 100644 --- a/crates/collab/src/db/tables/project_repository_statuses.rs +++ b/crates/collab/src/db/tables/project_repository_statuses.rs @@ -17,6 +17,8 @@ pub struct Model { pub first_status: Option, /// For unmerged entries, this is the `second_head` status. For tracked entries, this is the `worktree_status`. pub second_status: Option, + pub lines_added: Option, + pub lines_deleted: Option, pub scan_id: i64, pub is_deleted: bool, } diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index 6e50e41bade5f5dfdf124f5a6d659e81fc2ce0f6..dccc99a07769e66a3eb318a8201d8e14a29ef4f2 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -1,16 +1,40 @@ use std::path::{Path, PathBuf}; use call::ActiveCall; -use git::status::{FileStatus, StatusCode, TrackedStatus}; -use git_ui::project_diff::ProjectDiff; +use collections::HashMap; +use git::{ + repository::RepoPath, + status::{DiffStat, FileStatus, StatusCode, TrackedStatus}, +}; +use git_ui::{git_panel::GitPanel, project_diff::ProjectDiff}; use gpui::{AppContext as _, BackgroundExecutor, TestAppContext, VisualTestContext}; use project::ProjectPath; use serde_json::json; + use util::{path, rel_path::rel_path}; use workspace::{MultiWorkspace, Workspace}; use crate::TestServer; +fn collect_diff_stats( + panel: &gpui::Entity, + cx: &C, +) -> HashMap { + panel.read_with(cx, |panel, cx| { + let Some(repo) = panel.active_repository() else { + return HashMap::default(); + }; + let snapshot = repo.read(cx).snapshot(); + let mut stats = HashMap::default(); + for entry in snapshot.statuses_by_path.iter() { + if let Some(diff_stat) = entry.diff_stat { + stats.insert(entry.repo_path.clone(), diff_stat); + } + } + stats + }) +} + #[gpui::test] async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) { let mut server = TestServer::start(cx_a.background_executor.clone()).await; @@ -279,3 +303,198 @@ async fn test_remote_git_worktrees( ); assert_eq!(bugfix_worktree.sha.as_ref(), "fake-sha"); } + +#[gpui::test] +async fn test_diff_stat_sync_between_host_and_downstream_client( + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(cx_a.background_executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + + let fs = client_a.fs(); + fs.insert_tree( + path!("/code"), + json!({ + "project1": { + ".git": {}, + "src": { + "lib.rs": "line1\nline2\nline3\n", + "new_file.rs": "added1\nadded2\n", + }, + "README.md": "# project 1", + } + }), + ) + .await; + + let dot_git = Path::new(path!("/code/project1/.git")); + fs.set_head_for_repo( + dot_git, + &[ + ("src/lib.rs", "line1\nold_line2\n".into()), + ("src/deleted.rs", "was_here\n".into()), + ], + "deadbeef", + ); + fs.set_index_for_repo( + dot_git, + &[ + ("src/lib.rs", "line1\nold_line2\nline3\nline4\n".into()), + ("src/staged_only.rs", "x\ny\n".into()), + ("src/new_file.rs", "added1\nadded2\n".into()), + ("README.md", "# project 1".into()), + ], + ); + + let (project_a, worktree_id) = client_a + .build_local_project(path!("/code/project1"), cx_a) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + let _project_c = client_c.join_remote_project(project_id, cx_c).await; + cx_a.run_until_parked(); + + let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a); + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + + let panel_a = workspace_a.update_in(cx_a, GitPanel::new_test); + workspace_a.update_in(cx_a, |workspace, window, cx| { + workspace.add_panel(panel_a.clone(), window, cx); + }); + + let panel_b = workspace_b.update_in(cx_b, GitPanel::new_test); + workspace_b.update_in(cx_b, |workspace, window, cx| { + workspace.add_panel(panel_b.clone(), window, cx); + }); + + cx_a.run_until_parked(); + + let stats_a = collect_diff_stats(&panel_a, cx_a); + let stats_b = collect_diff_stats(&panel_b, cx_b); + + let mut expected: HashMap = HashMap::default(); + expected.insert( + RepoPath::new("src/lib.rs").unwrap(), + DiffStat { + added: 3, + deleted: 2, + }, + ); + expected.insert( + RepoPath::new("src/deleted.rs").unwrap(), + DiffStat { + added: 0, + deleted: 1, + }, + ); + expected.insert( + RepoPath::new("src/new_file.rs").unwrap(), + DiffStat { + added: 2, + deleted: 0, + }, + ); + expected.insert( + RepoPath::new("README.md").unwrap(), + DiffStat { + added: 1, + deleted: 0, + }, + ); + assert_eq!(stats_a, expected, "host diff stats should match expected"); + assert_eq!(stats_a, stats_b, "host and remote should agree"); + + let buffer_a = project_a + .update(cx_a, |p, cx| { + p.open_buffer((worktree_id, rel_path("src/lib.rs")), cx) + }) + .await + .unwrap(); + + let _buffer_b = project_b + .update(cx_b, |p, cx| { + p.open_buffer((worktree_id, rel_path("src/lib.rs")), cx) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + + buffer_a.update(cx_a, |buf, cx| { + buf.edit([(buf.len()..buf.len(), "line4\n")], None, cx); + }); + project_a + .update(cx_a, |project, cx| { + project.save_buffer(buffer_a.clone(), cx) + }) + .await + .unwrap(); + cx_a.run_until_parked(); + + let stats_a = collect_diff_stats(&panel_a, cx_a); + let stats_b = collect_diff_stats(&panel_b, cx_b); + + let mut expected_after_edit = expected.clone(); + expected_after_edit.insert( + RepoPath::new("src/lib.rs").unwrap(), + DiffStat { + added: 4, + deleted: 2, + }, + ); + assert_eq!( + stats_a, expected_after_edit, + "host diff stats should reflect the edit" + ); + assert_eq!( + stats_b, expected_after_edit, + "remote diff stats should reflect the host's edit" + ); + + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.hang_up(cx)) + .await + .unwrap(); + cx_a.run_until_parked(); + + let user_id_b = client_b.current_user_id(cx_b).to_proto(); + active_call_a + .update(cx_a, |call, cx| call.invite(user_id_b, None, cx)) + .await + .unwrap(); + cx_b.run_until_parked(); + let active_call_b = cx_b.read(ActiveCall::global); + active_call_b + .update(cx_b, |call, cx| call.accept_incoming(cx)) + .await + .unwrap(); + cx_a.run_until_parked(); + + let project_b = client_b.join_remote_project(project_id, cx_b).await; + cx_a.run_until_parked(); + + let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b); + let panel_b = workspace_b.update_in(cx_b, GitPanel::new_test); + workspace_b.update_in(cx_b, |workspace, window, cx| { + workspace.add_panel(panel_b.clone(), window, cx); + }); + cx_b.run_until_parked(); + + let stats_b = collect_diff_stats(&panel_b, cx_b); + assert_eq!( + stats_b, expected_after_edit, + "remote diff stats should be restored from the database after rejoining the call" + ); +} diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 06ebea9157f97a0323297cd3ae142c4b306fe4ef..85489b6057cd8214ee512fb477428c93cdb32219 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -795,8 +795,8 @@ impl GitRepository for FakeGitRepository { fn diff_stat( &self, - diff_type: git::repository::DiffType, - ) -> BoxFuture<'_, Result>> { + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result> { fn count_lines(s: &str) -> u32 { if s.is_empty() { 0 @@ -805,122 +805,95 @@ impl GitRepository for FakeGitRepository { } } - match diff_type { - git::repository::DiffType::HeadToIndex => self - .with_state_async(false, |state| { - let mut result = HashMap::default(); - let all_paths: HashSet<&RepoPath> = state - .head_contents - .keys() - .chain(state.index_contents.keys()) - .collect(); - for path in all_paths { - let head = state.head_contents.get(path); - let index = state.index_contents.get(path); - match (head, index) { - (Some(old), Some(new)) if old != new => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: count_lines(old), - }, - ); - } - (Some(old), None) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: 0, - deleted: count_lines(old), - }, - ); - } - (None, Some(new)) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: 0, - }, - ); - } - _ => {} - } - } - Ok(result) - }) - .boxed(), - git::repository::DiffType::HeadToWorktree => { - let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf(); - let worktree_files: HashMap = self + fn matches_prefixes(path: &RepoPath, prefixes: &[RepoPath]) -> bool { + if prefixes.is_empty() { + return true; + } + prefixes.iter().any(|prefix| { + let prefix_str = prefix.as_unix_str(); + if prefix_str == "." { + return true; + } + path == prefix || path.starts_with(&prefix) + }) + } + + let path_prefixes = path_prefixes.to_vec(); + + let workdir_path = self.dot_git_path.parent().unwrap().to_path_buf(); + let worktree_files: HashMap = self + .fs + .files() + .iter() + .filter_map(|path| { + let repo_path = path.strip_prefix(&workdir_path).ok()?; + if repo_path.starts_with(".git") { + return None; + } + let content = self .fs - .files() - .iter() - .filter_map(|path| { - let repo_path = path.strip_prefix(&workdir_path).ok()?; - if repo_path.starts_with(".git") { - return None; - } - let content = self - .fs - .read_file_sync(path) - .ok() - .and_then(|bytes| String::from_utf8(bytes).ok())?; - let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?; - Some((RepoPath::from_rel_path(&repo_path), content)) - }) - .collect(); + .read_file_sync(path) + .ok() + .and_then(|bytes| String::from_utf8(bytes).ok())?; + let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?; + Some((RepoPath::from_rel_path(&repo_path), content)) + }) + .collect(); - self.with_state_async(false, move |state| { - let mut result = HashMap::default(); - let all_paths: HashSet<&RepoPath> = state - .head_contents + self.with_state_async(false, move |state| { + let mut entries = Vec::new(); + let all_paths: HashSet<&RepoPath> = state + .head_contents + .keys() + .chain( + worktree_files .keys() - .chain(worktree_files.keys()) - .collect(); - for path in all_paths { - let head = state.head_contents.get(path); - let worktree = worktree_files.get(path); - match (head, worktree) { - (Some(old), Some(new)) if old != new => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: count_lines(old), - }, - ); - } - (Some(old), None) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: 0, - deleted: count_lines(old), - }, - ); - } - (None, Some(new)) => { - result.insert( - path.clone(), - git::status::DiffStat { - added: count_lines(new), - deleted: 0, - }, - ); - } - _ => {} - } + .filter(|p| state.index_contents.contains_key(*p)), + ) + .collect(); + for path in all_paths { + if !matches_prefixes(path, &path_prefixes) { + continue; + } + let head = state.head_contents.get(path); + let worktree = worktree_files.get(path); + match (head, worktree) { + (Some(old), Some(new)) if old != new => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: count_lines(old), + }, + )); } - Ok(result) - }) - .boxed() - } - git::repository::DiffType::MergeBase { .. } => { - future::ready(Ok(HashMap::default())).boxed() + (Some(old), None) => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: 0, + deleted: count_lines(old), + }, + )); + } + (None, Some(new)) => { + entries.push(( + path.clone(), + git::status::DiffStat { + added: count_lines(new), + deleted: 0, + }, + )); + } + _ => {} + } } - } + entries.sort_by(|(a, _), (b, _)| a.cmp(b)); + Ok(git::status::GitDiffStat { + entries: entries.into(), + }) + }) + .boxed() } fn checkpoint(&self) -> BoxFuture<'static, Result> { diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index f5a856325cc80071f2c8ef500e7b07aa24035f59..c36c70935522836eeea4a83a889109dc807604c8 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -924,8 +924,8 @@ pub trait GitRepository: Send + Sync { fn diff_stat( &self, - diff: DiffType, - ) -> BoxFuture<'_, Result>>; + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result>; /// Creates a checkpoint for the repository. fn checkpoint(&self) -> BoxFuture<'static, Result>; @@ -1997,42 +1997,30 @@ impl GitRepository for RealGitRepository { fn diff_stat( &self, - diff: DiffType, - ) -> BoxFuture<'_, Result>> { + path_prefixes: &[RepoPath], + ) -> BoxFuture<'_, Result> { + let path_prefixes = path_prefixes.to_vec(); let git_binary = self.git_binary(); + self.executor .spawn(async move { - let git = git_binary?; - let output = match diff { - DiffType::HeadToIndex => { - git.build_command(["diff", "--numstat", "--staged"]) - .output() - .await? - } - DiffType::HeadToWorktree => { - git.build_command(["diff", "--numstat"]).output().await? - } - DiffType::MergeBase { base_ref } => { - git.build_command([ - "diff", - "--numstat", - "--merge-base", - base_ref.as_ref(), - "HEAD", - ]) - .output() - .await? - } - }; - - anyhow::ensure!( - output.status.success(), - "Failed to run git diff --numstat:\n{}", - String::from_utf8_lossy(&output.stderr) - ); - Ok(crate::status::parse_numstat(&String::from_utf8_lossy( - &output.stdout, - ))) + let git_binary = git_binary?; + let mut args: Vec = vec![ + "diff".into(), + "--numstat".into(), + "--no-renames".into(), + "HEAD".into(), + ]; + if !path_prefixes.is_empty() { + args.push("--".into()); + args.extend( + path_prefixes + .iter() + .map(|p| p.as_std_path().to_string_lossy().into_owned()), + ); + } + let output = git_binary.run(&args).await?; + Ok(crate::status::parse_numstat(&output)) }) .boxed() } @@ -2942,11 +2930,6 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec { OsString::from("--no-renames"), OsString::from("-z"), ]; - args.extend( - path_prefixes - .iter() - .map(|path_prefix| path_prefix.as_std_path().into()), - ); args.extend(path_prefixes.iter().map(|path_prefix| { if path_prefix.is_empty() { Path::new(".").into() diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index b20919e7ecf4748d0035a003ed5eadebae752dd7..e8b5caec505f7bf65cb4f5cd7d789207ccd8784f 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -586,13 +586,18 @@ pub struct DiffStat { pub deleted: u32, } +#[derive(Clone, Debug)] +pub struct GitDiffStat { + pub entries: Arc<[(RepoPath, DiffStat)]>, +} + /// Parses the output of `git diff --numstat` where output looks like: /// /// ```text /// 24 12 dir/file.txt /// ``` -pub fn parse_numstat(output: &str) -> HashMap { - let mut stats = HashMap::default(); +pub fn parse_numstat(output: &str) -> GitDiffStat { + let mut entries = Vec::new(); for line in output.lines() { let line = line.trim(); if line.is_empty() { @@ -613,10 +618,14 @@ pub fn parse_numstat(output: &str) -> HashMap { let Ok(path) = RepoPath::new(path_str) else { continue; }; - let stat = DiffStat { added, deleted }; - stats.insert(path, stat); + entries.push((path, DiffStat { added, deleted })); + } + entries.sort_by(|(a, _), (b, _)| a.cmp(b)); + entries.dedup_by(|(a, _), (b, _)| a == b); + + GitDiffStat { + entries: entries.into(), } - stats } #[cfg(test)] @@ -629,20 +638,25 @@ mod tests { use super::{DiffStat, parse_numstat}; + fn lookup<'a>(entries: &'a [(RepoPath, DiffStat)], path: &str) -> Option<&'a DiffStat> { + let path = RepoPath::new(path).unwrap(); + entries.iter().find(|(p, _)| p == &path).map(|(_, s)| s) + } + #[test] fn test_parse_numstat_normal() { let input = "10\t5\tsrc/main.rs\n3\t1\tREADME.md\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 2); + assert_eq!(result.entries.len(), 2); assert_eq!( - result.get(&RepoPath::new("src/main.rs").unwrap()), + lookup(&result.entries, "src/main.rs"), Some(&DiffStat { added: 10, deleted: 5 }) ); assert_eq!( - result.get(&RepoPath::new("README.md").unwrap()), + lookup(&result.entries, "README.md"), Some(&DiffStat { added: 3, deleted: 1 @@ -655,10 +669,10 @@ mod tests { // git diff --numstat outputs "-\t-\tpath" for binary files let input = "-\t-\timage.png\n5\t2\tsrc/lib.rs\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 1); - assert!(!result.contains_key(&RepoPath::new("image.png").unwrap())); + assert_eq!(result.entries.len(), 1); + assert!(lookup(&result.entries, "image.png").is_none()); assert_eq!( - result.get(&RepoPath::new("src/lib.rs").unwrap()), + lookup(&result.entries, "src/lib.rs"), Some(&DiffStat { added: 5, deleted: 2 @@ -668,18 +682,18 @@ mod tests { #[test] fn test_parse_numstat_empty_input() { - assert!(parse_numstat("").is_empty()); - assert!(parse_numstat("\n\n").is_empty()); - assert!(parse_numstat(" \n \n").is_empty()); + assert!(parse_numstat("").entries.is_empty()); + assert!(parse_numstat("\n\n").entries.is_empty()); + assert!(parse_numstat(" \n \n").entries.is_empty()); } #[test] fn test_parse_numstat_malformed_lines_skipped() { let input = "not_a_number\t5\tfile.rs\n10\t5\tvalid.rs\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 1); + assert_eq!(result.entries.len(), 1); assert_eq!( - result.get(&RepoPath::new("valid.rs").unwrap()), + lookup(&result.entries, "valid.rs"), Some(&DiffStat { added: 10, deleted: 5 @@ -692,9 +706,9 @@ mod tests { // Lines with fewer than 3 tab-separated fields are skipped let input = "10\t5\n7\t3\tok.rs\n"; let result = parse_numstat(input); - assert_eq!(result.len(), 1); + assert_eq!(result.entries.len(), 1); assert_eq!( - result.get(&RepoPath::new("ok.rs").unwrap()), + lookup(&result.entries, "ok.rs"), Some(&DiffStat { added: 7, deleted: 3 @@ -707,7 +721,7 @@ mod tests { let input = "0\t0\tunchanged_but_present.rs\n"; let result = parse_numstat(input); assert_eq!( - result.get(&RepoPath::new("unchanged_but_present.rs").unwrap()), + lookup(&result.entries, "unchanged_but_present.rs"), Some(&DiffStat { added: 0, deleted: 0 diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index 1fabc387247e3f0889749463e3aabd89ef0bff42..61d94b68a118525bd9b67217a929ce7462696dc7 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -41,7 +41,7 @@ use gpui::{ WeakEntity, actions, anchored, deferred, point, size, uniform_list, }; use itertools::Itertools; -use language::{Buffer, BufferEvent, File}; +use language::{Buffer, File}; use language_model::{ ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role, }; @@ -51,7 +51,6 @@ use notifications::status_toast::{StatusToast, ToastIcon}; use panel::{PanelHeader, panel_button, panel_filled_button, panel_icon_button}; use project::{ Fs, Project, ProjectPath, - buffer_store::BufferStoreEvent, git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId, pending_op}, project_settings::{GitPathStyle, ProjectSettings}, }; @@ -533,6 +532,7 @@ pub struct GitStatusEntry { pub(crate) repo_path: RepoPath, pub(crate) status: FileStatus, pub(crate) staging: StageStatus, + pub(crate) diff_stat: Option, } impl GitStatusEntry { @@ -653,8 +653,7 @@ pub struct GitPanel { local_committer_task: Option>, bulk_staging: Option, stash_entries: GitStash, - diff_stats: HashMap, - diff_stats_task: Task<()>, + _settings_subscription: Subscription, } @@ -723,18 +722,14 @@ impl GitPanel { if tree_view != was_tree_view { this.view_mode = GitPanelViewMode::from_settings(cx); } + + let mut update_entries = false; if sort_by_path != was_sort_by_path || tree_view != was_tree_view { this.bulk_staging.take(); - this.update_visible_entries(window, cx); + update_entries = true; } - if diff_stats != was_diff_stats { - if diff_stats { - this.fetch_diff_stats(cx); - } else { - this.diff_stats.clear(); - this.diff_stats_task = Task::ready(()); - cx.notify(); - } + if (diff_stats != was_diff_stats) || update_entries { + this.update_visible_entries(window, cx); } was_sort_by_path = sort_by_path; was_tree_view = tree_view; @@ -791,33 +786,6 @@ impl GitPanel { ) .detach(); - let buffer_store = project.read(cx).buffer_store().clone(); - - for buffer in project.read(cx).opened_buffers(cx) { - cx.subscribe(&buffer, |this, _buffer, event, cx| { - if matches!(event, BufferEvent::Saved) { - if GitPanelSettings::get_global(cx).diff_stats { - this.fetch_diff_stats(cx); - } - } - }) - .detach(); - } - - cx.subscribe(&buffer_store, |_this, _store, event, cx| { - if let BufferStoreEvent::BufferAdded(buffer) = event { - cx.subscribe(buffer, |this, _buffer, event, cx| { - if matches!(event, BufferEvent::Saved) { - if GitPanelSettings::get_global(cx).diff_stats { - this.fetch_diff_stats(cx); - } - } - }) - .detach(); - } - }) - .detach(); - let mut this = Self { active_repository, commit_editor, @@ -858,8 +826,6 @@ impl GitPanel { entry_count: 0, bulk_staging: None, stash_entries: Default::default(), - diff_stats: HashMap::default(), - diff_stats_task: Task::ready(()), _settings_subscription, }; @@ -3575,6 +3541,7 @@ impl GitPanel { repo_path: entry.repo_path.clone(), status: entry.status, staging, + diff_stat: entry.diff_stat, }; if staging.has_staged() { @@ -3611,6 +3578,7 @@ impl GitPanel { repo_path: ops.repo_path.clone(), status: status.status, staging: StageStatus::Staged, + diff_stat: status.diff_stat, }); } } @@ -3743,60 +3711,9 @@ impl GitPanel { editor.set_placeholder_text(&placeholder_text, window, cx) }); - if GitPanelSettings::get_global(cx).diff_stats { - self.fetch_diff_stats(cx); - } - cx.notify(); } - fn fetch_diff_stats(&mut self, cx: &mut Context) { - let Some(repo) = self.active_repository.clone() else { - self.diff_stats.clear(); - return; - }; - - let unstaged_rx = repo.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToWorktree, cx)); - let staged_rx = repo.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToIndex, cx)); - - self.diff_stats_task = cx.spawn(async move |this, cx| { - let (unstaged_result, staged_result) = - futures::future::join(unstaged_rx, staged_rx).await; - - let mut combined = match unstaged_result { - Ok(Ok(stats)) => stats, - Ok(Err(err)) => { - log::warn!("Failed to fetch unstaged diff stats: {err:?}"); - HashMap::default() - } - Err(_) => HashMap::default(), - }; - - let staged = match staged_result { - Ok(Ok(stats)) => Some(stats), - Ok(Err(err)) => { - log::warn!("Failed to fetch staged diff stats: {err:?}"); - None - } - Err(_) => None, - }; - - if let Some(staged) = staged { - for (path, stat) in staged { - let entry = combined.entry(path).or_default(); - entry.added += stat.added; - entry.deleted += stat.deleted; - } - } - - this.update(cx, |this, cx| { - this.diff_stats = combined; - cx.notify(); - }) - .ok(); - }); - } - fn header_state(&self, header_type: Section) -> ToggleState { let (staged_count, count) = match header_type { Section::New => (self.new_staged_count, self.new_count), @@ -5227,17 +5144,14 @@ impl GitPanel { .active(|s| s.bg(active_bg)) .child(name_row) .when(GitPanelSettings::get_global(cx).diff_stats, |el| { - el.when_some( - self.diff_stats.get(&entry.repo_path).copied(), - move |this, stat| { - let id = format!("diff-stat-{}", id_for_diff_stat); - this.child(ui::DiffStat::new( - id, - stat.added as usize, - stat.deleted as usize, - )) - }, - ) + el.when_some(entry.diff_stat, move |this, stat| { + let id = format!("diff-stat-{}", id_for_diff_stat); + this.child(ui::DiffStat::new( + id, + stat.added as usize, + stat.deleted as usize, + )) + }) }) .child( div() @@ -5629,6 +5543,21 @@ impl GitPanel { } } +#[cfg(any(test, feature = "test-support"))] +impl GitPanel { + pub fn new_test( + workspace: &mut Workspace, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + Self::new(workspace, window, cx) + } + + pub fn active_repository(&self) -> Option<&Entity> { + self.active_repository.as_ref() + } +} + impl Render for GitPanel { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let project = self.project.read(cx); @@ -6606,11 +6535,19 @@ mod tests { repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }), GitListEntry::Status(GitStatusEntry { repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), },), ], ); @@ -6631,11 +6568,19 @@ mod tests { repo_path: repo_path("crates/gpui/gpui.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }), GitListEntry::Status(GitStatusEntry { repo_path: repo_path("crates/util/util.rs"), status: StatusCode::Modified.worktree(), staging: StageStatus::Unstaged, + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), },), ], ); diff --git a/crates/lsp/Cargo.toml b/crates/lsp/Cargo.toml index 9533ddb600b18213de4d6e50599c62aa182b9b8a..2c48575a648a9eba12b16ce8edb2cf959d7cc8b3 100644 --- a/crates/lsp/Cargo.toml +++ b/crates/lsp/Cargo.toml @@ -13,12 +13,13 @@ path = "src/lsp.rs" doctest = false [features] -test-support = ["async-pipe"] +test-support = ["async-pipe", "gpui_util"] [dependencies] anyhow.workspace = true async-pipe = { workspace = true, optional = true } collections.workspace = true +gpui_util = { workspace = true, optional = true } futures.workspace = true gpui.workspace = true log.workspace = true @@ -34,6 +35,7 @@ release_channel.workspace = true [dev-dependencies] async-pipe.workspace = true +gpui_util.workspace = true ctor.workspace = true gpui = { workspace = true, features = ["test-support"] } semver.workspace = true diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index e552c21d701cefa8aa1f4b6e14e826892e3b25b6..2e2318065292ffdc2ac39b577afc7a264d36473d 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -1970,10 +1970,14 @@ impl FakeLanguageServer { let responded_tx = responded_tx.clone(); let executor = cx.background_executor().clone(); async move { + let _guard = gpui_util::defer({ + let responded_tx = responded_tx.clone(); + move || { + responded_tx.unbounded_send(()).ok(); + } + }); executor.simulate_random_delay().await; - let result = result.await; - responded_tx.unbounded_send(()).ok(); - result + result.await } }) .detach(); diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index b03c7d69ab05daf94254a9d47cb2ae23da3043d1..fdafea73fd0ca797616cc58fc9e4b6a3c2101224 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -24,7 +24,7 @@ use futures::{ mpsc, oneshot::{self, Canceled}, }, - future::{self, Shared}, + future::{self, BoxFuture, Shared}, stream::FuturesOrdered, }; use git::{ @@ -39,8 +39,8 @@ use git::{ }, stash::{GitStash, StashEntry}, status::{ - DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus, - UnmergedStatus, UnmergedStatusCode, + self, DiffStat, DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, + TreeDiffStatus, UnmergedStatus, UnmergedStatusCode, }, }; use gpui::{ @@ -195,6 +195,7 @@ pub struct GitStoreCheckpoint { pub struct StatusEntry { pub repo_path: RepoPath, pub status: FileStatus, + pub diff_stat: Option, } impl StatusEntry { @@ -216,6 +217,8 @@ impl StatusEntry { repo_path: self.repo_path.to_proto(), simple_status, status: Some(status_to_proto(self.status)), + diff_stat_added: self.diff_stat.map(|ds| ds.added), + diff_stat_deleted: self.diff_stat.map(|ds| ds.deleted), } } } @@ -226,7 +229,15 @@ impl TryFrom for StatusEntry { fn try_from(value: proto::StatusEntry) -> Result { let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?; let status = status_from_proto(value.simple_status, value.status)?; - Ok(Self { repo_path, status }) + let diff_stat = match (value.diff_stat_added, value.diff_stat_deleted) { + (Some(added), Some(deleted)) => Some(DiffStat { added, deleted }), + _ => None, + }; + Ok(Self { + repo_path, + status, + diff_stat, + }) } } @@ -555,7 +566,6 @@ impl GitStore { client.add_entity_request_handler(Self::handle_askpass); client.add_entity_request_handler(Self::handle_check_for_pushed_commits); client.add_entity_request_handler(Self::handle_git_diff); - client.add_entity_request_handler(Self::handle_git_diff_stat); client.add_entity_request_handler(Self::handle_tree_diff); client.add_entity_request_handler(Self::handle_get_blob_content); client.add_entity_request_handler(Self::handle_open_unstaged_diff); @@ -2761,45 +2771,6 @@ impl GitStore { Ok(proto::GitDiffResponse { diff }) } - async fn handle_git_diff_stat( - this: Entity, - envelope: TypedEnvelope, - mut cx: AsyncApp, - ) -> Result { - let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); - let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; - let diff_type = match envelope.payload.diff_type() { - proto::git_diff_stat::DiffType::HeadToIndex => DiffType::HeadToIndex, - proto::git_diff_stat::DiffType::HeadToWorktree => DiffType::HeadToWorktree, - proto::git_diff_stat::DiffType::MergeBase => { - let base_ref = envelope - .payload - .merge_base_ref - .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?; - DiffType::MergeBase { - base_ref: base_ref.into(), - } - } - }; - - let stats = repository_handle - .update(&mut cx, |repository_handle, cx| { - repository_handle.diff_stat(diff_type, cx) - }) - .await??; - - let entries = stats - .into_iter() - .map(|(path, stat)| proto::GitDiffStatEntry { - path: path.to_proto(), - added: stat.added, - deleted: stat.deleted, - }) - .collect(); - - Ok(proto::GitDiffStatResponse { entries }) - } - async fn handle_tree_diff( this: Entity, request: TypedEnvelope, @@ -3623,7 +3594,9 @@ impl RepositorySnapshot { current_new_entry = new_statuses.next(); } Ordering::Equal => { - if new_entry.status != old_entry.status { + if new_entry.status != old_entry.status + || new_entry.diff_stat != old_entry.diff_stat + { updated_statuses.push(new_entry.to_proto()); } current_old_entry = old_statuses.next(); @@ -3693,6 +3666,12 @@ impl RepositorySnapshot { .cloned() } + pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option { + self.statuses_by_path + .get(&PathKey(path.as_ref().clone()), ()) + .and_then(|entry| entry.diff_stat) + } + pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option { Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style) } @@ -4193,6 +4172,10 @@ impl Repository { self.snapshot.status() } + pub fn diff_stat_for_path(&self, path: &RepoPath) -> Option { + self.snapshot.diff_stat_for_path(path) + } + pub fn cached_stash(&self) -> GitStash { self.snapshot.stash_entries.clone() } @@ -5884,63 +5867,6 @@ impl Repository { }) } - /// Fetches per-line diff statistics (additions/deletions) via `git diff --numstat`. - pub fn diff_stat( - &mut self, - diff_type: DiffType, - _cx: &App, - ) -> oneshot::Receiver< - Result>, - > { - let id = self.id; - self.send_job(None, move |repo, _cx| async move { - match repo { - RepositoryState::Local(LocalRepositoryState { backend, .. }) => { - backend.diff_stat(diff_type).await - } - RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => { - let (proto_diff_type, merge_base_ref) = match &diff_type { - DiffType::HeadToIndex => { - (proto::git_diff_stat::DiffType::HeadToIndex.into(), None) - } - DiffType::HeadToWorktree => { - (proto::git_diff_stat::DiffType::HeadToWorktree.into(), None) - } - DiffType::MergeBase { base_ref } => ( - proto::git_diff_stat::DiffType::MergeBase.into(), - Some(base_ref.to_string()), - ), - }; - let response = client - .request(proto::GitDiffStat { - project_id: project_id.0, - repository_id: id.to_proto(), - diff_type: proto_diff_type, - merge_base_ref, - }) - .await?; - - let stats = response - .entries - .into_iter() - .filter_map(|entry| { - let path = RepoPath::from_proto(&entry.path).log_err()?; - Some(( - path, - git::status::DiffStat { - added: entry.added, - deleted: entry.deleted, - }, - )) - }) - .collect(); - - Ok(stats) - } - } - }) - } - pub fn create_branch( &mut self, branch_name: String, @@ -6165,6 +6091,7 @@ impl Repository { cx.emit(RepositoryEvent::StatusesChanged); } self.snapshot.statuses_by_path.edit(edits, ()); + if update.is_last_update { self.snapshot.scan_id = update.scan_id; } @@ -6479,22 +6406,43 @@ impl Repository { return Ok(()); } + let has_head = prev_snapshot.head_commit.is_some(); + let stash_entries = backend.stash_entries().await?; let changed_path_statuses = cx .background_spawn(async move { let mut changed_paths = changed_paths.into_iter().flatten().collect::>(); - let statuses = backend - .status(&changed_paths.iter().cloned().collect::>()) - .await?; + let changed_paths_vec = changed_paths.iter().cloned().collect::>(); + + let status_task = backend.status(&changed_paths_vec); + let diff_stat_future = if has_head { + backend.diff_stat(&changed_paths_vec) + } else { + future::ready(Ok(status::GitDiffStat { + entries: Arc::default(), + })) + .boxed() + }; + + let (statuses, diff_stats) = + futures::future::try_join(status_task, diff_stat_future).await?; + + let diff_stats: HashMap = + HashMap::from_iter(diff_stats.entries.into_iter().cloned()); + let mut changed_path_statuses = Vec::new(); let prev_statuses = prev_snapshot.statuses_by_path.clone(); let mut cursor = prev_statuses.cursor::(()); for (repo_path, status) in &*statuses.entries { + let current_diff_stat = diff_stats.get(repo_path).copied(); + changed_paths.remove(repo_path); if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) - && cursor.item().is_some_and(|entry| entry.status == *status) + && cursor.item().is_some_and(|entry| { + entry.status == *status && entry.diff_stat == current_diff_stat + }) { continue; } @@ -6502,6 +6450,7 @@ impl Repository { changed_path_statuses.push(Edit::Insert(StatusEntry { repo_path: repo_path.clone(), status: *status, + diff_stat: current_diff_stat, })); } let mut cursor = prev_statuses.cursor::(()); @@ -6859,11 +6808,31 @@ async fn compute_snapshot( let mut events = Vec::new(); let branches = backend.branches().await?; let branch = branches.into_iter().find(|branch| branch.is_head); - let statuses = backend - .status(&[RepoPath::from_rel_path( + + // Useful when branch is None in detached head state + let head_commit = match backend.head_sha().await { + Some(head_sha) => backend.show(head_sha).await.log_err(), + None => None, + }; + + let diff_stat_future: BoxFuture<'_, Result> = if head_commit.is_some() { + backend.diff_stat(&[]) + } else { + future::ready(Ok(status::GitDiffStat { + entries: Arc::default(), + })) + .boxed() + }; + let (statuses, diff_stats) = futures::future::try_join( + backend.status(&[RepoPath::from_rel_path( &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(), - )]) - .await?; + )]), + diff_stat_future, + ) + .await?; + + let diff_stat_map: HashMap<&RepoPath, DiffStat> = + diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect(); let stash_entries = backend.stash_entries().await?; let mut conflicted_paths = Vec::new(); let statuses_by_path = SumTree::from_iter( @@ -6874,6 +6843,7 @@ async fn compute_snapshot( StatusEntry { repo_path: repo_path.clone(), status: *status, + diff_stat: diff_stat_map.get(repo_path).copied(), } }), (), @@ -6886,12 +6856,6 @@ async fn compute_snapshot( events.push(RepositoryEvent::StatusesChanged) } - // Useful when branch is None in detached head state - let head_commit = match backend.head_sha().await { - Some(head_sha) => backend.show(head_sha).await.log_err(), - None => None, - }; - if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit { events.push(RepositoryEvent::BranchChanged); } diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs index d597377910a2a837e456ac4384b06c333887dfb3..d86b969e61ed173ee314cde6f584f2dbab6859f9 100644 --- a/crates/project/tests/integration/project_tests.rs +++ b/crates/project/tests/integration/project_tests.rs @@ -31,7 +31,7 @@ use futures::{StreamExt, future}; use git::{ GitHostingProviderRegistry, repository::{RepoPath, repo_path}, - status::{FileStatus, StatusCode, TrackedStatus}, + status::{DiffStat, FileStatus, StatusCode, TrackedStatus}, }; use git2::RepositoryInitOptions; use gpui::{ @@ -9253,14 +9253,23 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("d.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }, ] ); @@ -9282,18 +9291,31 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("c.txt"), status: StatusCode::Modified.worktree(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 1, + }), }, StatusEntry { repo_path: repo_path("d.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }, ] ); @@ -9327,6 +9349,10 @@ async fn test_git_repository_status(cx: &mut gpui::TestAppContext) { [StatusEntry { repo_path: repo_path("a.txt"), status: StatusCode::Deleted.worktree(), + diff_stat: Some(DiffStat { + added: 0, + deleted: 1, + }), }] ); }); @@ -9391,6 +9417,7 @@ async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) { worktree_status: StatusCode::Added } .into(), + diff_stat: None, }] ) }); @@ -9593,6 +9620,10 @@ async fn test_repository_pending_ops_staging( worktree_status: StatusCode::Unmodified } .into(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 0, + }), }] ); }); @@ -9699,6 +9730,10 @@ async fn test_repository_pending_ops_long_running_staging( worktree_status: StatusCode::Unmodified } .into(), + diff_stat: Some(DiffStat { + added: 1, + deleted: 0, + }), }] ); }); @@ -9823,10 +9858,12 @@ async fn test_repository_pending_ops_stage_all( StatusEntry { repo_path: repo_path("a.txt"), status: FileStatus::Untracked, + diff_stat: None, }, StatusEntry { repo_path: repo_path("b.txt"), status: FileStatus::Untracked, + diff_stat: None, }, ] ); diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 6cb3acfcd878c8f970c4e99789939424a3835709..736abcdaa49f62d72582750a8a28ea785baee282 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -229,29 +229,6 @@ message GitDiffResponse { string diff = 1; } -message GitDiffStat { - uint64 project_id = 1; - uint64 repository_id = 2; - DiffType diff_type = 3; - optional string merge_base_ref = 4; - - enum DiffType { - HEAD_TO_WORKTREE = 0; - HEAD_TO_INDEX = 1; - MERGE_BASE = 2; - } -} - -message GitDiffStatResponse { - repeated GitDiffStatEntry entries = 1; -} - -message GitDiffStatEntry { - string path = 1; - uint32 added = 2; - uint32 deleted = 3; -} - message GitInit { uint64 project_id = 1; string abs_path = 2; @@ -360,6 +337,8 @@ message StatusEntry { // Can be removed once collab's min version is >=0.171.0. GitStatus simple_status = 2; GitFileStatus status = 3; + optional uint32 diff_stat_added = 4; + optional uint32 diff_stat_deleted = 5; } message StashEntry { diff --git a/crates/proto/proto/zed.proto b/crates/proto/proto/zed.proto index d6139f5342d153221d13917e26565a4c0eb5a707..c129b6eff26404b66b38439c29f0b83289b37172 100644 --- a/crates/proto/proto/zed.proto +++ b/crates/proto/proto/zed.proto @@ -474,9 +474,7 @@ message Envelope { SpawnKernel spawn_kernel = 426; SpawnKernelResponse spawn_kernel_response = 427; - KillKernel kill_kernel = 428; - GitDiffStat git_diff_stat = 429; - GitDiffStatResponse git_diff_stat_response = 430; // current max + KillKernel kill_kernel = 428; // current max } reserved 87 to 88; @@ -501,6 +499,7 @@ message Envelope { reserved 280 to 281; reserved 332 to 333; reserved 394 to 396; + reserved 429 to 430; } message Hello { diff --git a/crates/proto/src/proto.rs b/crates/proto/src/proto.rs index 3d30551557000c305a82b328828b566c9d78f75e..dd0a77beb29345021563b21bafd261d02b87e1ab 100644 --- a/crates/proto/src/proto.rs +++ b/crates/proto/src/proto.rs @@ -322,8 +322,6 @@ messages!( (CheckForPushedCommitsResponse, Background), (GitDiff, Background), (GitDiffResponse, Background), - (GitDiffStat, Background), - (GitDiffStatResponse, Background), (GitInit, Background), (GetDebugAdapterBinary, Background), (DebugAdapterBinary, Background), @@ -541,7 +539,6 @@ request_messages!( (GitRenameBranch, Ack), (CheckForPushedCommits, CheckForPushedCommitsResponse), (GitDiff, GitDiffResponse), - (GitDiffStat, GitDiffStatResponse), (GitInit, Ack), (ToggleBreakpoint, Ack), (GetDebugAdapterBinary, DebugAdapterBinary), @@ -730,7 +727,6 @@ entity_messages!( GitRemoveRemote, CheckForPushedCommits, GitDiff, - GitDiffStat, GitInit, BreakpointsForFile, ToggleBreakpoint, diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index 9b9fe9948ace530d7e55d2843952ca5c9efb3749..7f9953c8a4e746d9586b663330badb38149cfb64 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -6,7 +6,6 @@ use agent::{AgentTool, ReadFileTool, ReadFileToolInput, ToolCallEventStream, Too use client::{Client, UserStore}; use clock::FakeSystemClock; use collections::{HashMap, HashSet}; -use git::repository::DiffType; use language_model::LanguageModelToolResultContent; use extension::ExtensionHostProxy; @@ -1917,129 +1916,6 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA assert_eq!(server_branch.name(), "totally-new-branch"); } -#[gpui::test] -async fn test_remote_git_diff_stat(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { - let fs = FakeFs::new(server_cx.executor()); - fs.insert_tree( - path!("/code"), - json!({ - "project1": { - ".git": {}, - "src": { - "lib.rs": "line1\nline2\nline3\n", - "new_file.rs": "added1\nadded2\n", - }, - "README.md": "# project 1", - }, - }), - ) - .await; - - let dot_git = Path::new(path!("/code/project1/.git")); - - // HEAD: lib.rs (2 lines), deleted.rs (1 line) - fs.set_head_for_repo( - dot_git, - &[ - ("src/lib.rs", "line1\nold_line2\n".into()), - ("src/deleted.rs", "was_here\n".into()), - ], - "deadbeef", - ); - // Index: lib.rs modified (4 lines), staged_only.rs new (2 lines) - fs.set_index_for_repo( - dot_git, - &[ - ("src/lib.rs", "line1\nold_line2\nline3\nline4\n".into()), - ("src/staged_only.rs", "x\ny\n".into()), - ], - ); - - let (project, _headless) = init_test(&fs, cx, server_cx).await; - let (_worktree, _) = project - .update(cx, |project, cx| { - project.find_or_create_worktree(path!("/code/project1"), true, cx) - }) - .await - .unwrap(); - cx.run_until_parked(); - - let repo_path = |s: &str| git::repository::RepoPath::new(s).unwrap(); - - let repository = project.update(cx, |project, cx| project.active_repository(cx).unwrap()); - - // --- HeadToWorktree --- - let stats = cx - .update(|cx| repository.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToWorktree, cx))) - .await - .unwrap() - .unwrap(); - - // src/lib.rs: worktree 3 lines vs HEAD 2 lines - let stat = stats.get(&repo_path("src/lib.rs")).expect("src/lib.rs"); - assert_eq!((stat.added, stat.deleted), (3, 2)); - - // src/new_file.rs: only in worktree (2 lines) - let stat = stats - .get(&repo_path("src/new_file.rs")) - .expect("src/new_file.rs"); - assert_eq!((stat.added, stat.deleted), (2, 0)); - - // src/deleted.rs: only in HEAD (1 line) - let stat = stats - .get(&repo_path("src/deleted.rs")) - .expect("src/deleted.rs"); - assert_eq!((stat.added, stat.deleted), (0, 1)); - - // README.md: only in worktree (1 line) - let stat = stats.get(&repo_path("README.md")).expect("README.md"); - assert_eq!((stat.added, stat.deleted), (1, 0)); - - // --- HeadToIndex --- - let stats = cx - .update(|cx| repository.update(cx, |repo, cx| repo.diff_stat(DiffType::HeadToIndex, cx))) - .await - .unwrap() - .unwrap(); - - // src/lib.rs: index 4 lines vs HEAD 2 lines - let stat = stats.get(&repo_path("src/lib.rs")).expect("src/lib.rs"); - assert_eq!((stat.added, stat.deleted), (4, 2)); - - // src/staged_only.rs: only in index (2 lines) - let stat = stats - .get(&repo_path("src/staged_only.rs")) - .expect("src/staged_only.rs"); - assert_eq!((stat.added, stat.deleted), (2, 0)); - - // src/deleted.rs: in HEAD but not in index - let stat = stats - .get(&repo_path("src/deleted.rs")) - .expect("src/deleted.rs"); - assert_eq!((stat.added, stat.deleted), (0, 1)); - - // --- MergeBase (not implemented in FakeGitRepository) --- - let stats = cx - .update(|cx| { - repository.update(cx, |repo, cx| { - repo.diff_stat( - DiffType::MergeBase { - base_ref: "main".into(), - }, - cx, - ) - }) - }) - .await - .unwrap() - .unwrap(); - - assert!( - stats.is_empty(), - "MergeBase diff_stat should return empty from FakeGitRepository" - ); -} - #[gpui::test] async fn test_remote_agent_fs_tool_calls(cx: &mut TestAppContext, server_cx: &mut TestAppContext) { let fs = FakeFs::new(server_cx.executor()); From 0fc5bc2e89389681cede09780fac1d5fa5155f07 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 4 Mar 2026 12:55:20 -0700 Subject: [PATCH 317/548] debugger: Reverse Python repr escaping (#50554) Closes #37168 Authored-By: @ngauder Release Notes: - debugger: Unescape Python strings Co-authored-by: Nikolas Gauder --- crates/project/src/debugger/session.rs | 32 +++++++++++++++++++++++++- 1 file changed, 31 insertions(+), 1 deletion(-) diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index 2430d6c1024c61bb9af984c914df9c308c4cb64f..a6c3f52b17a4a6cf241aa49329f3f14f0b5cefbc 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -2645,10 +2645,40 @@ impl Session { self.fetch( command, move |this, variables, cx| { - let Some(variables) = variables.log_err() else { + let Some(mut variables) = variables.log_err() else { return; }; + if this.adapter.0.as_ref() == "Debugpy" { + for variable in variables.iter_mut() { + if variable.type_ == Some("str".into()) { + // reverse Python repr() escaping + let mut unescaped = String::with_capacity(variable.value.len()); + let mut chars = variable.value.chars(); + while let Some(c) = chars.next() { + if c != '\\' { + unescaped.push(c); + } else { + match chars.next() { + Some('\\') => unescaped.push('\\'), + Some('n') => unescaped.push('\n'), + Some('t') => unescaped.push('\t'), + Some('r') => unescaped.push('\r'), + Some('\'') => unescaped.push('\''), + Some('"') => unescaped.push('"'), + Some(c) => { + unescaped.push('\\'); + unescaped.push(c); + } + None => {} + } + } + } + variable.value = unescaped; + } + } + } + this.active_snapshot .variables .insert(variables_reference, variables); From 9316c4aa555d520c91aed490f20f4235797d3504 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Wed, 4 Mar 2026 13:04:48 -0700 Subject: [PATCH 318/548] Fix crash metrics ID (#50728) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before this change the crash handler uploaded crashes before sign-in had happened. Now we get the metrics_id correctly. This allows for us to tie crashes reported on Github to users who have opted into telemetry (users who have opted into crash reporting but not telemetry will not have a metrics_id). Release Notes: - Fixed crash reporter metadata collection --------- Co-authored-by: Miguel Raz Guzmán Macedo --- Cargo.lock | 3 +- crates/crashes/Cargo.toml | 3 +- crates/crashes/src/crashes.rs | 172 ++++++++++++++++++++-------------- crates/zed/Cargo.toml | 5 +- crates/zed/src/main.rs | 21 ++++- crates/zed/src/reliability.rs | 21 +++-- crates/zed/src/zed.rs | 11 +-- 7 files changed, 142 insertions(+), 94 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c4ec49e50c3aa75e5e470414da470301e6f77e04..d9c5c69e2d11d9e8f6d5cadd35ec342ebe202e57 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4126,13 +4126,13 @@ dependencies = [ name = "crashes" version = "0.1.0" dependencies = [ - "bincode", "cfg-if", "crash-handler", "futures 0.3.31", "log", "mach2 0.5.0", "minidumper", + "parking_lot", "paths", "release_channel", "serde", @@ -21735,7 +21735,6 @@ dependencies = [ "audio", "auto_update", "auto_update_ui", - "bincode", "breadcrumbs", "call", "channel", diff --git a/crates/crashes/Cargo.toml b/crates/crashes/Cargo.toml index 5e451853a925d86ffcc1491a5c95af1f94e6ed05..2c13dc83c5a88c3504da6f8be48c1d75c8e43652 100644 --- a/crates/crashes/Cargo.toml +++ b/crates/crashes/Cargo.toml @@ -6,13 +6,12 @@ edition.workspace = true license = "GPL-3.0-or-later" [dependencies] -bincode.workspace = true cfg-if.workspace = true crash-handler.workspace = true futures.workspace = true log.workspace = true minidumper.workspace = true - +parking_lot.workspace = true paths.workspace = true release_channel.workspace = true smol.workspace = true diff --git a/crates/crashes/src/crashes.rs b/crates/crashes/src/crashes.rs index a1a43dbb88198b7afd4b89141f7578c0a5bc25ce..0c848d759cd444f3eb6e2a9838d3005254a25b19 100644 --- a/crates/crashes/src/crashes.rs +++ b/crates/crashes/src/crashes.rs @@ -2,12 +2,14 @@ use crash_handler::{CrashEventResult, CrashHandler}; use futures::future::BoxFuture; use log::info; use minidumper::{Client, LoopAction, MinidumpBinary}; +use parking_lot::Mutex; use release_channel::{RELEASE_CHANNEL, ReleaseChannel}; use serde::{Deserialize, Serialize}; use std::mem; #[cfg(not(target_os = "windows"))] use smol::process::Command; +use system_specs::GpuSpecs; #[cfg(target_os = "macos")] use std::sync::atomic::AtomicU32; @@ -27,12 +29,14 @@ use std::{ }; // set once the crash handler has initialized and the client has connected to it -pub static CRASH_HANDLER: OnceLock> = OnceLock::new(); +static CRASH_HANDLER: OnceLock> = OnceLock::new(); // set when the first minidump request is made to avoid generating duplicate crash reports pub static REQUESTED_MINIDUMP: AtomicBool = AtomicBool::new(false); const CRASH_HANDLER_PING_TIMEOUT: Duration = Duration::from_secs(60); const CRASH_HANDLER_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); +static PENDING_CRASH_SERVER_MESSAGES: Mutex> = Mutex::new(Vec::new()); + #[cfg(target_os = "macos")] static PANIC_THREAD_ID: AtomicU32 = AtomicU32::new(0); @@ -118,6 +122,7 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl spawn_crash_handler_windows(&exe, &socket_name); info!("spawning crash handler process"); + send_crash_server_message(CrashServerMessage::Init(crash_init)); let mut elapsed = Duration::ZERO; let retry_frequency = Duration::from_millis(100); @@ -134,10 +139,6 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl smol::Timer::after(retry_frequency).await; } let client = maybe_client.unwrap(); - client - .send_message(1, serde_json::to_vec(&crash_init).unwrap()) - .unwrap(); - let client = Arc::new(client); #[cfg(target_os = "linux")] @@ -146,6 +147,10 @@ async fn connect_and_keepalive(crash_init: InitCrashHandler, handler: CrashHandl // Publishing the client to the OnceLock makes it visible to the signal // handler callback installed earlier. CRASH_HANDLER.set(client.clone()).ok(); + let messages: Vec<_> = mem::take(PENDING_CRASH_SERVER_MESSAGES.lock().as_mut()); + for message in messages.into_iter() { + send_crash_server_message(message); + } // mem::forget so that the drop is not called mem::forget(handler); info!("crash handler registered"); @@ -177,9 +182,10 @@ unsafe fn suspend_all_other_threads() { } pub struct CrashServer { - initialization_params: OnceLock, - panic_info: OnceLock, - active_gpu: OnceLock, + initialization_params: Mutex>, + panic_info: Mutex>, + active_gpu: Mutex>, + user_info: Mutex>, has_connection: Arc, } @@ -190,6 +196,7 @@ pub struct CrashInfo { pub minidump_error: Option, pub gpus: Vec, pub active_gpu: Option, + pub user_info: Option, } #[derive(Debug, Deserialize, Serialize, Clone)] @@ -207,15 +214,55 @@ pub struct CrashPanic { pub span: String, } +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct UserInfo { + pub metrics_id: Option, + pub is_staff: Option, +} + +fn send_crash_server_message(message: CrashServerMessage) { + let Some(crash_server) = CRASH_HANDLER.get() else { + PENDING_CRASH_SERVER_MESSAGES.lock().push(message); + return; + }; + let data = match serde_json::to_vec(&message) { + Ok(data) => data, + Err(err) => { + log::warn!("Failed to serialize crash server message: {:?}", err); + return; + } + }; + + if let Err(err) = crash_server.send_message(0, data) { + log::warn!("Failed to send data to crash server {:?}", err); + } +} + +pub fn set_gpu_info(specs: GpuSpecs) { + send_crash_server_message(CrashServerMessage::GPUInfo(specs)); +} + +pub fn set_user_info(info: UserInfo) { + send_crash_server_message(CrashServerMessage::UserInfo(info)); +} + +#[derive(Serialize, Deserialize, Debug)] +enum CrashServerMessage { + Init(InitCrashHandler), + Panic(CrashPanic), + GPUInfo(GpuSpecs), + UserInfo(UserInfo), +} + impl minidumper::ServerHandler for CrashServer { fn create_minidump_file(&self) -> Result<(File, PathBuf), io::Error> { - let err_message = "Missing initialization data"; let dump_path = paths::logs_dir() .join( &self .initialization_params - .get() - .expect(err_message) + .lock() + .as_ref() + .expect("Missing initialization data") .session_id, ) .with_extension("dmp"); @@ -255,13 +302,14 @@ impl minidumper::ServerHandler for CrashServer { let crash_info = CrashInfo { init: self .initialization_params - .get() - .expect("not initialized") - .clone(), - panic: self.panic_info.get().cloned(), + .lock() + .clone() + .expect("not initialized"), + panic: self.panic_info.lock().clone(), minidump_error, - active_gpu: self.active_gpu.get().cloned(), + active_gpu: self.active_gpu.lock().clone(), gpus, + user_info: self.user_info.lock().clone(), }; let crash_data_path = paths::logs_dir() @@ -273,30 +321,21 @@ impl minidumper::ServerHandler for CrashServer { LoopAction::Exit } - fn on_message(&self, kind: u32, buffer: Vec) { - match kind { - 1 => { - let init_data = - serde_json::from_slice::(&buffer).expect("invalid init data"); - self.initialization_params - .set(init_data) - .expect("already initialized"); + fn on_message(&self, _: u32, buffer: Vec) { + let message: CrashServerMessage = + serde_json::from_slice(&buffer).expect("invalid init data"); + match message { + CrashServerMessage::Init(init_data) => { + self.initialization_params.lock().replace(init_data); } - 2 => { - let panic_data = - serde_json::from_slice::(&buffer).expect("invalid panic data"); - self.panic_info.set(panic_data).expect("already panicked"); + CrashServerMessage::Panic(crash_panic) => { + self.panic_info.lock().replace(crash_panic); } - 3 => { - let gpu_specs: system_specs::GpuSpecs = - bincode::deserialize(&buffer).expect("gpu specs"); - // we ignore the case where it was already set because this message is sent - // on each new window. in theory all zed windows should be using the same - // GPU so this is fine. - self.active_gpu.set(gpu_specs).ok(); + CrashServerMessage::GPUInfo(gpu_specs) => { + self.active_gpu.lock().replace(gpu_specs); } - _ => { - panic!("invalid message kind"); + CrashServerMessage::UserInfo(user_info) => { + self.user_info.lock().replace(user_info); } } } @@ -326,37 +365,33 @@ pub fn panic_hook(info: &PanicHookInfo) { // if it's still not there just write panic info and no minidump let retry_frequency = Duration::from_millis(100); for _ in 0..5 { - if let Some(client) = CRASH_HANDLER.get() { - let location = info - .location() - .map_or_else(|| "".to_owned(), |location| location.to_string()); - log::error!("thread '{thread_name}' panicked at {location}:\n{message}..."); - client - .send_message( - 2, - serde_json::to_vec(&CrashPanic { message, span }).unwrap(), - ) - .ok(); - log::error!("triggering a crash to generate a minidump..."); - - #[cfg(target_os = "macos")] - PANIC_THREAD_ID.store( - unsafe { mach2::mach_init::mach_thread_self() }, - Ordering::SeqCst, - ); - - cfg_if::cfg_if! { - if #[cfg(target_os = "windows")] { - // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- - CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE) - break; - } else { - std::process::abort(); - } - } + if CRASH_HANDLER.get().is_some() { + break; } thread::sleep(retry_frequency); } + let location = info + .location() + .map_or_else(|| "".to_owned(), |location| location.to_string()); + log::error!("thread '{thread_name}' panicked at {location}:\n{message}..."); + + send_crash_server_message(CrashServerMessage::Panic(CrashPanic { message, span })); + log::error!("triggering a crash to generate a minidump..."); + + #[cfg(target_os = "macos")] + PANIC_THREAD_ID.store( + unsafe { mach2::mach_init::mach_thread_self() }, + Ordering::SeqCst, + ); + + cfg_if::cfg_if! { + if #[cfg(target_os = "windows")] { + // https://learn.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- + CrashHandler.simulate_exception(Some(234)); // (MORE_DATA_AVAILABLE) + } else { + std::process::abort(); + } + } } #[cfg(target_os = "windows")] @@ -436,10 +471,11 @@ pub fn crash_server(socket: &Path) { server .run( Box::new(CrashServer { - initialization_params: OnceLock::new(), - panic_info: OnceLock::new(), + initialization_params: Mutex::default(), + panic_info: Mutex::default(), + user_info: Mutex::default(), has_connection, - active_gpu: OnceLock::new(), + active_gpu: Mutex::default(), }), &shutdown, Some(CRASH_HANDLER_PING_TIMEOUT), diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 3d9e433d73dac7d79fc008c79b3ab2db5863a8db..6ea308db5a32cf82e48439c477c8bb81f02ab777 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -17,7 +17,6 @@ test-support = [ "gpui/test-support", "gpui_platform/screen-capture", "dep:image", - "dep:semver", "workspace/test-support", "project/test-support", "editor/test-support", @@ -32,7 +31,6 @@ visual-tests = [ "gpui_platform/screen-capture", "gpui_platform/test-support", "dep:image", - "dep:semver", "dep:tempfile", "dep:action_log", "dep:agent_servers", @@ -76,7 +74,6 @@ assets.workspace = true audio.workspace = true auto_update.workspace = true auto_update_ui.workspace = true -bincode.workspace = true breadcrumbs.workspace = true call.workspace = true chrono.workspace = true @@ -122,7 +119,7 @@ system_specs.workspace = true gpui.workspace = true gpui_platform = {workspace = true, features=["screen-capture", "font-kit", "wayland", "x11"]} image = { workspace = true, optional = true } -semver = { workspace = true, optional = true } +semver.workspace = true tempfile = { workspace = true, optional = true } clock = { workspace = true, optional = true } acp_thread.workspace = true diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index a3379a6017b7e3b7c26e2a98346e4926e90e0999..0d50339f6c9d42ffa653e5c7565ae6e22441bdca 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -335,7 +335,13 @@ fn main() { crashes::init( InitCrashHandler { session_id, - zed_version: app_version.to_string(), + // strip the build and channel information from the version string, we send them separately + zed_version: semver::Version::new( + app_version.major, + app_version.minor, + app_version.patch, + ) + .to_string(), binary: "zed".to_string(), release_channel: release_channel::RELEASE_CHANNEL_NAME.clone(), commit_sha: app_commit_sha @@ -573,6 +579,19 @@ fn main() { session.id().to_owned(), cx, ); + cx.subscribe(&user_store, { + let telemetry = telemetry.clone(); + move |_, evt: &client::user::Event, _| match evt { + client::user::Event::PrivateUserInfoUpdated => { + crashes::set_user_info(crashes::UserInfo { + metrics_id: telemetry.metrics_id().map(|s| s.to_string()), + is_staff: telemetry.is_staff(), + }); + } + _ => {} + } + }) + .detach(); // We should rename these in the future to `first app open`, `first app open for release channel`, and `app open` if let (Some(system_id), Some(installation_id)) = (&system_id, &installation_id) { diff --git a/crates/zed/src/reliability.rs b/crates/zed/src/reliability.rs index d8dc1c4f8fe412b5e8eeb6b09e482a9ed243aaa3..2f284027929b19e5b0d5ac084267cf5548cda667 100644 --- a/crates/zed/src/reliability.rs +++ b/crates/zed/src/reliability.rs @@ -288,16 +288,23 @@ async fn upload_minidump( form = form.text("minidump_error", minidump_error); } - if let Some(id) = client.telemetry().metrics_id() { - form = form.text("sentry[user][id]", id.to_string()); + if let Some(is_staff) = &metadata + .user_info + .as_ref() + .and_then(|user_info| user_info.is_staff) + { form = form.text( "sentry[user][is_staff]", - if client.telemetry().is_staff().unwrap_or_default() { - "true" - } else { - "false" - }, + if *is_staff { "true" } else { "false" }, ); + } + + if let Some(metrics_id) = metadata + .user_info + .as_ref() + .and_then(|user_info| user_info.metrics_id.as_ref()) + { + form = form.text("sentry[user][id]", metrics_id.clone()); } else if let Some(id) = client.telemetry().installation_id() { form = form.text("sentry[user][id]", format!("installation-{}", id)) } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 20629785c7172241f49a0e7a69f9dcc1953f6a95..aeb740c5ec05f5382e3b93527bb2191cb44f9d51 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -422,16 +422,7 @@ pub fn initialize_workspace( if let Some(specs) = window.gpu_specs() { log::info!("Using GPU: {:?}", specs); show_software_emulation_warning_if_needed(specs.clone(), window, cx); - if let Some((crash_server, message)) = crashes::CRASH_HANDLER - .get() - .zip(bincode::serialize(&specs).ok()) - && let Err(err) = crash_server.send_message(3, message) - { - log::warn!( - "Failed to store active gpu info for crash reporting: {}", - err - ); - } + crashes::set_gpu_info(specs); } let edit_prediction_menu_handle = PopoverMenuHandle::default(); From 4d42d3a6b043fe57e6935dc722e1b8a3bc8dcbdd Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 4 Mar 2026 15:10:13 -0500 Subject: [PATCH 319/548] docs: Remove Preview callouts for stable release (#50736) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR removes Preview callouts from documentation for features that are now in Stable. ## Files Updated • docs/src/collaboration/overview.md • docs/src/debugger.md • docs/src/configuring-languages.md • docs/src/troubleshooting.md • docs/src/outline-panel.md • docs/src/getting-started.md • docs/src/tasks.md • docs/src/ai/edit-prediction.md • docs/src/ai/llm-providers.md ## What This Does Removes callouts like: ```markdown > **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. ``` And: ```markdown > **Changed in Preview (v0.XXX).** See [release notes](/releases#0.XXX). ``` These features are now in Stable, so the callouts are no longer needed. Release Notes: - N/A --- docs/src/ai/edit-prediction.md | 2 -- docs/src/ai/llm-providers.md | 6 ------ docs/src/collaboration/overview.md | 2 -- docs/src/configuring-languages.md | 2 -- docs/src/debugger.md | 2 -- docs/src/getting-started.md | 2 -- docs/src/outline-panel.md | 2 -- docs/src/tasks.md | 2 -- docs/src/troubleshooting.md | 2 -- 9 files changed, 22 deletions(-) diff --git a/docs/src/ai/edit-prediction.md b/docs/src/ai/edit-prediction.md index 973dc9546a8b81ad58fc996102ff25aed2d241a9..92fde3eddd3be0a2dbfb1b6d37065b58cf2ad411 100644 --- a/docs/src/ai/edit-prediction.md +++ b/docs/src/ai/edit-prediction.md @@ -406,8 +406,6 @@ After adding your API key, Codestral will appear in the provider dropdown in the ### Self-Hosted OpenAI-compatible servers -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - You can use any self-hosted server that implements the OpenAI completion API format. This works with vLLM, llama.cpp server, LocalAI, and other compatible servers. #### Configuration diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index a4a6274af10d1aea20ed27160704136d9f0eb586..24501ab2d356b8dc4098808ed8e9193cf6e171c6 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -152,8 +152,6 @@ For the most up-to-date supported regions and models, refer to the [Supported Mo #### Extended Context Window {#bedrock-extended-context} -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - Anthropic models on Bedrock support a 1M token extended context window through the `anthropic_beta` API parameter. To enable this feature, set `"allow_extended_context": true` in your Bedrock configuration: ```json [settings] @@ -173,8 +171,6 @@ Zed enables extended context for supported models (Claude Sonnet 4.5 and Claude #### Image Support {#bedrock-image-support} -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - Bedrock models that support vision (Claude 3 and later, Amazon Nova Pro and Lite, Meta Llama 3.2 Vision models, Mistral Pixtral) can receive images in conversations and tool results. ### Anthropic {#anthropic} @@ -630,8 +626,6 @@ The OpenRouter API key will be saved in your keychain. Zed will also use the `OPENROUTER_API_KEY` environment variable if it's defined. -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When using OpenRouter as your assistant provider, you must explicitly select a model in your settings. OpenRouter no longer provides a default model selection. Configure your preferred OpenRouter model in `settings.json`: diff --git a/docs/src/collaboration/overview.md b/docs/src/collaboration/overview.md index 97efdae088d1692ad5840e23c13bc50d4ecb75c7..1022ec683bf5eefab55b9aff939c568098fdda30 100644 --- a/docs/src/collaboration/overview.md +++ b/docs/src/collaboration/overview.md @@ -24,8 +24,6 @@ See the [Data and Privacy FAQs](https://zed.dev/faq#data-and-privacy) for more d ### Selecting Audio Devices -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - You can select specific input and output audio devices instead of using system defaults. To configure audio devices: 1. Open {#kb zed::OpenSettings} diff --git a/docs/src/configuring-languages.md b/docs/src/configuring-languages.md index 91775c3df137e38eb0b6b7b333b49d269b2f3a7c..485d843fd480177376cf4e5e990fc495e2bb60a7 100644 --- a/docs/src/configuring-languages.md +++ b/docs/src/configuring-languages.md @@ -165,8 +165,6 @@ Not all languages in Zed support toolchain discovery and selection, but for thos ### Configuring Language Servers -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When configuring language servers in your `settings.json`, autocomplete suggestions include all available LSP adapters recognized by Zed, not only those currently active for loaded languages. This helps you discover and configure language servers before opening files that use them. Many language servers accept custom configuration options. You can set these in the `lsp` section of your `settings.json`: diff --git a/docs/src/debugger.md b/docs/src/debugger.md index c659c1410b38166cf11da0af728e18f8c9282054..bf05de0f6ccccff4e95fd622bab7130d655a1167 100644 --- a/docs/src/debugger.md +++ b/docs/src/debugger.md @@ -165,8 +165,6 @@ The debug adapter will then stop whenever an exception of a given kind occurs. W ## Working with Split Panes -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When debugging with multiple split panes open, Zed shows the active debug line in one pane and preserves your layout in others. If you have the same file open in multiple panes, the debugger picks a pane where the file is already the active tab—it won't switch tabs in panes where the file is inactive. Once the debugger picks a pane, it continues using that pane for subsequent breakpoints during the session. If you drag the tab with the active debug line to a different split, the debugger tracks the move and uses the new pane. diff --git a/docs/src/getting-started.md b/docs/src/getting-started.md index af6a41c26a6f70f073b2d7e45267871962bb1697..a87e1bea0f4c3eacaa330b34874283a0b61b5eb9 100644 --- a/docs/src/getting-started.md +++ b/docs/src/getting-started.md @@ -13,8 +13,6 @@ This guide covers the essential commands, environment setup, and navigation basi ### Welcome Page -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When you open Zed without a folder, you see the welcome page in the main editor area. The welcome page offers quick actions to open a folder, clone a repository, or view documentation. Once you open a folder or file, the welcome page disappears. If you split the editor into multiple panes, the welcome page appears only in the center pane when empty—other panes show a standard empty state. To reopen the welcome page, close all items in the center pane or use the command palette to search for "Welcome". diff --git a/docs/src/outline-panel.md b/docs/src/outline-panel.md index 1bacc3cacf4f556c9c3a06e59d6f3fac9b8c74b0..7b31725bf2cec844881e0c5b0b41aac864e28fc9 100644 --- a/docs/src/outline-panel.md +++ b/docs/src/outline-panel.md @@ -7,8 +7,6 @@ description: Navigate code structure with Zed's outline panel. View symbols, jum In addition to the modal outline (`cmd-shift-o`), Zed offers an outline panel. The outline panel can be deployed via `cmd-shift-b` (`outline panel: toggle focus` via the command palette), or by clicking the `Outline Panel` button in the status bar. -> **Changed in Preview (v0.225).** See [release notes](/releases#0.225). - When viewing a "singleton" buffer (i.e., a single file on a tab), the outline panel works similarly to that of the outline modal-it displays the outline of the current buffer's symbols. Each symbol entry shows its type prefix (such as "struct", "fn", "mod", "impl") along with the symbol name, helping you quickly identify what kind of symbol you're looking at. Clicking on an entry allows you to jump to the associated section in the file. The outline view will also automatically scroll to the section associated with the current cursor position within the file. ![Using the outline panel in a singleton buffer](https://zed.dev/img/outline-panel/singleton.png) diff --git a/docs/src/tasks.md b/docs/src/tasks.md index 0fa659eb2cc58fe63536e721475b0093e0650618..482ca7b4d5779a4861756332ce2c0f25eaad4ad4 100644 --- a/docs/src/tasks.md +++ b/docs/src/tasks.md @@ -225,8 +225,6 @@ This could be useful for launching a terminal application that you want to use i ## VS Code Task Format -> **Preview:** This feature is available in Zed Preview. It will be included in the next Stable release. - When importing VS Code tasks from `.vscode/tasks.json`, you can omit the `label` field. Zed automatically generates labels based on the task type: - **npm tasks**: `npm: + + {{/if}} From 8d4913168c4ea3ac6a4d6cc1b70d3e7d006d8639 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 11 Mar 2026 13:52:03 +0100 Subject: [PATCH 485/548] acp: Update to `0.10.2` (#51280) Updates to `0.10.2`, most notable change is implementing `session/close`. Release Notes: - N/A --- Cargo.lock | 35 ++++++++++++++++++----- Cargo.toml | 2 +- crates/acp_thread/src/acp_thread.rs | 2 +- crates/acp_thread/src/connection.rs | 6 +++- crates/agent/src/agent.rs | 6 +++- crates/agent_servers/src/acp.rs | 39 +++++++++++++++++++++----- crates/agent_ui/src/connection_view.rs | 26 +++++++++++------ 7 files changed, 89 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f11d2023b319501778768fdea39fb8dbb242a9e9..6fbffbcaff377bdf49d02afae172a19e72a2d188 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -227,9 +227,9 @@ dependencies = [ [[package]] name = "agent-client-protocol" -version = "0.9.4" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2659b1089101b15db31137710159421cb44785ecdb5ba784be3b4a6f8cb8a475" +checksum = "9c56a59cf6315e99f874d2c1f96c69d2da5ffe0087d211297fc4a41f849770a2" dependencies = [ "agent-client-protocol-schema", "anyhow", @@ -244,16 +244,16 @@ dependencies = [ [[package]] name = "agent-client-protocol-schema" -version = "0.10.8" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1" +checksum = "e0497b9a95a404e35799904835c57c6f8c69b9d08ccfd3cb5b7d746425cd6789" dependencies = [ "anyhow", "derive_more", "schemars", "serde", "serde_json", - "strum 0.27.2", + "strum 0.28.0", ] [[package]] @@ -7151,7 +7151,7 @@ dependencies = [ "serde", "serde_json", "serde_yaml", - "strum_macros", + "strum_macros 0.27.2", ] [[package]] @@ -16544,7 +16544,16 @@ version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ - "strum_macros", + "strum_macros 0.27.2", +] + +[[package]] +name = "strum" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9628de9b8791db39ceda2b119bbe13134770b56c138ec1d3af810d045c04f9bd" +dependencies = [ + "strum_macros 0.28.0", ] [[package]] @@ -16559,6 +16568,18 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "strum_macros" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab85eea0270ee17587ed4156089e10b9e6880ee688791d45a905f5b1ca36f664" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "subtle" version = "2.6.1" diff --git a/Cargo.toml b/Cargo.toml index c184837bfd6a67490169b7a6908b17b4d61e121f..f650dace84b1b2e6491acf2806077f72000605f5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -473,7 +473,7 @@ ztracing_macro = { path = "crates/ztracing_macro" } # External crates # -agent-client-protocol = { version = "=0.9.4", features = ["unstable"] } +agent-client-protocol = { version = "=0.10.2", features = ["unstable"] } aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "9d9640d4" } any_vec = "0.14" diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 58252eaddca553eb1da4c960a829a88afb9eb497..95030443f642b019b27758f53fd413c5146857b1 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -4027,7 +4027,7 @@ mod tests { } fn authenticate(&self, method: acp::AuthMethodId, _cx: &mut App) -> Task> { - if self.auth_methods().iter().any(|m| m.id == method) { + if self.auth_methods().iter().any(|m| m.id() == &method) { Task::ready(Ok(())) } else { Task::ready(Err(anyhow!("Invalid Auth Method"))) diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 1236058226eee840e1a36009df85291a774548dc..4f6aaf86bad68f919c2c5de30214b21ff851c3dd 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -60,7 +60,11 @@ pub trait AgentConnection { } /// Close an existing session. Allows the agent to free the session from memory. - fn close_session(&self, _session_id: &acp::SessionId, _cx: &mut App) -> Task> { + fn close_session( + self: Rc, + _session_id: &acp::SessionId, + _cx: &mut App, + ) -> Task> { Task::ready(Err(anyhow::Error::msg("Closing sessions is not supported"))) } diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index a62e219b2d075e10e074b55859fc6c366c25523d..95346d665732b40599b096d480178264601ce6d6 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -1418,7 +1418,11 @@ impl acp_thread::AgentConnection for NativeAgentConnection { true } - fn close_session(&self, session_id: &acp::SessionId, cx: &mut App) -> Task> { + fn close_session( + self: Rc, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { self.0.update(cx, |agent, _cx| { let project_id = agent.sessions.get(session_id).map(|s| s.project_id); agent.sessions.remove(session_id); diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index b9e4eba497ef1e01016a17e34d634fea20cab499..a661289f6221818c6f63c799b0593907bb665eb9 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -279,7 +279,7 @@ impl AcpConnection { acp::InitializeRequest::new(acp::ProtocolVersion::V1) .client_capabilities( acp::ClientCapabilities::new() - .fs(acp::FileSystemCapability::new() + .fs(acp::FileSystemCapabilities::new() .read_text_file(true) .write_text_file(true)) .terminal(true) @@ -331,11 +331,11 @@ impl AcpConnection { "env": command.env.clone().unwrap_or_default(), }); let meta = acp::Meta::from_iter([("terminal-auth".to_string(), value)]); - vec![ - acp::AuthMethod::new("spawn-gemini-cli", "Login") + vec![acp::AuthMethod::Agent( + acp::AuthMethodAgent::new("spawn-gemini-cli", "Login") .description("Login with your Google or Vertex AI account") .meta(meta), - ] + )] } else { response.auth_methods }; @@ -744,6 +744,31 @@ impl AgentConnection for AcpConnection { }) } + fn supports_close_session(&self) -> bool { + self.agent_capabilities.session_capabilities.close.is_some() + } + + fn close_session( + self: Rc, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + if !self.agent_capabilities.session_capabilities.close.is_none() { + return Task::ready(Err(anyhow!(LoadError::Other( + "Closing sessions is not supported by this agent.".into() + )))); + } + + let conn = self.connection.clone(); + let session_id = session_id.clone(); + cx.foreground_executor().spawn(async move { + conn.close_session(acp::CloseSessionRequest::new(session_id.clone())) + .await?; + self.sessions.borrow_mut().remove(&session_id); + Ok(()) + }) + } + fn auth_methods(&self) -> &[acp::AuthMethod] { &self.auth_methods } @@ -1373,10 +1398,10 @@ impl acp::Client for ClientDelegate { Ok(acp::CreateTerminalResponse::new(terminal_id)) } - async fn kill_terminal_command( + async fn kill_terminal( &self, - args: acp::KillTerminalCommandRequest, - ) -> Result { + args: acp::KillTerminalRequest, + ) -> Result { self.session_thread(&args.session_id)? .update(&mut self.cx.clone(), |thread, cx| { thread.kill_terminal(args.terminal_id, cx) diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 3f1f1fb164693e0bb9e0b6d8883b97ab5539ba4f..2fd86f9c9d91abb7d5b08bd7a779b93592f2011c 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -463,7 +463,7 @@ impl ConnectedServerState { let tasks = self .threads .keys() - .map(|id| self.connection.close_session(id, cx)); + .map(|id| self.connection.clone().close_session(id, cx)); let task = futures::future::join_all(tasks); cx.background_spawn(async move { task.await; @@ -1431,7 +1431,7 @@ impl ConnectionView { .connection() .auth_methods() .iter() - .any(|method| method.id.0.as_ref() == "claude-login") + .any(|method| method.id().0.as_ref() == "claude-login") { available_commands.push(acp::AvailableCommand::new("login", "Authenticate")); available_commands.push(acp::AvailableCommand::new("logout", "Authenticate")); @@ -1495,10 +1495,15 @@ impl ConnectionView { let agent_telemetry_id = connection.telemetry_id(); // Check for the experimental "terminal-auth" _meta field - let auth_method = connection.auth_methods().iter().find(|m| m.id == method); + let auth_method = connection.auth_methods().iter().find(|m| m.id() == &method); if let Some(terminal_auth) = auth_method - .and_then(|a| a.meta.as_ref()) + .and_then(|a| match a { + acp::AuthMethod::EnvVar(env_var) => env_var.meta.as_ref(), + acp::AuthMethod::Terminal(terminal) => terminal.meta.as_ref(), + acp::AuthMethod::Agent(agent) => agent.meta.as_ref(), + _ => None, + }) .and_then(|m| m.get("terminal-auth")) { // Extract terminal auth details from meta @@ -1882,7 +1887,7 @@ impl ConnectionView { .enumerate() .rev() .map(|(ix, method)| { - let (method_id, name) = (method.id.0.clone(), method.name.clone()); + let (method_id, name) = (method.id().0.clone(), method.name().to_string()); let agent_telemetry_id = connection.telemetry_id(); Button::new(method_id.clone(), name) @@ -1894,8 +1899,8 @@ impl ConnectionView { this.style(ButtonStyle::Outlined) } }) - .when_some(method.description.clone(), |this, description| { - this.tooltip(Tooltip::text(description)) + .when_some(method.description(), |this, description| { + this.tooltip(Tooltip::text(description.to_string())) }) .on_click({ cx.listener(move |this, _, window, cx| { @@ -4074,7 +4079,10 @@ pub(crate) mod tests { fn new() -> Self { Self { authenticated: Arc::new(Mutex::new(false)), - auth_method: acp::AuthMethod::new(Self::AUTH_METHOD_ID, "Test Login"), + auth_method: acp::AuthMethod::Agent(acp::AuthMethodAgent::new( + Self::AUTH_METHOD_ID, + "Test Login", + )), } } } @@ -4127,7 +4135,7 @@ pub(crate) mod tests { method_id: acp::AuthMethodId, _cx: &mut App, ) -> Task> { - if method_id == self.auth_method.id { + if &method_id == self.auth_method.id() { *self.authenticated.lock() = true; Task::ready(Ok(())) } else { From db9d9752c738158e3ded77aaf280ca3901d1ed52 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 11 Mar 2026 14:56:53 +0100 Subject: [PATCH 486/548] agent: Support providers streaming edits out of order (#51286) Release Notes: - N/A --- .../src/tools/streaming_edit_file_tool.rs | 127 +++++++++++++++++- 1 file changed, 124 insertions(+), 3 deletions(-) diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index c326ed3c10170d1c45517103ba02e178bec32c36..574fe078063b0b8e66ceb6cf0503ad139c23cdc4 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -118,7 +118,7 @@ pub struct Edit { pub new_text: String, } -#[derive(Default, Debug, Deserialize)] +#[derive(Clone, Default, Debug, Deserialize)] struct StreamingEditFileToolPartialInput { #[serde(default)] display_description: Option, @@ -132,7 +132,7 @@ struct StreamingEditFileToolPartialInput { edits: Option>, } -#[derive(Default, Debug, Deserialize)] +#[derive(Clone, Default, Debug, Deserialize)] pub struct PartialEdit { #[serde(default)] pub old_text: Option, @@ -314,12 +314,19 @@ impl AgentTool for StreamingEditFileTool { ) -> Task> { cx.spawn(async move |cx: &mut AsyncApp| { let mut state: Option = None; + let mut last_partial: Option = None; loop { futures::select! { partial = input.recv_partial().fuse() => { let Some(partial_value) = partial else { break }; if let Ok(parsed) = serde_json::from_value::(partial_value) { + let path_complete = parsed.path.is_some() + && parsed.path.as_ref() == last_partial.as_ref().and_then(|p| p.path.as_ref()); + + last_partial = Some(parsed.clone()); + if state.is_none() + && path_complete && let StreamingEditFileToolPartialInput { path: Some(path), display_description: Some(display_description), @@ -1907,6 +1914,13 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Setup + single edit that stays in-progress (no second edit to prove completion) + sender.send_partial(json!({ + "display_description": "Single edit", + "path": "root/file.txt", + "mode": "edit", + })); + cx.run_until_parked(); + sender.send_partial(json!({ "display_description": "Single edit", "path": "root/file.txt", @@ -3475,6 +3489,12 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + })); + cx.run_until_parked(); + sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", @@ -3550,8 +3570,9 @@ mod tests { // Verify buffer still has old content (no content partial yet) let buffer = project.update(cx, |project, cx| { let path = project.find_project_path("root/file.txt", cx).unwrap(); - project.get_open_buffer(&path, cx).unwrap() + project.open_buffer(path, cx) }); + let buffer = buffer.await.unwrap(); assert_eq!( buffer.read_with(cx, |b, _| b.text()), "old line 1\nold line 2\nold line 3\n" @@ -3735,6 +3756,106 @@ mod tests { ); } + #[gpui::test] + async fn test_streaming_edit_file_tool_fields_out_of_order_in_write_mode( + cx: &mut TestAppContext, + ) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root" + })); + cx.run_until_parked(); + + // Send final. + sender.send_final(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root/file.txt" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new_content"); + } + + #[gpui::test] + async fn test_streaming_edit_file_tool_fields_out_of_order_in_edit_mode( + cx: &mut TestAppContext, + ) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root" + })); + cx.run_until_parked(); + + // Send final. + sender.send_final(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root/file.txt" + })); + cx.run_until_parked(); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new_content"); + } + async fn setup_test_with_fs( cx: &mut TestAppContext, fs: Arc, From 3c82ddf261cd31d6150cd4aebf4ccbfc6518ea2e Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 11 Mar 2026 11:35:59 -0300 Subject: [PATCH 487/548] git_ui: Fix "resolve with agent" merge conflict notification (#51290) Follow up to https://github.com/zed-industries/zed/pull/49807 This PR fixes the merge conflict notification by making it appear only once per a given set of conflicted paths, as opposed to showing every time the `ConflictsUpdated` or `StatusesChanged` even would fire. Release Notes: - N/A --- crates/git_ui/src/conflict_view.rs | 15 +++++++++++---- crates/workspace/src/notifications.rs | 14 ++++++++------ 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/crates/git_ui/src/conflict_view.rs b/crates/git_ui/src/conflict_view.rs index 6c2c0b6f58696147da069b0aebdf55d396f7a388..7bb880abe6d1209aaf6b15d78979cc388bf37a36 100644 --- a/crates/git_ui/src/conflict_view.rs +++ b/crates/git_ui/src/conflict_view.rs @@ -15,7 +15,7 @@ use project::{ git_store::{GitStoreEvent, RepositoryEvent}, }; use settings::Settings; -use std::{ops::Range, sync::Arc}; +use std::{cell::RefCell, ops::Range, rc::Rc, sync::Arc}; use ui::{ActiveTheme, Divider, Element as _, Styled, Window, prelude::*}; use util::{ResultExt as _, debug_panic, maybe}; use workspace::{ @@ -534,7 +534,9 @@ pub(crate) fn register_conflict_notification( ) { let git_store = workspace.project().read(cx).git_store().clone(); - cx.subscribe(&git_store, |workspace, _git_store, event, cx| { + let last_shown_paths: Rc>> = Rc::new(RefCell::new(HashSet::default())); + + cx.subscribe(&git_store, move |workspace, _git_store, event, cx| { let conflicts_changed = matches!( event, GitStoreEvent::ConflictsUpdated @@ -546,10 +548,15 @@ pub(crate) fn register_conflict_notification( let paths = collect_conflicted_file_paths(workspace, cx); let notification_id = merge_conflict_notification_id(); + let current_paths_set: HashSet = paths.iter().cloned().collect(); if paths.is_empty() { + last_shown_paths.borrow_mut().clear(); workspace.dismiss_notification(¬ification_id, cx); - } else { + } else if *last_shown_paths.borrow() != current_paths_set { + // Only show the notification if the set of conflicted paths has changed. + // This prevents re-showing after the user dismisses it while working on the same conflicts. + *last_shown_paths.borrow_mut() = current_paths_set; let file_count = paths.len(); workspace.show_notification(notification_id, cx, |cx| { cx.new(|cx| { @@ -560,7 +567,7 @@ pub(crate) fn register_conflict_notification( }; MessageNotification::new(message, cx) - .primary_message("Resolve Conflicts with Agent") + .primary_message("Resolve with Agent") .primary_icon(IconName::ZedAssistant) .primary_icon_color(Color::Muted) .primary_on_click({ diff --git a/crates/workspace/src/notifications.rs b/crates/workspace/src/notifications.rs index 84f479b77e4f0274e0775353d3a7cd5579768f1c..9f4b5538ed67bde3f32969467828296485b7810f 100644 --- a/crates/workspace/src/notifications.rs +++ b/crates/workspace/src/notifications.rs @@ -657,15 +657,17 @@ impl RenderOnce for NotificationFrame { IconButton::new(close_id, close_icon) .tooltip(move |_window, cx| { if suppress { - Tooltip::for_action( - "Suppress.\nClose with click.", - &SuppressNotification, + Tooltip::with_meta( + "Suppress", + Some(&SuppressNotification), + "Click to Close", cx, ) } else if show_suppress_button { - Tooltip::for_action( - "Close.\nSuppress with shift-click.", - &menu::Cancel, + Tooltip::with_meta( + "Close", + Some(&menu::Cancel), + "Shift-click to Suppress", cx, ) } else { From bee94e73923267d83c11d0cbad66293388e3c380 Mon Sep 17 00:00:00 2001 From: "Joseph T. Lyons" Date: Wed, 11 Mar 2026 10:41:31 -0400 Subject: [PATCH 488/548] Bump Zed to v0.229 (#51292) Release Notes: - N/A --- Cargo.lock | 2 +- crates/zed/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6fbffbcaff377bdf49d02afae172a19e72a2d188..6570398f5b22f2248a9cd59f84d2cf70080c3591 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -21756,7 +21756,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.228.0" +version = "0.229.0" dependencies = [ "acp_thread", "acp_tools", diff --git a/crates/zed/Cargo.toml b/crates/zed/Cargo.toml index 2f61121d9c0aeb80a77d36bc4836b33c63936584..b38e5a774d7efe6e46642ed226515d7dff7275d3 100644 --- a/crates/zed/Cargo.toml +++ b/crates/zed/Cargo.toml @@ -2,7 +2,7 @@ description = "The fast, collaborative code editor." edition.workspace = true name = "zed" -version = "0.228.0" +version = "0.229.0" publish.workspace = true license = "GPL-3.0-or-later" authors = ["Zed Team "] From a8def21f53c18720d9c846434db9caae486890da Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Wed, 11 Mar 2026 14:52:57 +0000 Subject: [PATCH 489/548] agent: Add thread git stats to sidebar (#51287) --- crates/action_log/src/action_log.rs | 45 +++++++++++++++++++ crates/agent_ui/src/connection_view.rs | 4 +- .../src/connection_view/thread_view.rs | 37 --------------- crates/agent_ui/src/sidebar.rs | 19 ++++++++ crates/ui/src/components/ai/thread_item.rs | 45 +++++++++---------- 5 files changed, 86 insertions(+), 64 deletions(-) diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 28245944e39deca7fb2b3f86902f114420d31d20..3faf767c7020763eadc7db6c93af42f650a07434 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -1028,6 +1028,11 @@ impl ActionLog { .collect() } + /// Returns the total number of lines added and removed across all unreviewed buffers. + pub fn diff_stats(&self, cx: &App) -> DiffStats { + DiffStats::all_files(&self.changed_buffers(cx), cx) + } + /// Iterate over buffers changed since last read or edited by the model pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator> { self.tracked_buffers @@ -1044,6 +1049,46 @@ impl ActionLog { } } +#[derive(Default, Debug, Clone, Copy)] +pub struct DiffStats { + pub lines_added: u32, + pub lines_removed: u32, +} + +impl DiffStats { + pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self { + let mut stats = DiffStats::default(); + let diff_snapshot = diff.snapshot(cx); + let buffer_snapshot = buffer.snapshot(); + let base_text = diff_snapshot.base_text(); + + for hunk in diff_snapshot.hunks(&buffer_snapshot) { + let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row); + stats.lines_added += added_rows; + + let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row; + let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row; + let removed_rows = base_end.saturating_sub(base_start); + stats.lines_removed += removed_rows; + } + + stats + } + + pub fn all_files( + changed_buffers: &BTreeMap, Entity>, + cx: &App, + ) -> Self { + let mut total = DiffStats::default(); + for (buffer, diff) in changed_buffers { + let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx); + total.lines_added += stats.lines_added; + total.lines_removed += stats.lines_removed; + } + total + } +} + #[derive(Clone)] pub struct ActionLogTelemetry { pub agent_telemetry_id: SharedString, diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 2fd86f9c9d91abb7d5b08bd7a779b93592f2011c..b896741cee26e14ed372480f80d6cf8302db180b 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -5,7 +5,7 @@ use acp_thread::{ UserMessageId, }; use acp_thread::{AgentConnection, Plan}; -use action_log::{ActionLog, ActionLogTelemetry}; +use action_log::{ActionLog, ActionLogTelemetry, DiffStats}; use agent::{NativeAgentServer, NativeAgentSessionList, SharedThread, ThreadStore}; use agent_client_protocol::{self as acp, PromptCapabilities}; use agent_servers::AgentServer; @@ -46,7 +46,7 @@ use std::sync::Arc; use std::time::Instant; use std::{collections::BTreeMap, rc::Rc, time::Duration}; use terminal_view::terminal_panel::TerminalPanel; -use text::{Anchor, ToPoint as _}; +use text::Anchor; use theme::AgentFontSize; use ui::{ Callout, CircularProgress, CommonAnimationExt, ContextMenu, ContextMenuEntry, CopyButton, diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 771d80f08306838e756a2ea3dd8aa4b378cfd402..d4d23f5a0a0722afc5c588a355a6a9de1b59d194 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -156,43 +156,6 @@ impl ThreadFeedbackState { } } -#[derive(Default, Clone, Copy)] -struct DiffStats { - lines_added: u32, - lines_removed: u32, -} - -impl DiffStats { - fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self { - let mut stats = DiffStats::default(); - let diff_snapshot = diff.snapshot(cx); - let buffer_snapshot = buffer.snapshot(); - let base_text = diff_snapshot.base_text(); - - for hunk in diff_snapshot.hunks(&buffer_snapshot) { - let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row); - stats.lines_added += added_rows; - - let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row; - let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row; - let removed_rows = base_end.saturating_sub(base_start); - stats.lines_removed += removed_rows; - } - - stats - } - - fn all_files(changed_buffers: &BTreeMap, Entity>, cx: &App) -> Self { - let mut total = DiffStats::default(); - for (buffer, diff) in changed_buffers { - let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx); - total.lines_added += stats.lines_added; - total.lines_removed += stats.lines_removed; - } - total - } -} - pub enum AcpThreadViewEvent { FirstSendRequested { content: Vec }, } diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index 2679807388eb6261f9bc32be10c10ed500078b22..ae3a4f0ccb9df6073ae24a9c482b6c56de0ea968 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -1,5 +1,6 @@ use crate::{AgentPanel, AgentPanelEvent, NewThread}; use acp_thread::ThreadStatus; +use action_log::DiffStats; use agent::ThreadStore; use agent_client_protocol as acp; use agent_settings::AgentSettings; @@ -73,6 +74,7 @@ struct ActiveThreadInfo { icon: IconName, icon_from_external_svg: Option, is_background: bool, + diff_stats: DiffStats, } impl From<&ActiveThreadInfo> for acp_thread::AgentSessionInfo { @@ -98,6 +100,7 @@ struct ThreadEntry { is_live: bool, is_background: bool, highlight_positions: Vec, + diff_stats: DiffStats, } #[derive(Clone)] @@ -402,6 +405,8 @@ impl Sidebar { } }; + let diff_stats = thread.action_log().read(cx).diff_stats(cx); + ActiveThreadInfo { session_id, title, @@ -409,6 +414,7 @@ impl Sidebar { icon, icon_from_external_svg, is_background, + diff_stats, } }) .collect() @@ -472,6 +478,7 @@ impl Sidebar { is_live: false, is_background: false, highlight_positions: Vec::new(), + diff_stats: DiffStats::default(), }); } } @@ -497,6 +504,7 @@ impl Sidebar { thread.icon_from_external_svg = info.icon_from_external_svg.clone(); thread.is_live = true; thread.is_background = info.is_background; + thread.diff_stats = info.diff_stats; } } @@ -1171,6 +1179,12 @@ impl Sidebar { .highlight_positions(thread.highlight_positions.to_vec()) .status(thread.status) .notified(has_notification) + .when(thread.diff_stats.lines_added > 0, |this| { + this.added(thread.diff_stats.lines_added as usize) + }) + .when(thread.diff_stats.lines_removed > 0, |this| { + this.removed(thread.diff_stats.lines_removed as usize) + }) .selected(self.focused_thread.as_ref() == Some(&session_info.session_id)) .focused(is_selected) .on_click(cx.listener(move |this, _, window, cx| { @@ -1987,6 +2001,7 @@ mod tests { is_live: false, is_background: false, highlight_positions: Vec::new(), + diff_stats: DiffStats::default(), }), // Active thread with Running status ListEntry::Thread(ThreadEntry { @@ -2005,6 +2020,7 @@ mod tests { is_live: true, is_background: false, highlight_positions: Vec::new(), + diff_stats: DiffStats::default(), }), // Active thread with Error status ListEntry::Thread(ThreadEntry { @@ -2023,6 +2039,7 @@ mod tests { is_live: true, is_background: false, highlight_positions: Vec::new(), + diff_stats: DiffStats::default(), }), // Thread with WaitingForConfirmation status, not active ListEntry::Thread(ThreadEntry { @@ -2041,6 +2058,7 @@ mod tests { is_live: false, is_background: false, highlight_positions: Vec::new(), + diff_stats: DiffStats::default(), }), // Background thread that completed (should show notification) ListEntry::Thread(ThreadEntry { @@ -2059,6 +2077,7 @@ mod tests { is_live: true, is_background: true, highlight_positions: Vec::new(), + diff_stats: DiffStats::default(), }), // View More entry ListEntry::ViewMore { diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index 3c08bd946710f76ccf49f933b82091a3bcb06e08..edc685159f5c9edc5fa872e9d453d0b81fa9cb16 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -227,6 +227,12 @@ impl RenderOnce for ThreadItem { .gradient_stop(0.8) .group_name("thread-item"); + let has_diff_stats = self.added.is_some() || self.removed.is_some(); + let added_count = self.added.unwrap_or(0); + let removed_count = self.removed.unwrap_or(0); + let diff_stat_id = self.id.clone(); + let has_worktree = self.worktree.is_some(); + v_flex() .id(self.id.clone()) .group("thread-item") @@ -235,7 +241,7 @@ impl RenderOnce for ThreadItem { .cursor_pointer() .w_full() .map(|this| { - if self.worktree.is_some() { + if has_worktree || has_diff_stats { this.p_2() } else { this.px_2().py_1() @@ -300,35 +306,24 @@ impl RenderOnce for ThreadItem { .gap_1p5() .child(icon_container()) // Icon Spacing .child(worktree_label) - // TODO: Uncomment the elements below when we're ready to expose this data - // .child(dot_separator()) - // .child( - // Label::new(self.timestamp) - // .size(LabelSize::Small) - // .color(Color::Muted), - // ) - // .child( - // Label::new("•") - // .size(LabelSize::Small) - // .color(Color::Muted) - // .alpha(0.5), - // ) - // .when(has_no_changes, |this| { - // this.child( - // Label::new("No Changes") - // .size(LabelSize::Small) - // .color(Color::Muted), - // ) - // }) - .when(self.added.is_some() || self.removed.is_some(), |this| { + .when(has_diff_stats, |this| { this.child(DiffStat::new( - self.id, - self.added.unwrap_or(0), - self.removed.unwrap_or(0), + diff_stat_id.clone(), + added_count, + removed_count, )) }), ) }) + .when(!has_worktree && has_diff_stats, |this| { + this.child( + h_flex() + .min_w_0() + .gap_1p5() + .child(icon_container()) // Icon Spacing + .child(DiffStat::new(diff_stat_id, added_count, removed_count)), + ) + }) .when_some(self.on_click, |this, on_click| this.on_click(on_click)) } } From 2b425aa102142932a88275341913e9e3d99bbbec Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Wed, 11 Mar 2026 17:05:22 +0200 Subject: [PATCH 490/548] Limit editors' refresh runnables (#51299) Before, both rust-analyzer's LSPext tasks and tree-sitter tasks were queried on the entire multi buffer range. The PR moves all runnable-related logic into a submodule, and reworks the logic to consider only the visible buffers. Singleton buffers have their tasks resolved for the entire range still (same as LSPext tasks work), multi buffers have their buffers' data cached and reused. Release Notes: - Improved multi buffer's runnables resolution performance --- crates/editor/src/editor.rs | 526 ++--------------- crates/editor/src/editor_tests.rs | 17 +- crates/editor/src/element.rs | 6 +- crates/editor/src/runnables.rs | 915 ++++++++++++++++++++++++++++++ crates/editor/src/tasks.rs | 110 ---- crates/tasks_ui/src/tasks_ui.rs | 4 +- 6 files changed, 979 insertions(+), 599 deletions(-) create mode 100644 crates/editor/src/runnables.rs delete mode 100644 crates/editor/src/tasks.rs diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index a08ac3bbc466d159ce81a7aa3bebf82599914a0b..ca3dd81ab072d0e20389318515049793a8c827ef 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -35,13 +35,13 @@ mod lsp_ext; mod mouse_context_menu; pub mod movement; mod persistence; +mod runnables; mod rust_analyzer_ext; pub mod scroll; mod selections_collection; pub mod semantic_tokens; mod split; pub mod split_editor_view; -pub mod tasks; #[cfg(test)] mod code_completion_tests; @@ -133,8 +133,8 @@ use language::{ BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape, DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind, IndentSize, Language, LanguageName, LanguageRegistry, LanguageScope, LocalFile, OffsetRangeExt, - OutlineItem, Point, Runnable, Selection, SelectionGoal, TextObject, TransactionId, - TreeSitterOptions, WordsQuery, + OutlineItem, Point, Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions, + WordsQuery, language_settings::{ self, LanguageSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode, all_language_settings, language_settings, @@ -158,7 +158,7 @@ use project::{ BreakpointWithPosition, CodeAction, Completion, CompletionDisplayOptions, CompletionIntent, CompletionResponse, CompletionSource, DisableAiSettings, DocumentHighlight, InlayHint, InlayId, InvalidationStrategy, Location, LocationLink, LspAction, PrepareRenameResponse, Project, - ProjectItem, ProjectPath, ProjectTransaction, TaskSourceKind, + ProjectItem, ProjectPath, ProjectTransaction, debugger::{ breakpoint_store::{ Breakpoint, BreakpointEditAction, BreakpointSessionState, BreakpointState, @@ -200,7 +200,7 @@ use std::{ sync::Arc, time::{Duration, Instant}, }; -use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables}; +use task::TaskVariables; use text::{BufferId, FromAnchor, OffsetUtf16, Rope, ToOffset as _, ToPoint as _}; use theme::{ AccentColors, ActiveTheme, GlobalTheme, PlayerColor, StatusColors, SyntaxTheme, Theme, @@ -231,6 +231,7 @@ use crate::{ InlineValueCache, inlay_hints::{LspInlayHintData, inlay_hint_settings}, }, + runnables::{ResolvedTasks, RunnableData, RunnableTasks}, scroll::{ScrollOffset, ScrollPixelOffset}, selections_collection::resolve_selections_wrapping_blocks, semantic_tokens::SemanticTokenState, @@ -857,37 +858,6 @@ impl BufferSerialization { } } -#[derive(Clone, Debug)] -struct RunnableTasks { - templates: Vec<(TaskSourceKind, TaskTemplate)>, - offset: multi_buffer::Anchor, - // We need the column at which the task context evaluation should take place (when we're spawning it via gutter). - column: u32, - // Values of all named captures, including those starting with '_' - extra_variables: HashMap, - // Full range of the tagged region. We use it to determine which `extra_variables` to grab for context resolution in e.g. a modal. - context_range: Range, -} - -impl RunnableTasks { - fn resolve<'a>( - &'a self, - cx: &'a task::TaskContext, - ) -> impl Iterator + 'a { - self.templates.iter().filter_map(|(kind, template)| { - template - .resolve_task(&kind.to_id_base(), cx) - .map(|task| (kind.clone(), task)) - }) - } -} - -#[derive(Clone)] -pub struct ResolvedTasks { - templates: SmallVec<[(TaskSourceKind, ResolvedTask); 1]>, - position: Anchor, -} - /// Addons allow storing per-editor state in other crates (e.g. Vim) pub trait Addon: 'static { fn extend_key_context(&self, _: &mut KeyContext, _: &App) {} @@ -1295,8 +1265,7 @@ pub struct Editor { last_bounds: Option>, last_position_map: Option>, expect_bounds_change: Option>, - tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>, - tasks_update_task: Option>, + runnables: RunnableData, breakpoint_store: Option>, gutter_breakpoint_indicator: (Option, Option>), pub(crate) gutter_diff_review_indicator: (Option, Option>), @@ -2173,16 +2142,9 @@ impl Editor { editor.registered_buffers.clear(); editor.register_visible_buffers(cx); editor.invalidate_semantic_tokens(None); + editor.refresh_runnables(window, cx); editor.update_lsp_data(None, window, cx); editor.refresh_inlay_hints(InlayHintRefreshReason::ServerRemoved, cx); - if editor.tasks_update_task.is_none() { - editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); - } - } - project::Event::LanguageServerAdded(..) => { - if editor.tasks_update_task.is_none() { - editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); - } } project::Event::SnippetEdit(id, snippet_edits) => { // todo(lw): Non singletons @@ -2210,6 +2172,7 @@ impl Editor { let buffer_id = *buffer_id; if editor.buffer().read(cx).buffer(buffer_id).is_some() { editor.register_buffer(buffer_id, cx); + editor.refresh_runnables(window, cx); editor.update_lsp_data(Some(buffer_id), window, cx); editor.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); refresh_linked_ranges(editor, window, cx); @@ -2288,7 +2251,7 @@ impl Editor { &task_inventory, window, |editor, _, window, cx| { - editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); + editor.refresh_runnables(window, cx); }, )); }; @@ -2529,7 +2492,6 @@ impl Editor { }), blame: None, blame_subscription: None, - tasks: BTreeMap::default(), breakpoint_store, gutter_breakpoint_indicator: (None, None), @@ -2565,7 +2527,7 @@ impl Editor { ] }) .unwrap_or_default(), - tasks_update_task: None, + runnables: RunnableData::new(), pull_diagnostics_task: Task::ready(()), colors: None, refresh_colors_task: Task::ready(()), @@ -2632,7 +2594,6 @@ impl Editor { cx.notify(); })); } - editor.tasks_update_task = Some(editor.refresh_runnables(window, cx)); editor._subscriptions.extend(project_subscriptions); editor._subscriptions.push(cx.subscribe_in( @@ -2668,6 +2629,7 @@ impl Editor { ); if !editor.buffer().read(cx).is_singleton() { editor.update_lsp_data(None, window, cx); + editor.refresh_runnables(window, cx); } }) .ok(); @@ -5791,18 +5753,11 @@ impl Editor { let display_snapshot = self.display_map.update(cx, |map, cx| map.snapshot(cx)); let multi_buffer = self.buffer().read(cx); let multi_buffer_snapshot = multi_buffer.snapshot(cx); - let multi_buffer_visible_start = self - .scroll_manager - .native_anchor(&display_snapshot, cx) - .anchor - .to_point(&multi_buffer_snapshot); - let multi_buffer_visible_end = multi_buffer_snapshot.clip_point( - multi_buffer_visible_start - + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), - Bias::Left, - ); multi_buffer_snapshot - .range_to_buffer_ranges(multi_buffer_visible_start..=multi_buffer_visible_end) + .range_to_buffer_ranges( + self.multi_buffer_visible_range(&display_snapshot, cx) + .to_inclusive(), + ) .into_iter() .filter(|(_, excerpt_visible_range, _)| !excerpt_visible_range.is_empty()) .filter_map(|(buffer, excerpt_visible_range, excerpt_id)| { @@ -6737,8 +6692,8 @@ impl Editor { }; let buffer_id = buffer.read(cx).remote_id(); let tasks = self - .tasks - .get(&(buffer_id, buffer_row)) + .runnables + .runnables((buffer_id, buffer_row)) .map(|t| Arc::new(t.to_owned())); if !self.focus_handle.is_focused(window) { @@ -7789,24 +7744,13 @@ impl Editor { self.debounced_selection_highlight_complete = false; } if on_buffer_edit || query_changed { - let multi_buffer_visible_start = self - .scroll_manager - .native_anchor(&display_snapshot, cx) - .anchor - .to_point(&multi_buffer_snapshot); - let multi_buffer_visible_end = multi_buffer_snapshot.clip_point( - multi_buffer_visible_start - + Point::new(self.visible_line_count().unwrap_or(0.).ceil() as u32, 0), - Bias::Left, - ); - let multi_buffer_visible_range = multi_buffer_visible_start..multi_buffer_visible_end; self.quick_selection_highlight_task = Some(( query_range.clone(), self.update_selection_occurrence_highlights( snapshot.buffer.clone(), query_text.clone(), query_range.clone(), - multi_buffer_visible_range, + self.multi_buffer_visible_range(&display_snapshot, cx), false, window, cx, @@ -7841,6 +7785,27 @@ impl Editor { } } + pub fn multi_buffer_visible_range( + &self, + display_snapshot: &DisplaySnapshot, + cx: &App, + ) -> Range { + let visible_start = self + .scroll_manager + .native_anchor(display_snapshot, cx) + .anchor + .to_point(display_snapshot.buffer_snapshot()) + .to_display_point(display_snapshot); + + let mut target_end = visible_start; + *target_end.row_mut() += self.visible_line_count().unwrap_or(0.).ceil() as u32; + + visible_start.to_point(display_snapshot) + ..display_snapshot + .clip_point(target_end, Bias::Right) + .to_point(display_snapshot) + } + pub fn refresh_edit_prediction( &mut self, debounce: bool, @@ -8809,19 +8774,6 @@ impl Editor { Some(self.edit_prediction_provider.as_ref()?.provider.clone()) } - fn clear_tasks(&mut self) { - self.tasks.clear() - } - - fn insert_tasks(&mut self, key: (BufferId, BufferRow), value: RunnableTasks) { - if self.tasks.insert(key, value).is_some() { - // This case should hopefully be rare, but just in case... - log::error!( - "multiple different run targets found on a single line, only the last target will be rendered" - ) - } - } - /// Get all display points of breakpoints that will be rendered within editor /// /// This function is used to handle overlaps between breakpoints and Code action/runner symbol. @@ -9199,156 +9151,6 @@ impl Editor { }) } - pub fn spawn_nearest_task( - &mut self, - action: &SpawnNearestTask, - window: &mut Window, - cx: &mut Context, - ) { - let Some((workspace, _)) = self.workspace.clone() else { - return; - }; - let Some(project) = self.project.clone() else { - return; - }; - - // Try to find a closest, enclosing node using tree-sitter that has a task - let Some((buffer, buffer_row, tasks)) = self - .find_enclosing_node_task(cx) - // Or find the task that's closest in row-distance. - .or_else(|| self.find_closest_task(cx)) - else { - return; - }; - - let reveal_strategy = action.reveal; - let task_context = Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx); - cx.spawn_in(window, async move |_, cx| { - let context = task_context.await?; - let (task_source_kind, mut resolved_task) = tasks.resolve(&context).next()?; - - let resolved = &mut resolved_task.resolved; - resolved.reveal = reveal_strategy; - - workspace - .update_in(cx, |workspace, window, cx| { - workspace.schedule_resolved_task( - task_source_kind, - resolved_task, - false, - window, - cx, - ); - }) - .ok() - }) - .detach(); - } - - fn find_closest_task( - &mut self, - cx: &mut Context, - ) -> Option<(Entity, u32, Arc)> { - let cursor_row = self - .selections - .newest_adjusted(&self.display_snapshot(cx)) - .head() - .row; - - let ((buffer_id, row), tasks) = self - .tasks - .iter() - .min_by_key(|((_, row), _)| cursor_row.abs_diff(*row))?; - - let buffer = self.buffer.read(cx).buffer(*buffer_id)?; - let tasks = Arc::new(tasks.to_owned()); - Some((buffer, *row, tasks)) - } - - fn find_enclosing_node_task( - &mut self, - cx: &mut Context, - ) -> Option<(Entity, u32, Arc)> { - let snapshot = self.buffer.read(cx).snapshot(cx); - let offset = self - .selections - .newest::(&self.display_snapshot(cx)) - .head(); - let mut excerpt = snapshot.excerpt_containing(offset..offset)?; - let offset = excerpt.map_offset_to_buffer(offset); - let buffer_id = excerpt.buffer().remote_id(); - - let layer = excerpt.buffer().syntax_layer_at(offset)?; - let mut cursor = layer.node().walk(); - - while cursor.goto_first_child_for_byte(offset.0).is_some() { - if cursor.node().end_byte() == offset.0 { - cursor.goto_next_sibling(); - } - } - - // Ascend to the smallest ancestor that contains the range and has a task. - loop { - let node = cursor.node(); - let node_range = node.byte_range(); - let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row; - - // Check if this node contains our offset - if node_range.start <= offset.0 && node_range.end >= offset.0 { - // If it contains offset, check for task - if let Some(tasks) = self.tasks.get(&(buffer_id, symbol_start_row)) { - let buffer = self.buffer.read(cx).buffer(buffer_id)?; - return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned()))); - } - } - - if !cursor.goto_parent() { - break; - } - } - None - } - - fn render_run_indicator( - &self, - _style: &EditorStyle, - is_active: bool, - row: DisplayRow, - breakpoint: Option<(Anchor, Breakpoint, Option)>, - cx: &mut Context, - ) -> IconButton { - let color = Color::Muted; - let position = breakpoint.as_ref().map(|(anchor, _, _)| *anchor); - - IconButton::new( - ("run_indicator", row.0 as usize), - ui::IconName::PlayOutlined, - ) - .shape(ui::IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(color) - .toggle_state(is_active) - .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| { - let quick_launch = match e { - ClickEvent::Keyboard(_) => true, - ClickEvent::Mouse(e) => e.down.button == MouseButton::Left, - }; - - window.focus(&editor.focus_handle(cx), cx); - editor.toggle_code_actions( - &ToggleCodeActions { - deployed_from: Some(CodeActionSource::RunMenu(row)), - quick_launch, - }, - window, - cx, - ); - })) - .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { - editor.set_breakpoint_context_menu(row, position, event.position(), window, cx); - })) - } - pub fn context_menu_visible(&self) -> bool { !self.edit_prediction_preview_is_active() && self @@ -17153,241 +16955,6 @@ impl Editor { }); } - fn refresh_runnables(&mut self, window: &mut Window, cx: &mut Context) -> Task<()> { - if !self.mode().is_full() - || !EditorSettings::get_global(cx).gutter.runnables - || !self.enable_runnables - { - self.clear_tasks(); - return Task::ready(()); - } - let project = self.project().map(Entity::downgrade); - let task_sources = self.lsp_task_sources(cx); - let multi_buffer = self.buffer.downgrade(); - let lsp_data_enabled = self.lsp_data_enabled(); - cx.spawn_in(window, async move |editor, cx| { - cx.background_executor().timer(UPDATE_DEBOUNCE).await; - let Some(project) = project.and_then(|p| p.upgrade()) else { - return; - }; - let Ok(display_snapshot) = editor.update(cx, |this, cx| { - this.display_map.update(cx, |map, cx| map.snapshot(cx)) - }) else { - return; - }; - - let hide_runnables = project.update(cx, |project, _| project.is_via_collab()); - if hide_runnables { - return; - } - let new_rows = cx - .background_spawn({ - let snapshot = display_snapshot.clone(); - async move { - snapshot - .buffer_snapshot() - .runnable_ranges(Anchor::min()..Anchor::max()) - .collect() - } - }) - .await; - let lsp_tasks = if lsp_data_enabled { - let Ok(lsp_tasks) = - cx.update(|_, cx| crate::lsp_tasks(project.clone(), &task_sources, None, cx)) - else { - return; - }; - lsp_tasks.await - } else { - Vec::new() - }; - - let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| { - lsp_tasks - .into_iter() - .flat_map(|(kind, tasks)| { - tasks.into_iter().filter_map(move |(location, task)| { - Some((kind.clone(), location?, task)) - }) - }) - .fold(HashMap::default(), |mut acc, (kind, location, task)| { - let buffer = location.target.buffer; - let buffer_snapshot = buffer.read(cx).snapshot(); - let offset = display_snapshot.buffer_snapshot().excerpts().find_map( - |(excerpt_id, snapshot, _)| { - if snapshot.remote_id() == buffer_snapshot.remote_id() { - display_snapshot - .buffer_snapshot() - .anchor_in_excerpt(excerpt_id, location.target.range.start) - } else { - None - } - }, - ); - if let Some(offset) = offset { - let task_buffer_range = - location.target.range.to_point(&buffer_snapshot); - let context_buffer_range = - task_buffer_range.to_offset(&buffer_snapshot); - let context_range = BufferOffset(context_buffer_range.start) - ..BufferOffset(context_buffer_range.end); - - acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row)) - .or_insert_with(|| RunnableTasks { - templates: Vec::new(), - offset, - column: task_buffer_range.start.column, - extra_variables: HashMap::default(), - context_range, - }) - .templates - .push((kind, task.original_task().clone())); - } - - acc - }) - }) else { - return; - }; - - let Ok(prefer_lsp) = multi_buffer.update(cx, |buffer, cx| { - buffer.language_settings(cx).tasks.prefer_lsp - }) else { - return; - }; - - let rows = Self::runnable_rows( - project, - display_snapshot, - prefer_lsp && !lsp_tasks_by_rows.is_empty(), - new_rows, - cx.clone(), - ) - .await; - editor - .update(cx, |editor, _| { - editor.clear_tasks(); - for (key, mut value) in rows { - if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&key) { - value.templates.extend(lsp_tasks.templates); - } - - editor.insert_tasks(key, value); - } - for (key, value) in lsp_tasks_by_rows { - editor.insert_tasks(key, value); - } - }) - .ok(); - }) - } - - fn runnable_rows( - project: Entity, - snapshot: DisplaySnapshot, - prefer_lsp: bool, - runnable_ranges: Vec<(Range, language::RunnableRange)>, - cx: AsyncWindowContext, - ) -> Task> { - cx.spawn(async move |cx| { - let mut runnable_rows = Vec::with_capacity(runnable_ranges.len()); - for (run_range, mut runnable) in runnable_ranges { - let Some(tasks) = cx - .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx)) - .ok() - else { - continue; - }; - let mut tasks = tasks.await; - - if prefer_lsp { - tasks.retain(|(task_kind, _)| { - !matches!(task_kind, TaskSourceKind::Language { .. }) - }); - } - if tasks.is_empty() { - continue; - } - - let point = run_range.start.to_point(&snapshot.buffer_snapshot()); - let Some(row) = snapshot - .buffer_snapshot() - .buffer_line_for_row(MultiBufferRow(point.row)) - .map(|(_, range)| range.start.row) - else { - continue; - }; - - let context_range = - BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end); - runnable_rows.push(( - (runnable.buffer_id, row), - RunnableTasks { - templates: tasks, - offset: snapshot.buffer_snapshot().anchor_before(run_range.start), - context_range, - column: point.column, - extra_variables: runnable.extra_captures, - }, - )); - } - runnable_rows - }) - } - - fn templates_with_tags( - project: &Entity, - runnable: &mut Runnable, - cx: &mut App, - ) -> Task> { - let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| { - let (worktree_id, file) = project - .buffer_for_id(runnable.buffer, cx) - .and_then(|buffer| buffer.read(cx).file()) - .map(|file| (file.worktree_id(cx), file.clone())) - .unzip(); - - ( - project.task_store().read(cx).task_inventory().cloned(), - worktree_id, - file, - ) - }); - - let tags = mem::take(&mut runnable.tags); - let language = runnable.language.clone(); - cx.spawn(async move |cx| { - let mut templates_with_tags = Vec::new(); - if let Some(inventory) = inventory { - for RunnableTag(tag) in tags { - let new_tasks = inventory.update(cx, |inventory, cx| { - inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx) - }); - templates_with_tags.extend(new_tasks.await.into_iter().filter( - move |(_, template)| { - template.tags.iter().any(|source_tag| source_tag == &tag) - }, - )); - } - } - templates_with_tags.sort_by_key(|(kind, _)| kind.to_owned()); - - if let Some((leading_tag_source, _)) = templates_with_tags.first() { - // Strongest source wins; if we have worktree tag binding, prefer that to - // global and language bindings; - // if we have a global binding, prefer that to language binding. - let first_mismatch = templates_with_tags - .iter() - .position(|(tag_source, _)| tag_source != leading_tag_source); - if let Some(index) = first_mismatch { - templates_with_tags.truncate(index); - } - } - - templates_with_tags - }) - } - pub fn move_to_enclosing_bracket( &mut self, _: &MoveToEnclosingBracket, @@ -24184,7 +23751,6 @@ impl Editor { predecessor, excerpts, } => { - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); let buffer_id = buffer.read(cx).remote_id(); if self.buffer.read(cx).diff_for(buffer_id).is_none() && let Some(project) = &self.project @@ -24202,6 +23768,7 @@ impl Editor { .invalidate_buffer(&buffer.read(cx).remote_id()); self.update_lsp_data(Some(buffer_id), window, cx); self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + self.refresh_runnables(window, cx); self.colorize_brackets(false, cx); self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); cx.emit(EditorEvent::ExcerptsAdded { @@ -24220,8 +23787,7 @@ impl Editor { self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx); for buffer_id in removed_buffer_ids { self.registered_buffers.remove(buffer_id); - self.tasks - .retain(|(task_buffer_id, _), _| task_buffer_id != buffer_id); + self.clear_runnables(Some(*buffer_id)); self.semantic_token_state.invalidate_buffer(buffer_id); self.display_map.update(cx, |display_map, cx| { display_map.invalidate_semantic_highlights(*buffer_id); @@ -24263,10 +23829,12 @@ impl Editor { } self.colorize_brackets(false, cx); self.update_lsp_data(None, window, cx); + self.refresh_runnables(window, cx); cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() }) } multi_buffer::Event::Reparsed(buffer_id) => { - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + self.clear_runnables(Some(*buffer_id)); + self.refresh_runnables(window, cx); self.refresh_selected_text_highlights(&self.display_snapshot(cx), true, window, cx); self.colorize_brackets(true, cx); jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx); @@ -24274,7 +23842,7 @@ impl Editor { cx.emit(EditorEvent::Reparsed(*buffer_id)); } multi_buffer::Event::DiffHunksToggled => { - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + self.refresh_runnables(window, cx); } multi_buffer::Event::LanguageChanged(buffer_id, is_fresh_language) => { if !is_fresh_language { @@ -24410,7 +23978,7 @@ impl Editor { .unwrap_or(DiagnosticSeverity::Hint); self.set_max_diagnostics_severity(new_severity, cx); } - self.tasks_update_task = Some(self.refresh_runnables(window, cx)); + self.refresh_runnables(window, cx); self.update_edit_prediction_settings(cx); self.refresh_edit_prediction(true, false, window, cx); self.refresh_inline_values(cx); diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index d3da58733dd0a24622a6dcde87f638069e206cf4..fe71cb76f0f16dc7a928ccff725585c0e857c62e 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -5,6 +5,7 @@ use crate::{ edit_prediction_tests::FakeEditPredictionDelegate, element::StickyHeader, linked_editing_ranges::LinkedEditingRanges, + runnables::RunnableTasks, scroll::scroll_amount::ScrollAmount, test::{ assert_text_with_selections, build_editor, editor_content_with_blocks, @@ -24403,20 +24404,24 @@ async fn test_find_enclosing_node_with_task(cx: &mut TestAppContext) { editor.update_in(cx, |editor, window, cx| { let snapshot = editor.buffer().read(cx).snapshot(cx); - editor.tasks.insert( - (buffer.read(cx).remote_id(), 3), + editor.runnables.insert( + buffer.read(cx).remote_id(), + 3, + buffer.read(cx).version(), RunnableTasks { - templates: vec![], + templates: Vec::new(), offset: snapshot.anchor_before(MultiBufferOffset(43)), column: 0, extra_variables: HashMap::default(), context_range: BufferOffset(43)..BufferOffset(85), }, ); - editor.tasks.insert( - (buffer.read(cx).remote_id(), 8), + editor.runnables.insert( + buffer.read(cx).remote_id(), + 8, + buffer.read(cx).version(), RunnableTasks { - templates: vec![], + templates: Vec::new(), offset: snapshot.anchor_before(MultiBufferOffset(86)), column: 0, extra_variables: HashMap::default(), diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index b7207fce71bc71c5bdd5962ca3328030935238ca..3b1356525960654ea88c6cfa84115f1e67ac2e5b 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3275,9 +3275,9 @@ impl EditorElement { snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right); editor - .tasks - .iter() - .filter_map(|(_, tasks)| { + .runnables + .all_runnables() + .filter_map(|tasks| { let multibuffer_point = tasks.offset.to_point(&snapshot.buffer_snapshot()); if multibuffer_point < offset_range_start || multibuffer_point > offset_range_end diff --git a/crates/editor/src/runnables.rs b/crates/editor/src/runnables.rs new file mode 100644 index 0000000000000000000000000000000000000000..9fa6b89ec130e74f388c5e82b9b346197bb13abb --- /dev/null +++ b/crates/editor/src/runnables.rs @@ -0,0 +1,915 @@ +use std::{collections::BTreeMap, mem, ops::Range, sync::Arc}; + +use clock::Global; +use collections::HashMap; +use gpui::{ + App, AppContext as _, AsyncWindowContext, ClickEvent, Context, Entity, Focusable as _, + MouseButton, Task, Window, +}; +use language::{Buffer, BufferRow, Runnable}; +use lsp::LanguageServerName; +use multi_buffer::{ + Anchor, BufferOffset, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot, ToPoint as _, +}; +use project::{ + Location, Project, TaskSourceKind, + debugger::breakpoint_store::{Breakpoint, BreakpointSessionState}, + project_settings::ProjectSettings, +}; +use settings::Settings as _; +use smallvec::SmallVec; +use task::{ResolvedTask, RunnableTag, TaskContext, TaskTemplate, TaskVariables, VariableName}; +use text::{BufferId, OffsetRangeExt as _, ToOffset as _, ToPoint as _}; +use ui::{Clickable as _, Color, IconButton, IconSize, Toggleable as _}; + +use crate::{ + CodeActionSource, Editor, EditorSettings, EditorStyle, RangeToAnchorExt, SpawnNearestTask, + ToggleCodeActions, UPDATE_DEBOUNCE, display_map::DisplayRow, +}; + +#[derive(Debug)] +pub(super) struct RunnableData { + runnables: HashMap)>, + runnables_update_task: Task<()>, +} + +impl RunnableData { + pub fn new() -> Self { + Self { + runnables: HashMap::default(), + runnables_update_task: Task::ready(()), + } + } + + pub fn runnables( + &self, + (buffer_id, buffer_row): (BufferId, BufferRow), + ) -> Option<&RunnableTasks> { + self.runnables.get(&buffer_id)?.1.get(&buffer_row) + } + + pub fn all_runnables(&self) -> impl Iterator { + self.runnables + .values() + .flat_map(|(_, tasks)| tasks.values()) + } + + pub fn has_cached(&self, buffer_id: BufferId, version: &Global) -> bool { + self.runnables + .get(&buffer_id) + .is_some_and(|(cached_version, _)| !version.changed_since(cached_version)) + } + + #[cfg(test)] + pub fn insert( + &mut self, + buffer_id: BufferId, + buffer_row: BufferRow, + version: Global, + tasks: RunnableTasks, + ) { + self.runnables + .entry(buffer_id) + .or_insert_with(|| (version, BTreeMap::default())) + .1 + .insert(buffer_row, tasks); + } +} + +#[derive(Clone, Debug)] +pub struct RunnableTasks { + pub templates: Vec<(TaskSourceKind, TaskTemplate)>, + pub offset: multi_buffer::Anchor, + // We need the column at which the task context evaluation should take place (when we're spawning it via gutter). + pub column: u32, + // Values of all named captures, including those starting with '_' + pub extra_variables: HashMap, + // Full range of the tagged region. We use it to determine which `extra_variables` to grab for context resolution in e.g. a modal. + pub context_range: Range, +} + +impl RunnableTasks { + pub fn resolve<'a>( + &'a self, + cx: &'a task::TaskContext, + ) -> impl Iterator + 'a { + self.templates.iter().filter_map(|(kind, template)| { + template + .resolve_task(&kind.to_id_base(), cx) + .map(|task| (kind.clone(), task)) + }) + } +} + +#[derive(Clone)] +pub struct ResolvedTasks { + pub templates: SmallVec<[(TaskSourceKind, ResolvedTask); 1]>, + pub position: Anchor, +} + +impl Editor { + pub fn refresh_runnables(&mut self, window: &mut Window, cx: &mut Context) { + if !self.mode().is_full() + || !EditorSettings::get_global(cx).gutter.runnables + || !self.enable_runnables + { + self.clear_runnables(None); + return; + } + if let Some(buffer) = self.buffer().read(cx).as_singleton() { + if self + .runnables + .has_cached(buffer.read(cx).remote_id(), &buffer.read(cx).version()) + { + return; + } + } + + let project = self.project().map(Entity::downgrade); + let lsp_task_sources = self.lsp_task_sources(true, true, cx); + let multi_buffer = self.buffer.downgrade(); + self.runnables.runnables_update_task = cx.spawn_in(window, async move |editor, cx| { + cx.background_executor().timer(UPDATE_DEBOUNCE).await; + let Some(project) = project.and_then(|p| p.upgrade()) else { + return; + }; + + let hide_runnables = project.update(cx, |project, _| project.is_via_collab()); + if hide_runnables { + return; + } + let lsp_tasks = if lsp_task_sources.is_empty() { + Vec::new() + } else { + let Ok(lsp_tasks) = cx + .update(|_, cx| crate::lsp_tasks(project.clone(), &lsp_task_sources, None, cx)) + else { + return; + }; + lsp_tasks.await + }; + let new_rows = { + let Some((multi_buffer_snapshot, multi_buffer_query_range)) = editor + .update(cx, |editor, cx| { + let multi_buffer = editor.buffer().read(cx); + if multi_buffer.is_singleton() { + Some((multi_buffer.snapshot(cx), Anchor::min()..Anchor::max())) + } else { + let display_snapshot = + editor.display_map.update(cx, |map, cx| map.snapshot(cx)); + let multi_buffer_query_range = + editor.multi_buffer_visible_range(&display_snapshot, cx); + let multi_buffer_snapshot = display_snapshot.buffer(); + Some(( + multi_buffer_snapshot.clone(), + multi_buffer_query_range.to_anchors(&multi_buffer_snapshot), + )) + } + }) + .ok() + .flatten() + else { + return; + }; + cx.background_spawn({ + async move { + multi_buffer_snapshot + .runnable_ranges(multi_buffer_query_range) + .collect() + } + }) + .await + }; + + let Ok(multi_buffer_snapshot) = + editor.update(cx, |editor, cx| editor.buffer().read(cx).snapshot(cx)) + else { + return; + }; + let Ok(mut lsp_tasks_by_rows) = cx.update(|_, cx| { + lsp_tasks + .into_iter() + .flat_map(|(kind, tasks)| { + tasks.into_iter().filter_map(move |(location, task)| { + Some((kind.clone(), location?, task)) + }) + }) + .fold(HashMap::default(), |mut acc, (kind, location, task)| { + let buffer = location.target.buffer; + let buffer_snapshot = buffer.read(cx).snapshot(); + let offset = multi_buffer_snapshot.excerpts().find_map( + |(excerpt_id, snapshot, _)| { + if snapshot.remote_id() == buffer_snapshot.remote_id() { + multi_buffer_snapshot + .anchor_in_excerpt(excerpt_id, location.target.range.start) + } else { + None + } + }, + ); + if let Some(offset) = offset { + let task_buffer_range = + location.target.range.to_point(&buffer_snapshot); + let context_buffer_range = + task_buffer_range.to_offset(&buffer_snapshot); + let context_range = BufferOffset(context_buffer_range.start) + ..BufferOffset(context_buffer_range.end); + + acc.entry((buffer_snapshot.remote_id(), task_buffer_range.start.row)) + .or_insert_with(|| RunnableTasks { + templates: Vec::new(), + offset, + column: task_buffer_range.start.column, + extra_variables: HashMap::default(), + context_range, + }) + .templates + .push((kind, task.original_task().clone())); + } + + acc + }) + }) else { + return; + }; + + let Ok(prefer_lsp) = multi_buffer.update(cx, |buffer, cx| { + buffer.language_settings(cx).tasks.prefer_lsp + }) else { + return; + }; + + let rows = Self::runnable_rows( + project, + multi_buffer_snapshot, + prefer_lsp && !lsp_tasks_by_rows.is_empty(), + new_rows, + cx.clone(), + ) + .await; + editor + .update(cx, |editor, cx| { + for ((buffer_id, row), mut new_tasks) in rows { + let Some(buffer) = editor.buffer().read(cx).buffer(buffer_id) else { + continue; + }; + + if let Some(lsp_tasks) = lsp_tasks_by_rows.remove(&(buffer_id, row)) { + new_tasks.templates.extend(lsp_tasks.templates); + } + editor.insert_runnables( + buffer_id, + buffer.read(cx).version(), + row, + new_tasks, + ); + } + for ((buffer_id, row), new_tasks) in lsp_tasks_by_rows { + let Some(buffer) = editor.buffer().read(cx).buffer(buffer_id) else { + continue; + }; + editor.insert_runnables( + buffer_id, + buffer.read(cx).version(), + row, + new_tasks, + ); + } + }) + .ok(); + }); + } + + pub fn spawn_nearest_task( + &mut self, + action: &SpawnNearestTask, + window: &mut Window, + cx: &mut Context, + ) { + let Some((workspace, _)) = self.workspace.clone() else { + return; + }; + let Some(project) = self.project.clone() else { + return; + }; + + // Try to find a closest, enclosing node using tree-sitter that has a task + let Some((buffer, buffer_row, tasks)) = self + .find_enclosing_node_task(cx) + // Or find the task that's closest in row-distance. + .or_else(|| self.find_closest_task(cx)) + else { + return; + }; + + let reveal_strategy = action.reveal; + let task_context = Self::build_tasks_context(&project, &buffer, buffer_row, &tasks, cx); + cx.spawn_in(window, async move |_, cx| { + let context = task_context.await?; + let (task_source_kind, mut resolved_task) = tasks.resolve(&context).next()?; + + let resolved = &mut resolved_task.resolved; + resolved.reveal = reveal_strategy; + + workspace + .update_in(cx, |workspace, window, cx| { + workspace.schedule_resolved_task( + task_source_kind, + resolved_task, + false, + window, + cx, + ); + }) + .ok() + }) + .detach(); + } + + pub fn clear_runnables(&mut self, for_buffer: Option) { + if let Some(buffer_id) = for_buffer { + self.runnables.runnables.remove(&buffer_id); + } else { + self.runnables.runnables.clear(); + } + self.runnables.runnables_update_task = Task::ready(()); + } + + pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task> { + let Some(project) = self.project.clone() else { + return Task::ready(None); + }; + let (selection, buffer, editor_snapshot) = { + let selection = self.selections.newest_adjusted(&self.display_snapshot(cx)); + let Some((buffer, _)) = self + .buffer() + .read(cx) + .point_to_buffer_offset(selection.start, cx) + else { + return Task::ready(None); + }; + let snapshot = self.snapshot(window, cx); + (selection, buffer, snapshot) + }; + let selection_range = selection.range(); + let start = editor_snapshot + .display_snapshot + .buffer_snapshot() + .anchor_after(selection_range.start) + .text_anchor; + let end = editor_snapshot + .display_snapshot + .buffer_snapshot() + .anchor_after(selection_range.end) + .text_anchor; + let location = Location { + buffer, + range: start..end, + }; + let captured_variables = { + let mut variables = TaskVariables::default(); + let buffer = location.buffer.read(cx); + let buffer_id = buffer.remote_id(); + let snapshot = buffer.snapshot(); + let starting_point = location.range.start.to_point(&snapshot); + let starting_offset = starting_point.to_offset(&snapshot); + for (_, tasks) in self + .runnables + .runnables + .get(&buffer_id) + .into_iter() + .flat_map(|(_, tasks)| tasks.range(0..starting_point.row + 1)) + { + if !tasks + .context_range + .contains(&crate::BufferOffset(starting_offset)) + { + continue; + } + for (capture_name, value) in tasks.extra_variables.iter() { + variables.insert( + VariableName::Custom(capture_name.to_owned().into()), + value.clone(), + ); + } + } + variables + }; + + project.update(cx, |project, cx| { + project.task_store().update(cx, |task_store, cx| { + task_store.task_context_for_location(captured_variables, location, cx) + }) + }) + } + + pub fn lsp_task_sources( + &self, + visible_only: bool, + skip_cached: bool, + cx: &mut Context, + ) -> HashMap> { + if !self.lsp_data_enabled() { + return HashMap::default(); + } + let buffers = if visible_only { + self.visible_excerpts(true, cx) + .into_values() + .map(|(buffer, _, _)| buffer) + .collect() + } else { + self.buffer().read(cx).all_buffers() + }; + + let lsp_settings = &ProjectSettings::get_global(cx).lsp; + + buffers + .into_iter() + .filter_map(|buffer| { + let lsp_tasks_source = buffer + .read(cx) + .language()? + .context_provider()? + .lsp_task_source()?; + if lsp_settings + .get(&lsp_tasks_source) + .is_none_or(|s| s.enable_lsp_tasks) + { + let buffer_id = buffer.read(cx).remote_id(); + if skip_cached + && self + .runnables + .has_cached(buffer_id, &buffer.read(cx).version()) + { + None + } else { + Some((lsp_tasks_source, buffer_id)) + } + } else { + None + } + }) + .fold( + HashMap::default(), + |mut acc, (lsp_task_source, buffer_id)| { + acc.entry(lsp_task_source) + .or_insert_with(Vec::new) + .push(buffer_id); + acc + }, + ) + } + + pub fn find_enclosing_node_task( + &mut self, + cx: &mut Context, + ) -> Option<(Entity, u32, Arc)> { + let snapshot = self.buffer.read(cx).snapshot(cx); + let offset = self + .selections + .newest::(&self.display_snapshot(cx)) + .head(); + let mut excerpt = snapshot.excerpt_containing(offset..offset)?; + let offset = excerpt.map_offset_to_buffer(offset); + let buffer_id = excerpt.buffer().remote_id(); + + let layer = excerpt.buffer().syntax_layer_at(offset)?; + let mut cursor = layer.node().walk(); + + while cursor.goto_first_child_for_byte(offset.0).is_some() { + if cursor.node().end_byte() == offset.0 { + cursor.goto_next_sibling(); + } + } + + // Ascend to the smallest ancestor that contains the range and has a task. + loop { + let node = cursor.node(); + let node_range = node.byte_range(); + let symbol_start_row = excerpt.buffer().offset_to_point(node.start_byte()).row; + + // Check if this node contains our offset + if node_range.start <= offset.0 && node_range.end >= offset.0 { + // If it contains offset, check for task + if let Some(tasks) = self + .runnables + .runnables + .get(&buffer_id) + .and_then(|(_, tasks)| tasks.get(&symbol_start_row)) + { + let buffer = self.buffer.read(cx).buffer(buffer_id)?; + return Some((buffer, symbol_start_row, Arc::new(tasks.to_owned()))); + } + } + + if !cursor.goto_parent() { + break; + } + } + None + } + + pub fn render_run_indicator( + &self, + _style: &EditorStyle, + is_active: bool, + row: DisplayRow, + breakpoint: Option<(Anchor, Breakpoint, Option)>, + cx: &mut Context, + ) -> IconButton { + let color = Color::Muted; + let position = breakpoint.as_ref().map(|(anchor, _, _)| *anchor); + + IconButton::new( + ("run_indicator", row.0 as usize), + ui::IconName::PlayOutlined, + ) + .shape(ui::IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(color) + .toggle_state(is_active) + .on_click(cx.listener(move |editor, e: &ClickEvent, window, cx| { + let quick_launch = match e { + ClickEvent::Keyboard(_) => true, + ClickEvent::Mouse(e) => e.down.button == MouseButton::Left, + }; + + window.focus(&editor.focus_handle(cx), cx); + editor.toggle_code_actions( + &ToggleCodeActions { + deployed_from: Some(CodeActionSource::RunMenu(row)), + quick_launch, + }, + window, + cx, + ); + })) + .on_right_click(cx.listener(move |editor, event: &ClickEvent, window, cx| { + editor.set_breakpoint_context_menu(row, position, event.position(), window, cx); + })) + } + + fn insert_runnables( + &mut self, + buffer: BufferId, + version: Global, + row: BufferRow, + new_tasks: RunnableTasks, + ) { + let (old_version, tasks) = self.runnables.runnables.entry(buffer).or_default(); + if !old_version.changed_since(&version) { + *old_version = version; + tasks.insert(row, new_tasks); + } + } + + fn runnable_rows( + project: Entity, + snapshot: MultiBufferSnapshot, + prefer_lsp: bool, + runnable_ranges: Vec<(Range, language::RunnableRange)>, + cx: AsyncWindowContext, + ) -> Task> { + cx.spawn(async move |cx| { + let mut runnable_rows = Vec::with_capacity(runnable_ranges.len()); + for (run_range, mut runnable) in runnable_ranges { + let Some(tasks) = cx + .update(|_, cx| Self::templates_with_tags(&project, &mut runnable.runnable, cx)) + .ok() + else { + continue; + }; + let mut tasks = tasks.await; + + if prefer_lsp { + tasks.retain(|(task_kind, _)| { + !matches!(task_kind, TaskSourceKind::Language { .. }) + }); + } + if tasks.is_empty() { + continue; + } + + let point = run_range.start.to_point(&snapshot); + let Some(row) = snapshot + .buffer_line_for_row(MultiBufferRow(point.row)) + .map(|(_, range)| range.start.row) + else { + continue; + }; + + let context_range = + BufferOffset(runnable.full_range.start)..BufferOffset(runnable.full_range.end); + runnable_rows.push(( + (runnable.buffer_id, row), + RunnableTasks { + templates: tasks, + offset: snapshot.anchor_before(run_range.start), + context_range, + column: point.column, + extra_variables: runnable.extra_captures, + }, + )); + } + runnable_rows + }) + } + + fn templates_with_tags( + project: &Entity, + runnable: &mut Runnable, + cx: &mut App, + ) -> Task> { + let (inventory, worktree_id, file) = project.read_with(cx, |project, cx| { + let (worktree_id, file) = project + .buffer_for_id(runnable.buffer, cx) + .and_then(|buffer| buffer.read(cx).file()) + .map(|file| (file.worktree_id(cx), file.clone())) + .unzip(); + + ( + project.task_store().read(cx).task_inventory().cloned(), + worktree_id, + file, + ) + }); + + let tags = mem::take(&mut runnable.tags); + let language = runnable.language.clone(); + cx.spawn(async move |cx| { + let mut templates_with_tags = Vec::new(); + if let Some(inventory) = inventory { + for RunnableTag(tag) in tags { + let new_tasks = inventory.update(cx, |inventory, cx| { + inventory.list_tasks(file.clone(), Some(language.clone()), worktree_id, cx) + }); + templates_with_tags.extend(new_tasks.await.into_iter().filter( + move |(_, template)| { + template.tags.iter().any(|source_tag| source_tag == &tag) + }, + )); + } + } + templates_with_tags.sort_by_key(|(kind, _)| kind.to_owned()); + + if let Some((leading_tag_source, _)) = templates_with_tags.first() { + // Strongest source wins; if we have worktree tag binding, prefer that to + // global and language bindings; + // if we have a global binding, prefer that to language binding. + let first_mismatch = templates_with_tags + .iter() + .position(|(tag_source, _)| tag_source != leading_tag_source); + if let Some(index) = first_mismatch { + templates_with_tags.truncate(index); + } + } + + templates_with_tags + }) + } + + fn find_closest_task( + &mut self, + cx: &mut Context, + ) -> Option<(Entity, u32, Arc)> { + let cursor_row = self + .selections + .newest_adjusted(&self.display_snapshot(cx)) + .head() + .row; + + let ((buffer_id, row), tasks) = self + .runnables + .runnables + .iter() + .flat_map(|(buffer_id, (_, tasks))| { + tasks.iter().map(|(row, tasks)| ((*buffer_id, *row), tasks)) + }) + .min_by_key(|((_, row), _)| cursor_row.abs_diff(*row))?; + + let buffer = self.buffer.read(cx).buffer(buffer_id)?; + let tasks = Arc::new(tasks.to_owned()); + Some((buffer, row, tasks)) + } +} + +#[cfg(test)] +mod tests { + use std::{sync::Arc, time::Duration}; + + use gpui::{AppContext as _, Task, TestAppContext}; + use indoc::indoc; + use language::ContextProvider; + use languages::rust_lang; + use multi_buffer::{MultiBuffer, PathKey}; + use project::{FakeFs, Project}; + use serde_json::json; + use task::{TaskTemplate, TaskTemplates}; + use text::Point; + use util::path; + + use crate::{ + Editor, UPDATE_DEBOUNCE, editor_tests::init_test, scroll::scroll_amount::ScrollAmount, + }; + + struct TestRustContextProvider; + + impl ContextProvider for TestRustContextProvider { + fn associated_tasks( + &self, + _: Option>, + _: &gpui::App, + ) -> Task> { + Task::ready(Some(TaskTemplates(vec![ + TaskTemplate { + label: "Run main".into(), + command: "cargo".into(), + args: vec!["run".into()], + tags: vec!["rust-main".into()], + ..TaskTemplate::default() + }, + TaskTemplate { + label: "Run test".into(), + command: "cargo".into(), + args: vec!["test".into()], + tags: vec!["rust-test".into()], + ..TaskTemplate::default() + }, + ]))) + } + } + + fn rust_lang_with_task_context() -> Arc { + Arc::new( + Arc::try_unwrap(rust_lang()) + .unwrap() + .with_context_provider(Some(Arc::new(TestRustContextProvider))), + ) + } + + fn collect_runnable_labels( + editor: &Editor, + ) -> Vec<(text::BufferId, language::BufferRow, Vec)> { + let mut result = editor + .runnables + .runnables + .iter() + .flat_map(|(buffer_id, (_, tasks))| { + tasks.iter().map(move |(row, runnable_tasks)| { + let mut labels: Vec = runnable_tasks + .templates + .iter() + .map(|(_, template)| template.label.clone()) + .collect(); + labels.sort(); + (*buffer_id, *row, labels) + }) + }) + .collect::>(); + result.sort_by_key(|(id, row, _)| (*id, *row)); + result + } + + #[gpui::test] + async fn test_multi_buffer_runnables_on_scroll(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let padding_lines = 50; + let mut first_rs = String::from("fn main() {\n println!(\"hello\");\n}\n"); + for _ in 0..padding_lines { + first_rs.push_str("//\n"); + } + let test_one_row = 3 + padding_lines as u32 + 1; + first_rs.push_str("#[test]\nfn test_one() {\n assert!(true);\n}\n"); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/project"), + json!({ + "first.rs": first_rs, + "second.rs": indoc! {" + #[test] + fn test_two() { + assert!(true); + } + + #[test] + fn test_three() { + assert!(true); + } + "}, + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang_with_task_context()); + + let buffer_1 = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/first.rs"), cx) + }) + .await + .unwrap(); + let buffer_2 = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/project/second.rs"), cx) + }) + .await + .unwrap(); + + let buffer_1_id = buffer_1.read_with(cx, |buffer, _| buffer.remote_id()); + let buffer_2_id = buffer_2.read_with(cx, |buffer, _| buffer.remote_id()); + + let multi_buffer = cx.new(|cx| { + let mut multi_buffer = MultiBuffer::new(language::Capability::ReadWrite); + let end = buffer_1.read(cx).max_point(); + multi_buffer.set_excerpts_for_path( + PathKey::sorted(0), + buffer_1.clone(), + [Point::new(0, 0)..end], + 0, + cx, + ); + multi_buffer.set_excerpts_for_path( + PathKey::sorted(1), + buffer_2.clone(), + [Point::new(0, 0)..Point::new(8, 1)], + 0, + cx, + ); + multi_buffer + }); + + let editor = cx.add_window(|window, cx| { + Editor::for_multibuffer(multi_buffer, Some(project.clone()), window, cx) + }); + cx.executor().advance_clock(Duration::from_millis(500)); + cx.executor().run_until_parked(); + + // Clear stale data from startup events, then refresh. + // first.rs is long enough that second.rs is below the ~47-line viewport. + editor + .update(cx, |editor, window, cx| { + editor.clear_runnables(None); + editor.refresh_runnables(window, cx); + }) + .unwrap(); + cx.executor().advance_clock(UPDATE_DEBOUNCE); + cx.executor().run_until_parked(); + assert_eq!( + editor + .update(cx, |editor, _, _| collect_runnable_labels(editor)) + .unwrap(), + vec![(buffer_1_id, 0, vec!["Run main".to_string()])], + "Only fn main from first.rs should be visible before scrolling" + ); + + // Scroll down to bring second.rs excerpts into view. + editor + .update(cx, |editor, window, cx| { + editor.scroll_screen(&ScrollAmount::Page(1.0), window, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.executor().run_until_parked(); + + let after_scroll = editor + .update(cx, |editor, _, _| collect_runnable_labels(editor)) + .unwrap(); + assert_eq!( + after_scroll, + vec![ + (buffer_1_id, 0, vec!["Run main".to_string()]), + (buffer_1_id, test_one_row, vec!["Run test".to_string()]), + (buffer_2_id, 1, vec!["Run test".to_string()]), + (buffer_2_id, 6, vec!["Run test".to_string()]), + ], + "Tree-sitter should detect both #[test] fns in second.rs after scroll" + ); + + // Edit second.rs to invalidate its cache; first.rs data should persist. + buffer_2.update(cx, |buffer, cx| { + buffer.edit([(0..0, "// added comment\n")], None, cx); + }); + editor + .update(cx, |editor, window, cx| { + editor.scroll_screen(&ScrollAmount::Page(-1.0), window, cx); + }) + .unwrap(); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.executor().run_until_parked(); + + assert_eq!( + editor + .update(cx, |editor, _, _| collect_runnable_labels(editor)) + .unwrap(), + vec![ + (buffer_1_id, 0, vec!["Run main".to_string()]), + (buffer_1_id, test_one_row, vec!["Run test".to_string()]), + ], + "first.rs runnables should survive an edit to second.rs" + ); + } +} diff --git a/crates/editor/src/tasks.rs b/crates/editor/src/tasks.rs deleted file mode 100644 index e39880ddc1f575a7b12f40c5496c75c1f473c6e9..0000000000000000000000000000000000000000 --- a/crates/editor/src/tasks.rs +++ /dev/null @@ -1,110 +0,0 @@ -use crate::Editor; - -use collections::HashMap; -use gpui::{App, Task, Window}; -use lsp::LanguageServerName; -use project::{Location, project_settings::ProjectSettings}; -use settings::Settings as _; -use task::{TaskContext, TaskVariables, VariableName}; -use text::{BufferId, ToOffset, ToPoint}; - -impl Editor { - pub fn task_context(&self, window: &mut Window, cx: &mut App) -> Task> { - let Some(project) = self.project.clone() else { - return Task::ready(None); - }; - let (selection, buffer, editor_snapshot) = { - let selection = self.selections.newest_adjusted(&self.display_snapshot(cx)); - let Some((buffer, _)) = self - .buffer() - .read(cx) - .point_to_buffer_offset(selection.start, cx) - else { - return Task::ready(None); - }; - let snapshot = self.snapshot(window, cx); - (selection, buffer, snapshot) - }; - let selection_range = selection.range(); - let start = editor_snapshot - .display_snapshot - .buffer_snapshot() - .anchor_after(selection_range.start) - .text_anchor; - let end = editor_snapshot - .display_snapshot - .buffer_snapshot() - .anchor_after(selection_range.end) - .text_anchor; - let location = Location { - buffer, - range: start..end, - }; - let captured_variables = { - let mut variables = TaskVariables::default(); - let buffer = location.buffer.read(cx); - let buffer_id = buffer.remote_id(); - let snapshot = buffer.snapshot(); - let starting_point = location.range.start.to_point(&snapshot); - let starting_offset = starting_point.to_offset(&snapshot); - for (_, tasks) in self - .tasks - .range((buffer_id, 0)..(buffer_id, starting_point.row + 1)) - { - if !tasks - .context_range - .contains(&crate::BufferOffset(starting_offset)) - { - continue; - } - for (capture_name, value) in tasks.extra_variables.iter() { - variables.insert( - VariableName::Custom(capture_name.to_owned().into()), - value.clone(), - ); - } - } - variables - }; - - project.update(cx, |project, cx| { - project.task_store().update(cx, |task_store, cx| { - task_store.task_context_for_location(captured_variables, location, cx) - }) - }) - } - - pub fn lsp_task_sources(&self, cx: &App) -> HashMap> { - let lsp_settings = &ProjectSettings::get_global(cx).lsp; - - self.buffer() - .read(cx) - .all_buffers() - .into_iter() - .filter_map(|buffer| { - let lsp_tasks_source = buffer - .read(cx) - .language()? - .context_provider()? - .lsp_task_source()?; - if lsp_settings - .get(&lsp_tasks_source) - .is_none_or(|s| s.enable_lsp_tasks) - { - let buffer_id = buffer.read(cx).remote_id(); - Some((lsp_tasks_source, buffer_id)) - } else { - None - } - }) - .fold( - HashMap::default(), - |mut acc, (lsp_task_source, buffer_id)| { - acc.entry(lsp_task_source) - .or_insert_with(Vec::new) - .push(buffer_id); - acc - }, - ) - } -} diff --git a/crates/tasks_ui/src/tasks_ui.rs b/crates/tasks_ui/src/tasks_ui.rs index 29e6a9de7fab9b5421fe38fee0fd24fd43b12ccc..fdacef3b193beb8a656916edb61fbff1a200385b 100644 --- a/crates/tasks_ui/src/tasks_ui.rs +++ b/crates/tasks_ui/src/tasks_ui.rs @@ -316,7 +316,9 @@ pub fn task_contexts( let lsp_task_sources = active_editor .as_ref() - .map(|active_editor| active_editor.update(cx, |editor, cx| editor.lsp_task_sources(cx))) + .map(|active_editor| { + active_editor.update(cx, |editor, cx| editor.lsp_task_sources(false, false, cx)) + }) .unwrap_or_default(); let latest_selection = active_editor.as_ref().map(|active_editor| { From dfc3a7c6e87cdc0465e152f216f2d9c561116af1 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Wed, 11 Mar 2026 16:57:22 +0100 Subject: [PATCH 491/548] agent_ui: Move UI logic from `ThreadHistory` to separate component (#51301) This is just a re-factor. We'll make use of this once we make thread history non-global (storing one history per ACP connection). Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 15 +- crates/agent_ui/src/agent_ui.rs | 4 +- crates/agent_ui/src/connection_view.rs | 16 +- .../src/connection_view/thread_view.rs | 2 +- crates/agent_ui/src/entry_view_state.rs | 3 +- crates/agent_ui/src/inline_assistant.rs | 2 +- crates/agent_ui/src/message_editor.rs | 48 +- crates/agent_ui/src/thread_history.rs | 959 +----------------- crates/agent_ui/src/thread_history_view.rs | 878 ++++++++++++++++ 9 files changed, 944 insertions(+), 983 deletions(-) create mode 100644 crates/agent_ui/src/thread_history_view.rs diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 630411c2400ee925f980b5d3a410cb3574e81cd6..1537c05096ec81f1b3f354cac236bfdda52c9f6f 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -48,7 +48,7 @@ use crate::{ NewNativeAgentThreadFromSummary, }; use crate::{ - ExpandMessageEditor, ThreadHistory, ThreadHistoryEvent, + ExpandMessageEditor, ThreadHistory, ThreadHistoryView, ThreadHistoryViewEvent, text_thread_history::{TextThreadHistory, TextThreadHistoryEvent}, }; use agent_settings::AgentSettings; @@ -863,6 +863,7 @@ pub struct AgentPanel { fs: Arc, language_registry: Arc, acp_history: Entity, + acp_history_view: Entity, text_thread_history: Entity, thread_store: Entity, text_thread_store: Entity, @@ -1072,14 +1073,15 @@ impl AgentPanel { cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let thread_store = ThreadStore::global(cx); - let acp_history = cx.new(|cx| ThreadHistory::new(None, window, cx)); + let acp_history = cx.new(|cx| ThreadHistory::new(None, cx)); + let acp_history_view = cx.new(|cx| ThreadHistoryView::new(acp_history.clone(), window, cx)); let text_thread_history = cx.new(|cx| TextThreadHistory::new(text_thread_store.clone(), window, cx)); cx.subscribe_in( - &acp_history, + &acp_history_view, window, |this, _, event, window, cx| match event { - ThreadHistoryEvent::Open(thread) => { + ThreadHistoryViewEvent::Open(thread) => { this.load_agent_thread( thread.session_id.clone(), thread.cwd.clone(), @@ -1213,6 +1215,7 @@ impl AgentPanel { pending_serialization: None, onboarding, acp_history, + acp_history_view, text_thread_history, thread_store, selected_agent: AgentType::default(), @@ -3046,7 +3049,7 @@ impl Focusable for AgentPanel { ActiveView::Uninitialized => self.focus_handle.clone(), ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx), ActiveView::History { kind } => match kind { - HistoryKind::AgentThreads => self.acp_history.focus_handle(cx), + HistoryKind::AgentThreads => self.acp_history_view.focus_handle(cx), HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx), }, ActiveView::TextThread { @@ -4763,7 +4766,7 @@ impl Render for AgentPanel { .child(server_view.clone()) .child(self.render_drag_target(cx)), ActiveView::History { kind } => match kind { - HistoryKind::AgentThreads => parent.child(self.acp_history.clone()), + HistoryKind::AgentThreads => parent.child(self.acp_history_view.clone()), HistoryKind::TextThreads => parent.child(self.text_thread_history.clone()), }, ActiveView::TextThread { diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 292db8fc7c0398fdd8c8800b8acc2b3c6df22740..52ce6f0bd7a312966b6602fb43be4074d7f3e620 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -33,6 +33,7 @@ pub mod test_support; mod text_thread_editor; mod text_thread_history; mod thread_history; +mod thread_history_view; mod ui; use std::rc::Rc; @@ -74,7 +75,8 @@ pub(crate) use mode_selector::ModeSelector; pub(crate) use model_selector::ModelSelector; pub(crate) use model_selector_popover::ModelSelectorPopover; pub use text_thread_editor::{AgentPanelDelegate, TextThreadEditor}; -pub(crate) use thread_history::*; +pub(crate) use thread_history::ThreadHistory; +pub(crate) use thread_history_view::*; use zed_actions; actions!( diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index b896741cee26e14ed372480f80d6cf8302db180b..b562688a83b75b75a1b95c065b14d0484daef055 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -2901,7 +2901,7 @@ pub(crate) mod tests { let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); // Create history without an initial session list - it will be set after connection - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3007,7 +3007,7 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3066,7 +3066,7 @@ pub(crate) mod tests { let captured_cwd = connection.captured_cwd.clone(); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3123,7 +3123,7 @@ pub(crate) mod tests { let captured_cwd = connection.captured_cwd.clone(); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3180,7 +3180,7 @@ pub(crate) mod tests { let captured_cwd = connection.captured_cwd.clone(); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3498,7 +3498,7 @@ pub(crate) mod tests { // Set up thread view in workspace 1 let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project1.clone(), cx))); @@ -3718,7 +3718,7 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -4454,7 +4454,7 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|window, cx| cx.new(|cx| ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index d4d23f5a0a0722afc5c588a355a6a9de1b59d194..44f9e78a2bb47af6cb171194fbd5a34de7383f1b 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -7409,7 +7409,7 @@ impl ThreadView { // TODO: Add keyboard navigation. let is_hovered = self.hovered_recent_history_item == Some(index); - crate::thread_history::HistoryEntryElement::new( + crate::thread_history_view::HistoryEntryElement::new( entry, self.server_view.clone(), ) diff --git a/crates/agent_ui/src/entry_view_state.rs b/crates/agent_ui/src/entry_view_state.rs index aef7f1f335eff7d092f924b9883ab0d64bbf65a8..17769335a1cc7e514bad15862d20d4048a089b7b 100644 --- a/crates/agent_ui/src/entry_view_state.rs +++ b/crates/agent_ui/src/entry_view_state.rs @@ -508,8 +508,7 @@ mod tests { }); let thread_store = None; - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let view_state = cx.new(|_cx| { EntryViewState::new( diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 4e7eecfe07aac84269cb1d325cc5a95943578863..2aee2b4601e126b25a977cf92d314970049026da 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -2155,7 +2155,7 @@ pub mod test { }); let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let history = cx.new(|cx| crate::ThreadHistory::new(None, window, cx)); + let history = cx.new(|cx| crate::ThreadHistory::new(None, cx)); // Add editor to workspace workspace.update(cx, |workspace, cx| { diff --git a/crates/agent_ui/src/message_editor.rs b/crates/agent_ui/src/message_editor.rs index 6c2628f9d37efd0531d5663ac4b1d27d9ae5ae0f..c9067d4ec261261e66c7718b36ebcb96b2099fed 100644 --- a/crates/agent_ui/src/message_editor.rs +++ b/crates/agent_ui/src/message_editor.rs @@ -1708,8 +1708,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = None; - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -1822,8 +1821,7 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let workspace_handle = workspace.downgrade(); let message_editor = workspace.update_in(cx, |_, window, cx| { cx.new(|cx| { @@ -1978,8 +1976,7 @@ mod tests { let mut cx = VisualTestContext::from_window(window.into(), cx); let thread_store = None; - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let available_commands = Rc::new(RefCell::new(vec![ acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"), @@ -2213,8 +2210,7 @@ mod tests { } let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { @@ -2709,8 +2705,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2810,8 +2805,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let session_id = acp::SessionId::new("thread-123"); let title = Some("Previous Conversation".into()); @@ -2886,8 +2880,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = None; - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2943,8 +2936,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = None; - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -2998,8 +2990,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -3054,8 +3045,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -3119,8 +3109,7 @@ mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let (message_editor, editor) = workspace.update_in(cx, |workspace, window, cx| { let workspace_handle = cx.weak_entity(); @@ -3279,8 +3268,7 @@ mod tests { }); let thread_store = Some(cx.new(|cx| ThreadStore::new(cx))); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); // Create a new `MessageEditor`. The `EditorMode::full()` has to be used // to ensure we have a fixed viewport, so we can eventually actually @@ -3400,8 +3388,7 @@ mod tests { let mut cx = VisualTestContext::from_window(window.into(), cx); let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { let workspace_handle = cx.weak_entity(); @@ -3483,8 +3470,7 @@ mod tests { let mut cx = VisualTestContext::from_window(window.into(), cx); let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let (message_editor, editor) = workspace.update_in(&mut cx, |workspace, window, cx| { let workspace_handle = cx.weak_entity(); @@ -3568,8 +3554,7 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { @@ -3721,8 +3706,7 @@ mod tests { let (multi_workspace, cx) = cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); - let history = - cx.update(|window, cx| cx.new(|cx| crate::ThreadHistory::new(None, window, cx))); + let history = cx.update(|_window, cx| cx.new(|cx| crate::ThreadHistory::new(None, cx))); let message_editor = cx.update(|window, cx| { cx.new(|cx| { diff --git a/crates/agent_ui/src/thread_history.rs b/crates/agent_ui/src/thread_history.rs index 01536b00e98d13a699457377a6ebf8e9e87a59b4..5e66d4468767e7002b8b5f6c79ffe8aaecf77127 100644 --- a/crates/agent_ui/src/thread_history.rs +++ b/crates/agent_ui/src/thread_history.rs @@ -1,118 +1,21 @@ -use crate::ConnectionView; -use crate::{AgentPanel, RemoveHistory, RemoveSelectedThread}; use acp_thread::{AgentSessionInfo, AgentSessionList, AgentSessionListRequest, SessionListUpdate}; use agent_client_protocol as acp; -use chrono::{Datelike as _, Local, NaiveDate, TimeDelta, Utc}; -use editor::{Editor, EditorEvent}; -use fuzzy::StringMatchCandidate; -use gpui::{ - App, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Task, - UniformListScrollHandle, WeakEntity, Window, uniform_list, -}; -use std::{fmt::Display, ops::Range, rc::Rc}; -use text::Bias; -use time::{OffsetDateTime, UtcOffset}; -use ui::{ - ElementId, HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tab, Tooltip, - WithScrollbar, prelude::*, -}; - -const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread"); - -fn thread_title(entry: &AgentSessionInfo) -> &SharedString { - entry - .title - .as_ref() - .filter(|title| !title.is_empty()) - .unwrap_or(DEFAULT_TITLE) -} +use gpui::{App, Task}; +use std::rc::Rc; +use ui::prelude::*; pub struct ThreadHistory { session_list: Option>, sessions: Vec, - scroll_handle: UniformListScrollHandle, - selected_index: usize, - hovered_index: Option, - search_editor: Entity, - search_query: SharedString, - visible_items: Vec, - local_timezone: UtcOffset, - confirming_delete_history: bool, - _visible_items_task: Task<()>, _refresh_task: Task<()>, _watch_task: Option>, - _subscriptions: Vec, -} - -enum ListItemType { - BucketSeparator(TimeBucket), - Entry { - entry: AgentSessionInfo, - format: EntryTimeFormat, - }, - SearchResult { - entry: AgentSessionInfo, - positions: Vec, - }, -} - -impl ListItemType { - fn history_entry(&self) -> Option<&AgentSessionInfo> { - match self { - ListItemType::Entry { entry, .. } => Some(entry), - ListItemType::SearchResult { entry, .. } => Some(entry), - _ => None, - } - } } -pub enum ThreadHistoryEvent { - Open(AgentSessionInfo), -} - -impl EventEmitter for ThreadHistory {} - impl ThreadHistory { - pub fn new( - session_list: Option>, - window: &mut Window, - cx: &mut Context, - ) -> Self { - let search_editor = cx.new(|cx| { - let mut editor = Editor::single_line(window, cx); - editor.set_placeholder_text("Search threads...", window, cx); - editor - }); - - let search_editor_subscription = - cx.subscribe(&search_editor, |this, search_editor, event, cx| { - if let EditorEvent::BufferEdited = event { - let query = search_editor.read(cx).text(cx); - if this.search_query != query { - this.search_query = query.into(); - this.update_visible_items(false, cx); - } - } - }); - - let scroll_handle = UniformListScrollHandle::default(); - + pub fn new(session_list: Option>, cx: &mut Context) -> Self { let mut this = Self { session_list: None, sessions: Vec::new(), - scroll_handle, - selected_index: 0, - hovered_index: None, - visible_items: Default::default(), - search_editor, - local_timezone: UtcOffset::from_whole_seconds( - chrono::Local::now().offset().local_minus_utc(), - ) - .unwrap(), - search_query: SharedString::default(), - confirming_delete_history: false, - _subscriptions: vec![search_editor_subscription], - _visible_items_task: Task::ready(()), _refresh_task: Task::ready(()), _watch_task: None, }; @@ -120,43 +23,6 @@ impl ThreadHistory { this } - fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context) { - let entries = self.sessions.clone(); - let new_list_items = if self.search_query.is_empty() { - self.add_list_separators(entries, cx) - } else { - self.filter_search_results(entries, cx) - }; - let selected_history_entry = if preserve_selected_item { - self.selected_history_entry().cloned() - } else { - None - }; - - self._visible_items_task = cx.spawn(async move |this, cx| { - let new_visible_items = new_list_items.await; - this.update(cx, |this, cx| { - let new_selected_index = if let Some(history_entry) = selected_history_entry { - new_visible_items - .iter() - .position(|visible_entry| { - visible_entry - .history_entry() - .is_some_and(|entry| entry.session_id == history_entry.session_id) - }) - .unwrap_or(0) - } else { - 0 - }; - - this.visible_items = new_visible_items; - this.set_selected_index(new_selected_index, Bias::Right, cx); - cx.notify(); - }) - .ok(); - }); - } - pub fn set_session_list( &mut self, session_list: Option>, @@ -170,9 +36,6 @@ impl ThreadHistory { self.session_list = session_list; self.sessions.clear(); - self.visible_items.clear(); - self.selected_index = 0; - self._visible_items_task = Task::ready(()); self._refresh_task = Task::ready(()); let Some(session_list) = self.session_list.as_ref() else { @@ -181,9 +44,8 @@ impl ThreadHistory { return; }; let Some(rx) = session_list.watch(cx) else { - // No watch support - do a one-time refresh self._watch_task = None; - self.refresh_sessions(false, false, cx); + self.refresh_sessions(false, cx); return; }; session_list.notify_refresh(); @@ -191,7 +53,6 @@ impl ThreadHistory { self._watch_task = Some(cx.spawn(async move |this, cx| { while let Ok(first_update) = rx.recv().await { let mut updates = vec![first_update]; - // Collect any additional updates that are already in the channel while let Ok(update) = rx.try_recv() { updates.push(update); } @@ -202,7 +63,7 @@ impl ThreadHistory { .any(|u| matches!(u, SessionListUpdate::Refresh)); if needs_refresh { - this.refresh_sessions(true, false, cx); + this.refresh_sessions(false, cx); } else { for update in updates { if let SessionListUpdate::SessionInfo { session_id, update } = update { @@ -217,7 +78,7 @@ impl ThreadHistory { } pub(crate) fn refresh_full_history(&mut self, cx: &mut Context) { - self.refresh_sessions(true, true, cx); + self.refresh_sessions(true, cx); } fn apply_info_update( @@ -258,23 +119,15 @@ impl ThreadHistory { session.meta = Some(meta); } - self.update_visible_items(true, cx); + cx.notify(); } - fn refresh_sessions( - &mut self, - preserve_selected_item: bool, - load_all_pages: bool, - cx: &mut Context, - ) { + fn refresh_sessions(&mut self, load_all_pages: bool, cx: &mut Context) { let Some(session_list) = self.session_list.clone() else { - self.update_visible_items(preserve_selected_item, cx); + cx.notify(); return; }; - // If a new refresh arrives while pagination is in progress, the previous - // `_refresh_task` is cancelled. This is intentional (latest refresh wins), - // but means sessions may be in a partial state until the new refresh completes. self._refresh_task = cx.spawn(async move |this, cx| { let mut cursor: Option = None; let mut is_first_page = true; @@ -305,7 +158,7 @@ impl ThreadHistory { } else { this.sessions.extend(page_sessions); } - this.update_visible_items(preserve_selected_item, cx); + cx.notify(); }) .ok(); @@ -378,693 +231,11 @@ impl ThreadHistory { } } - fn add_list_separators( - &self, - entries: Vec, - cx: &App, - ) -> Task> { - cx.background_spawn(async move { - let mut items = Vec::with_capacity(entries.len() + 1); - let mut bucket = None; - let today = Local::now().naive_local().date(); - - for entry in entries.into_iter() { - let entry_bucket = entry - .updated_at - .map(|timestamp| { - let entry_date = timestamp.with_timezone(&Local).naive_local().date(); - TimeBucket::from_dates(today, entry_date) - }) - .unwrap_or(TimeBucket::All); - - if Some(entry_bucket) != bucket { - bucket = Some(entry_bucket); - items.push(ListItemType::BucketSeparator(entry_bucket)); - } - - items.push(ListItemType::Entry { - entry, - format: entry_bucket.into(), - }); - } - items - }) - } - - fn filter_search_results( - &self, - entries: Vec, - cx: &App, - ) -> Task> { - let query = self.search_query.clone(); - cx.background_spawn({ - let executor = cx.background_executor().clone(); - async move { - let mut candidates = Vec::with_capacity(entries.len()); - - for (idx, entry) in entries.iter().enumerate() { - candidates.push(StringMatchCandidate::new(idx, thread_title(entry))); - } - - const MAX_MATCHES: usize = 100; - - let matches = fuzzy::match_strings( - &candidates, - &query, - false, - true, - MAX_MATCHES, - &Default::default(), - executor, - ) - .await; - - matches - .into_iter() - .map(|search_match| ListItemType::SearchResult { - entry: entries[search_match.candidate_id].clone(), - positions: search_match.positions, - }) - .collect() - } - }) - } - - fn search_produced_no_matches(&self) -> bool { - self.visible_items.is_empty() && !self.search_query.is_empty() - } - - fn selected_history_entry(&self) -> Option<&AgentSessionInfo> { - self.get_history_entry(self.selected_index) - } - - fn get_history_entry(&self, visible_items_ix: usize) -> Option<&AgentSessionInfo> { - self.visible_items.get(visible_items_ix)?.history_entry() - } - - fn set_selected_index(&mut self, mut index: usize, bias: Bias, cx: &mut Context) { - if self.visible_items.len() == 0 { - self.selected_index = 0; - return; - } - while matches!( - self.visible_items.get(index), - None | Some(ListItemType::BucketSeparator(..)) - ) { - index = match bias { - Bias::Left => { - if index == 0 { - self.visible_items.len() - 1 - } else { - index - 1 - } - } - Bias::Right => { - if index >= self.visible_items.len() - 1 { - 0 - } else { - index + 1 - } - } - }; - } - self.selected_index = index; - self.scroll_handle - .scroll_to_item(index, ScrollStrategy::Top); - cx.notify() - } - - pub fn select_previous( - &mut self, - _: &menu::SelectPrevious, - _window: &mut Window, - cx: &mut Context, - ) { - if self.selected_index == 0 { - self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); - } else { - self.set_selected_index(self.selected_index - 1, Bias::Left, cx); - } - } - - pub fn select_next( - &mut self, - _: &menu::SelectNext, - _window: &mut Window, - cx: &mut Context, - ) { - if self.selected_index == self.visible_items.len() - 1 { - self.set_selected_index(0, Bias::Right, cx); + pub(crate) fn delete_sessions(&self, cx: &mut App) -> Task> { + if let Some(session_list) = self.session_list.as_ref() { + session_list.delete_sessions(cx) } else { - self.set_selected_index(self.selected_index + 1, Bias::Right, cx); - } - } - - fn select_first( - &mut self, - _: &menu::SelectFirst, - _window: &mut Window, - cx: &mut Context, - ) { - self.set_selected_index(0, Bias::Right, cx); - } - - fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { - self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); - } - - fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { - self.confirm_entry(self.selected_index, cx); - } - - fn confirm_entry(&mut self, ix: usize, cx: &mut Context) { - let Some(entry) = self.get_history_entry(ix) else { - return; - }; - cx.emit(ThreadHistoryEvent::Open(entry.clone())); - } - - fn remove_selected_thread( - &mut self, - _: &RemoveSelectedThread, - _window: &mut Window, - cx: &mut Context, - ) { - self.remove_thread(self.selected_index, cx) - } - - fn remove_thread(&mut self, visible_item_ix: usize, cx: &mut Context) { - let Some(entry) = self.get_history_entry(visible_item_ix) else { - return; - }; - let Some(session_list) = self.session_list.as_ref() else { - return; - }; - if !session_list.supports_delete() { - return; - } - let task = session_list.delete_session(&entry.session_id, cx); - task.detach_and_log_err(cx); - } - - fn remove_history(&mut self, _window: &mut Window, cx: &mut Context) { - let Some(session_list) = self.session_list.as_ref() else { - return; - }; - if !session_list.supports_delete() { - return; - } - session_list.delete_sessions(cx).detach_and_log_err(cx); - self.confirming_delete_history = false; - cx.notify(); - } - - fn prompt_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { - self.confirming_delete_history = true; - cx.notify(); - } - - fn cancel_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { - self.confirming_delete_history = false; - cx.notify(); - } - - fn render_list_items( - &mut self, - range: Range, - _window: &mut Window, - cx: &mut Context, - ) -> Vec { - self.visible_items - .get(range.clone()) - .into_iter() - .flatten() - .enumerate() - .map(|(ix, item)| self.render_list_item(item, range.start + ix, cx)) - .collect() - } - - fn render_list_item(&self, item: &ListItemType, ix: usize, cx: &Context) -> AnyElement { - match item { - ListItemType::Entry { entry, format } => self - .render_history_entry(entry, *format, ix, Vec::default(), cx) - .into_any(), - ListItemType::SearchResult { entry, positions } => self.render_history_entry( - entry, - EntryTimeFormat::DateAndTime, - ix, - positions.clone(), - cx, - ), - ListItemType::BucketSeparator(bucket) => div() - .px(DynamicSpacing::Base06.rems(cx)) - .pt_2() - .pb_1() - .child( - Label::new(bucket.to_string()) - .size(LabelSize::XSmall) - .color(Color::Muted), - ) - .into_any_element(), - } - } - - fn render_history_entry( - &self, - entry: &AgentSessionInfo, - format: EntryTimeFormat, - ix: usize, - highlight_positions: Vec, - cx: &Context, - ) -> AnyElement { - let selected = ix == self.selected_index; - let hovered = Some(ix) == self.hovered_index; - let entry_time = entry.updated_at; - let display_text = match (format, entry_time) { - (EntryTimeFormat::DateAndTime, Some(entry_time)) => { - let now = Utc::now(); - let duration = now.signed_duration_since(entry_time); - let days = duration.num_days(); - - format!("{}d", days) - } - (EntryTimeFormat::TimeOnly, Some(entry_time)) => { - format.format_timestamp(entry_time.timestamp(), self.local_timezone) - } - (_, None) => "—".to_string(), - }; - - let title = thread_title(entry).clone(); - let full_date = entry_time - .map(|time| { - EntryTimeFormat::DateAndTime.format_timestamp(time.timestamp(), self.local_timezone) - }) - .unwrap_or_else(|| "Unknown".to_string()); - - h_flex() - .w_full() - .pb_1() - .child( - ListItem::new(ix) - .rounded() - .toggle_state(selected) - .spacing(ListItemSpacing::Sparse) - .start_slot( - h_flex() - .w_full() - .gap_2() - .justify_between() - .child( - HighlightedLabel::new(thread_title(entry), highlight_positions) - .size(LabelSize::Small) - .truncate(), - ) - .child( - Label::new(display_text) - .color(Color::Muted) - .size(LabelSize::XSmall), - ), - ) - .tooltip(move |_, cx| { - Tooltip::with_meta(title.clone(), None, full_date.clone(), cx) - }) - .on_hover(cx.listener(move |this, is_hovered, _window, cx| { - if *is_hovered { - this.hovered_index = Some(ix); - } else if this.hovered_index == Some(ix) { - this.hovered_index = None; - } - - cx.notify(); - })) - .end_slot::(if hovered && self.supports_delete() { - Some( - IconButton::new("delete", IconName::Trash) - .shape(IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .tooltip(move |_window, cx| { - Tooltip::for_action("Delete", &RemoveSelectedThread, cx) - }) - .on_click(cx.listener(move |this, _, _, cx| { - this.remove_thread(ix, cx); - cx.stop_propagation() - })), - ) - } else { - None - }) - .on_click(cx.listener(move |this, _, _, cx| this.confirm_entry(ix, cx))), - ) - .into_any_element() - } -} - -impl Focusable for ThreadHistory { - fn focus_handle(&self, cx: &App) -> FocusHandle { - self.search_editor.focus_handle(cx) - } -} - -impl Render for ThreadHistory { - fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { - let has_no_history = self.is_empty(); - - v_flex() - .key_context("ThreadHistory") - .size_full() - .bg(cx.theme().colors().panel_background) - .on_action(cx.listener(Self::select_previous)) - .on_action(cx.listener(Self::select_next)) - .on_action(cx.listener(Self::select_first)) - .on_action(cx.listener(Self::select_last)) - .on_action(cx.listener(Self::confirm)) - .on_action(cx.listener(Self::remove_selected_thread)) - .on_action(cx.listener(|this, _: &RemoveHistory, window, cx| { - this.remove_history(window, cx); - })) - .child( - h_flex() - .h(Tab::container_height(cx)) - .w_full() - .py_1() - .px_2() - .gap_2() - .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border) - .child( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child(self.search_editor.clone()), - ) - .child({ - let view = v_flex() - .id("list-container") - .relative() - .overflow_hidden() - .flex_grow(); - - if has_no_history { - view.justify_center().items_center().child( - Label::new("You don't have any past threads yet.") - .size(LabelSize::Small) - .color(Color::Muted), - ) - } else if self.search_produced_no_matches() { - view.justify_center() - .items_center() - .child(Label::new("No threads match your search.").size(LabelSize::Small)) - } else { - view.child( - uniform_list( - "thread-history", - self.visible_items.len(), - cx.processor(|this, range: Range, window, cx| { - this.render_list_items(range, window, cx) - }), - ) - .p_1() - .pr_4() - .track_scroll(&self.scroll_handle) - .flex_grow(), - ) - .vertical_scrollbar_for(&self.scroll_handle, window, cx) - } - }) - .when(!has_no_history && self.supports_delete(), |this| { - this.child( - h_flex() - .p_2() - .border_t_1() - .border_color(cx.theme().colors().border_variant) - .when(!self.confirming_delete_history, |this| { - this.child( - Button::new("delete_history", "Delete All History") - .full_width() - .style(ButtonStyle::Outlined) - .label_size(LabelSize::Small) - .on_click(cx.listener(|this, _, window, cx| { - this.prompt_delete_history(window, cx); - })), - ) - }) - .when(self.confirming_delete_history, |this| { - this.w_full() - .gap_2() - .flex_wrap() - .justify_between() - .child( - h_flex() - .flex_wrap() - .gap_1() - .child( - Label::new("Delete all threads?") - .size(LabelSize::Small), - ) - .child( - Label::new("You won't be able to recover them later.") - .size(LabelSize::Small) - .color(Color::Muted), - ), - ) - .child( - h_flex() - .gap_1() - .child( - Button::new("cancel_delete", "Cancel") - .label_size(LabelSize::Small) - .on_click(cx.listener(|this, _, window, cx| { - this.cancel_delete_history(window, cx); - })), - ) - .child( - Button::new("confirm_delete", "Delete") - .style(ButtonStyle::Tinted(ui::TintColor::Error)) - .color(Color::Error) - .label_size(LabelSize::Small) - .on_click(cx.listener(|_, _, window, cx| { - window.dispatch_action( - Box::new(RemoveHistory), - cx, - ); - })), - ), - ) - }), - ) - }) - } -} - -#[derive(IntoElement)] -pub struct HistoryEntryElement { - entry: AgentSessionInfo, - thread_view: WeakEntity, - selected: bool, - hovered: bool, - supports_delete: bool, - on_hover: Box, -} - -impl HistoryEntryElement { - pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity) -> Self { - Self { - entry, - thread_view, - selected: false, - hovered: false, - supports_delete: false, - on_hover: Box::new(|_, _, _| {}), - } - } - - pub fn supports_delete(mut self, supports_delete: bool) -> Self { - self.supports_delete = supports_delete; - self - } - - pub fn hovered(mut self, hovered: bool) -> Self { - self.hovered = hovered; - self - } - - pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self { - self.on_hover = Box::new(on_hover); - self - } -} - -impl RenderOnce for HistoryEntryElement { - fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { - let id = ElementId::Name(self.entry.session_id.0.clone().into()); - let title = thread_title(&self.entry).clone(); - let formatted_time = self - .entry - .updated_at - .map(|timestamp| { - let now = chrono::Utc::now(); - let duration = now.signed_duration_since(timestamp); - - if duration.num_days() > 0 { - format!("{}d", duration.num_days()) - } else if duration.num_hours() > 0 { - format!("{}h ago", duration.num_hours()) - } else if duration.num_minutes() > 0 { - format!("{}m ago", duration.num_minutes()) - } else { - "Just now".to_string() - } - }) - .unwrap_or_else(|| "Unknown".to_string()); - - ListItem::new(id) - .rounded() - .toggle_state(self.selected) - .spacing(ListItemSpacing::Sparse) - .start_slot( - h_flex() - .w_full() - .gap_2() - .justify_between() - .child(Label::new(title).size(LabelSize::Small).truncate()) - .child( - Label::new(formatted_time) - .color(Color::Muted) - .size(LabelSize::XSmall), - ), - ) - .on_hover(self.on_hover) - .end_slot::(if (self.hovered || self.selected) && self.supports_delete { - Some( - IconButton::new("delete", IconName::Trash) - .shape(IconButtonShape::Square) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) - .tooltip(move |_window, cx| { - Tooltip::for_action("Delete", &RemoveSelectedThread, cx) - }) - .on_click({ - let thread_view = self.thread_view.clone(); - let session_id = self.entry.session_id.clone(); - - move |_event, _window, cx| { - if let Some(thread_view) = thread_view.upgrade() { - thread_view.update(cx, |thread_view, cx| { - thread_view.delete_history_entry(&session_id, cx); - }); - } - } - }), - ) - } else { - None - }) - .on_click({ - let thread_view = self.thread_view.clone(); - let entry = self.entry; - - move |_event, window, cx| { - if let Some(workspace) = thread_view - .upgrade() - .and_then(|view| view.read(cx).workspace().upgrade()) - { - if let Some(panel) = workspace.read(cx).panel::(cx) { - panel.update(cx, |panel, cx| { - panel.load_agent_thread( - entry.session_id.clone(), - entry.cwd.clone(), - entry.title.clone(), - window, - cx, - ); - }); - } - } - } - }) - } -} - -#[derive(Clone, Copy)] -pub enum EntryTimeFormat { - DateAndTime, - TimeOnly, -} - -impl EntryTimeFormat { - fn format_timestamp(&self, timestamp: i64, timezone: UtcOffset) -> String { - let timestamp = OffsetDateTime::from_unix_timestamp(timestamp).unwrap(); - - match self { - EntryTimeFormat::DateAndTime => time_format::format_localized_timestamp( - timestamp, - OffsetDateTime::now_utc(), - timezone, - time_format::TimestampFormat::EnhancedAbsolute, - ), - EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)), - } - } -} - -impl From for EntryTimeFormat { - fn from(bucket: TimeBucket) -> Self { - match bucket { - TimeBucket::Today => EntryTimeFormat::TimeOnly, - TimeBucket::Yesterday => EntryTimeFormat::TimeOnly, - TimeBucket::ThisWeek => EntryTimeFormat::DateAndTime, - TimeBucket::PastWeek => EntryTimeFormat::DateAndTime, - TimeBucket::All => EntryTimeFormat::DateAndTime, - } - } -} - -#[derive(PartialEq, Eq, Clone, Copy, Debug)] -enum TimeBucket { - Today, - Yesterday, - ThisWeek, - PastWeek, - All, -} - -impl TimeBucket { - fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { - if date == reference { - return TimeBucket::Today; - } - - if date == reference - TimeDelta::days(1) { - return TimeBucket::Yesterday; - } - - let week = date.iso_week(); - - if reference.iso_week() == week { - return TimeBucket::ThisWeek; - } - - let last_week = (reference - TimeDelta::days(7)).iso_week(); - - if week == last_week { - return TimeBucket::PastWeek; - } - - TimeBucket::All - } -} - -impl Display for TimeBucket { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - TimeBucket::Today => write!(f, "Today"), - TimeBucket::Yesterday => write!(f, "Yesterday"), - TimeBucket::ThisWeek => write!(f, "This Week"), - TimeBucket::PastWeek => write!(f, "Past Week"), - TimeBucket::All => write!(f, "All"), + Task::ready(Ok(())) } } } @@ -1073,7 +244,6 @@ impl Display for TimeBucket { mod tests { use super::*; use acp_thread::AgentSessionListResponse; - use chrono::NaiveDate; use gpui::TestAppContext; use std::{ any::Any, @@ -1246,9 +416,7 @@ mod tests { vec![test_session("session-2", "Second")], )); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); history.update(cx, |history, _cx| { @@ -1270,9 +438,7 @@ mod tests { vec![test_session("session-2", "Second")], )); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); session_list.clear_requested_cursors(); @@ -1307,9 +473,7 @@ mod tests { vec![test_session("session-2", "Second")], )); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); history.update(cx, |history, cx| history.refresh_full_history(cx)); @@ -1340,9 +504,7 @@ mod tests { vec![test_session("session-2", "Second")], )); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); history.update(cx, |history, cx| history.refresh_full_history(cx)); @@ -1371,9 +533,7 @@ mod tests { vec![test_session("session-2", "Second")], )); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); history.update(cx, |history, cx| history.refresh_full_history(cx)); @@ -1416,9 +576,7 @@ mod tests { .with_async_responses(), ); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); session_list.clear_requested_cursors(); @@ -1449,19 +607,15 @@ mod tests { }]; let session_list = Rc::new(TestSessionList::new(sessions)); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); - // Send a title update session_list.send_update(SessionListUpdate::SessionInfo { session_id: session_id.clone(), update: acp::SessionInfoUpdate::new().title("New Title"), }); cx.run_until_parked(); - // Check that the title was updated history.update(cx, |history, _cx| { let session = history.sessions.iter().find(|s| s.session_id == session_id); assert_eq!( @@ -1486,19 +640,15 @@ mod tests { }]; let session_list = Rc::new(TestSessionList::new(sessions)); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); - // Send an update that clears the title (null) session_list.send_update(SessionListUpdate::SessionInfo { session_id: session_id.clone(), update: acp::SessionInfoUpdate::new().title(None::), }); cx.run_until_parked(); - // Check that the title was cleared history.update(cx, |history, _cx| { let session = history.sessions.iter().find(|s| s.session_id == session_id); assert_eq!(session.unwrap().title, None); @@ -1520,19 +670,15 @@ mod tests { }]; let session_list = Rc::new(TestSessionList::new(sessions)); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); - // Send an update with no fields set (all undefined) session_list.send_update(SessionListUpdate::SessionInfo { session_id: session_id.clone(), update: acp::SessionInfoUpdate::new(), }); cx.run_until_parked(); - // Check that the title is unchanged history.update(cx, |history, _cx| { let session = history.sessions.iter().find(|s| s.session_id == session_id); assert_eq!( @@ -1557,12 +703,9 @@ mod tests { }]; let session_list = Rc::new(TestSessionList::new(sessions)); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); - // Send multiple updates before the executor runs session_list.send_update(SessionListUpdate::SessionInfo { session_id: session_id.clone(), update: acp::SessionInfoUpdate::new().title("First Title"), @@ -1573,7 +716,6 @@ mod tests { }); cx.run_until_parked(); - // Check that the final title is "Second Title" (both applied in order) history.update(cx, |history, _cx| { let session = history.sessions.iter().find(|s| s.session_id == session_id); assert_eq!( @@ -1598,12 +740,9 @@ mod tests { }]; let session_list = Rc::new(TestSessionList::new(sessions)); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); - // Send an info update followed by a refresh session_list.send_update(SessionListUpdate::SessionInfo { session_id: session_id.clone(), update: acp::SessionInfoUpdate::new().title("Local Update"), @@ -1611,7 +750,6 @@ mod tests { session_list.send_update(SessionListUpdate::Refresh); cx.run_until_parked(); - // The refresh should have fetched from server, getting "Server Title" history.update(cx, |history, _cx| { let session = history.sessions.iter().find(|s| s.session_id == session_id); assert_eq!( @@ -1636,19 +774,15 @@ mod tests { }]; let session_list = Rc::new(TestSessionList::new(sessions)); - let (history, cx) = cx.add_window_view(|window, cx| { - ThreadHistory::new(Some(session_list.clone()), window, cx) - }); + let history = cx.new(|cx| ThreadHistory::new(Some(session_list.clone()), cx)); cx.run_until_parked(); - // Send an update for an unknown session session_list.send_update(SessionListUpdate::SessionInfo { session_id: acp::SessionId::new("unknown-session"), update: acp::SessionInfoUpdate::new().title("Should Be Ignored"), }); cx.run_until_parked(); - // Check that the known session is unchanged and no crash occurred history.update(cx, |history, _cx| { assert_eq!(history.sessions.len(), 1); assert_eq!( @@ -1657,43 +791,4 @@ mod tests { ); }); } - - #[test] - fn test_time_bucket_from_dates() { - let today = NaiveDate::from_ymd_opt(2023, 1, 15).unwrap(); - - let date = today; - assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::Today); - - let date = NaiveDate::from_ymd_opt(2023, 1, 14).unwrap(); - assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::Yesterday); - - let date = NaiveDate::from_ymd_opt(2023, 1, 13).unwrap(); - assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::ThisWeek); - - let date = NaiveDate::from_ymd_opt(2023, 1, 11).unwrap(); - assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::ThisWeek); - - let date = NaiveDate::from_ymd_opt(2023, 1, 8).unwrap(); - assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::PastWeek); - - let date = NaiveDate::from_ymd_opt(2023, 1, 5).unwrap(); - assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::PastWeek); - - // All: not in this week or last week - let date = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); - assert_eq!(TimeBucket::from_dates(today, date), TimeBucket::All); - - // Test year boundary cases - let new_year = NaiveDate::from_ymd_opt(2023, 1, 1).unwrap(); - - let date = NaiveDate::from_ymd_opt(2022, 12, 31).unwrap(); - assert_eq!( - TimeBucket::from_dates(new_year, date), - TimeBucket::Yesterday - ); - - let date = NaiveDate::from_ymd_opt(2022, 12, 28).unwrap(); - assert_eq!(TimeBucket::from_dates(new_year, date), TimeBucket::ThisWeek); - } } diff --git a/crates/agent_ui/src/thread_history_view.rs b/crates/agent_ui/src/thread_history_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..1756fc46ed48e86dc4bf9c78f2c2ef79618ed43b --- /dev/null +++ b/crates/agent_ui/src/thread_history_view.rs @@ -0,0 +1,878 @@ +use crate::thread_history::ThreadHistory; +use crate::{AgentPanel, ConnectionView, RemoveHistory, RemoveSelectedThread}; +use acp_thread::AgentSessionInfo; +use chrono::{Datelike as _, Local, NaiveDate, TimeDelta, Utc}; +use editor::{Editor, EditorEvent}; +use fuzzy::StringMatchCandidate; +use gpui::{ + AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, ScrollStrategy, Task, + UniformListScrollHandle, WeakEntity, Window, uniform_list, +}; +use std::{fmt::Display, ops::Range}; +use text::Bias; +use time::{OffsetDateTime, UtcOffset}; +use ui::{ + ElementId, HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tab, Tooltip, + WithScrollbar, prelude::*, +}; + +const DEFAULT_TITLE: &SharedString = &SharedString::new_static("New Thread"); + +pub(crate) fn thread_title(entry: &AgentSessionInfo) -> &SharedString { + entry + .title + .as_ref() + .filter(|title| !title.is_empty()) + .unwrap_or(DEFAULT_TITLE) +} + +pub struct ThreadHistoryView { + history: Entity, + scroll_handle: UniformListScrollHandle, + selected_index: usize, + hovered_index: Option, + search_editor: Entity, + search_query: SharedString, + visible_items: Vec, + local_timezone: UtcOffset, + confirming_delete_history: bool, + _visible_items_task: Task<()>, + _subscriptions: Vec, +} + +enum ListItemType { + BucketSeparator(TimeBucket), + Entry { + entry: AgentSessionInfo, + format: EntryTimeFormat, + }, + SearchResult { + entry: AgentSessionInfo, + positions: Vec, + }, +} + +impl ListItemType { + fn history_entry(&self) -> Option<&AgentSessionInfo> { + match self { + ListItemType::Entry { entry, .. } => Some(entry), + ListItemType::SearchResult { entry, .. } => Some(entry), + _ => None, + } + } +} + +pub enum ThreadHistoryViewEvent { + Open(AgentSessionInfo), +} + +impl EventEmitter for ThreadHistoryView {} + +impl ThreadHistoryView { + pub fn new( + history: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let search_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Search threads...", window, cx); + editor + }); + + let search_editor_subscription = + cx.subscribe(&search_editor, |this, search_editor, event, cx| { + if let EditorEvent::BufferEdited = event { + let query = search_editor.read(cx).text(cx); + if this.search_query != query { + this.search_query = query.into(); + this.update_visible_items(false, cx); + } + } + }); + + let history_subscription = cx.observe(&history, |this, _, cx| { + this.update_visible_items(true, cx); + }); + + let scroll_handle = UniformListScrollHandle::default(); + + let mut this = Self { + history, + scroll_handle, + selected_index: 0, + hovered_index: None, + visible_items: Default::default(), + search_editor, + local_timezone: UtcOffset::from_whole_seconds( + chrono::Local::now().offset().local_minus_utc(), + ) + .unwrap(), + search_query: SharedString::default(), + confirming_delete_history: false, + _subscriptions: vec![search_editor_subscription, history_subscription], + _visible_items_task: Task::ready(()), + }; + this.update_visible_items(false, cx); + this + } + + fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context) { + let entries = self.history.read(cx).sessions().to_vec(); + let new_list_items = if self.search_query.is_empty() { + self.add_list_separators(entries, cx) + } else { + self.filter_search_results(entries, cx) + }; + let selected_history_entry = if preserve_selected_item { + self.selected_history_entry().cloned() + } else { + None + }; + + self._visible_items_task = cx.spawn(async move |this, cx| { + let new_visible_items = new_list_items.await; + this.update(cx, |this, cx| { + let new_selected_index = if let Some(history_entry) = selected_history_entry { + new_visible_items + .iter() + .position(|visible_entry| { + visible_entry + .history_entry() + .is_some_and(|entry| entry.session_id == history_entry.session_id) + }) + .unwrap_or(0) + } else { + 0 + }; + + this.visible_items = new_visible_items; + this.set_selected_index(new_selected_index, Bias::Right, cx); + cx.notify(); + }) + .ok(); + }); + } + + fn add_list_separators( + &self, + entries: Vec, + cx: &App, + ) -> Task> { + cx.background_spawn(async move { + let mut items = Vec::with_capacity(entries.len() + 1); + let mut bucket = None; + let today = Local::now().naive_local().date(); + + for entry in entries.into_iter() { + let entry_bucket = entry + .updated_at + .map(|timestamp| { + let entry_date = timestamp.with_timezone(&Local).naive_local().date(); + TimeBucket::from_dates(today, entry_date) + }) + .unwrap_or(TimeBucket::All); + + if Some(entry_bucket) != bucket { + bucket = Some(entry_bucket); + items.push(ListItemType::BucketSeparator(entry_bucket)); + } + + items.push(ListItemType::Entry { + entry, + format: entry_bucket.into(), + }); + } + items + }) + } + + fn filter_search_results( + &self, + entries: Vec, + cx: &App, + ) -> Task> { + let query = self.search_query.clone(); + cx.background_spawn({ + let executor = cx.background_executor().clone(); + async move { + let mut candidates = Vec::with_capacity(entries.len()); + + for (idx, entry) in entries.iter().enumerate() { + candidates.push(StringMatchCandidate::new(idx, thread_title(entry))); + } + + const MAX_MATCHES: usize = 100; + + let matches = fuzzy::match_strings( + &candidates, + &query, + false, + true, + MAX_MATCHES, + &Default::default(), + executor, + ) + .await; + + matches + .into_iter() + .map(|search_match| ListItemType::SearchResult { + entry: entries[search_match.candidate_id].clone(), + positions: search_match.positions, + }) + .collect() + } + }) + } + + fn search_produced_no_matches(&self) -> bool { + self.visible_items.is_empty() && !self.search_query.is_empty() + } + + fn selected_history_entry(&self) -> Option<&AgentSessionInfo> { + self.get_history_entry(self.selected_index) + } + + fn get_history_entry(&self, visible_items_ix: usize) -> Option<&AgentSessionInfo> { + self.visible_items.get(visible_items_ix)?.history_entry() + } + + fn set_selected_index(&mut self, mut index: usize, bias: Bias, cx: &mut Context) { + if self.visible_items.len() == 0 { + self.selected_index = 0; + return; + } + while matches!( + self.visible_items.get(index), + None | Some(ListItemType::BucketSeparator(..)) + ) { + index = match bias { + Bias::Left => { + if index == 0 { + self.visible_items.len() - 1 + } else { + index - 1 + } + } + Bias::Right => { + if index >= self.visible_items.len() - 1 { + 0 + } else { + index + 1 + } + } + }; + } + self.selected_index = index; + self.scroll_handle + .scroll_to_item(index, ScrollStrategy::Top); + cx.notify() + } + + fn select_previous( + &mut self, + _: &menu::SelectPrevious, + _window: &mut Window, + cx: &mut Context, + ) { + if self.selected_index == 0 { + self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); + } else { + self.set_selected_index(self.selected_index - 1, Bias::Left, cx); + } + } + + fn select_next(&mut self, _: &menu::SelectNext, _window: &mut Window, cx: &mut Context) { + if self.selected_index == self.visible_items.len() - 1 { + self.set_selected_index(0, Bias::Right, cx); + } else { + self.set_selected_index(self.selected_index + 1, Bias::Right, cx); + } + } + + fn select_first( + &mut self, + _: &menu::SelectFirst, + _window: &mut Window, + cx: &mut Context, + ) { + self.set_selected_index(0, Bias::Right, cx); + } + + fn select_last(&mut self, _: &menu::SelectLast, _window: &mut Window, cx: &mut Context) { + self.set_selected_index(self.visible_items.len() - 1, Bias::Left, cx); + } + + fn confirm(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context) { + self.confirm_entry(self.selected_index, cx); + } + + fn confirm_entry(&mut self, ix: usize, cx: &mut Context) { + let Some(entry) = self.get_history_entry(ix) else { + return; + }; + cx.emit(ThreadHistoryViewEvent::Open(entry.clone())); + } + + fn remove_selected_thread( + &mut self, + _: &RemoveSelectedThread, + _window: &mut Window, + cx: &mut Context, + ) { + self.remove_thread(self.selected_index, cx) + } + + fn remove_thread(&mut self, visible_item_ix: usize, cx: &mut Context) { + let Some(entry) = self.get_history_entry(visible_item_ix) else { + return; + }; + if !self.history.read(cx).supports_delete() { + return; + } + let session_id = entry.session_id.clone(); + self.history.update(cx, |history, cx| { + history + .delete_session(&session_id, cx) + .detach_and_log_err(cx); + }); + } + + fn remove_history(&mut self, _window: &mut Window, cx: &mut Context) { + if !self.history.read(cx).supports_delete() { + return; + } + self.history.update(cx, |history, cx| { + history.delete_sessions(cx).detach_and_log_err(cx); + }); + self.confirming_delete_history = false; + cx.notify(); + } + + fn prompt_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.confirming_delete_history = true; + cx.notify(); + } + + fn cancel_delete_history(&mut self, _window: &mut Window, cx: &mut Context) { + self.confirming_delete_history = false; + cx.notify(); + } + + fn render_list_items( + &mut self, + range: Range, + _window: &mut Window, + cx: &mut Context, + ) -> Vec { + self.visible_items + .get(range.clone()) + .into_iter() + .flatten() + .enumerate() + .map(|(ix, item)| self.render_list_item(item, range.start + ix, cx)) + .collect() + } + + fn render_list_item(&self, item: &ListItemType, ix: usize, cx: &Context) -> AnyElement { + match item { + ListItemType::Entry { entry, format } => self + .render_history_entry(entry, *format, ix, Vec::default(), cx) + .into_any(), + ListItemType::SearchResult { entry, positions } => self.render_history_entry( + entry, + EntryTimeFormat::DateAndTime, + ix, + positions.clone(), + cx, + ), + ListItemType::BucketSeparator(bucket) => div() + .px(DynamicSpacing::Base06.rems(cx)) + .pt_2() + .pb_1() + .child( + Label::new(bucket.to_string()) + .size(LabelSize::XSmall) + .color(Color::Muted), + ) + .into_any_element(), + } + } + + fn render_history_entry( + &self, + entry: &AgentSessionInfo, + format: EntryTimeFormat, + ix: usize, + highlight_positions: Vec, + cx: &Context, + ) -> AnyElement { + let selected = ix == self.selected_index; + let hovered = Some(ix) == self.hovered_index; + let entry_time = entry.updated_at; + let display_text = match (format, entry_time) { + (EntryTimeFormat::DateAndTime, Some(entry_time)) => { + let now = Utc::now(); + let duration = now.signed_duration_since(entry_time); + let days = duration.num_days(); + + format!("{}d", days) + } + (EntryTimeFormat::TimeOnly, Some(entry_time)) => { + format.format_timestamp(entry_time.timestamp(), self.local_timezone) + } + (_, None) => "—".to_string(), + }; + + let title = thread_title(entry).clone(); + let full_date = entry_time + .map(|time| { + EntryTimeFormat::DateAndTime.format_timestamp(time.timestamp(), self.local_timezone) + }) + .unwrap_or_else(|| "Unknown".to_string()); + + let supports_delete = self.history.read(cx).supports_delete(); + + h_flex() + .w_full() + .pb_1() + .child( + ListItem::new(ix) + .rounded() + .toggle_state(selected) + .spacing(ListItemSpacing::Sparse) + .start_slot( + h_flex() + .w_full() + .gap_2() + .justify_between() + .child( + HighlightedLabel::new(thread_title(entry), highlight_positions) + .size(LabelSize::Small) + .truncate(), + ) + .child( + Label::new(display_text) + .color(Color::Muted) + .size(LabelSize::XSmall), + ), + ) + .tooltip(move |_, cx| { + Tooltip::with_meta(title.clone(), None, full_date.clone(), cx) + }) + .on_hover(cx.listener(move |this, is_hovered, _window, cx| { + if *is_hovered { + this.hovered_index = Some(ix); + } else if this.hovered_index == Some(ix) { + this.hovered_index = None; + } + + cx.notify(); + })) + .end_slot::(if hovered && supports_delete { + Some( + IconButton::new("delete", IconName::Trash) + .shape(IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(move |_window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, cx) + }) + .on_click(cx.listener(move |this, _, _, cx| { + this.remove_thread(ix, cx); + cx.stop_propagation() + })), + ) + } else { + None + }) + .on_click(cx.listener(move |this, _, _, cx| this.confirm_entry(ix, cx))), + ) + .into_any_element() + } +} + +impl Focusable for ThreadHistoryView { + fn focus_handle(&self, cx: &App) -> FocusHandle { + self.search_editor.focus_handle(cx) + } +} + +impl Render for ThreadHistoryView { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let has_no_history = self.history.read(cx).is_empty(); + let supports_delete = self.history.read(cx).supports_delete(); + + v_flex() + .key_context("ThreadHistory") + .size_full() + .bg(cx.theme().colors().panel_background) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .on_action(cx.listener(Self::remove_selected_thread)) + .on_action(cx.listener(|this, _: &RemoveHistory, window, cx| { + this.remove_history(window, cx); + })) + .child( + h_flex() + .h(Tab::container_height(cx)) + .w_full() + .py_1() + .px_2() + .gap_2() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::MagnifyingGlass) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child(self.search_editor.clone()), + ) + .child({ + let view = v_flex() + .id("list-container") + .relative() + .overflow_hidden() + .flex_grow(); + + if has_no_history { + view.justify_center().items_center().child( + Label::new("You don't have any past threads yet.") + .size(LabelSize::Small) + .color(Color::Muted), + ) + } else if self.search_produced_no_matches() { + view.justify_center() + .items_center() + .child(Label::new("No threads match your search.").size(LabelSize::Small)) + } else { + view.child( + uniform_list( + "thread-history", + self.visible_items.len(), + cx.processor(|this, range: Range, window, cx| { + this.render_list_items(range, window, cx) + }), + ) + .p_1() + .pr_4() + .track_scroll(&self.scroll_handle) + .flex_grow(), + ) + .vertical_scrollbar_for(&self.scroll_handle, window, cx) + } + }) + .when(!has_no_history && supports_delete, |this| { + this.child( + h_flex() + .p_2() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + .when(!self.confirming_delete_history, |this| { + this.child( + Button::new("delete_history", "Delete All History") + .full_width() + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.prompt_delete_history(window, cx); + })), + ) + }) + .when(self.confirming_delete_history, |this| { + this.w_full() + .gap_2() + .flex_wrap() + .justify_between() + .child( + h_flex() + .flex_wrap() + .gap_1() + .child( + Label::new("Delete all threads?") + .size(LabelSize::Small), + ) + .child( + Label::new("You won't be able to recover them later.") + .size(LabelSize::Small) + .color(Color::Muted), + ), + ) + .child( + h_flex() + .gap_1() + .child( + Button::new("cancel_delete", "Cancel") + .label_size(LabelSize::Small) + .on_click(cx.listener(|this, _, window, cx| { + this.cancel_delete_history(window, cx); + })), + ) + .child( + Button::new("confirm_delete", "Delete") + .style(ButtonStyle::Tinted(ui::TintColor::Error)) + .color(Color::Error) + .label_size(LabelSize::Small) + .on_click(cx.listener(|_, _, window, cx| { + window.dispatch_action( + Box::new(RemoveHistory), + cx, + ); + })), + ), + ) + }), + ) + }) + } +} + +#[derive(IntoElement)] +pub struct HistoryEntryElement { + entry: AgentSessionInfo, + thread_view: WeakEntity, + selected: bool, + hovered: bool, + supports_delete: bool, + on_hover: Box, +} + +impl HistoryEntryElement { + pub fn new(entry: AgentSessionInfo, thread_view: WeakEntity) -> Self { + Self { + entry, + thread_view, + selected: false, + hovered: false, + supports_delete: false, + on_hover: Box::new(|_, _, _| {}), + } + } + + pub fn supports_delete(mut self, supports_delete: bool) -> Self { + self.supports_delete = supports_delete; + self + } + + pub fn hovered(mut self, hovered: bool) -> Self { + self.hovered = hovered; + self + } + + pub fn on_hover(mut self, on_hover: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self { + self.on_hover = Box::new(on_hover); + self + } +} + +impl RenderOnce for HistoryEntryElement { + fn render(self, _window: &mut Window, _cx: &mut App) -> impl IntoElement { + let id = ElementId::Name(self.entry.session_id.0.clone().into()); + let title = thread_title(&self.entry).clone(); + let formatted_time = self + .entry + .updated_at + .map(|timestamp| { + let now = chrono::Utc::now(); + let duration = now.signed_duration_since(timestamp); + + if duration.num_days() > 0 { + format!("{}d", duration.num_days()) + } else if duration.num_hours() > 0 { + format!("{}h ago", duration.num_hours()) + } else if duration.num_minutes() > 0 { + format!("{}m ago", duration.num_minutes()) + } else { + "Just now".to_string() + } + }) + .unwrap_or_else(|| "Unknown".to_string()); + + ListItem::new(id) + .rounded() + .toggle_state(self.selected) + .spacing(ListItemSpacing::Sparse) + .start_slot( + h_flex() + .w_full() + .gap_2() + .justify_between() + .child(Label::new(title).size(LabelSize::Small).truncate()) + .child( + Label::new(formatted_time) + .color(Color::Muted) + .size(LabelSize::XSmall), + ), + ) + .on_hover(self.on_hover) + .end_slot::(if (self.hovered || self.selected) && self.supports_delete { + Some( + IconButton::new("delete", IconName::Trash) + .shape(IconButtonShape::Square) + .icon_size(IconSize::XSmall) + .icon_color(Color::Muted) + .tooltip(move |_window, cx| { + Tooltip::for_action("Delete", &RemoveSelectedThread, cx) + }) + .on_click({ + let thread_view = self.thread_view.clone(); + let session_id = self.entry.session_id.clone(); + + move |_event, _window, cx| { + if let Some(thread_view) = thread_view.upgrade() { + thread_view.update(cx, |thread_view, cx| { + thread_view.delete_history_entry(&session_id, cx); + }); + } + } + }), + ) + } else { + None + }) + .on_click({ + let thread_view = self.thread_view.clone(); + let entry = self.entry; + + move |_event, window, cx| { + if let Some(workspace) = thread_view + .upgrade() + .and_then(|view| view.read(cx).workspace().upgrade()) + { + if let Some(panel) = workspace.read(cx).panel::(cx) { + panel.update(cx, |panel, cx| { + panel.load_agent_thread( + entry.session_id.clone(), + entry.cwd.clone(), + entry.title.clone(), + window, + cx, + ); + }); + } + } + } + }) + } +} + +#[derive(Clone, Copy)] +pub enum EntryTimeFormat { + DateAndTime, + TimeOnly, +} + +impl EntryTimeFormat { + fn format_timestamp(&self, timestamp: i64, timezone: UtcOffset) -> String { + let timestamp = OffsetDateTime::from_unix_timestamp(timestamp).unwrap(); + + match self { + EntryTimeFormat::DateAndTime => time_format::format_localized_timestamp( + timestamp, + OffsetDateTime::now_utc(), + timezone, + time_format::TimestampFormat::EnhancedAbsolute, + ), + EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)), + } + } +} + +impl From for EntryTimeFormat { + fn from(bucket: TimeBucket) -> Self { + match bucket { + TimeBucket::Today => EntryTimeFormat::TimeOnly, + TimeBucket::Yesterday => EntryTimeFormat::TimeOnly, + TimeBucket::ThisWeek => EntryTimeFormat::DateAndTime, + TimeBucket::PastWeek => EntryTimeFormat::DateAndTime, + TimeBucket::All => EntryTimeFormat::DateAndTime, + } + } +} + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +enum TimeBucket { + Today, + Yesterday, + ThisWeek, + PastWeek, + All, +} + +impl TimeBucket { + fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { + if date == reference { + return TimeBucket::Today; + } + + if date == reference - TimeDelta::days(1) { + return TimeBucket::Yesterday; + } + + let week = date.iso_week(); + + if reference.iso_week() == week { + return TimeBucket::ThisWeek; + } + + let last_week = (reference - TimeDelta::days(7)).iso_week(); + + if week == last_week { + return TimeBucket::PastWeek; + } + + TimeBucket::All + } +} + +impl Display for TimeBucket { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + TimeBucket::Today => write!(f, "Today"), + TimeBucket::Yesterday => write!(f, "Yesterday"), + TimeBucket::ThisWeek => write!(f, "This Week"), + TimeBucket::PastWeek => write!(f, "Past Week"), + TimeBucket::All => write!(f, "All"), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use chrono::NaiveDate; + + #[test] + fn test_time_bucket_from_dates() { + let today = NaiveDate::from_ymd_opt(2025, 1, 15).unwrap(); + + assert_eq!(TimeBucket::from_dates(today, today), TimeBucket::Today); + + let yesterday = NaiveDate::from_ymd_opt(2025, 1, 14).unwrap(); + assert_eq!( + TimeBucket::from_dates(today, yesterday), + TimeBucket::Yesterday + ); + + let this_week = NaiveDate::from_ymd_opt(2025, 1, 13).unwrap(); + assert_eq!( + TimeBucket::from_dates(today, this_week), + TimeBucket::ThisWeek + ); + + let past_week = NaiveDate::from_ymd_opt(2025, 1, 7).unwrap(); + assert_eq!( + TimeBucket::from_dates(today, past_week), + TimeBucket::PastWeek + ); + + let old = NaiveDate::from_ymd_opt(2024, 12, 1).unwrap(); + assert_eq!(TimeBucket::from_dates(today, old), TimeBucket::All); + } +} From 34407d62eaeda4b87c109ad5497328a70553cfae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Wed, 11 Mar 2026 17:31:46 +0100 Subject: [PATCH 492/548] Delete unused workspace dependencies (#51285) Just a small opportunistic cleanup. Release Notes: - N/A --- Cargo.toml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f650dace84b1b2e6491acf2806077f72000605f5..36e7ca8cc7129af0ed7ab29dc5db338cdf33f7d4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -511,7 +511,6 @@ aws-smithy-runtime-api = { version = "1.9.2", features = ["http-1x", "client"] } aws-smithy-types = { version = "1.3.4", features = ["http-body-1-x"] } backtrace = "0.3" base64 = "0.22" -bincode = "1.2.1" bitflags = "2.6.0" brotli = "8.0.2" bytes = "1.0" @@ -570,7 +569,6 @@ human_bytes = "0.4.1" html5ever = "0.27.0" http = "1.1" http-body = "1.0" -hyper = "0.14" ignore = "0.4.22" image = "0.25.1" imara-diff = "0.1.8" @@ -688,7 +686,6 @@ serde_json_lenient = { version = "0.2", features = [ "raw_value", ] } serde_path_to_error = "0.1.17" -serde_repr = "0.1" serde_urlencoded = "0.7" sha2 = "0.10" shellexpand = "2.1.0" @@ -719,7 +716,6 @@ time = { version = "0.3", features = [ ] } tiny_http = "0.8" tokio = { version = "1" } -tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] } tokio-socks = { version = "0.5.2", default-features = false, features = [ "futures-io", "tokio", From f713373d1026a4e7fa7caf289b33b5009e885e0c Mon Sep 17 00:00:00 2001 From: daydalek <90121301+daydalek@users.noreply.github.com> Date: Thu, 12 Mar 2026 01:06:22 +0800 Subject: [PATCH 493/548] editor: Persist multi-line diagnostic hovers in whitespace areas (#47471) When the mouse cursor moves into the whitespace of a line within a multi-line diagnostic range, the hover popover would previously disappear. This change adds a check to keep the diagnostic hover visible if the mouse row intersects with the active diagnostic's range. Fixes #46841 Release Notes: - Improved hover behavior for multi-line diagnostics to persist when hovering over whitespace. https://github.com/user-attachments/assets/0965cb25-6207-4d4a-9165-0d51157fc6e4 --- crates/editor/src/editor.rs | 1 + crates/editor/src/element.rs | 6 +- crates/editor/src/hover_links.rs | 4 +- crates/editor/src/hover_popover.rs | 174 ++++++++++++++++++++++-- crates/editor/src/inlays/inlay_hints.rs | 5 +- 5 files changed, 173 insertions(+), 17 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index ca3dd81ab072d0e20389318515049793a8c827ef..dc2696eb2ca83999934cab6cdee82e364657c70e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -8389,6 +8389,7 @@ impl Editor { self.update_hovered_link( position_map.point_for_position(mouse_position), + Some(mouse_position), &position_map.snapshot, modifiers, window, diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 3b1356525960654ea88c6cfa84115f1e67ac2e5b..5de14d80681ca1ad07534e8764217ef75cc90305 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1462,6 +1462,7 @@ impl EditorElement { if text_hovered { editor.update_hovered_link( point_for_position, + Some(event.position), &position_map.snapshot, modifiers, window, @@ -1473,12 +1474,13 @@ impl EditorElement { .snapshot .buffer_snapshot() .anchor_before(point.to_offset(&position_map.snapshot, Bias::Left)); - hover_at(editor, Some(anchor), window, cx); + hover_at(editor, Some(anchor), Some(event.position), window, cx); Self::update_visible_cursor(editor, point, position_map, window, cx); } else { editor.update_inlay_link_and_hover_points( &position_map.snapshot, point_for_position, + Some(event.position), modifiers.secondary(), modifiers.shift, window, @@ -1487,7 +1489,7 @@ impl EditorElement { } } else { editor.hide_hovered_link(cx); - hover_at(editor, None, window, cx); + hover_at(editor, None, Some(event.position), window, cx); } } diff --git a/crates/editor/src/hover_links.rs b/crates/editor/src/hover_links.rs index 659a383d6b20129909b4c3f2d7bdbfbe5e580f4e..3a6ff4ec0e4fc53d19bfb51a10b1f7790933b175 100644 --- a/crates/editor/src/hover_links.rs +++ b/crates/editor/src/hover_links.rs @@ -4,7 +4,7 @@ use crate::{ HighlightKey, Navigated, PointForPosition, SelectPhase, editor_settings::GoToDefinitionFallback, scroll::ScrollAmount, }; -use gpui::{App, AsyncWindowContext, Context, Entity, Modifiers, Task, Window, px}; +use gpui::{App, AsyncWindowContext, Context, Entity, Modifiers, Pixels, Task, Window, px}; use language::{Bias, ToOffset}; use linkify::{LinkFinder, LinkKind}; use lsp::LanguageServerId; @@ -113,6 +113,7 @@ impl Editor { pub(crate) fn update_hovered_link( &mut self, point_for_position: PointForPosition, + mouse_position: Option>, snapshot: &EditorSnapshot, modifiers: Modifiers, window: &mut Window, @@ -138,6 +139,7 @@ impl Editor { self.update_inlay_link_and_hover_points( snapshot, point_for_position, + mouse_position, hovered_link_modifier, modifiers.shift, window, diff --git a/crates/editor/src/hover_popover.rs b/crates/editor/src/hover_popover.rs index f5d5e6d5ab69d690bd5f3aee29bf9aa493cf0059..ad54d6105ca3896d21857d548d80f991a1a76ecc 100644 --- a/crates/editor/src/hover_popover.rs +++ b/crates/editor/src/hover_popover.rs @@ -8,10 +8,10 @@ use crate::{ }; use anyhow::Context as _; use gpui::{ - AnyElement, AsyncWindowContext, Context, Entity, Focusable as _, FontWeight, Hsla, - InteractiveElement, IntoElement, MouseButton, ParentElement, Pixels, ScrollHandle, Size, - StatefulInteractiveElement, StyleRefinement, Styled, Subscription, Task, TextStyleRefinement, - Window, div, px, + AnyElement, App, AsyncApp, AsyncWindowContext, Bounds, Context, Entity, Focusable as _, + FontWeight, Hsla, InteractiveElement, IntoElement, MouseButton, ParentElement, Pixels, + ScrollHandle, Size, StatefulInteractiveElement, StyleRefinement, Styled, Subscription, Task, + TextStyleRefinement, WeakEntity, Window, canvas, div, px, }; use itertools::Itertools; use language::{DiagnosticEntry, Language, LanguageRegistry}; @@ -20,7 +20,10 @@ use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use multi_buffer::{MultiBufferOffset, ToOffset, ToPoint}; use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart}; use settings::Settings; -use std::{borrow::Cow, cell::RefCell}; +use std::{ + borrow::Cow, + cell::{Cell, RefCell}, +}; use std::{ops::Range, sync::Arc, time::Duration}; use std::{path::PathBuf, rc::Rc}; use theme::ThemeSettings; @@ -45,6 +48,7 @@ pub fn hover(editor: &mut Editor, _: &Hover, window: &mut Window, cx: &mut Conte pub fn hover_at( editor: &mut Editor, anchor: Option, + mouse_position: Option>, window: &mut Window, cx: &mut Context, ) { @@ -52,10 +56,37 @@ pub fn hover_at( if show_keyboard_hover(editor, window, cx) { return; } + if let Some(anchor) = anchor { + editor.hover_state.hiding_delay_task = None; + editor.hover_state.closest_mouse_distance = None; show_hover(editor, anchor, false, window, cx); } else { - hide_hover(editor, cx); + let mut getting_closer = false; + if let Some(mouse_position) = mouse_position { + getting_closer = editor.hover_state.is_mouse_getting_closer(mouse_position); + } + + // If we are moving away and a timer is already running, just let it count down. + if !getting_closer && editor.hover_state.hiding_delay_task.is_some() { + return; + } + + // If we are moving closer, or if no timer is running at all, start/restart the 300ms timer. + let delay = 300u64; + let task = cx.spawn(move |this: WeakEntity, cx: &mut AsyncApp| { + let mut cx = cx.clone(); + async move { + cx.background_executor() + .timer(Duration::from_millis(delay)) + .await; + this.update(&mut cx, |editor, cx| { + hide_hover(editor, cx); + }) + .ok(); + } + }); + editor.hover_state.hiding_delay_task = Some(task); } } } @@ -156,6 +187,9 @@ pub fn hover_at_inlay( let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay.0; + editor.hover_state.hiding_delay_task = None; + editor.hover_state.closest_mouse_distance = None; + let task = cx.spawn_in(window, async move |this, cx| { async move { cx.background_executor() @@ -187,6 +221,7 @@ pub fn hover_at_inlay( scroll_handle, keyboard_grace: Rc::new(RefCell::new(false)), anchor: None, + last_bounds: Rc::new(Cell::new(None)), _subscription: subscription, }; @@ -216,6 +251,8 @@ pub fn hide_hover(editor: &mut Editor, cx: &mut Context) -> bool { editor.hover_state.info_task = None; editor.hover_state.triggered_from = None; + editor.hover_state.hiding_delay_task = None; + editor.hover_state.closest_mouse_distance = None; editor.clear_background_highlights(HighlightKey::HoverState, cx); @@ -254,6 +291,9 @@ fn show_hover( .map(|project| project.read(cx).languages().clone()); let provider = editor.semantics_provider.clone()?; + editor.hover_state.hiding_delay_task = None; + editor.hover_state.closest_mouse_distance = None; + if !ignore_timeout { if same_info_hover(editor, &snapshot, anchor) || same_diagnostic_hover(editor, &snapshot, anchor) @@ -398,6 +438,7 @@ fn show_hover( background_color, keyboard_grace: Rc::new(RefCell::new(ignore_timeout)), anchor, + last_bounds: Rc::new(Cell::new(None)), _subscription: subscription, }) } else { @@ -466,6 +507,7 @@ fn show_hover( scroll_handle, keyboard_grace: Rc::new(RefCell::new(ignore_timeout)), anchor: Some(anchor), + last_bounds: Rc::new(Cell::new(None)), _subscription: subscription, }) } @@ -507,6 +549,7 @@ fn show_hover( scroll_handle, keyboard_grace: Rc::new(RefCell::new(ignore_timeout)), anchor: Some(anchor), + last_bounds: Rc::new(Cell::new(None)), _subscription: subscription, }); } @@ -778,6 +821,8 @@ pub struct HoverState { pub diagnostic_popover: Option, pub triggered_from: Option, pub info_task: Option>>, + pub closest_mouse_distance: Option, + pub hiding_delay_task: Option>, } impl HoverState { @@ -785,6 +830,60 @@ impl HoverState { !self.info_popovers.is_empty() || self.diagnostic_popover.is_some() } + pub fn is_mouse_getting_closer(&mut self, mouse_position: gpui::Point) -> bool { + if !self.visible() { + return false; + } + + let mut popover_bounds = Vec::new(); + for info_popover in &self.info_popovers { + if let Some(bounds) = info_popover.last_bounds.get() { + popover_bounds.push(bounds); + } + } + if let Some(diagnostic_popover) = &self.diagnostic_popover { + if let Some(bounds) = diagnostic_popover.last_bounds.get() { + popover_bounds.push(bounds); + } + } + + if popover_bounds.is_empty() { + return false; + } + + let distance = popover_bounds + .iter() + .map(|bounds| self.distance_from_point_to_bounds(mouse_position, *bounds)) + .min_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal)) + .unwrap_or(px(f32::MAX)); + + if let Some(closest_distance) = self.closest_mouse_distance { + if distance > closest_distance + px(4.0) { + return false; + } + } + + self.closest_mouse_distance = + Some(distance.min(self.closest_mouse_distance.unwrap_or(distance))); + true + } + + fn distance_from_point_to_bounds( + &self, + point: gpui::Point, + bounds: Bounds, + ) -> Pixels { + let center_x = bounds.origin.x + bounds.size.width / 2.; + let center_y = bounds.origin.y + bounds.size.height / 2.; + let dx: f32 = ((point.x - center_x).abs() - bounds.size.width / 2.) + .max(px(0.0)) + .into(); + let dy: f32 = ((point.y - center_y).abs() - bounds.size.height / 2.) + .max(px(0.0)) + .into(); + px((dx.powi(2) + dy.powi(2)).sqrt()) + } + pub(crate) fn render( &mut self, snapshot: &EditorSnapshot, @@ -887,6 +986,7 @@ pub struct InfoPopover { pub scroll_handle: ScrollHandle, pub keyboard_grace: Rc>, pub anchor: Option, + pub last_bounds: Rc>>>, _subscription: Option, } @@ -898,13 +998,36 @@ impl InfoPopover { cx: &mut Context, ) -> AnyElement { let keyboard_grace = Rc::clone(&self.keyboard_grace); + let this = cx.entity().downgrade(); + let bounds_cell = self.last_bounds.clone(); div() .id("info_popover") .occlude() .elevation_2(cx) + .child( + canvas( + { + move |bounds, _window, _cx| { + bounds_cell.set(Some(bounds)); + } + }, + |_, _, _, _| {}, + ) + .absolute() + .size_full(), + ) // Prevent a mouse down/move on the popover from being propagated to the editor, // because that would dismiss the popover. - .on_mouse_move(|_, _, cx| cx.stop_propagation()) + .on_mouse_move({ + move |_, _, cx: &mut App| { + this.update(cx, |editor, _| { + editor.hover_state.closest_mouse_distance = Some(px(0.0)); + editor.hover_state.hiding_delay_task = None; + }) + .ok(); + cx.stop_propagation() + } + }) .on_mouse_down(MouseButton::Left, move |_, _, cx| { let mut keyboard_grace = keyboard_grace.borrow_mut(); *keyboard_grace = false; @@ -957,6 +1080,7 @@ pub struct DiagnosticPopover { background_color: Hsla, pub keyboard_grace: Rc>, pub anchor: Anchor, + pub last_bounds: Rc>>>, _subscription: Subscription, pub scroll_handle: ScrollHandle, } @@ -970,10 +1094,23 @@ impl DiagnosticPopover { ) -> AnyElement { let keyboard_grace = Rc::clone(&self.keyboard_grace); let this = cx.entity().downgrade(); + let bounds_cell = self.last_bounds.clone(); div() .id("diagnostic") .occlude() .elevation_2_borderless(cx) + .child( + canvas( + { + move |bounds, _window, _cx| { + bounds_cell.set(Some(bounds)); + } + }, + |_, _, _, _| {}, + ) + .absolute() + .size_full(), + ) // Don't draw the background color if the theme // allows transparent surfaces. .when(theme_is_transparent(cx), |this| { @@ -981,7 +1118,17 @@ impl DiagnosticPopover { }) // Prevent a mouse move on the popover from being propagated to the editor, // because that would dismiss the popover. - .on_mouse_move(|_, _, cx| cx.stop_propagation()) + .on_mouse_move({ + let this = this.clone(); + move |_, _, cx: &mut App| { + this.update(cx, |editor, _| { + editor.hover_state.closest_mouse_distance = Some(px(0.0)); + editor.hover_state.hiding_delay_task = None; + }) + .ok(); + cx.stop_propagation() + } + }) // Prevent a mouse down on the popover from being propagated to the editor, // because that would move the cursor. .on_mouse_down(MouseButton::Left, move |_, _, cx| { @@ -1151,7 +1298,7 @@ mod tests { let anchor = snapshot .buffer_snapshot() .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); - hover_at(editor, Some(anchor), window, cx) + hover_at(editor, Some(anchor), None, window, cx) }); assert!(!cx.editor(|editor, _window, _cx| editor.hover_state.visible())); @@ -1251,7 +1398,7 @@ mod tests { let anchor = snapshot .buffer_snapshot() .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); - hover_at(editor, Some(anchor), window, cx) + hover_at(editor, Some(anchor), None, window, cx) }); cx.background_executor .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); @@ -1289,7 +1436,7 @@ mod tests { let anchor = snapshot .buffer_snapshot() .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); - hover_at(editor, Some(anchor), window, cx) + hover_at(editor, Some(anchor), None, window, cx) }); assert!(!cx.editor(|editor, _window, _cx| editor.hover_state.visible())); @@ -1343,7 +1490,7 @@ mod tests { let anchor = snapshot .buffer_snapshot() .anchor_before(hover_point.to_offset(&snapshot, Bias::Left)); - hover_at(editor, Some(anchor), window, cx) + hover_at(editor, Some(anchor), None, window, cx) }); cx.background_executor .advance_clock(Duration::from_millis(get_hover_popover_delay(&cx) + 100)); @@ -1752,6 +1899,7 @@ mod tests { editor.update_inlay_link_and_hover_points( &editor.snapshot(window, cx), new_type_hint_part_hover_position, + None, true, false, window, @@ -1822,6 +1970,7 @@ mod tests { editor.update_inlay_link_and_hover_points( &editor.snapshot(window, cx), new_type_hint_part_hover_position, + None, true, false, window, @@ -1877,6 +2026,7 @@ mod tests { editor.update_inlay_link_and_hover_points( &editor.snapshot(window, cx), struct_hint_part_hover_position, + None, true, false, window, diff --git a/crates/editor/src/inlays/inlay_hints.rs b/crates/editor/src/inlays/inlay_hints.rs index 62eb35f1ac85227c9b52737660da0d1834e1bbfa..414829dc3bbcd89f5f4e4337a955cfff5bb57fca 100644 --- a/crates/editor/src/inlays/inlay_hints.rs +++ b/crates/editor/src/inlays/inlay_hints.rs @@ -7,7 +7,7 @@ use std::{ use clock::Global; use collections::{HashMap, HashSet}; use futures::future::join_all; -use gpui::{App, Entity, Task}; +use gpui::{App, Entity, Pixels, Task}; use itertools::Itertools; use language::{ BufferRow, @@ -569,6 +569,7 @@ impl Editor { &mut self, snapshot: &EditorSnapshot, point_for_position: PointForPosition, + mouse_position: Option>, secondary_held: bool, shift_held: bool, window: &mut Window, @@ -748,7 +749,7 @@ impl Editor { self.hide_hovered_link(cx) } if !hover_updated { - hover_popover::hover_at(self, None, window, cx); + hover_popover::hover_at(self, None, mouse_position, window, cx); } } From ac2f097559ecbaab6f55ca5a519f53b80ac54afb Mon Sep 17 00:00:00 2001 From: MostlyK <135974627+MostlyKIGuess@users.noreply.github.com> Date: Wed, 11 Mar 2026 22:42:48 +0530 Subject: [PATCH 494/548] image_viewer: Add pinch event support (#47351) This change implements pinch / magnification gesture handling. This uses the following wayland [protocol](https://wayland.app/protocols/pointer-gestures-unstable-v1). And the following [API](https://developer.apple.com/documentation/appkit/nsevent/magnification) for mac. - Original: https://github.com/gpui-ce/gpui-ce/pull/11 Release Notes: - Zooming works with pinching in and out inside Image Viewer --- crates/gpui/src/elements/div.rs | 92 ++++++++++++++++ crates/gpui/src/interactive.rs | 55 ++++++++++ crates/gpui/src/window.rs | 6 ++ crates/gpui_linux/src/linux/wayland/client.rs | 100 ++++++++++++++++++ crates/gpui_macos/src/events.rs | 25 ++++- crates/gpui_macos/src/window.rs | 4 + crates/image_viewer/src/image_viewer.rs | 38 ++++++- 7 files changed, 314 insertions(+), 6 deletions(-) diff --git a/crates/gpui/src/elements/div.rs b/crates/gpui/src/elements/div.rs index 3599affc3c792f3c93b3b94cfc44740d7c38caf7..bf185b1b6cc20e0f0f484fd0029c78a6211e6a3a 100644 --- a/crates/gpui/src/elements/div.rs +++ b/crates/gpui/src/elements/div.rs @@ -15,6 +15,8 @@ //! and Tailwind-like styling that you can use to build your own custom elements. Div is //! constructed by combining these two systems into an all-in-one element. +#[cfg(any(target_os = "linux", target_os = "macos"))] +use crate::PinchEvent; use crate::{ AbsoluteLength, Action, AnyDrag, AnyElement, AnyTooltip, AnyView, App, Bounds, ClickEvent, DispatchPhase, Display, Element, ElementId, Entity, FocusHandle, Global, GlobalElementId, @@ -353,6 +355,43 @@ impl Interactivity { })); } + /// Bind the given callback to pinch gesture events during the bubble phase. + /// + /// Note: This event is only available on macOS and Wayland (Linux). + /// On Windows, pinch gestures are simulated as scroll wheel events with Ctrl held. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + #[cfg(any(target_os = "linux", target_os = "macos"))] + pub fn on_pinch(&mut self, listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static) { + self.pinch_listeners + .push(Box::new(move |event, phase, hitbox, window, cx| { + if phase == DispatchPhase::Bubble && hitbox.is_hovered(window) { + (listener)(event, window, cx); + } + })); + } + + /// Bind the given callback to pinch gesture events during the capture phase. + /// + /// Note: This event is only available on macOS and Wayland (Linux). + /// On Windows, pinch gestures are simulated as scroll wheel events with Ctrl held. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + #[cfg(any(target_os = "linux", target_os = "macos"))] + pub fn capture_pinch( + &mut self, + listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static, + ) { + self.pinch_listeners + .push(Box::new(move |event, phase, _hitbox, window, cx| { + if phase == DispatchPhase::Capture { + (listener)(event, window, cx); + } else { + cx.propagate(); + } + })); + } + /// Bind the given callback to an action dispatch during the capture phase. /// The imperative API equivalent to [`InteractiveElement::capture_action`]. /// @@ -635,6 +674,16 @@ impl Interactivity { pub fn block_mouse_except_scroll(&mut self) { self.hitbox_behavior = HitboxBehavior::BlockMouseExceptScroll; } + + #[cfg(any(target_os = "linux", target_os = "macos"))] + fn has_pinch_listeners(&self) -> bool { + !self.pinch_listeners.is_empty() + } + + #[cfg(not(any(target_os = "linux", target_os = "macos")))] + fn has_pinch_listeners(&self) -> bool { + false + } } /// A trait for elements that want to use the standard GPUI event handlers that don't @@ -905,6 +954,34 @@ pub trait InteractiveElement: Sized { self } + /// Bind the given callback to pinch gesture events during the bubble phase. + /// The fluent API equivalent to [`Interactivity::on_pinch`]. + /// + /// Note: This event is only available on macOS and Wayland (Linux). + /// On Windows, pinch gestures are simulated as scroll wheel events with Ctrl held. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + #[cfg(any(target_os = "linux", target_os = "macos"))] + fn on_pinch(mut self, listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static) -> Self { + self.interactivity().on_pinch(listener); + self + } + + /// Bind the given callback to pinch gesture events during the capture phase. + /// The fluent API equivalent to [`Interactivity::capture_pinch`]. + /// + /// Note: This event is only available on macOS and Wayland (Linux). + /// On Windows, pinch gestures are simulated as scroll wheel events with Ctrl held. + /// + /// See [`Context::listener`](crate::Context::listener) to get access to a view's state from this callback. + #[cfg(any(target_os = "linux", target_os = "macos"))] + fn capture_pinch( + mut self, + listener: impl Fn(&PinchEvent, &mut Window, &mut App) + 'static, + ) -> Self { + self.interactivity().capture_pinch(listener); + self + } /// Capture the given action, before normal action dispatch can fire. /// The fluent API equivalent to [`Interactivity::capture_action`]. /// @@ -1290,6 +1367,10 @@ pub(crate) type MouseMoveListener = pub(crate) type ScrollWheelListener = Box; +#[cfg(any(target_os = "linux", target_os = "macos"))] +pub(crate) type PinchListener = + Box; + pub(crate) type ClickListener = Rc; pub(crate) type DragListener = @@ -1644,6 +1725,8 @@ pub struct Interactivity { pub(crate) mouse_pressure_listeners: Vec, pub(crate) mouse_move_listeners: Vec, pub(crate) scroll_wheel_listeners: Vec, + #[cfg(any(target_os = "linux", target_os = "macos"))] + pub(crate) pinch_listeners: Vec, pub(crate) key_down_listeners: Vec, pub(crate) key_up_listeners: Vec, pub(crate) modifiers_changed_listeners: Vec, @@ -1847,6 +1930,7 @@ impl Interactivity { || !self.click_listeners.is_empty() || !self.aux_click_listeners.is_empty() || !self.scroll_wheel_listeners.is_empty() + || self.has_pinch_listeners() || self.drag_listener.is_some() || !self.drop_listeners.is_empty() || self.tooltip_builder.is_some() @@ -2213,6 +2297,14 @@ impl Interactivity { }) } + #[cfg(any(target_os = "linux", target_os = "macos"))] + for listener in self.pinch_listeners.drain(..) { + let hitbox = hitbox.clone(); + window.on_mouse_event(move |event: &PinchEvent, phase, window, cx| { + listener(event, phase, &hitbox, window, cx); + }) + } + if self.hover_style.is_some() || self.base_style.mouse_cursor.is_some() || cx.active_drag.is_some() && !self.drag_over_styles.is_empty() diff --git a/crates/gpui/src/interactive.rs b/crates/gpui/src/interactive.rs index 5316a5992bb41d11ef5b6518555a9a20795f894c..3d3ddb49f70b2f96772627d085c93ce31b6dc0b5 100644 --- a/crates/gpui/src/interactive.rs +++ b/crates/gpui/src/interactive.rs @@ -17,6 +17,9 @@ pub trait KeyEvent: InputEvent {} /// A mouse event from the platform. pub trait MouseEvent: InputEvent {} +/// A gesture event from the platform. +pub trait GestureEvent: InputEvent {} + /// The key down event equivalent for the platform. #[derive(Clone, Debug, Eq, PartialEq)] pub struct KeyDownEvent { @@ -467,6 +470,51 @@ impl Default for ScrollDelta { } } +/// A pinch gesture event from the platform, generated when the user performs +/// a pinch-to-zoom gesture (typically on a trackpad). +/// +/// Note: This event is only available on macOS and Wayland (Linux). +/// On Windows, pinch gestures are simulated as scroll wheel events with Ctrl held. +#[derive(Clone, Debug, Default)] +#[cfg(any(target_os = "linux", target_os = "macos"))] +pub struct PinchEvent { + /// The position of the pinch center on the window. + pub position: Point, + + /// The zoom delta for this event. + /// Positive values indicate zooming in, negative values indicate zooming out. + /// For example, 0.1 represents a 10% zoom increase. + pub delta: f32, + + /// The modifiers that were held down during the pinch gesture. + pub modifiers: Modifiers, + + /// The phase of the pinch gesture. + pub phase: TouchPhase, +} + +#[cfg(any(target_os = "linux", target_os = "macos"))] +impl Sealed for PinchEvent {} +#[cfg(any(target_os = "linux", target_os = "macos"))] +impl InputEvent for PinchEvent { + fn to_platform_input(self) -> PlatformInput { + PlatformInput::Pinch(self) + } +} +#[cfg(any(target_os = "linux", target_os = "macos"))] +impl GestureEvent for PinchEvent {} +#[cfg(any(target_os = "linux", target_os = "macos"))] +impl MouseEvent for PinchEvent {} + +#[cfg(any(target_os = "linux", target_os = "macos"))] +impl Deref for PinchEvent { + type Target = Modifiers; + + fn deref(&self) -> &Self::Target { + &self.modifiers + } +} + impl ScrollDelta { /// Returns true if this is a precise scroll delta in pixels. pub fn precise(&self) -> bool { @@ -626,6 +674,9 @@ pub enum PlatformInput { MouseExited(MouseExitEvent), /// The scroll wheel was used. ScrollWheel(ScrollWheelEvent), + /// A pinch gesture was performed. + #[cfg(any(target_os = "linux", target_os = "macos"))] + Pinch(PinchEvent), /// Files were dragged and dropped onto the window. FileDrop(FileDropEvent), } @@ -642,6 +693,8 @@ impl PlatformInput { PlatformInput::MousePressure(event) => Some(event), PlatformInput::MouseExited(event) => Some(event), PlatformInput::ScrollWheel(event) => Some(event), + #[cfg(any(target_os = "linux", target_os = "macos"))] + PlatformInput::Pinch(event) => Some(event), PlatformInput::FileDrop(event) => Some(event), } } @@ -657,6 +710,8 @@ impl PlatformInput { PlatformInput::MousePressure(_) => None, PlatformInput::MouseExited(_) => None, PlatformInput::ScrollWheel(_) => None, + #[cfg(any(target_os = "linux", target_os = "macos"))] + PlatformInput::Pinch(_) => None, PlatformInput::FileDrop(_) => None, } } diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 3fcb911d2c58f8968bc6b0c66f26ed2de365dd53..e3c61a4fd31f35df591f20075221907270e352c8 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -3945,6 +3945,12 @@ impl Window { self.modifiers = scroll_wheel.modifiers; PlatformInput::ScrollWheel(scroll_wheel) } + #[cfg(any(target_os = "linux", target_os = "macos"))] + PlatformInput::Pinch(pinch) => { + self.mouse_position = pinch.position; + self.modifiers = pinch.modifiers; + PlatformInput::Pinch(pinch) + } // Translate dragging and dropping of external files from the operating system // to internal drag and drop events. PlatformInput::FileDrop(file_drop) => match file_drop { diff --git a/crates/gpui_linux/src/linux/wayland/client.rs b/crates/gpui_linux/src/linux/wayland/client.rs index 8dd48b878cc1ffcb87201e9b1b252966bfce5efb..ce49fca37232f256e570f584272519d8d6f34dd8 100644 --- a/crates/gpui_linux/src/linux/wayland/client.rs +++ b/crates/gpui_linux/src/linux/wayland/client.rs @@ -36,6 +36,9 @@ use wayland_client::{ wl_shm_pool, wl_surface, }, }; +use wayland_protocols::wp::pointer_gestures::zv1::client::{ + zwp_pointer_gesture_pinch_v1, zwp_pointer_gestures_v1, +}; use wayland_protocols::wp::primary_selection::zv1::client::zwp_primary_selection_offer_v1::{ self, ZwpPrimarySelectionOfferV1, }; @@ -124,6 +127,7 @@ pub struct Globals { pub layer_shell: Option, pub blur_manager: Option, pub text_input_manager: Option, + pub gesture_manager: Option, pub dialog: Option, pub executor: ForegroundExecutor, } @@ -164,6 +168,7 @@ impl Globals { layer_shell: globals.bind(&qh, 1..=5, ()).ok(), blur_manager: globals.bind(&qh, 1..=1, ()).ok(), text_input_manager: globals.bind(&qh, 1..=1, ()).ok(), + gesture_manager: globals.bind(&qh, 1..=3, ()).ok(), dialog: globals.bind(&qh, dialog_v..=dialog_v, ()).ok(), executor, qh, @@ -208,6 +213,8 @@ pub(crate) struct WaylandClientState { pub compositor_gpu: Option, wl_seat: wl_seat::WlSeat, // TODO: Multi seat support wl_pointer: Option, + pinch_gesture: Option, + pinch_scale: f32, wl_keyboard: Option, cursor_shape_device: Option, data_device: Option, @@ -584,6 +591,8 @@ impl WaylandClient { wl_seat: seat, wl_pointer: None, wl_keyboard: None, + pinch_gesture: None, + pinch_scale: 1.0, cursor_shape_device: None, data_device, primary_selection, @@ -1325,6 +1334,12 @@ impl Dispatch for WaylandClientStatePtr { .as_ref() .map(|cursor_shape_manager| cursor_shape_manager.get_pointer(&pointer, qh, ())); + state.pinch_gesture = state.globals.gesture_manager.as_ref().map( + |gesture_manager: &zwp_pointer_gestures_v1::ZwpPointerGesturesV1| { + gesture_manager.get_pinch_gesture(&pointer, qh, ()) + }, + ); + if let Some(wl_pointer) = &state.wl_pointer { wl_pointer.release(); } @@ -1998,6 +2013,91 @@ impl Dispatch for WaylandClientStatePtr { } } +impl Dispatch for WaylandClientStatePtr { + fn event( + _this: &mut Self, + _: &zwp_pointer_gestures_v1::ZwpPointerGesturesV1, + _: ::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + // The gesture manager doesn't generate events + } +} + +impl Dispatch + for WaylandClientStatePtr +{ + fn event( + this: &mut Self, + _: &zwp_pointer_gesture_pinch_v1::ZwpPointerGesturePinchV1, + event: ::Event, + _: &(), + _: &Connection, + _: &QueueHandle, + ) { + use gpui::PinchEvent; + + let client = this.get_client(); + let mut state = client.borrow_mut(); + + let Some(window) = state.mouse_focused_window.clone() else { + return; + }; + + match event { + zwp_pointer_gesture_pinch_v1::Event::Begin { + serial: _, + time: _, + surface: _, + fingers: _, + } => { + state.pinch_scale = 1.0; + let input = PlatformInput::Pinch(PinchEvent { + position: state.mouse_location.unwrap_or(point(px(0.0), px(0.0))), + delta: 0.0, + modifiers: state.modifiers, + phase: TouchPhase::Started, + }); + drop(state); + window.handle_input(input); + } + zwp_pointer_gesture_pinch_v1::Event::Update { time: _, scale, .. } => { + let new_absolute_scale = scale as f32; + let previous_scale = state.pinch_scale; + let zoom_delta = new_absolute_scale - previous_scale; + state.pinch_scale = new_absolute_scale; + + let input = PlatformInput::Pinch(PinchEvent { + position: state.mouse_location.unwrap_or(point(px(0.0), px(0.0))), + delta: zoom_delta, + modifiers: state.modifiers, + phase: TouchPhase::Moved, + }); + drop(state); + window.handle_input(input); + } + zwp_pointer_gesture_pinch_v1::Event::End { + serial: _, + time: _, + cancelled: _, + } => { + state.pinch_scale = 1.0; + let input = PlatformInput::Pinch(PinchEvent { + position: state.mouse_location.unwrap_or(point(px(0.0), px(0.0))), + delta: 0.0, + modifiers: state.modifiers, + phase: TouchPhase::Ended, + }); + drop(state); + window.handle_input(input); + } + _ => {} + } + } +} + impl Dispatch for WaylandClientStatePtr { fn event( this: &mut Self, diff --git a/crates/gpui_macos/src/events.rs b/crates/gpui_macos/src/events.rs index 5970488a17fbf9395f4ba29f5b98a135f6d55f7f..71bcb105e8aa8c6c43fd5b7864881535454c5ec3 100644 --- a/crates/gpui_macos/src/events.rs +++ b/crates/gpui_macos/src/events.rs @@ -1,8 +1,8 @@ use gpui::{ Capslock, KeyDownEvent, KeyUpEvent, Keystroke, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseExitEvent, MouseMoveEvent, MousePressureEvent, MouseUpEvent, - NavigationDirection, Pixels, PlatformInput, PressureStage, ScrollDelta, ScrollWheelEvent, - TouchPhase, point, px, + NavigationDirection, PinchEvent, Pixels, PlatformInput, PressureStage, ScrollDelta, + ScrollWheelEvent, TouchPhase, point, px, }; use crate::{ @@ -234,6 +234,27 @@ pub(crate) unsafe fn platform_input_from_native( _ => None, } } + NSEventType::NSEventTypeMagnify => window_height.map(|window_height| { + let phase = match native_event.phase() { + NSEventPhase::NSEventPhaseMayBegin | NSEventPhase::NSEventPhaseBegan => { + TouchPhase::Started + } + NSEventPhase::NSEventPhaseEnded => TouchPhase::Ended, + _ => TouchPhase::Moved, + }; + + let magnification = native_event.magnification() as f32; + + PlatformInput::Pinch(PinchEvent { + position: point( + px(native_event.locationInWindow().x as f32), + window_height - px(native_event.locationInWindow().y as f32), + ), + delta: magnification, + modifiers: read_modifiers(native_event), + phase, + }) + }), NSEventType::NSScrollWheel => window_height.map(|window_height| { let phase = match native_event.phase() { NSEventPhase::NSEventPhaseMayBegin | NSEventPhase::NSEventPhaseBegan => { diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index 456ee31ac3b03780e68267621d66435b1ceab4a9..c20c86026a102464343fc7c8cfb03b69b19b7641 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -172,6 +172,10 @@ unsafe fn build_classes() { sel!(mouseExited:), handle_view_event as extern "C" fn(&Object, Sel, id), ); + decl.add_method( + sel!(magnifyWithEvent:), + handle_view_event as extern "C" fn(&Object, Sel, id), + ); decl.add_method( sel!(mouseDragged:), handle_view_event as extern "C" fn(&Object, Sel, id), diff --git a/crates/image_viewer/src/image_viewer.rs b/crates/image_viewer/src/image_viewer.rs index c223494bd709217439bdff9f6a7ba17e1a65494e..291603b2b3f1544f6c60f9c3bdbbb87d3f77c424 100644 --- a/crates/image_viewer/src/image_viewer.rs +++ b/crates/image_viewer/src/image_viewer.rs @@ -6,6 +6,8 @@ use std::path::Path; use anyhow::Context as _; use editor::{EditorSettings, items::entry_git_aware_label_color}; use file_icons::FileIcons; +#[cfg(any(target_os = "linux", target_os = "macos"))] +use gpui::PinchEvent; use gpui::{ AnyElement, App, Bounds, Context, DispatchPhase, Element, ElementId, Entity, EventEmitter, FocusHandle, Focusable, GlobalElementId, InspectorElementId, InteractiveElement, IntoElement, @@ -260,6 +262,12 @@ impl ImageView { cx.notify(); } } + + #[cfg(any(target_os = "linux", target_os = "macos"))] + fn handle_pinch(&mut self, event: &PinchEvent, _window: &mut Window, cx: &mut Context) { + let zoom_factor = 1.0 + event.delta; + self.set_zoom(self.zoom_level * zoom_factor, Some(event.position), cx); + } } struct ImageContentElement { @@ -679,8 +687,9 @@ impl Render for ImageView { .size_full() .relative() .bg(cx.theme().colors().editor_background) - .child( - div() + .child({ + #[cfg(any(target_os = "linux", target_os = "macos"))] + let container = div() .id("image-container") .size_full() .overflow_hidden() @@ -690,13 +699,34 @@ impl Render for ImageView { gpui::CursorStyle::OpenHand }) .on_scroll_wheel(cx.listener(Self::handle_scroll_wheel)) + .on_pinch(cx.listener(Self::handle_pinch)) .on_mouse_down(MouseButton::Left, cx.listener(Self::handle_mouse_down)) .on_mouse_down(MouseButton::Middle, cx.listener(Self::handle_mouse_down)) .on_mouse_up(MouseButton::Left, cx.listener(Self::handle_mouse_up)) .on_mouse_up(MouseButton::Middle, cx.listener(Self::handle_mouse_up)) .on_mouse_move(cx.listener(Self::handle_mouse_move)) - .child(ImageContentElement::new(cx.entity())), - ) + .child(ImageContentElement::new(cx.entity())); + + #[cfg(not(any(target_os = "linux", target_os = "macos")))] + let container = div() + .id("image-container") + .size_full() + .overflow_hidden() + .cursor(if self.is_dragging() { + gpui::CursorStyle::ClosedHand + } else { + gpui::CursorStyle::OpenHand + }) + .on_scroll_wheel(cx.listener(Self::handle_scroll_wheel)) + .on_mouse_down(MouseButton::Left, cx.listener(Self::handle_mouse_down)) + .on_mouse_down(MouseButton::Middle, cx.listener(Self::handle_mouse_down)) + .on_mouse_up(MouseButton::Left, cx.listener(Self::handle_mouse_up)) + .on_mouse_up(MouseButton::Middle, cx.listener(Self::handle_mouse_up)) + .on_mouse_move(cx.listener(Self::handle_mouse_move)) + .child(ImageContentElement::new(cx.entity())); + + container + }) } } From 480e269097a55b8250b20e1550f3139df6c0d3f1 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 11 Mar 2026 14:21:28 -0300 Subject: [PATCH 495/548] agent_ui: Add UI refinements to the sidebar (#51307) Release Notes: - N/A --- assets/icons/threads_sidebar_left_closed.svg | 5 + assets/icons/threads_sidebar_left_open.svg | 5 + assets/icons/threads_sidebar_right_closed.svg | 5 + assets/icons/threads_sidebar_right_open.svg | 5 + assets/icons/workspace_nav_closed.svg | 5 - assets/icons/workspace_nav_open.svg | 5 - crates/agent_ui/src/agent_panel.rs | 133 +++++++++++------- crates/agent_ui/src/sidebar.rs | 87 ++++++++---- crates/icons/src/icons.rs | 6 +- crates/ui/src/components/ai/thread_item.rs | 67 +++++---- crates/ui/src/components/list/list_item.rs | 8 ++ 11 files changed, 211 insertions(+), 120 deletions(-) create mode 100644 assets/icons/threads_sidebar_left_closed.svg create mode 100644 assets/icons/threads_sidebar_left_open.svg create mode 100644 assets/icons/threads_sidebar_right_closed.svg create mode 100644 assets/icons/threads_sidebar_right_open.svg delete mode 100644 assets/icons/workspace_nav_closed.svg delete mode 100644 assets/icons/workspace_nav_open.svg diff --git a/assets/icons/threads_sidebar_left_closed.svg b/assets/icons/threads_sidebar_left_closed.svg new file mode 100644 index 0000000000000000000000000000000000000000..feb1015254635ef65f90f2c9ea38efab74d01d60 --- /dev/null +++ b/assets/icons/threads_sidebar_left_closed.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_left_open.svg b/assets/icons/threads_sidebar_left_open.svg new file mode 100644 index 0000000000000000000000000000000000000000..8057b060a84d7d7ffcf29aff1c0c79a8764edc22 --- /dev/null +++ b/assets/icons/threads_sidebar_left_open.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_right_closed.svg b/assets/icons/threads_sidebar_right_closed.svg new file mode 100644 index 0000000000000000000000000000000000000000..10fa4b792fd65b5875dcf2cadab1fc12a123ab47 --- /dev/null +++ b/assets/icons/threads_sidebar_right_closed.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_right_open.svg b/assets/icons/threads_sidebar_right_open.svg new file mode 100644 index 0000000000000000000000000000000000000000..23a01eb3f82a5866157220172c868ed9ded46033 --- /dev/null +++ b/assets/icons/threads_sidebar_right_open.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/workspace_nav_closed.svg b/assets/icons/workspace_nav_closed.svg deleted file mode 100644 index ed1fce52d6826a4d10299f331358ff84e4caa973..0000000000000000000000000000000000000000 --- a/assets/icons/workspace_nav_closed.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/workspace_nav_open.svg b/assets/icons/workspace_nav_open.svg deleted file mode 100644 index 464b6aac73c2aeaa9463a805aabc4559377bbfd3..0000000000000000000000000000000000000000 --- a/assets/icons/workspace_nav_open.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 1537c05096ec81f1b3f354cac236bfdda52c9f6f..50346bd752cec4432fb5a87e4df7cb4ce09aca83 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -483,9 +483,17 @@ pub fn init(cx: &mut App) { } if let Some(panel) = workspace.panel::(cx) { if let Some(sidebar) = panel.read(cx).sidebar.clone() { + let was_open = sidebar.read(cx).is_open(); sidebar.update(cx, |sidebar, cx| { sidebar.toggle(window, cx); }); + // When closing the sidebar, restore focus to the active pane + // to avoid "zombie focus" on the now-hidden sidebar elements + if was_open { + let active_pane = workspace.active_pane().clone(); + let pane_focus = active_pane.read(cx).focus_handle(cx); + window.focus(&pane_focus, cx); + } } } }) @@ -3623,7 +3631,7 @@ impl AgentPanel { Some((view, width, is_open)) } - fn render_sidebar_toggle(&self, cx: &Context) -> Option { + fn render_sidebar_toggle(&self, docked_right: bool, cx: &Context) -> Option { if !multi_workspace_enabled(cx) { return None; } @@ -3634,20 +3642,41 @@ impl AgentPanel { } let has_notifications = sidebar_read.has_notifications(cx); + let icon = if docked_right { + IconName::ThreadsSidebarRightClosed + } else { + IconName::ThreadsSidebarLeftClosed + }; + Some( - IconButton::new("toggle-workspace-sidebar", IconName::WorkspaceNavClosed) - .icon_size(IconSize::Small) - .when(has_notifications, |button| { - button - .indicator(Indicator::dot().color(Color::Accent)) - .indicator_border_color(Some(cx.theme().colors().tab_bar_background)) - }) - .tooltip(move |_, cx| { - Tooltip::for_action("Open Threads Sidebar", &ToggleWorkspaceSidebar, cx) - }) - .on_click(|_, window, cx| { - window.dispatch_action(ToggleWorkspaceSidebar.boxed_clone(), cx); + h_flex() + .h_full() + .px_1() + .map(|this| { + if docked_right { + this.border_l_1() + } else { + this.border_r_1() + } }) + .border_color(cx.theme().colors().border_variant) + .child( + IconButton::new("toggle-workspace-sidebar", icon) + .icon_size(IconSize::Small) + .when(has_notifications, |button| { + button + .indicator(Indicator::dot().color(Color::Accent)) + .indicator_border_color(Some( + cx.theme().colors().tab_bar_background, + )) + }) + .tooltip(move |_, cx| { + Tooltip::for_action("Open Threads Sidebar", &ToggleWorkspaceSidebar, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(ToggleWorkspaceSidebar.boxed_clone(), cx); + }), + ) .into_any_element(), ) } @@ -4104,6 +4133,23 @@ impl AgentPanel { let use_v2_empty_toolbar = has_v2_flag && is_empty_state && !is_in_history_or_config; + let is_sidebar_open = self + .sidebar + .as_ref() + .map(|s| s.read(cx).is_open()) + .unwrap_or(false); + + let base_container = h_flex() + .id("agent-panel-toolbar") + .h(Tab::container_height(cx)) + .max_w_full() + .flex_none() + .justify_between() + .gap_2() + .bg(cx.theme().colors().tab_bar_background) + .border_b_1() + .border_color(cx.theme().colors().border); + if use_v2_empty_toolbar { let (chevron_icon, icon_color, label_color) = if self.new_thread_menu_handle.is_deployed() { @@ -4162,34 +4208,26 @@ impl AgentPanel { y: px(1.0), }); - h_flex() - .id("agent-panel-toolbar") - .h(Tab::container_height(cx)) - .max_w_full() - .flex_none() - .justify_between() - .gap_2() - .bg(cx.theme().colors().tab_bar_background) - .border_b_1() - .border_color(cx.theme().colors().border) + base_container .child( h_flex() .size_full() - .gap(DynamicSpacing::Base04.rems(cx)) - .pl(DynamicSpacing::Base04.rems(cx)) + .gap_1() + .when(is_sidebar_open || docked_right, |this| this.pl_1()) .when(!docked_right, |this| { - this.children(self.render_sidebar_toggle(cx)) + this.children(self.render_sidebar_toggle(false, cx)) }) .child(agent_selector_menu) .child(self.render_start_thread_in_selector(cx)), ) .child( h_flex() + .h_full() .flex_none() - .gap(DynamicSpacing::Base02.rems(cx)) - .pl(DynamicSpacing::Base04.rems(cx)) - .pr(DynamicSpacing::Base06.rems(cx)) - .when(show_history_menu, |this| { + .gap_1() + .pl_1() + .pr_1() + .when(show_history_menu && !has_v2_flag, |this| { this.child(self.render_recent_entries_menu( IconName::MenuAltTemp, Corner::TopRight, @@ -4198,7 +4236,7 @@ impl AgentPanel { }) .child(self.render_panel_options_menu(window, cx)) .when(docked_right, |this| { - this.children(self.render_sidebar_toggle(cx)) + this.children(self.render_sidebar_toggle(true, cx)) }), ) .into_any_element() @@ -4222,23 +4260,19 @@ impl AgentPanel { .with_handle(self.new_thread_menu_handle.clone()) .menu(move |window, cx| new_thread_menu_builder(window, cx)); - h_flex() - .id("agent-panel-toolbar") - .h(Tab::container_height(cx)) - .max_w_full() - .flex_none() - .justify_between() - .gap_2() - .bg(cx.theme().colors().tab_bar_background) - .border_b_1() - .border_color(cx.theme().colors().border) + base_container .child( h_flex() .size_full() - .gap(DynamicSpacing::Base04.rems(cx)) - .pl(DynamicSpacing::Base04.rems(cx)) + .map(|this| { + if is_sidebar_open || docked_right { + this.pl_1().gap_1() + } else { + this.pl_0().gap_0p5() + } + }) .when(!docked_right, |this| { - this.children(self.render_sidebar_toggle(cx)) + this.children(self.render_sidebar_toggle(false, cx)) }) .child(match &self.active_view { ActiveView::History { .. } | ActiveView::Configuration => { @@ -4250,12 +4284,13 @@ impl AgentPanel { ) .child( h_flex() + .h_full() .flex_none() - .gap(DynamicSpacing::Base02.rems(cx)) - .pl(DynamicSpacing::Base04.rems(cx)) - .pr(DynamicSpacing::Base06.rems(cx)) + .gap_1() + .pl_1() + .pr_1() .child(new_thread_menu) - .when(show_history_menu, |this| { + .when(show_history_menu && !has_v2_flag, |this| { this.child(self.render_recent_entries_menu( IconName::MenuAltTemp, Corner::TopRight, @@ -4264,7 +4299,7 @@ impl AgentPanel { }) .child(self.render_panel_options_menu(window, cx)) .when(docked_right, |this| { - this.children(self.render_sidebar_toggle(cx)) + this.children(self.render_sidebar_toggle(true, cx)) }), ) .into_any_element() diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index ae3a4f0ccb9df6073ae24a9c482b6c56de0ea968..e36cb750b4a74dc8d749501eed07941cd30c7b6f 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -713,6 +713,8 @@ impl Sidebar { let is_group_header_after_first = ix > 0 && matches!(entry, ListEntry::ProjectHeader { .. }); + let docked_right = AgentSettings::get_global(cx).dock == settings::DockPosition::Right; + let rendered = match entry { ListEntry::ProjectHeader { path_list, @@ -728,9 +730,12 @@ impl Sidebar { highlight_positions, *has_threads, is_selected, + docked_right, cx, ), - ListEntry::Thread(thread) => self.render_thread(ix, thread, is_selected, cx), + ListEntry::Thread(thread) => { + self.render_thread(ix, thread, is_selected, docked_right, cx) + } ListEntry::ViewMore { path_list, remaining_count, @@ -770,6 +775,7 @@ impl Sidebar { highlight_positions: &[usize], has_threads: bool, is_selected: bool, + docked_right: bool, cx: &mut Context, ) -> AnyElement { let id = SharedString::from(format!("project-header-{}", ix)); @@ -815,12 +821,13 @@ impl Sidebar { .group_name(group_name) .toggle_state(is_active_workspace) .focused(is_selected) + .docked_right(docked_right) .child( h_flex() .relative() .min_w_0() .w_full() - .p_1() + .py_1() .gap_1p5() .child( Icon::new(disclosure_icon) @@ -969,7 +976,7 @@ impl Sidebar { } fn has_filter_query(&self, cx: &App) -> bool { - self.filter_editor.read(cx).buffer().read(cx).is_empty() + !self.filter_editor.read(cx).text(cx).is_empty() } fn editor_move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context) { @@ -1156,6 +1163,7 @@ impl Sidebar { ix: usize, thread: &ThreadEntry, is_selected: bool, + docked_right: bool, cx: &mut Context, ) -> AnyElement { let has_notification = self @@ -1171,6 +1179,7 @@ impl Sidebar { let workspace = thread.workspace.clone(); let id = SharedString::from(format!("thread-entry-{}", ix)); + ThreadItem::new(id, title) .icon(thread.icon) .when_some(thread.icon_from_external_svg.clone(), |this, svg| { @@ -1187,6 +1196,7 @@ impl Sidebar { }) .selected(self.focused_thread.as_ref() == Some(&session_info.session_id)) .focused(is_selected) + .docked_right(docked_right) .on_click(cx.listener(move |this, _, window, cx| { this.selection = None; this.activate_thread(session_info.clone(), &workspace, window, cx); @@ -1301,6 +1311,7 @@ impl Sidebar { div() .w_full() .p_2() + .pt_1p5() .child( Button::new( SharedString::from(format!("new-thread-btn-{}", ix)), @@ -1320,6 +1331,40 @@ impl Sidebar { ) .into_any_element() } + + fn render_sidebar_toggle_button( + &self, + docked_right: bool, + cx: &mut Context, + ) -> impl IntoElement { + let icon = if docked_right { + IconName::ThreadsSidebarRightOpen + } else { + IconName::ThreadsSidebarLeftOpen + }; + + h_flex() + .h_full() + .px_1() + .map(|this| { + if docked_right { + this.pr_1p5().border_l_1() + } else { + this.border_r_1() + } + }) + .border_color(cx.theme().colors().border_variant) + .child( + IconButton::new("sidebar-close-toggle", icon) + .icon_size(IconSize::Small) + .tooltip(move |_, cx| { + Tooltip::for_action("Close Threads Sidebar", &ToggleWorkspaceSidebar, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(ToggleWorkspaceSidebar.boxed_clone(), cx); + }), + ) + } } impl Sidebar { @@ -1416,37 +1461,19 @@ impl Render for Sidebar { .child({ let docked_right = AgentSettings::get_global(cx).dock == settings::DockPosition::Right; - let render_close_button = || { - IconButton::new("sidebar-close-toggle", IconName::WorkspaceNavOpen) - .icon_size(IconSize::Small) - .tooltip(move |_, cx| { - Tooltip::for_action( - "Close Threads Sidebar", - &ToggleWorkspaceSidebar, - cx, - ) - }) - .on_click(|_, window, cx| { - window.dispatch_action(ToggleWorkspaceSidebar.boxed_clone(), cx); - }) - }; h_flex() - .flex_none() - .px_2p5() .h(Tab::container_height(cx)) - .gap_2() + .flex_none() + .gap_1p5() .border_b_1() .border_color(cx.theme().colors().border) - .when(!docked_right, |this| this.child(render_close_button())) - .child( - Icon::new(IconName::MagnifyingGlass) - .size(IconSize::Small) - .color(Color::Muted), - ) + .when(!docked_right, |this| { + this.child(self.render_sidebar_toggle_button(false, cx)) + }) .child(self.render_filter_input(cx)) .when(has_query, |this| { - this.pr_1().child( + this.when(!docked_right, |this| this.pr_1p5()).child( IconButton::new("clear_filter", IconName::Close) .shape(IconButtonShape::Square) .tooltip(Tooltip::text("Clear Search")) @@ -1456,7 +1483,11 @@ impl Render for Sidebar { })), ) }) - .when(docked_right, |this| this.child(render_close_button())) + .when(docked_right, |this| { + this.pl_2() + .pr_0p5() + .child(self.render_sidebar_toggle_button(true, cx)) + }) }) .child( v_flex() diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 7c06eaef92ece60e8b4a9ad78976b68aee854226..94fed7f03f46e64ef0ac929e60cf6ae848145e72 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -244,6 +244,10 @@ pub enum IconName { ThinkingModeOff, Thread, ThreadFromSummary, + ThreadsSidebarLeftClosed, + ThreadsSidebarLeftOpen, + ThreadsSidebarRightClosed, + ThreadsSidebarRightOpen, ThumbsDown, ThumbsUp, TodoComplete, @@ -272,8 +276,6 @@ pub enum IconName { UserRoundPen, Warning, WholeWord, - WorkspaceNavClosed, - WorkspaceNavOpen, XCircle, XCircleFilled, ZedAgent, diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index edc685159f5c9edc5fa872e9d453d0b81fa9cb16..1ab516b0cbbcb20c98bf61525779d2bd760ef260 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -1,6 +1,6 @@ use crate::{ - DecoratedIcon, DiffStat, GradientFade, HighlightedLabel, IconDecoration, IconDecorationKind, - SpinnerLabel, prelude::*, + CommonAnimationExt, DecoratedIcon, DiffStat, GradientFade, HighlightedLabel, IconDecoration, + IconDecorationKind, prelude::*, }; use gpui::{AnyView, ClickEvent, Hsla, SharedString}; @@ -26,6 +26,7 @@ pub struct ThreadItem { selected: bool, focused: bool, hovered: bool, + docked_right: bool, added: Option, removed: Option, worktree: Option, @@ -50,6 +51,7 @@ impl ThreadItem { selected: false, focused: false, hovered: false, + docked_right: false, added: None, removed: None, worktree: None, @@ -107,6 +109,11 @@ impl ThreadItem { self } + pub fn docked_right(mut self, docked_right: bool) -> Self { + self.docked_right = docked_right; + self + } + pub fn worktree(mut self, worktree: impl Into) -> Self { self.worktree = Some(worktree.into()); self @@ -154,12 +161,12 @@ impl ThreadItem { impl RenderOnce for ThreadItem { fn render(self, _: &mut Window, cx: &mut App) -> impl IntoElement { let color = cx.theme().colors(); - // let dot_separator = || { - // Label::new("•") - // .size(LabelSize::Small) - // .color(Color::Muted) - // .alpha(0.5) - // }; + let dot_separator = || { + Label::new("•") + .size(LabelSize::Small) + .color(Color::Muted) + .alpha(0.5) + }; let icon_container = || h_flex().size_4().flex_none().justify_center(); let agent_icon = if let Some(custom_svg) = self.custom_icon_from_external_svg { @@ -194,17 +201,23 @@ impl RenderOnce for ThreadItem { None }; - let icon = if let Some(decoration) = decoration { - icon_container().child(DecoratedIcon::new(agent_icon, Some(decoration))) - } else { - icon_container().child(agent_icon) - }; - let is_running = matches!( self.status, AgentThreadStatus::Running | AgentThreadStatus::WaitingForConfirmation ); - let running_or_action = is_running || (self.hovered && self.action_slot.is_some()); + + let icon = if is_running { + icon_container().child( + Icon::new(IconName::LoadCircle) + .size(IconSize::Small) + .color(Color::Muted) + .with_rotate_animation(2), + ) + } else if let Some(decoration) = decoration { + icon_container().child(DecoratedIcon::new(agent_icon, Some(decoration))) + } else { + icon_container().child(agent_icon) + }; let title = self.title; let highlight_positions = self.highlight_positions; @@ -244,13 +257,16 @@ impl RenderOnce for ThreadItem { if has_worktree || has_diff_stats { this.p_2() } else { - this.px_2().py_1() + this.p_1() } }) .when(self.selected, |s| s.bg(color.element_active)) .border_1() .border_color(gpui::transparent_black()) - .when(self.focused, |s| s.border_color(color.panel_focused_border)) + .when(self.focused, |s| { + s.when(self.docked_right, |s| s.border_r_2()) + .border_color(color.border_focused) + }) .hover(|s| s.bg(color.element_hover)) .on_hover(self.on_hover) .child( @@ -270,20 +286,8 @@ impl RenderOnce for ThreadItem { .when_some(self.tooltip, |this, tooltip| this.tooltip(tooltip)), ) .child(gradient_overlay) - .when(running_or_action, |this| { - this.child( - h_flex() - .gap_1() - .when(is_running, |this| { - this.child( - icon_container() - .child(SpinnerLabel::new().color(Color::Accent)), - ) - }) - .when(self.hovered, |this| { - this.when_some(self.action_slot, |this, slot| this.child(slot)) - }), - ) + .when(self.hovered, |this| { + this.when_some(self.action_slot, |this, slot| this.child(slot)) }), ) .when_some(self.worktree, |this, worktree| { @@ -306,6 +310,7 @@ impl RenderOnce for ThreadItem { .gap_1p5() .child(icon_container()) // Icon Spacing .child(worktree_label) + .child(dot_separator()) .when(has_diff_stats, |this| { this.child(DiffStat::new( diff_stat_id.clone(), diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs index 01e88e1fe666fa2038b05af055a0e02b195e9bac..d707df82f4d19b0a3f519e9d6ac9ccdb22965e27 100644 --- a/crates/ui/src/components/list/list_item.rs +++ b/crates/ui/src/components/list/list_item.rs @@ -48,6 +48,7 @@ pub struct ListItem { rounded: bool, overflow_x: bool, focused: Option, + docked_right: bool, } impl ListItem { @@ -78,6 +79,7 @@ impl ListItem { rounded: false, overflow_x: false, focused: None, + docked_right: false, } } @@ -194,6 +196,11 @@ impl ListItem { self.focused = Some(focused); self } + + pub fn docked_right(mut self, docked_right: bool) -> Self { + self.docked_right = docked_right; + self + } } impl Disableable for ListItem { @@ -247,6 +254,7 @@ impl RenderOnce for ListItem { this.when_some(self.focused, |this, focused| { if focused { this.border_1() + .when(self.docked_right, |this| this.border_r_2()) .border_color(cx.theme().colors().border_focused) } else { this.border_1() From 45072109221790776b03b686b6973dd7f8227cb0 Mon Sep 17 00:00:00 2001 From: Jack Date: Thu, 12 Mar 2026 01:55:15 +0800 Subject: [PATCH 496/548] languages: Exclude angle brackets from rainbow bracket colorization for TSX (#51311) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Angle brackets in TSX (`<`, `>`, `/>`, ` Screenshots: I don't have a built copy of Zed handy to attach — happy to add one if a maintainer needs it before merging. Release Notes: - Removed rainbow bracket colorization for angled brackets within TSX. --- crates/languages/src/tsx/brackets.scm | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/crates/languages/src/tsx/brackets.scm b/crates/languages/src/tsx/brackets.scm index d72fcb26005a0021907558bbbee7471cfeaec603..cd59d553783f685775e45ba883210272b168c3b8 100644 --- a/crates/languages/src/tsx/brackets.scm +++ b/crates/languages/src/tsx/brackets.scm @@ -7,14 +7,17 @@ ("{" @open "}" @close) -("<" @open +(("<" @open ">" @close) + (#set! rainbow.exclude)) -("<" @open +(("<" @open "/>" @close) + (#set! rainbow.exclude)) -("" @close) + (#set! rainbow.exclude)) (("\"" @open "\"" @close) From 546dacc29bf9edd75cff4083ba7ac7d203947fb3 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Wed, 11 Mar 2026 19:48:12 +0100 Subject: [PATCH 497/548] nix: Correctly handle commitSha == null in nix devshell (#51319) Release Notes: - N/A --- nix/build.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nix/build.nix b/nix/build.nix index d96a7e51ca08d23572b01f0c387d6ef9e4f2dd70..a5ced61bbbfd145c1e3f9fc9909ae69779ba133a 100644 --- a/nix/build.nix +++ b/nix/build.nix @@ -224,7 +224,7 @@ let }; ZED_UPDATE_EXPLANATION = "Zed has been installed using Nix. Auto-updates have thus been disabled."; RELEASE_VERSION = version; - ZED_COMMIT_SHA = commitSha; + ZED_COMMIT_SHA = lib.optionalString (commitSha != null) "${commitSha}"; LK_CUSTOM_WEBRTC = pkgs.callPackage ./livekit-libwebrtc/package.nix { }; PROTOC = "${protobuf}/bin/protoc"; From 56b2eae745d3ae8317810fa129a633b3268be1d0 Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Wed, 11 Mar 2026 20:26:19 +0100 Subject: [PATCH 498/548] audio: Run webrtc receiver task on the realtime-priority thread as well (#51315) Co-authored-by: Jakub Konka Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed issues with tremendous audio latency in long-running collab calls. Co-authored-by: Jakub Konka --- crates/livekit_client/src/livekit_client/playback.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/livekit_client/src/livekit_client/playback.rs b/crates/livekit_client/src/livekit_client/playback.rs index f62de78b4f9fb702f03943b06270abb41aa68e34..88ebdfd389498ae00ad434eb22726a84a5fe1e01 100644 --- a/crates/livekit_client/src/livekit_client/playback.rs +++ b/crates/livekit_client/src/livekit_client/playback.rs @@ -111,7 +111,7 @@ impl AudioStack { source.num_channels as i32, ); - let receive_task = self.executor.spawn({ + let receive_task = self.executor.spawn_with_priority(Priority::RealtimeAudio, { let source = source.clone(); async move { while let Some(frame) = stream.next().await { @@ -202,7 +202,7 @@ impl AudioStack { let apm = self.apm.clone(); let (frame_tx, mut frame_rx) = futures::channel::mpsc::unbounded(); - let transmit_task = self.executor.spawn({ + let transmit_task = self.executor.spawn_with_priority(Priority::RealtimeAudio, { async move { while let Some(frame) = frame_rx.next().await { source.capture_frame(&frame).await.log_err(); From 9fb57b0daf1933e965b22543257b78bc4f22d376 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Wed, 11 Mar 2026 20:54:51 +0100 Subject: [PATCH 499/548] language_model: Centralize LlmApiToken to a singleton (#51225) The edit prediction, web search and completions endpoints in Cloud all use tokens called LlmApiToken. These were independently created, cached, and refreshed in three places: the cloud language model provider, the edit prediction store, and the cloud web search provider. Each held its own LlmApiToken instance, meaning three separate requests to get these tokens at startup / login and three redundant refreshes whenever the server signaled a token update was needed. We already had a global singleton reacting to the refresh signals: RefreshLlmTokenListener. It now holds a single LlmApiToken that all three services use, performs the refresh itself, and emits RefreshLlmTokenEvent only after the token is fresh. That event is used by the language model provider to re-fetch models after a refresh. The singleton is accessed only through `LlmApiToken::global()`. I have tested this manually, and it token acquisition and usage appear to be working fine. Edit: I've tested it with a long running session, and refresh seems to be working fine too. Release Notes: - N/A --------- Co-authored-by: Marshall Bowers --- crates/edit_prediction/src/edit_prediction.rs | 25 ++---------- .../src/edit_prediction_tests.rs | 1 + .../src/{model/mod.rs => model.rs} | 0 .../language_model/src/model/cloud_model.rs | 38 ++++++++++++++++--- crates/language_models/src/provider/cloud.rs | 6 +-- crates/web_search_providers/src/cloud.rs | 26 ++----------- 6 files changed, 43 insertions(+), 53 deletions(-) rename crates/language_model/src/{model/mod.rs => model.rs} (100%) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 5e1c9f9a03ec0c4bff0bbd60a9aefc6a06fa5368..63240ddd53108f0b2450386150958e23f975d7ed 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -23,14 +23,14 @@ use futures::{ use gpui::BackgroundExecutor; use gpui::http_client::Url; use gpui::{ - App, AsyncApp, Entity, EntityId, Global, SharedString, Subscription, Task, WeakEntity, actions, + App, AsyncApp, Entity, EntityId, Global, SharedString, Task, WeakEntity, actions, http_client::{self, AsyncBody, Method}, prelude::*, }; use language::language_settings::all_language_settings; use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint}; use language::{BufferSnapshot, OffsetRangeExt}; -use language_model::{LlmApiToken, NeedsLlmTokenRefresh, RefreshLlmTokenListener}; +use language_model::{LlmApiToken, NeedsLlmTokenRefresh}; use project::{DisableAiSettings, Project, ProjectPath, WorktreeId}; use release_channel::AppVersion; use semver::Version; @@ -133,7 +133,6 @@ pub struct EditPredictionStore { client: Arc, user_store: Entity, llm_token: LlmApiToken, - _llm_token_subscription: Subscription, _fetch_experiments_task: Task<()>, projects: HashMap, update_required: bool, @@ -674,10 +673,9 @@ impl EditPredictionStore { } pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); let data_collection_choice = Self::load_data_collection_choice(); - let llm_token = LlmApiToken::default(); + let llm_token = LlmApiToken::global(cx); let (reject_tx, reject_rx) = mpsc::unbounded(); cx.background_spawn({ @@ -721,23 +719,6 @@ impl EditPredictionStore { user_store, llm_token, _fetch_experiments_task: fetch_experiments_task, - _llm_token_subscription: cx.subscribe( - &refresh_llm_token_listener, - |this, _listener, _event, cx| { - let client = this.client.clone(); - let llm_token = this.llm_token.clone(); - let organization_id = this - .user_store - .read(cx) - .current_organization() - .map(|organization| organization.id.clone()); - cx.spawn(async move |_this, _cx| { - llm_token.refresh(&client, organization_id).await?; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - }, - ), update_required: false, edit_prediction_model: EditPredictionModel::Zeta, zeta2_raw_config: Self::zeta2_raw_config_from_env(), diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index ad237e6f8fb31708dbabc6e8332ce0c164877004..8f97df2c308980e1c2c89838609b30e1aedb1917 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -21,6 +21,7 @@ use language::{ Anchor, Buffer, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSeverity, Operation, Point, Selection, SelectionGoal, }; +use language_model::RefreshLlmTokenListener; use lsp::LanguageServerId; use parking_lot::Mutex; use pretty_assertions::{assert_eq, assert_matches}; diff --git a/crates/language_model/src/model/mod.rs b/crates/language_model/src/model.rs similarity index 100% rename from crates/language_model/src/model/mod.rs rename to crates/language_model/src/model.rs diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index e64cc43edd8eef6cfaf0c6c966365c81d37b611c..e384ce05fa390677529235442c4cb91186520a02 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -30,6 +30,13 @@ impl fmt::Display for PaymentRequiredError { pub struct LlmApiToken(Arc>>); impl LlmApiToken { + pub fn global(cx: &App) -> Self { + RefreshLlmTokenListener::global(cx) + .read(cx) + .llm_api_token + .clone() + } + pub async fn acquire( &self, client: &Arc, @@ -102,13 +109,16 @@ struct GlobalRefreshLlmTokenListener(Entity); impl Global for GlobalRefreshLlmTokenListener {} -pub struct RefreshLlmTokenEvent; +pub struct LlmTokenRefreshedEvent; pub struct RefreshLlmTokenListener { + client: Arc, + user_store: Entity, + llm_api_token: LlmApiToken, _subscription: Subscription, } -impl EventEmitter for RefreshLlmTokenListener {} +impl EventEmitter for RefreshLlmTokenListener {} impl RefreshLlmTokenListener { pub fn register(client: Arc, user_store: Entity, cx: &mut App) { @@ -128,21 +138,39 @@ impl RefreshLlmTokenListener { } }); - let subscription = cx.subscribe(&user_store, |_this, _user_store, event, cx| { + let subscription = cx.subscribe(&user_store, |this, _user_store, event, cx| { if matches!(event, client::user::Event::OrganizationChanged) { - cx.emit(RefreshLlmTokenEvent); + this.refresh(cx); } }); Self { + client, + user_store, + llm_api_token: LlmApiToken::default(), _subscription: subscription, } } + fn refresh(&self, cx: &mut Context) { + let client = self.client.clone(); + let llm_api_token = self.llm_api_token.clone(); + let organization_id = self + .user_store + .read(cx) + .current_organization() + .map(|o| o.id.clone()); + cx.spawn(async move |this, cx| { + llm_api_token.refresh(&client, organization_id).await?; + this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent)) + }) + .detach_and_log_err(cx); + } + fn handle_refresh_llm_token(this: Entity, message: &MessageToClient, cx: &mut App) { match message { MessageToClient::UserUpdated => { - this.update(cx, |_this, cx| cx.emit(RefreshLlmTokenEvent)); + this.update(cx, |this, cx| this.refresh(cx)); } } } diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 4e705a8d62a5446b17bcc95a7dc75152b0c3269c..610b0167b86f8bf4426b671cedad45a28c3fdc6d 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -109,9 +109,10 @@ impl State { cx: &mut Context, ) -> Self { let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); + let llm_api_token = LlmApiToken::global(cx); Self { client: client.clone(), - llm_api_token: LlmApiToken::default(), + llm_api_token, user_store: user_store.clone(), status, models: Vec::new(), @@ -158,9 +159,6 @@ impl State { .current_organization() .map(|o| o.id.clone()); cx.spawn(async move |this, cx| { - llm_api_token - .refresh(&client, organization_id.clone()) - .await?; let response = Self::fetch_models(client, llm_api_token, organization_id).await?; this.update(cx, |this, cx| { diff --git a/crates/web_search_providers/src/cloud.rs b/crates/web_search_providers/src/cloud.rs index c8bc89953f2b2d3ec62bac07e80f2737522824f7..51be6c9ddff01a956eebabe3e44166ae15de4515 100644 --- a/crates/web_search_providers/src/cloud.rs +++ b/crates/web_search_providers/src/cloud.rs @@ -5,9 +5,9 @@ use client::{Client, UserStore}; use cloud_api_types::OrganizationId; use cloud_llm_client::{WebSearchBody, WebSearchResponse}; use futures::AsyncReadExt as _; -use gpui::{App, AppContext, Context, Entity, Subscription, Task}; +use gpui::{App, AppContext, Context, Entity, Task}; use http_client::{HttpClient, Method}; -use language_model::{LlmApiToken, NeedsLlmTokenRefresh, RefreshLlmTokenListener}; +use language_model::{LlmApiToken, NeedsLlmTokenRefresh}; use web_search::{WebSearchProvider, WebSearchProviderId}; pub struct CloudWebSearchProvider { @@ -26,34 +26,16 @@ pub struct State { client: Arc, user_store: Entity, llm_api_token: LlmApiToken, - _llm_token_subscription: Subscription, } impl State { pub fn new(client: Arc, user_store: Entity, cx: &mut Context) -> Self { - let refresh_llm_token_listener = RefreshLlmTokenListener::global(cx); + let llm_api_token = LlmApiToken::global(cx); Self { client, user_store, - llm_api_token: LlmApiToken::default(), - _llm_token_subscription: cx.subscribe( - &refresh_llm_token_listener, - |this, _, _event, cx| { - let client = this.client.clone(); - let llm_api_token = this.llm_api_token.clone(); - let organization_id = this - .user_store - .read(cx) - .current_organization() - .map(|o| o.id.clone()); - cx.spawn(async move |_this, _cx| { - llm_api_token.refresh(&client, organization_id).await?; - anyhow::Ok(()) - }) - .detach_and_log_err(cx); - }, - ), + llm_api_token, } } } From 9d2e2c859b9af0821f2c65a26845b49b571c2433 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 11 Mar 2026 17:34:03 -0300 Subject: [PATCH 500/548] agent_ui: Add more UI refinements to the sidebar (#51325) Adjust the settings view and removes the new empty state from text threads. Release Notes: - N/A --- crates/agent_ui/src/agent_configuration.rs | 13 ++++++++-- crates/agent_ui/src/agent_panel.rs | 5 +++- crates/agent_ui/src/sidebar.rs | 2 +- crates/ui/src/components/chip.rs | 3 +-- crates/ui/src/components/label/label.rs | 28 ++++++++++++++++++++++ 5 files changed, 45 insertions(+), 6 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index aa316ba7c5efe5f679764cd7d4626a1f1310e4c6..46f92bfb2cfd60158bfb7c7aae9c16f3d9184695 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -228,6 +228,7 @@ impl AgentConfiguration { .unwrap_or(false); v_flex() + .min_w_0() .w_full() .when(is_expanded, |this| this.mb_2()) .child( @@ -312,6 +313,7 @@ impl AgentConfiguration { ) .child( v_flex() + .min_w_0() .w_full() .px_2() .gap_1() @@ -459,6 +461,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .w_full() .child(self.render_section_title( "LLM Providers", @@ -559,6 +562,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .border_b_1() .border_color(cx.theme().colors().border) .child(self.render_section_title( @@ -802,9 +806,12 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .id(item_id.clone()) .child( h_flex() + .min_w_0() + .w_full() .justify_between() .child( h_flex() @@ -820,13 +827,13 @@ impl AgentConfiguration { .tooltip(Tooltip::text(tooltip_text)) .child(status_indicator), ) - .child(Label::new(item_id).truncate()) + .child(Label::new(item_id).flex_shrink_0().truncate()) .child( div() .id("extension-source") + .min_w_0() .mt_0p5() .mx_1() - .flex_none() .tooltip(Tooltip::text(source_tooltip)) .child( Icon::new(source_icon) @@ -1019,6 +1026,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .border_b_1() .border_color(cx.theme().colors().border) .child( @@ -1217,6 +1225,7 @@ impl Render for AgentConfiguration { .id("assistant-configuration-content") .track_scroll(&self.scroll_handle) .size_full() + .min_w_0() .overflow_y_scroll() .child(self.render_agent_servers_section(cx)) .child(self.render_context_servers_section(window, cx)) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 50346bd752cec4432fb5a87e4df7cb4ce09aca83..1aefc99c020409a764ad2c44fe8477665f73c4bc 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -4131,7 +4131,10 @@ impl AgentPanel { ActiveView::History { .. } | ActiveView::Configuration ); - let use_v2_empty_toolbar = has_v2_flag && is_empty_state && !is_in_history_or_config; + let is_text_thread = matches!(&self.active_view, ActiveView::TextThread { .. }); + + let use_v2_empty_toolbar = + has_v2_flag && is_empty_state && !is_in_history_or_config && !is_text_thread; let is_sidebar_open = self .sidebar diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index e36cb750b4a74dc8d749501eed07941cd30c7b6f..595366dd0484254ed641e69713b519199547e8e3 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -1250,7 +1250,7 @@ impl Sidebar { .focused(is_selected) .child( h_flex() - .p_1() + .py_1() .gap_1p5() .child(Icon::new(icon).size(IconSize::Small).color(Color::Muted)) .child(Label::new(label).color(Color::Muted)) diff --git a/crates/ui/src/components/chip.rs b/crates/ui/src/components/chip.rs index ce709fe3962f742f5208808315f3bdac09c1f513..06dc7e6afa6fa8723985913dfece4205e360511e 100644 --- a/crates/ui/src/components/chip.rs +++ b/crates/ui/src/components/chip.rs @@ -81,8 +81,7 @@ impl RenderOnce for Chip { h_flex() .when_some(self.height, |this, h| this.h(h)) - .min_w_0() - .flex_initial() + .flex_none() .px_1() .border_1() .rounded_sm() diff --git a/crates/ui/src/components/label/label.rs b/crates/ui/src/components/label/label.rs index d0f50c00336eb971621e2da7bbaf53cf09569caa..405948ea06c7e86fcb3dec217186596bdaaf0aeb 100644 --- a/crates/ui/src/components/label/label.rs +++ b/crates/ui/src/components/label/label.rs @@ -73,6 +73,34 @@ impl Label { gpui::margin_style_methods!({ visibility: pub }); + + pub fn flex_1(mut self) -> Self { + self.style().flex_grow = Some(1.); + self.style().flex_shrink = Some(1.); + self.style().flex_basis = Some(gpui::relative(0.).into()); + self + } + + pub fn flex_none(mut self) -> Self { + self.style().flex_grow = Some(0.); + self.style().flex_shrink = Some(0.); + self + } + + pub fn flex_grow(mut self) -> Self { + self.style().flex_grow = Some(1.); + self + } + + pub fn flex_shrink(mut self) -> Self { + self.style().flex_shrink = Some(1.); + self + } + + pub fn flex_shrink_0(mut self) -> Self { + self.style().flex_shrink = Some(0.); + self + } } impl LabelCommon for Label { From 3dff4c57877c8a1b6bc2f6e2444b3b58ab9e637d Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Wed, 11 Mar 2026 18:15:43 -0300 Subject: [PATCH 501/548] agent_ui: Add timestamp to thread item in the sidebar (#51327) Release Notes: - N/A --- crates/agent_ui/src/sidebar.rs | 26 ++++ crates/ui/src/components/ai/thread_item.rs | 140 +++++++++++++++++---- 2 files changed, 143 insertions(+), 23 deletions(-) diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index 595366dd0484254ed641e69713b519199547e8e3..3804e3f63678bcf771b27b2f05929a958531ab39 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -1180,11 +1180,37 @@ impl Sidebar { let id = SharedString::from(format!("thread-entry-{}", ix)); + let timestamp = thread + .session_info + .created_at + .or(thread.session_info.updated_at) + .map(|entry_time| { + let now = Utc::now(); + let duration = now.signed_duration_since(entry_time); + + let minutes = duration.num_minutes(); + let hours = duration.num_hours(); + let days = duration.num_days(); + let weeks = days / 7; + let months = days / 30; + + if minutes < 60 { + format!("{}m", minutes.max(1)) + } else if hours < 24 { + format!("{}h", hours) + } else if weeks < 4 { + format!("{}w", weeks.max(1)) + } else { + format!("{}mo", months.max(1)) + } + }); + ThreadItem::new(id, title) .icon(thread.icon) .when_some(thread.icon_from_external_svg.clone(), |this, svg| { this.custom_icon_from_external_svg(svg) }) + .when_some(timestamp, |this, ts| this.timestamp(ts)) .highlight_positions(thread.highlight_positions.to_vec()) .status(thread.status) .notified(has_notification) diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index 1ab516b0cbbcb20c98bf61525779d2bd760ef260..5be91e9d98a1219dcfbbba70a5541ba7b827cfc5 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -245,6 +245,8 @@ impl RenderOnce for ThreadItem { let removed_count = self.removed.unwrap_or(0); let diff_stat_id = self.id.clone(); let has_worktree = self.worktree.is_some(); + let has_timestamp = !self.timestamp.is_empty(); + let timestamp = self.timestamp; v_flex() .id(self.id.clone()) @@ -253,13 +255,7 @@ impl RenderOnce for ThreadItem { .overflow_hidden() .cursor_pointer() .w_full() - .map(|this| { - if has_worktree || has_diff_stats { - this.p_2() - } else { - this.p_1() - } - }) + .p_1() .when(self.selected, |s| s.bg(color.element_active)) .border_1() .border_color(gpui::transparent_black()) @@ -310,23 +306,47 @@ impl RenderOnce for ThreadItem { .gap_1p5() .child(icon_container()) // Icon Spacing .child(worktree_label) - .child(dot_separator()) + .when(has_diff_stats || has_timestamp, |this| { + this.child(dot_separator()) + }) .when(has_diff_stats, |this| { this.child(DiffStat::new( diff_stat_id.clone(), added_count, removed_count, )) + }) + .when(has_diff_stats && has_timestamp, |this| { + this.child(dot_separator()) + }) + .when(has_timestamp, |this| { + this.child( + Label::new(timestamp.clone()) + .size(LabelSize::Small) + .color(Color::Muted), + ) }), ) }) - .when(!has_worktree && has_diff_stats, |this| { + .when(!has_worktree && (has_diff_stats || has_timestamp), |this| { this.child( h_flex() .min_w_0() .gap_1p5() .child(icon_container()) // Icon Spacing - .child(DiffStat::new(diff_stat_id, added_count, removed_count)), + .when(has_diff_stats, |this| { + this.child(DiffStat::new(diff_stat_id, added_count, removed_count)) + }) + .when(has_diff_stats && has_timestamp, |this| { + this.child(dot_separator()) + }) + .when(has_timestamp, |this| { + this.child( + Label::new(timestamp.clone()) + .size(LabelSize::Small) + .color(Color::Muted), + ) + }), ) }) .when_some(self.on_click, |this, on_click| this.on_click(on_click)) @@ -349,21 +369,31 @@ impl Component for ThreadItem { let thread_item_examples = vec![ single_example( - "Default", + "Default (minutes)", container() .child( ThreadItem::new("ti-1", "Linking to the Agent Panel Depending on Settings") .icon(IconName::AiOpenAi) - .timestamp("1:33 AM"), + .timestamp("15m"), + ) + .into_any_element(), + ), + single_example( + "Timestamp Only (hours)", + container() + .child( + ThreadItem::new("ti-1b", "Thread with just a timestamp") + .icon(IconName::AiClaude) + .timestamp("3h"), ) .into_any_element(), ), single_example( - "Notified", + "Notified (weeks)", container() .child( ThreadItem::new("ti-2", "Refine thread view scrolling behavior") - .timestamp("12:12 AM") + .timestamp("1w") .notified(true), ) .into_any_element(), @@ -373,7 +403,7 @@ impl Component for ThreadItem { container() .child( ThreadItem::new("ti-2b", "Execute shell command in terminal") - .timestamp("12:15 AM") + .timestamp("2h") .status(AgentThreadStatus::WaitingForConfirmation), ) .into_any_element(), @@ -383,7 +413,7 @@ impl Component for ThreadItem { container() .child( ThreadItem::new("ti-2c", "Failed to connect to language server") - .timestamp("12:20 AM") + .timestamp("5h") .status(AgentThreadStatus::Error), ) .into_any_element(), @@ -394,7 +424,7 @@ impl Component for ThreadItem { .child( ThreadItem::new("ti-3", "Add line numbers option to FileEditBlock") .icon(IconName::AiClaude) - .timestamp("7:30 PM") + .timestamp("23h") .status(AgentThreadStatus::Running), ) .into_any_element(), @@ -405,30 +435,43 @@ impl Component for ThreadItem { .child( ThreadItem::new("ti-4", "Add line numbers option to FileEditBlock") .icon(IconName::AiClaude) - .timestamp("7:37 PM") + .timestamp("2w") .worktree("link-agent-panel"), ) .into_any_element(), ), single_example( - "With Changes", + "With Changes (months)", container() .child( ThreadItem::new("ti-5", "Managing user and project settings interactions") .icon(IconName::AiClaude) - .timestamp("7:37 PM") + .timestamp("1mo") .added(10) .removed(3), ) .into_any_element(), ), + single_example( + "Worktree + Changes + Timestamp", + container() + .child( + ThreadItem::new("ti-5b", "Full metadata example") + .icon(IconName::AiClaude) + .worktree("my-project") + .added(42) + .removed(17) + .timestamp("3w"), + ) + .into_any_element(), + ), single_example( "Selected Item", container() .child( ThreadItem::new("ti-6", "Refine textarea interaction behavior") .icon(IconName::AiGemini) - .timestamp("3:00 PM") + .timestamp("45m") .selected(true), ) .into_any_element(), @@ -439,23 +482,74 @@ impl Component for ThreadItem { .child( ThreadItem::new("ti-7", "Implement keyboard navigation") .icon(IconName::AiClaude) - .timestamp("4:00 PM") + .timestamp("12h") .focused(true), ) .into_any_element(), ), + single_example( + "Focused + Docked Right", + container() + .child( + ThreadItem::new("ti-7b", "Focused with right dock border") + .icon(IconName::AiClaude) + .timestamp("1w") + .focused(true) + .docked_right(true), + ) + .into_any_element(), + ), single_example( "Selected + Focused", container() .child( ThreadItem::new("ti-8", "Active and keyboard-focused thread") .icon(IconName::AiGemini) - .timestamp("5:00 PM") + .timestamp("2mo") .selected(true) .focused(true), ) .into_any_element(), ), + single_example( + "Hovered with Action Slot", + container() + .child( + ThreadItem::new("ti-9", "Hover to see action button") + .icon(IconName::AiClaude) + .timestamp("6h") + .hovered(true) + .action_slot( + IconButton::new("delete", IconName::Trash) + .icon_size(IconSize::Small) + .icon_color(Color::Muted), + ), + ) + .into_any_element(), + ), + single_example( + "Search Highlight", + container() + .child( + ThreadItem::new("ti-10", "Implement keyboard navigation") + .icon(IconName::AiClaude) + .timestamp("4w") + .highlight_positions(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]), + ) + .into_any_element(), + ), + single_example( + "Worktree Search Highlight", + container() + .child( + ThreadItem::new("ti-11", "Search in worktree name") + .icon(IconName::AiClaude) + .timestamp("3mo") + .worktree("my-project-name") + .worktree_highlight_positions(vec![3, 4, 5, 6, 7, 8, 9, 10, 11]), + ) + .into_any_element(), + ), ]; Some( From becb24cd19405b6842cb4f0fb656a1a3853a0137 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 11 Mar 2026 17:28:29 -0400 Subject: [PATCH 502/548] cloud_api_types: Add `ZedBusiness` variant to `Plan` (#51329) This PR adds a `ZedBusiness` variant to the `Plan` enum. Closes CLO-480. Release Notes: - N/A --- crates/agent_ui/src/agent_configuration.rs | 1 + crates/ai_onboarding/src/ai_onboarding.rs | 19 +++++++++++++++++++ crates/ai_onboarding/src/ai_upsell_card.rs | 20 ++++++++++++++++++++ crates/ai_onboarding/src/plan_definitions.rs | 6 ++++++ crates/cloud_api_types/src/plan.rs | 1 + crates/title_bar/src/plan_chip.rs | 1 + 6 files changed, 48 insertions(+) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 46f92bfb2cfd60158bfb7c7aae9c16f3d9184695..ef3f3fdacc3d155554f3e2576ed1ed27c1d9ff0d 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -501,6 +501,7 @@ impl AgentConfiguration { Plan::ZedFree => ("Free", Color::Default, free_chip_bg), Plan::ZedProTrial => ("Pro Trial", Color::Accent, pro_chip_bg), Plan::ZedPro => ("Pro", Color::Accent, pro_chip_bg), + Plan::ZedBusiness => ("Business", Color::Accent, pro_chip_bg), Plan::ZedStudent => ("Student", Color::Accent, pro_chip_bg), }; diff --git a/crates/ai_onboarding/src/ai_onboarding.rs b/crates/ai_onboarding/src/ai_onboarding.rs index 0b1ccb4088e67de332c2bd2940ca5bdf77f1d3df..8b578d2e7f00a4f0dd139e074259d28e09932908 100644 --- a/crates/ai_onboarding/src/ai_onboarding.rs +++ b/crates/ai_onboarding/src/ai_onboarding.rs @@ -266,6 +266,20 @@ impl ZedAiOnboarding { .into_any_element() } + fn render_business_plan_state(&self, _cx: &mut App) -> AnyElement { + v_flex() + .gap_1() + .child(Headline::new("Welcome to Zed Business")) + .child( + Label::new("Here's what you get:") + .color(Color::Muted) + .mb_2(), + ) + .child(PlanDefinitions.business_plan()) + .children(self.render_dismiss_button()) + .into_any_element() + } + fn render_student_plan_state(&self, _cx: &mut App) -> AnyElement { v_flex() .gap_1() @@ -289,6 +303,7 @@ impl RenderOnce for ZedAiOnboarding { Some(Plan::ZedFree) => self.render_free_plan_state(cx), Some(Plan::ZedProTrial) => self.render_trial_state(cx), Some(Plan::ZedPro) => self.render_pro_plan_state(cx), + Some(Plan::ZedBusiness) => self.render_business_plan_state(cx), Some(Plan::ZedStudent) => self.render_student_plan_state(cx), } } else { @@ -353,6 +368,10 @@ impl Component for ZedAiOnboarding { "Pro Plan", onboarding(SignInStatus::SignedIn, Some(Plan::ZedPro), false), ), + single_example( + "Business Plan", + onboarding(SignInStatus::SignedIn, Some(Plan::ZedBusiness), false), + ), ]) .into_any_element(), ) diff --git a/crates/ai_onboarding/src/ai_upsell_card.rs b/crates/ai_onboarding/src/ai_upsell_card.rs index f1a1c4310def0b9b4dbabbc6a59eae940396fbb9..40a35f590d87a9928d4299199a99f223264e5ef3 100644 --- a/crates/ai_onboarding/src/ai_upsell_card.rs +++ b/crates/ai_onboarding/src/ai_upsell_card.rs @@ -250,6 +250,15 @@ impl RenderOnce for AiUpsellCard { .mb_2(), ) .child(PlanDefinitions.pro_plan()), + Some(Plan::ZedBusiness) => card + .child(certified_user_stamp) + .child(Label::new("You're in the Zed Business plan").size(LabelSize::Large)) + .child( + Label::new("Here's what you get:") + .color(Color::Muted) + .mb_2(), + ) + .child(PlanDefinitions.business_plan()), Some(Plan::ZedStudent) => card .child(certified_user_stamp) .child(Label::new("You're in the Zed Student plan").size(LabelSize::Large)) @@ -368,6 +377,17 @@ impl Component for AiUpsellCard { } .into_any_element(), ), + single_example( + "Business Plan", + AiUpsellCard { + sign_in_status: SignInStatus::SignedIn, + sign_in: Arc::new(|_, _| {}), + account_too_young: false, + user_plan: Some(Plan::ZedBusiness), + tab_index: Some(1), + } + .into_any_element(), + ), ], )) .into_any_element(), diff --git a/crates/ai_onboarding/src/plan_definitions.rs b/crates/ai_onboarding/src/plan_definitions.rs index 6d46a598c385b300fa579c69b0c58cfe51610c68..184815bcad9babb1892335c6207a79e1fe193c04 100644 --- a/crates/ai_onboarding/src/plan_definitions.rs +++ b/crates/ai_onboarding/src/plan_definitions.rs @@ -36,6 +36,12 @@ impl PlanDefinitions { .child(ListBulletItem::new("Usage-based billing beyond $5")) } + pub fn business_plan(&self) -> impl IntoElement { + List::new() + .child(ListBulletItem::new("Unlimited edit predictions")) + .child(ListBulletItem::new("Usage-based billing")) + } + pub fn student_plan(&self) -> impl IntoElement { List::new() .child(ListBulletItem::new("Unlimited edit predictions")) diff --git a/crates/cloud_api_types/src/plan.rs b/crates/cloud_api_types/src/plan.rs index e4a33e3c1933717f642848acc13dcf19b173e902..1f40d1ddb5f0e72871d5ecaee62b884132c158e4 100644 --- a/crates/cloud_api_types/src/plan.rs +++ b/crates/cloud_api_types/src/plan.rs @@ -9,6 +9,7 @@ pub enum Plan { ZedFree, ZedPro, ZedProTrial, + ZedBusiness, ZedStudent, } diff --git a/crates/title_bar/src/plan_chip.rs b/crates/title_bar/src/plan_chip.rs index edec0da2dea317bd122ece14d6afb90a31990c96..237e507ed8e4d1a5f63a7df116bf08fd69086bc2 100644 --- a/crates/title_bar/src/plan_chip.rs +++ b/crates/title_bar/src/plan_chip.rs @@ -33,6 +33,7 @@ impl RenderOnce for PlanChip { Plan::ZedFree => ("Free", Color::Default, free_chip_bg), Plan::ZedProTrial => ("Pro Trial", Color::Accent, pro_chip_bg), Plan::ZedPro => ("Pro", Color::Accent, pro_chip_bg), + Plan::ZedBusiness => ("Business", Color::Accent, pro_chip_bg), Plan::ZedStudent => ("Student", Color::Accent, pro_chip_bg), }; From bb4f771f0e28a07d980edf8ca8fa6a6f596d1512 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 11 Mar 2026 18:22:17 -0400 Subject: [PATCH 503/548] client: Populate plans for organizations (#51334) This PR makes it so we populate the `plans_by_organization` collection with the plans returned from the server. Release Notes: - N/A --- crates/client/src/test.rs | 8 ++++++-- crates/client/src/user.rs | 17 ++++++++++++++++- crates/cloud_api_types/src/cloud_api_types.rs | 5 ++++- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/crates/client/src/test.rs b/crates/client/src/test.rs index 5102664a8c08ba336f3ae506aadb68eb2a537935..b506cee822ff9c2e4e31f262886a26ac1acbd134 100644 --- a/crates/client/src/test.rs +++ b/crates/client/src/test.rs @@ -1,4 +1,6 @@ -use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore}; +use std::collections::BTreeMap; +use std::sync::Arc; + use anyhow::{Context as _, Result, anyhow}; use cloud_api_client::{ AuthenticatedUser, GetAuthenticatedUserResponse, KnownOrUnknown, Plan, PlanInfo, @@ -9,7 +11,8 @@ use gpui::{AppContext as _, Entity, TestAppContext}; use http_client::{AsyncBody, Method, Request, http}; use parking_lot::Mutex; use rpc::{ConnectionId, Peer, Receipt, TypedEnvelope, proto}; -use std::sync::Arc; + +use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore}; pub struct FakeServer { peer: Arc, @@ -266,6 +269,7 @@ pub fn make_get_authenticated_user_response( }, feature_flags: vec![], organizations: vec![], + plans_by_organization: BTreeMap::new(), plan: PlanInfo { plan: KnownOrUnknown::Known(Plan::ZedPro), subscription_period: None, diff --git a/crates/client/src/user.rs b/crates/client/src/user.rs index 5d38569cfd86c38e5b4780621db40d1f2a3b745c..71b05dc58f54379f8dfb2ec46d4c280926a56bea 100644 --- a/crates/client/src/user.rs +++ b/crates/client/src/user.rs @@ -3,7 +3,7 @@ use anyhow::{Context as _, Result}; use chrono::{DateTime, Utc}; use cloud_api_client::websocket_protocol::MessageToClient; use cloud_api_client::{ - GetAuthenticatedUserResponse, Organization, OrganizationId, Plan, PlanInfo, + GetAuthenticatedUserResponse, KnownOrUnknown, Organization, OrganizationId, Plan, PlanInfo, }; use cloud_llm_client::{ EDIT_PREDICTIONS_USAGE_AMOUNT_HEADER_NAME, EDIT_PREDICTIONS_USAGE_LIMIT_HEADER_NAME, UsageLimit, @@ -817,6 +817,21 @@ impl UserStore { self.organizations = response.organizations.into_iter().map(Arc::new).collect(); self.current_organization = self.organizations.first().cloned(); + self.plans_by_organization = response + .plans_by_organization + .into_iter() + .map(|(organization_id, plan)| { + let plan = match plan { + KnownOrUnknown::Known(plan) => plan, + KnownOrUnknown::Unknown(_) => { + // If we get a plan that we don't recognize, fall back to the Free plan. + Plan::ZedFree + } + }; + + (organization_id, plan) + }) + .collect(); self.edit_prediction_usage = Some(EditPredictionUsage(RequestUsage { limit: response.plan.usage.edit_predictions.limit, diff --git a/crates/cloud_api_types/src/cloud_api_types.rs b/crates/cloud_api_types/src/cloud_api_types.rs index 42d3442bfc016f5cb1a39ba421ccdfe386bcbc65..e2c517edcc78e37bc2eab7055c5ac8d79c9db5b2 100644 --- a/crates/cloud_api_types/src/cloud_api_types.rs +++ b/crates/cloud_api_types/src/cloud_api_types.rs @@ -4,6 +4,7 @@ mod plan; mod timestamp; pub mod websocket_protocol; +use std::collections::BTreeMap; use std::sync::Arc; use serde::{Deserialize, Serialize}; @@ -21,6 +22,8 @@ pub struct GetAuthenticatedUserResponse { pub feature_flags: Vec, #[serde(default)] pub organizations: Vec, + #[serde(default)] + pub plans_by_organization: BTreeMap>, pub plan: PlanInfo, } @@ -35,7 +38,7 @@ pub struct AuthenticatedUser { pub accepted_tos_at: Option, } -#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize, Deserialize)] +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize)] pub struct OrganizationId(pub Arc); #[derive(Debug, PartialEq, Serialize, Deserialize)] From 6034961499c180c56c41e9647f8f5950b2a808ef Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Wed, 11 Mar 2026 19:21:57 -0400 Subject: [PATCH 504/548] ai_onboarding: Add student plan examples to component preview (#51338) This PR adds examples for the student plan to the component preview. Release Notes: - N/A --- crates/ai_onboarding/src/ai_onboarding.rs | 4 ++++ crates/ai_onboarding/src/ai_upsell_card.rs | 11 +++++++++++ 2 files changed, 15 insertions(+) diff --git a/crates/ai_onboarding/src/ai_onboarding.rs b/crates/ai_onboarding/src/ai_onboarding.rs index 8b578d2e7f00a4f0dd139e074259d28e09932908..e05853fa167267c505d4424365c29844e0ce08db 100644 --- a/crates/ai_onboarding/src/ai_onboarding.rs +++ b/crates/ai_onboarding/src/ai_onboarding.rs @@ -372,6 +372,10 @@ impl Component for ZedAiOnboarding { "Business Plan", onboarding(SignInStatus::SignedIn, Some(Plan::ZedBusiness), false), ), + single_example( + "Student Plan", + onboarding(SignInStatus::SignedIn, Some(Plan::ZedStudent), false), + ), ]) .into_any_element(), ) diff --git a/crates/ai_onboarding/src/ai_upsell_card.rs b/crates/ai_onboarding/src/ai_upsell_card.rs index 40a35f590d87a9928d4299199a99f223264e5ef3..cbaa9785db9e5471dd76a3add2cb9f19ca1b7ae1 100644 --- a/crates/ai_onboarding/src/ai_upsell_card.rs +++ b/crates/ai_onboarding/src/ai_upsell_card.rs @@ -388,6 +388,17 @@ impl Component for AiUpsellCard { } .into_any_element(), ), + single_example( + "Student Plan", + AiUpsellCard { + sign_in_status: SignInStatus::SignedIn, + sign_in: Arc::new(|_, _| {}), + account_too_young: false, + user_plan: Some(Plan::ZedStudent), + tab_index: Some(1), + } + .into_any_element(), + ), ], )) .into_any_element(), From f627c43ea1e4a8dc5788b2136b7c78aedb6b87d3 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Thu, 12 Mar 2026 11:49:32 +0530 Subject: [PATCH 505/548] languages: Prevent `bsn` macro from injecting rust layer (#51353) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/51240 We don’t parse bsn as embedded Rust anymore. We expect bsn to get its own Tree-sitter implementation in the future, which should improve this. This fixes broken syntax highlighting for string literals. See line 66 in the comparison below. image Release Notes: - N/A Co-authored-by: Christopher Biscardi --- crates/languages/src/rust/injections.scm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/languages/src/rust/injections.scm b/crates/languages/src/rust/injections.scm index 89d839282d3388f450f9ebdb923167f0986f349c..c50694dc9e0b90d3e31bc1147e59eea7ff402efa 100644 --- a/crates/languages/src/rust/injections.scm +++ b/crates/languages/src/rust/injections.scm @@ -10,7 +10,7 @@ (scoped_identifier (identifier) @_macro_name .) ] - (#not-any-of? @_macro_name "view" "html") + (#not-any-of? @_macro_name "view" "html" "bsn") (token_tree) @injection.content (#set! injection.language "rust")) From 81da953acfdcba1951875abb9664cd371d4c7f86 Mon Sep 17 00:00:00 2001 From: Josh Robson Chase Date: Thu, 12 Mar 2026 05:07:58 -0400 Subject: [PATCH 506/548] helix: Always offset cursor on selection (#46311) https://github.com/zed-industries/zed/pull/42837 added the `cursor_offset_on_selection` field, which displays the cursor *after* the end of the selection unless a vim visual mode is enabled, in which case it gets displayed *at* the end of the selection. However, the real helix is effectively *always* in select mode, and will always display the cursor at the end of the selection, whether that selection is made via its visual mode, a movement key, or with the mouse. This makes it so that the helix mode setting is taken into account regardless of the visual-ness of the vim mode in the `sync_vim_settings` method. I also considered simply moving `Mode::HelixNormal` up to the `true` arm of the match in the `is_visual` method since helix is kinda *always* in visual mode, but I figured that could have some unintended consequences and chose to err on the side of caution. Possibly related to #20121 Closes #46998 Release Notes: - Fixed the cursor offset in non-visual helix selections Co-authored-by: Nils Koch --- crates/vim/src/state.rs | 4 ++++ crates/vim/src/vim.rs | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs index 69b2816cc0bdc5aeed2af787b9a92166e2c93956..4e71a698ff0789a462e5ec2e83d673421621c884 100644 --- a/crates/vim/src/state.rs +++ b/crates/vim/src/state.rs @@ -73,6 +73,10 @@ impl Mode { Self::Normal | Self::Insert | Self::Replace | Self::HelixNormal => false, } } + + pub fn is_helix(&self) -> bool { + matches!(self, Self::HelixNormal | Self::HelixSelect) + } } #[derive(Clone, Debug, PartialEq)] diff --git a/crates/vim/src/vim.rs b/crates/vim/src/vim.rs index 8c551bcd2768043ae416157c80d4d2f9faa19092..3085dc5b3763222eb4b06d2ee551e026feba0002 100644 --- a/crates/vim/src/vim.rs +++ b/crates/vim/src/vim.rs @@ -2070,7 +2070,7 @@ impl Vim { input_enabled: self.editor_input_enabled(), expects_character_input: self.expects_character_input(), autoindent: self.should_autoindent(), - cursor_offset_on_selection: self.mode.is_visual(), + cursor_offset_on_selection: self.mode.is_visual() || self.mode.is_helix(), line_mode: matches!(self.mode, Mode::VisualLine), hide_edit_predictions: !matches!(self.mode, Mode::Insert | Mode::Replace), } From 5ebdbe2aacfe7d21a96c3bd1ca759ac4101ffb5c Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 12 Mar 2026 10:22:05 +0100 Subject: [PATCH 507/548] agent_ui: No global thread history (#51362) Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/agent_ui/src/agent_connection_store.rs | 73 +++-- crates/agent_ui/src/agent_panel.rs | 293 +++++++++++------- crates/agent_ui/src/connection_view.rs | 110 ++++--- crates/agent_ui/src/inline_assistant.rs | 31 +- crates/agent_ui/src/text_thread_history.rs | 4 + crates/agent_ui/src/thread_history.rs | 11 +- crates/agent_ui/src/thread_history_view.rs | 4 + 7 files changed, 332 insertions(+), 194 deletions(-) diff --git a/crates/agent_ui/src/agent_connection_store.rs b/crates/agent_ui/src/agent_connection_store.rs index c0c4519bcc64d53690dd782a55e6b9da4f498fe0..936b9b7a2de984f20f59c8f050ecb3bff1386595 100644 --- a/crates/agent_ui/src/agent_connection_store.rs +++ b/crates/agent_ui/src/agent_connection_store.rs @@ -9,42 +9,51 @@ use gpui::{AppContext, Context, Entity, EventEmitter, SharedString, Subscription use project::{AgentServerStore, AgentServersUpdated, Project}; use watch::Receiver; -use crate::ExternalAgent; +use crate::{ExternalAgent, ThreadHistory}; use project::ExternalAgentServerName; -pub enum ConnectionEntry { +pub enum AgentConnectionEntry { Connecting { - connect_task: Shared, LoadError>>>, - }, - Connected { - connection: Rc, + connect_task: Shared>>, }, + Connected(AgentConnectedState), Error { error: LoadError, }, } -impl ConnectionEntry { - pub fn wait_for_connection(&self) -> Shared, LoadError>>> { +#[derive(Clone)] +pub struct AgentConnectedState { + pub connection: Rc, + pub history: Entity, +} + +impl AgentConnectionEntry { + pub fn wait_for_connection(&self) -> Shared>> { match self { - ConnectionEntry::Connecting { connect_task } => connect_task.clone(), - ConnectionEntry::Connected { connection } => { - Task::ready(Ok(connection.clone())).shared() - } - ConnectionEntry::Error { error } => Task::ready(Err(error.clone())).shared(), + AgentConnectionEntry::Connecting { connect_task } => connect_task.clone(), + AgentConnectionEntry::Connected(state) => Task::ready(Ok(state.clone())).shared(), + AgentConnectionEntry::Error { error } => Task::ready(Err(error.clone())).shared(), + } + } + + pub fn history(&self) -> Option<&Entity> { + match self { + AgentConnectionEntry::Connected(state) => Some(&state.history), + _ => None, } } } -pub enum ConnectionEntryEvent { +pub enum AgentConnectionEntryEvent { NewVersionAvailable(SharedString), } -impl EventEmitter for ConnectionEntry {} +impl EventEmitter for AgentConnectionEntry {} pub struct AgentConnectionStore { project: Entity, - entries: HashMap>, + entries: HashMap>, _subscriptions: Vec, } @@ -59,17 +68,21 @@ impl AgentConnectionStore { } } + pub fn entry(&self, key: &ExternalAgent) -> Option<&Entity> { + self.entries.get(key) + } + pub fn request_connection( &mut self, key: ExternalAgent, server: Rc, cx: &mut Context, - ) -> Entity { + ) -> Entity { self.entries.get(&key).cloned().unwrap_or_else(|| { let (mut new_version_rx, connect_task) = self.start_connection(server.clone(), cx); let connect_task = connect_task.shared(); - let entry = cx.new(|_cx| ConnectionEntry::Connecting { + let entry = cx.new(|_cx| AgentConnectionEntry::Connecting { connect_task: connect_task.clone(), }); @@ -79,18 +92,18 @@ impl AgentConnectionStore { let key = key.clone(); let entry = entry.clone(); async move |this, cx| match connect_task.await { - Ok(connection) => { + Ok(connected_state) => { entry.update(cx, |entry, cx| { - if let ConnectionEntry::Connecting { .. } = entry { - *entry = ConnectionEntry::Connected { connection }; + if let AgentConnectionEntry::Connecting { .. } = entry { + *entry = AgentConnectionEntry::Connected(connected_state); cx.notify(); } }); } Err(error) => { entry.update(cx, |entry, cx| { - if let ConnectionEntry::Connecting { .. } = entry { - *entry = ConnectionEntry::Error { error }; + if let AgentConnectionEntry::Connecting { .. } = entry { + *entry = AgentConnectionEntry::Error { error }; cx.notify(); } }); @@ -106,7 +119,7 @@ impl AgentConnectionStore { while let Ok(version) = new_version_rx.recv().await { if let Some(version) = version { entry.update(cx, |_entry, cx| { - cx.emit(ConnectionEntryEvent::NewVersionAvailable( + cx.emit(AgentConnectionEntryEvent::NewVersionAvailable( version.clone().into(), )); }); @@ -143,7 +156,7 @@ impl AgentConnectionStore { cx: &mut Context, ) -> ( Receiver>, - Task, LoadError>>, + Task>, ) { let (new_version_tx, new_version_rx) = watch::channel::>(None); @@ -151,8 +164,14 @@ impl AgentConnectionStore { let delegate = AgentServerDelegate::new(agent_server_store, Some(new_version_tx)); let connect_task = server.connect(delegate, cx); - let connect_task = cx.spawn(async move |_this, _cx| match connect_task.await { - Ok(connection) => Ok(connection), + let connect_task = cx.spawn(async move |_this, cx| match connect_task.await { + Ok(connection) => cx.update(|cx| { + let history = cx.new(|cx| ThreadHistory::new(connection.session_list(cx), cx)); + Ok(AgentConnectedState { + connection, + history, + }) + }), Err(err) => match err.downcast::() { Ok(load_error) => Err(load_error), Err(err) => Err(LoadError::Other(SharedString::from(err.to_string()))), diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 1aefc99c020409a764ad2c44fe8477665f73c4bc..741e995c8f1b2e44677ec7c7de7bef22a3421f3c 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -29,8 +29,6 @@ use zed_actions::agent::{ ResolveConflictedFilesWithAgent, ResolveConflictsWithAgent, ReviewBranchDiff, }; -use crate::ManageProfiles; -use crate::agent_connection_store::AgentConnectionStore; use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal}; use crate::{ AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow, @@ -48,12 +46,14 @@ use crate::{ NewNativeAgentThreadFromSummary, }; use crate::{ - ExpandMessageEditor, ThreadHistory, ThreadHistoryView, ThreadHistoryViewEvent, + ExpandMessageEditor, ThreadHistoryView, text_thread_history::{TextThreadHistory, TextThreadHistoryEvent}, }; +use crate::{ManageProfiles, ThreadHistoryViewEvent}; +use crate::{ThreadHistory, agent_connection_store::AgentConnectionStore}; use agent_settings::AgentSettings; use ai_onboarding::AgentPanelOnboarding; -use anyhow::{Result, anyhow}; +use anyhow::{Context as _, Result, anyhow}; use assistant_slash_command::SlashCommandWorkingSet; use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary}; use client::UserStore; @@ -621,9 +621,9 @@ fn build_conflicted_files_resolution_prompt( content } -#[derive(Clone, Copy, Debug, PartialEq, Eq)] -enum HistoryKind { - AgentThreads, +#[derive(Clone, Debug, PartialEq, Eq)] +enum History { + AgentThreads { view: Entity }, TextThreads, } @@ -639,7 +639,7 @@ enum ActiveView { _subscriptions: Vec, }, History { - kind: HistoryKind, + history: History, }, Configuration, } @@ -870,8 +870,6 @@ pub struct AgentPanel { project: Entity, fs: Arc, language_registry: Arc, - acp_history: Entity, - acp_history_view: Entity, text_thread_history: Entity, thread_store: Entity, text_thread_store: Entity, @@ -1081,26 +1079,9 @@ impl AgentPanel { cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx)); let thread_store = ThreadStore::global(cx); - let acp_history = cx.new(|cx| ThreadHistory::new(None, cx)); - let acp_history_view = cx.new(|cx| ThreadHistoryView::new(acp_history.clone(), window, cx)); let text_thread_history = cx.new(|cx| TextThreadHistory::new(text_thread_store.clone(), window, cx)); - cx.subscribe_in( - &acp_history_view, - window, - |this, _, event, window, cx| match event { - ThreadHistoryViewEvent::Open(thread) => { - this.load_agent_thread( - thread.session_id.clone(), - thread.cwd.clone(), - thread.title.clone(), - window, - cx, - ); - } - }, - ) - .detach(); + cx.subscribe_in( &text_thread_history, window, @@ -1120,15 +1101,18 @@ impl AgentPanel { window.defer(cx, move |window, cx| { let panel = weak_panel.clone(); let agent_navigation_menu = - ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| { + ContextMenu::build_persistent(window, cx, move |mut menu, window, cx| { if let Some(panel) = panel.upgrade() { - if let Some(kind) = panel.read(cx).history_kind_for_selected_agent(cx) { - menu = - Self::populate_recently_updated_menu_section(menu, panel, kind, cx); - let view_all_label = match kind { - HistoryKind::AgentThreads => "View All", - HistoryKind::TextThreads => "View All Text Threads", + if let Some(history) = panel + .update(cx, |panel, cx| panel.history_for_selected_agent(window, cx)) + { + let view_all_label = match history { + History::AgentThreads { .. } => "View All", + History::TextThreads => "View All Text Threads", }; + menu = Self::populate_recently_updated_menu_section( + menu, panel, history, cx, + ); menu = menu.action(view_all_label, Box::new(OpenHistory)); } } @@ -1222,8 +1206,6 @@ impl AgentPanel { zoomed: false, pending_serialization: None, onboarding, - acp_history, - acp_history_view, text_thread_history, thread_store, selected_agent: AgentType::default(), @@ -1288,8 +1270,8 @@ impl AgentPanel { &self.thread_store } - pub fn history(&self) -> &Entity { - &self.acp_history + pub fn connection_store(&self) -> &Entity { + &self.connection_store } pub fn open_thread( @@ -1353,27 +1335,41 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { - let Some(thread) = self - .acp_history - .read(cx) - .session_for_id(&action.from_session_id) - else { - return; - }; + let agent = ExternalAgent::NativeAgent; - self.external_thread( - Some(ExternalAgent::NativeAgent), - None, - None, - None, - Some(AgentInitialContent::ThreadSummary { - session_id: thread.session_id, - title: thread.title, - }), - true, - window, - cx, - ); + let server = agent.server(self.fs.clone(), self.thread_store.clone()); + let session_id = action.from_session_id.clone(); + + let entry = self.connection_store.update(cx, |store, cx| { + store.request_connection(agent.clone(), server, cx) + }); + let connect_task = entry.read(cx).wait_for_connection(); + + cx.spawn_in(window, async move |this, cx| { + let history = connect_task.await?.history; + this.update_in(cx, |this, window, cx| { + let thread = history + .read(cx) + .session_for_id(&session_id) + .context("Session not found")?; + + this.external_thread( + Some(agent), + None, + None, + None, + Some(AgentInitialContent::ThreadSummary { + session_id: thread.session_id, + title: thread.title, + }), + true, + window, + cx, + ); + anyhow::Ok(()) + }) + }) + .detach_and_log_err(cx); } fn new_text_thread(&mut self, window: &mut Window, cx: &mut Context) { @@ -1554,13 +1550,52 @@ impl AgentPanel { }) } - fn history_kind_for_selected_agent(&self, cx: &App) -> Option { - match self.selected_agent { - AgentType::NativeAgent => Some(HistoryKind::AgentThreads), - AgentType::TextThread => Some(HistoryKind::TextThreads), - AgentType::Custom { .. } => { - if self.acp_history.read(cx).has_session_list() { - Some(HistoryKind::AgentThreads) + fn has_history_for_selected_agent(&self, cx: &App) -> bool { + match &self.selected_agent { + AgentType::TextThread | AgentType::NativeAgent => true, + AgentType::Custom { name } => { + let agent = ExternalAgent::Custom { name: name.clone() }; + self.connection_store + .read(cx) + .entry(&agent) + .map_or(false, |entry| entry.read(cx).history().is_some()) + } + } + } + + fn history_for_selected_agent( + &self, + window: &mut Window, + cx: &mut Context, + ) -> Option { + match &self.selected_agent { + AgentType::TextThread => Some(History::TextThreads), + AgentType::NativeAgent => { + let history = self + .connection_store + .read(cx) + .entry(&ExternalAgent::NativeAgent)? + .read(cx) + .history()? + .clone(); + + Some(History::AgentThreads { + view: self.create_thread_history_view(history, window, cx), + }) + } + AgentType::Custom { name } => { + let agent = ExternalAgent::Custom { name: name.clone() }; + let history = self + .connection_store + .read(cx) + .entry(&agent)? + .read(cx) + .history()? + .clone(); + if history.read(cx).has_session_list() { + Some(History::AgentThreads { + view: self.create_thread_history_view(history, window, cx), + }) } else { None } @@ -1568,13 +1603,38 @@ impl AgentPanel { } } + fn create_thread_history_view( + &self, + history: Entity, + window: &mut Window, + cx: &mut Context, + ) -> Entity { + let view = cx.new(|cx| ThreadHistoryView::new(history.clone(), window, cx)); + cx.subscribe_in(&view, window, |this, _, event, window, cx| match event { + ThreadHistoryViewEvent::Open(thread) => { + this.load_agent_thread( + thread.session_id.clone(), + thread.cwd.clone(), + thread.title.clone(), + window, + cx, + ); + } + }) + .detach(); + view + } + fn open_history(&mut self, window: &mut Window, cx: &mut Context) { - let Some(kind) = self.history_kind_for_selected_agent(cx) else { + let Some(history) = self.history_for_selected_agent(window, cx) else { return; }; - if let ActiveView::History { kind: active_kind } = self.active_view { - if active_kind == kind { + if let ActiveView::History { + history: active_history, + } = &self.active_view + { + if active_history == &history { if let Some(previous_view) = self.previous_view.take() { self.set_active_view(previous_view, true, window, cx); } @@ -1582,7 +1642,7 @@ impl AgentPanel { } } - self.set_active_view(ActiveView::History { kind }, true, window, cx); + self.set_active_view(ActiveView::History { history }, true, window, cx); cx.notify(); } @@ -1655,7 +1715,7 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { - if self.history_kind_for_selected_agent(cx).is_none() { + if !self.has_history_for_selected_agent(cx) { return; } self.agent_navigation_menu_handle.toggle(window, cx); @@ -2096,7 +2156,7 @@ impl AgentPanel { let was_in_agent_history = matches!( self.active_view, ActiveView::History { - kind: HistoryKind::AgentThreads + history: History::AgentThreads { .. } } ); let current_is_uninitialized = matches!(self.active_view, ActiveView::Uninitialized); @@ -2154,16 +2214,13 @@ impl AgentPanel { } }; - let is_in_agent_history = matches!( - self.active_view, - ActiveView::History { - kind: HistoryKind::AgentThreads + if let ActiveView::History { history } = &self.active_view { + if !was_in_agent_history && let History::AgentThreads { view } = history { + view.update(cx, |view, cx| { + view.history() + .update(cx, |history, cx| history.refresh_full_history(cx)) + }); } - ); - - if !was_in_agent_history && is_in_agent_history { - self.acp_history - .update(cx, |history, cx| history.refresh_full_history(cx)); } if focus { @@ -2175,14 +2232,14 @@ impl AgentPanel { fn populate_recently_updated_menu_section( mut menu: ContextMenu, panel: Entity, - kind: HistoryKind, + history: History, cx: &mut Context, ) -> ContextMenu { - match kind { - HistoryKind::AgentThreads => { - let entries = panel + match history { + History::AgentThreads { view } => { + let entries = view .read(cx) - .acp_history + .history() .read(cx) .sessions() .iter() @@ -2224,7 +2281,7 @@ impl AgentPanel { }); } } - HistoryKind::TextThreads => { + History::TextThreads => { let entries = panel .read(cx) .text_thread_store @@ -2518,7 +2575,6 @@ impl AgentPanel { project, thread_store, self.prompt_store.clone(), - self.acp_history.clone(), window, cx, ) @@ -3056,9 +3112,9 @@ impl Focusable for AgentPanel { match &self.active_view { ActiveView::Uninitialized => self.focus_handle.clone(), ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx), - ActiveView::History { kind } => match kind { - HistoryKind::AgentThreads => self.acp_history_view.focus_handle(cx), - HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx), + ActiveView::History { history: kind } => match kind { + History::AgentThreads { view } => view.read(cx).focus_handle(cx), + History::TextThreads => self.text_thread_history.focus_handle(cx), }, ActiveView::TextThread { text_thread_editor, .. @@ -3292,10 +3348,10 @@ impl AgentPanel { .into_any_element(), } } - ActiveView::History { kind } => { + ActiveView::History { history: kind } => { let title = match kind { - HistoryKind::AgentThreads => "History", - HistoryKind::TextThreads => "Text Thread History", + History::AgentThreads { .. } => "History", + History::TextThreads => "Text Thread History", }; Label::new(title).truncate().into_any_element() } @@ -4122,7 +4178,7 @@ impl AgentPanel { selected_agent.into_any_element() }; - let show_history_menu = self.history_kind_for_selected_agent(cx).is_some(); + let show_history_menu = self.has_history_for_selected_agent(cx); let has_v2_flag = cx.has_flag::(); let is_empty_state = !self.active_thread_has_messages(cx); @@ -4402,6 +4458,14 @@ impl AgentPanel { return false; } + let has_configured_non_zed_providers = LanguageModelRegistry::read_global(cx) + .visible_providers() + .iter() + .any(|provider| { + provider.is_authenticated(cx) + && provider.id() != language_model::ZED_CLOUD_PROVIDER_ID + }); + match &self.active_view { ActiveView::Uninitialized | ActiveView::History { .. } | ActiveView::Configuration => { false @@ -4411,17 +4475,15 @@ impl AgentPanel { { false } - _ => { - let history_is_empty = self.acp_history.read(cx).is_empty(); - - let has_configured_non_zed_providers = LanguageModelRegistry::read_global(cx) - .visible_providers() - .iter() - .any(|provider| { - provider.is_authenticated(cx) - && provider.id() != language_model::ZED_CLOUD_PROVIDER_ID - }); - + ActiveView::AgentThread { server_view } => { + let history_is_empty = server_view + .read(cx) + .history() + .is_none_or(|h| h.read(cx).is_empty()); + history_is_empty || !has_configured_non_zed_providers + } + ActiveView::TextThread { .. } => { + let history_is_empty = self.text_thread_history.read(cx).is_empty(); history_is_empty || !has_configured_non_zed_providers } } @@ -4803,9 +4865,9 @@ impl Render for AgentPanel { ActiveView::AgentThread { server_view, .. } => parent .child(server_view.clone()) .child(self.render_drag_target(cx)), - ActiveView::History { kind } => match kind { - HistoryKind::AgentThreads => parent.child(self.acp_history_view.clone()), - HistoryKind::TextThreads => parent.child(self.text_thread_history.clone()), + ActiveView::History { history: kind } => match kind { + History::AgentThreads { view } => parent.child(view.clone()), + History::TextThreads => parent.child(self.text_thread_history.clone()), }, ActiveView::TextThread { text_thread_editor, @@ -4910,17 +4972,26 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist { let Some(panel) = workspace.read(cx).panel::(cx) else { return; }; + let Some(history) = panel + .read(cx) + .connection_store() + .read(cx) + .entry(&crate::ExternalAgent::NativeAgent) + .and_then(|s| s.read(cx).history()) + else { + log::error!("No connection entry found for native agent"); + return; + }; let project = workspace.read(cx).project().downgrade(); let panel = panel.read(cx); let thread_store = panel.thread_store().clone(); - let history = panel.history().downgrade(); assistant.assist( prompt_editor, self.workspace.clone(), project, thread_store, None, - history, + history.downgrade(), initial_prompt, window, cx, diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index b562688a83b75b75a1b95c065b14d0484daef055..8aeacbd61ad404f94c39efbd14a846a3b52150d9 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -67,7 +67,9 @@ use super::entry_view_state::EntryViewState; use super::thread_history::ThreadHistory; use crate::ModeSelector; use crate::ModelSelectorPopover; -use crate::agent_connection_store::{AgentConnectionStore, ConnectionEntryEvent}; +use crate::agent_connection_store::{ + AgentConnectedState, AgentConnectionEntryEvent, AgentConnectionStore, +}; use crate::agent_diff::AgentDiff; use crate::entry_view_state::{EntryViewEvent, ViewEvent}; use crate::message_editor::{MessageEditor, MessageEditorEvent}; @@ -314,7 +316,6 @@ pub struct ConnectionView { thread_store: Option>, prompt_store: Option>, server_state: ServerState, - history: Entity, focus_handle: FocusHandle, notifications: Vec>, notification_subscriptions: HashMap, Vec>, @@ -418,6 +419,7 @@ pub struct ConnectedServerState { active_id: Option, threads: HashMap>, connection: Rc, + history: Entity, conversation: Entity, _connection_entry_subscription: Subscription, } @@ -484,7 +486,6 @@ impl ConnectionView { project: Entity, thread_store: Option>, prompt_store: Option>, - history: Entity, window: &mut Window, cx: &mut Context, ) -> Self { @@ -537,7 +538,6 @@ impl ConnectionView { notifications: Vec::new(), notification_subscriptions: HashMap::default(), auth_task: None, - history, _subscriptions: subscriptions, focus_handle: cx.focus_handle(), } @@ -660,7 +660,7 @@ impl ConnectionView { let connection_entry_subscription = cx.subscribe(&connection_entry, |this, _entry, event, cx| match event { - ConnectionEntryEvent::NewVersionAvailable(version) => { + AgentConnectionEntryEvent::NewVersionAvailable(version) => { if let Some(thread) = this.active_thread() { thread.update(cx, |thread, cx| { thread.new_server_version_available = Some(version.clone()); @@ -674,8 +674,11 @@ impl ConnectionView { let load_session_id = resume_session_id.clone(); let load_task = cx.spawn_in(window, async move |this, cx| { - let connection = match connect_result.await { - Ok(connection) => connection, + let (connection, history) = match connect_result.await { + Ok(AgentConnectedState { + connection, + history, + }) => (connection, history), Err(err) => { this.update_in(cx, |this, window, cx| { this.handle_load_error(load_session_id.clone(), err, window, cx); @@ -764,6 +767,7 @@ impl ConnectionView { conversation.clone(), resumed_without_history, initial_content, + history.clone(), window, cx, ); @@ -777,14 +781,6 @@ impl ConnectionView { } let id = current.read(cx).thread.read(cx).session_id().clone(); - let session_list = if connection.supports_session_history() { - connection.session_list(cx) - } else { - None - }; - this.history.update(cx, |history, cx| { - history.set_session_list(session_list, cx); - }); this.set_server_state( ServerState::Connected(ConnectedServerState { connection, @@ -792,6 +788,7 @@ impl ConnectionView { active_id: Some(id.clone()), threads: HashMap::from_iter([(id, current)]), conversation, + history, _connection_entry_subscription: connection_entry_subscription, }), cx, @@ -825,6 +822,7 @@ impl ConnectionView { conversation: Entity, resumed_without_history: bool, initial_content: Option, + history: Entity, window: &mut Window, cx: &mut Context, ) -> Entity { @@ -841,7 +839,7 @@ impl ConnectionView { self.workspace.clone(), self.project.downgrade(), self.thread_store.clone(), - self.history.downgrade(), + history.downgrade(), self.prompt_store.clone(), prompt_capabilities.clone(), available_commands.clone(), @@ -1008,7 +1006,7 @@ impl ConnectionView { resumed_without_history, self.project.downgrade(), self.thread_store.clone(), - self.history.clone(), + history, self.prompt_store.clone(), initial_content, subscriptions, @@ -1090,6 +1088,7 @@ impl ConnectionView { threads: HashMap::default(), connection, conversation: cx.new(|_cx| Conversation::default()), + history: cx.new(|cx| ThreadHistory::new(None, cx)), _connection_entry_subscription: Subscription::new(|| {}), }), cx, @@ -1694,10 +1693,10 @@ impl ConnectionView { cx.spawn_in(window, async move |this, cx| { let subagent_thread = subagent_thread_task.await?; this.update_in(cx, |this, window, cx| { - let conversation = this + let Some((conversation, history)) = this .as_connected() - .map(|connected| connected.conversation.clone()); - let Some(conversation) = conversation else { + .map(|connected| (connected.conversation.clone(), connected.history.clone())) + else { return; }; conversation.update(cx, |conversation, cx| { @@ -1709,6 +1708,7 @@ impl ConnectionView { conversation, false, None, + history, window, cx, ); @@ -2215,9 +2215,11 @@ impl ConnectionView { let agent_name = self.agent.name(); let workspace = self.workspace.clone(); let project = self.project.downgrade(); - let history = self.history.downgrade(); - - let Some(thread) = self.active_thread() else { + let Some(connected) = self.as_connected() else { + return; + }; + let history = connected.history.downgrade(); + let Some(thread) = connected.active_view() else { return; }; let prompt_capabilities = thread.read(cx).prompt_capabilities.clone(); @@ -2610,8 +2612,16 @@ impl ConnectionView { }) } + pub fn history(&self) -> Option<&Entity> { + self.as_connected().map(|c| &c.history) + } + pub fn delete_history_entry(&mut self, session_id: &acp::SessionId, cx: &mut Context) { - let task = self + let Some(connected) = self.as_connected() else { + return; + }; + + let task = connected .history .update(cx, |history, cx| history.delete_session(&session_id, cx)); task.detach_and_log_err(cx); @@ -2900,8 +2910,6 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - // Create history without an initial session list - it will be set after connection - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -2921,7 +2929,6 @@ pub(crate) mod tests { project, Some(thread_store), None, - history.clone(), window, cx, ) @@ -2931,6 +2938,14 @@ pub(crate) mod tests { // Wait for connection to establish cx.run_until_parked(); + let history = cx.update(|_window, cx| { + thread_view + .read(cx) + .history() + .expect("Missing history") + .clone() + }); + // Initially empty because StubAgentConnection.session_list() returns None active_thread(&thread_view, cx).read_with(cx, |view, _cx| { assert_eq!(view.recent_history_entries.len(), 0); @@ -3007,7 +3022,6 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3027,7 +3041,6 @@ pub(crate) mod tests { project, Some(thread_store), None, - history, window, cx, ) @@ -3066,7 +3079,6 @@ pub(crate) mod tests { let captured_cwd = connection.captured_cwd.clone(); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3086,7 +3098,6 @@ pub(crate) mod tests { project, Some(thread_store), None, - history, window, cx, ) @@ -3123,7 +3134,6 @@ pub(crate) mod tests { let captured_cwd = connection.captured_cwd.clone(); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3143,7 +3153,6 @@ pub(crate) mod tests { project, Some(thread_store), None, - history, window, cx, ) @@ -3180,7 +3189,6 @@ pub(crate) mod tests { let captured_cwd = connection.captured_cwd.clone(); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -3200,7 +3208,6 @@ pub(crate) mod tests { project, Some(thread_store), None, - history, window, cx, ) @@ -3498,7 +3505,6 @@ pub(crate) mod tests { // Set up thread view in workspace 1 let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project1.clone(), cx))); @@ -3519,7 +3525,6 @@ pub(crate) mod tests { project1.clone(), Some(thread_store), None, - history, window, cx, ) @@ -3676,7 +3681,8 @@ pub(crate) mod tests { agent: impl AgentServer + 'static, cx: &mut TestAppContext, ) -> (Entity, &mut VisualTestContext) { - let (thread_view, _history, cx) = setup_thread_view_with_history(agent, cx).await; + let (thread_view, _history, cx) = + setup_thread_view_with_history_and_initial_content(agent, None, cx).await; (thread_view, cx) } @@ -3688,7 +3694,9 @@ pub(crate) mod tests { Entity, &mut VisualTestContext, ) { - setup_thread_view_with_history_and_initial_content(agent, None, cx).await + let (thread_view, history, cx) = + setup_thread_view_with_history_and_initial_content(agent, None, cx).await; + (thread_view, history.expect("Missing history"), cx) } async fn setup_thread_view_with_initial_content( @@ -3708,7 +3716,7 @@ pub(crate) mod tests { cx: &mut TestAppContext, ) -> ( Entity, - Entity, + Option>, &mut VisualTestContext, ) { let fs = FakeFs::new(cx.executor()); @@ -3718,18 +3726,19 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); + let agent_key = ExternalAgent::Custom { + name: "Test".into(), + }; + let thread_view = cx.update(|window, cx| { cx.new(|cx| { ConnectionView::new( Rc::new(agent), - connection_store, - ExternalAgent::Custom { - name: "Test".into(), - }, + connection_store.clone(), + agent_key.clone(), None, None, None, @@ -3738,13 +3747,20 @@ pub(crate) mod tests { project, Some(thread_store), None, - history.clone(), window, cx, ) }) }); cx.run_until_parked(); + + let history = cx.update(|_window, cx| { + connection_store + .read(cx) + .entry(&agent_key) + .and_then(|e| e.read(cx).history().cloned()) + }); + (thread_view, history, cx) } @@ -4454,7 +4470,6 @@ pub(crate) mod tests { let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone()); let thread_store = cx.update(|_window, cx| cx.new(|cx| ThreadStore::new(cx))); - let history = cx.update(|_window, cx| cx.new(|cx| ThreadHistory::new(None, cx))); let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); @@ -4475,7 +4490,6 @@ pub(crate) mod tests { project.clone(), Some(thread_store.clone()), None, - history, window, cx, ) diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 2aee2b4601e126b25a977cf92d314970049026da..8fde876183db385c019e6ccb1f2e5a0d4b121892 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -266,7 +266,7 @@ impl InlineAssistant { return; }; - let configuration_error = || { + let configuration_error = |cx| { let model_registry = LanguageModelRegistry::read_global(cx); model_registry.configuration_error(model_registry.inline_assistant_model(), cx) }; @@ -278,7 +278,15 @@ impl InlineAssistant { let prompt_store = agent_panel.prompt_store().as_ref().cloned(); let thread_store = agent_panel.thread_store().clone(); - let history = agent_panel.history().downgrade(); + let Some(history) = agent_panel + .connection_store() + .read(cx) + .entry(&crate::ExternalAgent::NativeAgent) + .and_then(|s| s.read(cx).history().cloned()) + else { + log::error!("No connection entry found for native agent"); + return; + }; let handle_assist = |window: &mut Window, cx: &mut Context| match inline_assist_target { @@ -290,7 +298,7 @@ impl InlineAssistant { workspace.project().downgrade(), thread_store, prompt_store, - history, + history.downgrade(), action.prompt.clone(), window, cx, @@ -305,7 +313,7 @@ impl InlineAssistant { workspace.project().downgrade(), thread_store, prompt_store, - history, + history.downgrade(), action.prompt.clone(), window, cx, @@ -314,7 +322,7 @@ impl InlineAssistant { } }; - if let Some(error) = configuration_error() { + if let Some(error) = configuration_error(cx) { if let ConfigurationError::ProviderNotAuthenticated(provider) = error { cx.spawn(async move |_, cx| { cx.update(|cx| provider.authenticate(cx)).await?; @@ -322,7 +330,7 @@ impl InlineAssistant { }) .detach_and_log_err(cx); - if configuration_error().is_none() { + if configuration_error(cx).is_none() { handle_assist(window, cx); } } else { @@ -1969,7 +1977,16 @@ impl CodeActionProvider for AssistantCodeActionProvider { .panel::(cx) .context("missing agent panel")? .read(cx); - anyhow::Ok((panel.thread_store().clone(), panel.history().downgrade())) + + let history = panel + .connection_store() + .read(cx) + .entry(&crate::ExternalAgent::NativeAgent) + .and_then(|e| e.read(cx).history()) + .context("no history found for native agent")? + .downgrade(); + + anyhow::Ok((panel.thread_store().clone(), history)) })??; let editor = editor.upgrade().context("editor was released")?; let range = editor diff --git a/crates/agent_ui/src/text_thread_history.rs b/crates/agent_ui/src/text_thread_history.rs index c19f64bc3503ab38c83dc9534d64fae5c23cc21c..7a2a4ff91ddae0531df200118b55151a8dbb4499 100644 --- a/crates/agent_ui/src/text_thread_history.rs +++ b/crates/agent_ui/src/text_thread_history.rs @@ -116,6 +116,10 @@ impl TextThreadHistory { this } + pub fn is_empty(&self) -> bool { + self.visible_items.is_empty() + } + fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context) { let entries = self.text_thread_store.update(cx, |store, _| { store.ordered_text_threads().cloned().collect::>() diff --git a/crates/agent_ui/src/thread_history.rs b/crates/agent_ui/src/thread_history.rs index 5e66d4468767e7002b8b5f6c79ffe8aaecf77127..1ca763cb6a64f1d1b680e31c1ac55a4717762157 100644 --- a/crates/agent_ui/src/thread_history.rs +++ b/crates/agent_ui/src/thread_history.rs @@ -19,14 +19,23 @@ impl ThreadHistory { _refresh_task: Task::ready(()), _watch_task: None, }; - this.set_session_list(session_list, cx); + this.set_session_list_impl(session_list, cx); this } + #[cfg(any(test, feature = "test-support"))] pub fn set_session_list( &mut self, session_list: Option>, cx: &mut Context, + ) { + self.set_session_list_impl(session_list, cx); + } + + fn set_session_list_impl( + &mut self, + session_list: Option>, + cx: &mut Context, ) { if let (Some(current), Some(next)) = (&self.session_list, &session_list) && Rc::ptr_eq(current, next) diff --git a/crates/agent_ui/src/thread_history_view.rs b/crates/agent_ui/src/thread_history_view.rs index 1756fc46ed48e86dc4bf9c78f2c2ef79618ed43b..4e43748911ba0559485e7a4d991e5dc9d2d4c524 100644 --- a/crates/agent_ui/src/thread_history_view.rs +++ b/crates/agent_ui/src/thread_history_view.rs @@ -117,6 +117,10 @@ impl ThreadHistoryView { this } + pub fn history(&self) -> &Entity { + &self.history + } + fn update_visible_items(&mut self, preserve_selected_item: bool, cx: &mut Context) { let entries = self.history.read(cx).sessions().to_vec(); let new_list_items = if self.search_query.is_empty() { From 4d5e25f4088025ee4f837c25c7832ac2fe9c7fad Mon Sep 17 00:00:00 2001 From: Shashank Suresh <52377159+shashank-suresh@users.noreply.github.com> Date: Thu, 12 Mar 2026 14:52:55 +0530 Subject: [PATCH 508/548] editor: Add line range support to editor::CopyFileLocation command (#51328) Closes #51309 Before you mark this PR as ready for review, make sure that you have: - [X] Added a solid test coverage and/or screenshots from doing manual testing - [X] Done a self-review taking into account security and performance aspects - [X] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Improved `editor::CopyFileLocation` command to include the full selected line range (e.g. 'src/main.rs:12-18') when multiple lines are selected, rather than only the first line number. --- .../collab/tests/integration/editor_tests.rs | 48 +++++++++++++++++++ crates/editor/src/editor.rs | 24 +++++++--- 2 files changed, 65 insertions(+), 7 deletions(-) diff --git a/crates/collab/tests/integration/editor_tests.rs b/crates/collab/tests/integration/editor_tests.rs index 0d0569182d5a9ff235642d61c39f0b5bc15b6cb0..6b23780156e03d62543cf597e82959083685f0c0 100644 --- a/crates/collab/tests/integration/editor_tests.rs +++ b/crates/collab/tests/integration/editor_tests.rs @@ -4721,6 +4721,54 @@ async fn test_copy_file_location(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo cx_b.read_from_clipboard().and_then(|item| item.text()), Some(format!("{}:2", path!("src/main.rs"))) ); + + editor_a.update_in(cx_a, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(16)..MultiBufferOffset(44)]); + }); + editor.copy_file_location(&CopyFileLocation, window, cx); + }); + + assert_eq!( + cx_a.read_from_clipboard().and_then(|item| item.text()), + Some(format!("{}:2-3", path!("src/main.rs"))) + ); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(16)..MultiBufferOffset(44)]); + }); + editor.copy_file_location(&CopyFileLocation, window, cx); + }); + + assert_eq!( + cx_b.read_from_clipboard().and_then(|item| item.text()), + Some(format!("{}:2-3", path!("src/main.rs"))) + ); + + editor_a.update_in(cx_a, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(16)..MultiBufferOffset(43)]); + }); + editor.copy_file_location(&CopyFileLocation, window, cx); + }); + + assert_eq!( + cx_a.read_from_clipboard().and_then(|item| item.text()), + Some(format!("{}:2", path!("src/main.rs"))) + ); + + editor_b.update_in(cx_b, |editor, window, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.select_ranges([MultiBufferOffset(16)..MultiBufferOffset(43)]); + }); + editor.copy_file_location(&CopyFileLocation, window, cx); + }); + + assert_eq!( + cx_b.read_from_clipboard().and_then(|item| item.text()), + Some(format!("{}:2", path!("src/main.rs"))) + ); } #[track_caller] diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index dc2696eb2ca83999934cab6cdee82e364657c70e..18a02e9773b3952d99b71f6d337f3c8950aff78e 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -22846,18 +22846,28 @@ impl Editor { _: &mut Window, cx: &mut Context, ) { - let selection = self - .selections - .newest::(&self.display_snapshot(cx)) - .start - .row - + 1; + let selection = self.selections.newest::(&self.display_snapshot(cx)); + + let start_line = selection.start.row + 1; + let end_line = selection.end.row + 1; + + let end_line = if selection.end.column == 0 && end_line > start_line { + end_line - 1 + } else { + end_line + }; + if let Some(file_location) = self.active_excerpt(cx).and_then(|(_, buffer, _)| { let project = self.project()?.read(cx); let file = buffer.read(cx).file()?; let path = file.path().display(project.path_style(cx)); - Some(format!("{path}:{selection}")) + let location = if start_line == end_line { + format!("{path}:{start_line}") + } else { + format!("{path}:{start_line}-{end_line}") + }; + Some(location) }) { cx.write_to_clipboard(ClipboardItem::new_string(file_location)); } From eeb034c31cea9618d445287c906576c1e0aa0898 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 12 Mar 2026 10:38:54 +0100 Subject: [PATCH 509/548] agent: Fix race condition when loading threads (#51366) This fixes a race condition that could occur when using the sidebar: `Failed to launch: project state not found` We were accessing/creating the project state before an await point, meaning that we could remove the state if session/close was called in the meantime. - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/agent/src/agent.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index 95346d665732b40599b096d480178264601ce6d6..2ac341dc997b016f3e723fad99a4a57007510c52 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -870,7 +870,6 @@ impl NativeAgent { project: Entity, cx: &mut Context, ) -> Task>> { - let project_id = self.get_or_create_project_state(&project, cx); let database_future = ThreadsDatabase::connect(cx); cx.spawn(async move |this, cx| { let database = database_future.await.map_err(|err| anyhow!(err))?; @@ -880,6 +879,7 @@ impl NativeAgent { .with_context(|| format!("no thread found with ID: {id:?}"))?; this.update(cx, |this, cx| { + let project_id = this.get_or_create_project_state(&project, cx); let project_state = this .projects .get(&project_id) @@ -915,11 +915,11 @@ impl NativeAgent { return Task::ready(Ok(session.acp_thread.clone())); } - let project_id = self.get_or_create_project_state(&project, cx); - let task = self.load_thread(id, project, cx); + let task = self.load_thread(id, project.clone(), cx); cx.spawn(async move |this, cx| { let thread = task.await?; let acp_thread = this.update(cx, |this, cx| { + let project_id = this.get_or_create_project_state(&project, cx); this.register_session(thread.clone(), project_id, cx) })?; let events = thread.update(cx, |thread, cx| thread.replay(cx)); From ff89bcfca077180c8430f6d57b5584f4ac619df6 Mon Sep 17 00:00:00 2001 From: Dibash Thapa <47865470+dibashthapa@users.noreply.github.com> Date: Thu, 12 Mar 2026 16:02:22 +0545 Subject: [PATCH 510/548] Fix hidden files in remote Open Folder dialog (#50846) Fixes https://github.com/zed-industries/zed/issues/48457 Hidden files (like .config, .ssh, etc.) were not showing in the Open Folder dialog when browsing remote servers via SSH. This was because the `OpenPathDelegate` was not configured to show hidden files. This fix adds .show_hidden() when creating the delegate for remote project picker. Release Notes: - Fixed the hidden files not showing in remote project's open folder action --- crates/recent_projects/src/remote_servers.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/recent_projects/src/remote_servers.rs b/crates/recent_projects/src/remote_servers.rs index b094ff6c5bc5499e7ed1f3e6c9e0b9331b6bb7c2..60ebf85dd23460a8a0ce0c70da2d7b69761690db 100644 --- a/crates/recent_projects/src/remote_servers.rs +++ b/crates/recent_projects/src/remote_servers.rs @@ -390,7 +390,7 @@ impl ProjectPicker { ) -> Entity { let (tx, rx) = oneshot::channel(); let lister = project::DirectoryLister::Project(project.clone()); - let delegate = open_path_prompt::OpenPathDelegate::new(tx, lister, false, cx); + let delegate = open_path_prompt::OpenPathDelegate::new(tx, lister, false, cx).show_hidden(); let picker = cx.new(|cx| { let picker = Picker::uniform_list(delegate, window, cx) From 1fd8ee74e2960a8d7d5e79952449a4a0e9a40870 Mon Sep 17 00:00:00 2001 From: Henrique Ferreiro Date: Thu, 12 Mar 2026 11:48:27 +0100 Subject: [PATCH 511/548] Fix Tree-sitter link in documentation (#51370) --- docs/src/extensions/languages.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/extensions/languages.md b/docs/src/extensions/languages.md index eee29cc57d1ce5e1a5a7608c70ece98bf4a233ee..c8e6958db683a5a3e2c9903c590f564b0ef4cb93 100644 --- a/docs/src/extensions/languages.md +++ b/docs/src/extensions/languages.md @@ -52,7 +52,7 @@ TBD: Document `language_name/config.toml` keys ## Grammar -Zed uses the [Tree-sitter](https://tree-sitter.github.io) parsing library to provide built-in language-specific features. There are grammars available for many languages, and you can also [develop your own grammar](https://tree-sitter.github.io/tree-sitter/creating-parsers#writing-the-grammar). A growing list of Zed features are built using pattern matching over syntax trees with Tree-sitter queries. As mentioned above, every language that is defined in an extension must specify the name of a Tree-sitter grammar that is used for parsing. These grammars are then registered separately in extensions' `extension.toml` file, like this: +Zed uses the [Tree-sitter](https://tree-sitter.github.io) parsing library to provide built-in language-specific features. There are grammars available for many languages, and you can also [develop your own grammar](https://tree-sitter.github.io/tree-sitter/creating-parsers/3-writing-the-grammar.html). A growing list of Zed features are built using pattern matching over syntax trees with Tree-sitter queries. As mentioned above, every language that is defined in an extension must specify the name of a Tree-sitter grammar that is used for parsing. These grammars are then registered separately in extensions' `extension.toml` file, like this: ```toml [grammars.gleam] From 9e50ee040e965086dd9d7c072f79add6d8772d23 Mon Sep 17 00:00:00 2001 From: Cameron Mcloughlin Date: Thu, 12 Mar 2026 10:54:07 +0000 Subject: [PATCH 512/548] agent: Thread switcher sticky workspace header (#51372) --- crates/agent_ui/src/sidebar.rs | 116 +++++++++++++++++++++++++++++---- 1 file changed, 105 insertions(+), 11 deletions(-) diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index 3804e3f63678bcf771b27b2f05929a958531ab39..e204205819a8eb41a0624fb8a4a8ba9a96174add 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -134,6 +134,7 @@ impl From for ListEntry { struct SidebarContents { entries: Vec, notified_threads: HashSet, + project_header_indices: Vec, } impl SidebarContents { @@ -663,10 +664,17 @@ impl Sidebar { // the build pass (no extra scan needed). notified_threads.retain(|id| current_session_ids.contains(id)); + let project_header_indices = entries + .iter() + .enumerate() + .filter_map(|(i, e)| matches!(e, ListEntry::ProjectHeader { .. }).then_some(i)) + .collect(); + self.active_entry_index = active_entry_index; self.contents = SidebarContents { entries, notified_threads, + project_header_indices, }; } @@ -724,6 +732,7 @@ impl Sidebar { has_threads, } => self.render_project_header( ix, + false, path_list, label, workspace, @@ -769,6 +778,7 @@ impl Sidebar { fn render_project_header( &self, ix: usize, + is_sticky: bool, path_list: &PathList, label: &SharedString, workspace: &Entity, @@ -778,9 +788,10 @@ impl Sidebar { docked_right: bool, cx: &mut Context, ) -> AnyElement { - let id = SharedString::from(format!("project-header-{}", ix)); - let group_name = SharedString::from(format!("header-group-{}", ix)); - let ib_id = SharedString::from(format!("project-header-new-thread-{}", ix)); + let id_prefix = if is_sticky { "sticky-" } else { "" }; + let id = SharedString::from(format!("{id_prefix}project-header-{ix}")); + let group_name = SharedString::from(format!("{id_prefix}header-group-{ix}")); + let ib_id = SharedString::from(format!("{id_prefix}project-header-new-thread-{ix}")); let is_collapsed = self.collapsed_groups.contains(path_list); let disclosure_icon = if is_collapsed { @@ -842,7 +853,9 @@ impl Sidebar { .when(workspace_count > 1, |this| { this.child( IconButton::new( - SharedString::from(format!("project-header-remove-{}", ix)), + SharedString::from(format!( + "{id_prefix}project-header-remove-{ix}", + )), IconName::Close, ) .icon_size(IconSize::Small) @@ -858,7 +871,9 @@ impl Sidebar { .when(view_more_expanded && !is_collapsed, |this| { this.child( IconButton::new( - SharedString::from(format!("project-header-collapse-{}", ix)), + SharedString::from(format!( + "{id_prefix}project-header-collapse-{ix}", + )), IconName::ListCollapse, ) .icon_size(IconSize::Small) @@ -899,6 +914,84 @@ impl Sidebar { .into_any_element() } + fn render_sticky_header( + &self, + docked_right: bool, + window: &mut Window, + cx: &mut Context, + ) -> Option { + let scroll_top = self.list_state.logical_scroll_top(); + + let &header_idx = self + .contents + .project_header_indices + .iter() + .rev() + .find(|&&idx| idx <= scroll_top.item_ix)?; + + let needs_sticky = header_idx < scroll_top.item_ix + || (header_idx == scroll_top.item_ix && scroll_top.offset_in_item > px(0.)); + + if !needs_sticky { + return None; + } + + let ListEntry::ProjectHeader { + path_list, + label, + workspace, + highlight_positions, + has_threads, + } = self.contents.entries.get(header_idx)? + else { + return None; + }; + + let is_focused = self.focus_handle.is_focused(window) + || self.filter_editor.focus_handle(cx).is_focused(window); + let is_selected = is_focused && self.selection == Some(header_idx); + + let header_element = self.render_project_header( + header_idx, + true, + &path_list, + &label, + &workspace, + &highlight_positions, + *has_threads, + is_selected, + docked_right, + cx, + ); + + let top_offset = self + .contents + .project_header_indices + .iter() + .find(|&&idx| idx > header_idx) + .and_then(|&next_idx| { + let bounds = self.list_state.bounds_for_item(next_idx)?; + let viewport = self.list_state.viewport_bounds(); + let y_in_viewport = bounds.origin.y - viewport.origin.y; + let header_height = bounds.size.height; + (y_in_viewport < header_height).then_some(y_in_viewport - header_height) + }) + .unwrap_or(px(0.)); + + let element = v_flex() + .absolute() + .top(top_offset) + .left_0() + .w_full() + .bg(cx.theme().colors().surface_background) + .border_b_1() + .border_color(cx.theme().colors().border_variant) + .child(header_element) + .into_any_element(); + + Some(element) + } + fn activate_workspace( &mut self, workspace: &Entity, @@ -1466,6 +1559,8 @@ impl Render for Sidebar { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let ui_font = theme::setup_ui_font(window, cx); let has_query = self.has_filter_query(cx); + let docked_right = AgentSettings::get_global(cx).dock == settings::DockPosition::Right; + let sticky_header = self.render_sticky_header(docked_right, window, cx); v_flex() .id("workspace-sidebar") @@ -1484,10 +1579,7 @@ impl Render for Sidebar { .font(ui_font) .size_full() .bg(cx.theme().colors().surface_background) - .child({ - let docked_right = - AgentSettings::get_global(cx).dock == settings::DockPosition::Right; - + .child( h_flex() .h(Tab::container_height(cx)) .flex_none() @@ -1513,10 +1605,11 @@ impl Render for Sidebar { this.pl_2() .pr_0p5() .child(self.render_sidebar_toggle_button(true, cx)) - }) - }) + }), + ) .child( v_flex() + .relative() .flex_1() .overflow_hidden() .child( @@ -1527,6 +1620,7 @@ impl Render for Sidebar { .flex_1() .size_full(), ) + .when_some(sticky_header, |this, header| this.child(header)) .vertical_scrollbar_for(&self.list_state, window, cx), ) } From 39721045f93714100517ad12b4173fd3580340ca Mon Sep 17 00:00:00 2001 From: Daniel Eichman <61132910+zfz7@users.noreply.github.com> Date: Thu, 12 Mar 2026 04:25:24 -0700 Subject: [PATCH 513/548] Add missing ctrl-shift-g binding for editor::UndoSelection to the JetBrains/IntelliJ keymap on macOS (#51130) ## Description: This PR adds the missing `ctrl-shift-g` binding for editor::UndoSelection to the JetBrains/IntelliJ keymap on macOS. ## Problem In IntelliJ IDEA, when using multiple cursors: - ctrl+g (macOS) adds the next occurrence to the selection - ctrl+shift+g (macOS) removes the last added occurrence from the selection The current Zed JetBrains keymap has `ctrl-g` for SelectNext but is missing the corresponding `ctrl-shift-g` for undoing/removing the last selection. ## Reference - Press Ctrl+G (macOS) to find and select the next occurrence [link](https://www.jetbrains.com/help/idea/multicursor.html#multiple_words) - To remove selection from the last selected occurrence, press Ctrl+Shift+G (macOS) [link](https://www.jetbrains.com/help/idea/multicursor.html#multiple_words) This change improves parity with IntelliJ for users transitioning to Zed. ### Demo https://github.com/user-attachments/assets/0c7f699f-697d-4b81-a929-53f765d254d8 Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [X] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - JetBrains macOS bindings: added the missing `ctrl-shift-g` binding for `editor::UndoSelection` --- assets/keymaps/macos/jetbrains.json | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index 8612e07c4719dfdbf67762c89505cc2da0cfa000..304ffb86e8c2fd08fb756b015490f8c4ac424f58 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -33,6 +33,7 @@ "cmd-+": "editor::UnfoldLines", "alt-shift-g": "editor::SplitSelectionIntoLines", "ctrl-g": ["editor::SelectNext", { "replace_newest": false }], + "ctrl-shift-g": "editor::UndoSelection", "ctrl-cmd-g": ["editor::SelectPrevious", { "replace_newest": false }], "cmd-/": ["editor::ToggleComments", { "advance_downwards": true }], "alt-up": "editor::SelectLargerSyntaxNode", From efc6b0ce70f1a95297be20c2e66804c88feca32e Mon Sep 17 00:00:00 2001 From: Sebastian Kootz <63540046+Skxxtz@users.noreply.github.com> Date: Thu, 12 Mar 2026 12:36:45 +0100 Subject: [PATCH 514/548] gpui: Add `aspect-ratio` builder method to `Styled` (#48751) # Summary This PR simply adds the missing `aspect_ratio` and `aspect_square` helper functions to the `Styled` trait. Release Notes: - N/A Co-authored-by: MrSubidubi --- crates/gpui/src/styled.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/crates/gpui/src/styled.rs b/crates/gpui/src/styled.rs index 3d0b86a9523f5ac05e51941c826e32379368c464..f83e9103572b9b708ef4b9a8f99bf73244be71a4 100644 --- a/crates/gpui/src/styled.rs +++ b/crates/gpui/src/styled.rs @@ -384,6 +384,20 @@ pub trait Styled: Sized { self } + /// Sets the aspect ratio of the element. + /// [Docs](https://tailwindcss.com/docs/aspect-ratio) + fn aspect_ratio(mut self, ratio: f32) -> Self { + self.style().aspect_ratio = Some(ratio); + self + } + + /// Sets the aspect ratio of the element to 1/1 – equal width and height. + /// [Docs](https://tailwindcss.com/docs/aspect-ratio) + fn aspect_square(mut self) -> Self { + self.style().aspect_ratio = Some(1.0); + self + } + /// Sets the background color of the element. fn bg(mut self, fill: F) -> Self where From 3bcef8b1f2bddd4507f0823c09fcea761c7c78a9 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 12 Mar 2026 12:38:35 +0100 Subject: [PATCH 515/548] agent_ui: Rename `ExternalAgent` to `Agent` (#51377) Name is confusing, since the `NativeAgent` variant is not an external agent Release Notes: - N/A --- crates/agent_ui/src/agent_connection_store.rs | 12 ++--- crates/agent_ui/src/agent_panel.rs | 44 +++++++++---------- crates/agent_ui/src/agent_ui.rs | 28 ++++++------ crates/agent_ui/src/connection_view.rs | 28 ++++++------ crates/agent_ui/src/inline_assistant.rs | 4 +- 5 files changed, 58 insertions(+), 58 deletions(-) diff --git a/crates/agent_ui/src/agent_connection_store.rs b/crates/agent_ui/src/agent_connection_store.rs index 936b9b7a2de984f20f59c8f050ecb3bff1386595..c9be46aea3ad99dec77724710db9088ae459696e 100644 --- a/crates/agent_ui/src/agent_connection_store.rs +++ b/crates/agent_ui/src/agent_connection_store.rs @@ -9,7 +9,7 @@ use gpui::{AppContext, Context, Entity, EventEmitter, SharedString, Subscription use project::{AgentServerStore, AgentServersUpdated, Project}; use watch::Receiver; -use crate::{ExternalAgent, ThreadHistory}; +use crate::{Agent, ThreadHistory}; use project::ExternalAgentServerName; pub enum AgentConnectionEntry { @@ -53,7 +53,7 @@ impl EventEmitter for AgentConnectionEntry {} pub struct AgentConnectionStore { project: Entity, - entries: HashMap>, + entries: HashMap>, _subscriptions: Vec, } @@ -68,13 +68,13 @@ impl AgentConnectionStore { } } - pub fn entry(&self, key: &ExternalAgent) -> Option<&Entity> { + pub fn entry(&self, key: &Agent) -> Option<&Entity> { self.entries.get(key) } pub fn request_connection( &mut self, - key: ExternalAgent, + key: Agent, server: Rc, cx: &mut Context, ) -> Entity { @@ -142,8 +142,8 @@ impl AgentConnectionStore { ) { let store = store.read(cx); self.entries.retain(|key, _| match key { - ExternalAgent::NativeAgent => true, - ExternalAgent::Custom { name } => store + Agent::NativeAgent => true, + Agent::Custom { name } => store .external_agents .contains_key(&ExternalAgentServerName(name.clone())), }); diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 741e995c8f1b2e44677ec7c7de7bef22a3421f3c..09d52b6000392693d435217b4739ddc452b8de6d 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -42,7 +42,7 @@ use crate::{ ui::EndTrialUpsell, }; use crate::{ - AgentInitialContent, ExternalAgent, ExternalSourcePrompt, NewExternalAgentThread, + Agent, AgentInitialContent, ExternalSourcePrompt, NewExternalAgentThread, NewNativeAgentThreadFromSummary, }; use crate::{ @@ -738,11 +738,11 @@ impl AgentType { } } -impl From for AgentType { - fn from(value: ExternalAgent) -> Self { +impl From for AgentType { + fn from(value: Agent) -> Self { match value { - ExternalAgent::Custom { name } => Self::Custom { name }, - ExternalAgent::NativeAgent => Self::NativeAgent, + Agent::Custom { name } => Self::Custom { name }, + Agent::NativeAgent => Self::NativeAgent, } } } @@ -1283,7 +1283,7 @@ impl AgentPanel { cx: &mut Context, ) { self.external_thread( - Some(crate::ExternalAgent::NativeAgent), + Some(crate::Agent::NativeAgent), Some(session_id), cwd, title, @@ -1335,7 +1335,7 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { - let agent = ExternalAgent::NativeAgent; + let agent = Agent::NativeAgent; let server = agent.server(self.fs.clone(), self.thread_store.clone()); let session_id = action.from_session_id.clone(); @@ -1417,7 +1417,7 @@ impl AgentPanel { fn external_thread( &mut self, - agent_choice: Option, + agent_choice: Option, resume_session_id: Option, cwd: Option, title: Option, @@ -1435,7 +1435,7 @@ impl AgentPanel { #[derive(Serialize, Deserialize)] struct LastUsedExternalAgent { - agent: crate::ExternalAgent, + agent: crate::Agent, } let thread_store = self.thread_store.clone(); @@ -1473,7 +1473,7 @@ impl AgentPanel { } else { cx.spawn_in(window, async move |this, cx| { let ext_agent = if is_via_collab { - ExternalAgent::NativeAgent + Agent::NativeAgent } else { cx.background_spawn(async move { KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY) @@ -1485,7 +1485,7 @@ impl AgentPanel { serde_json::from_str::(&value).log_err() }) .map(|agent| agent.agent) - .unwrap_or(ExternalAgent::NativeAgent) + .unwrap_or(Agent::NativeAgent) }; let server = ext_agent.server(fs, thread_store); @@ -1554,7 +1554,7 @@ impl AgentPanel { match &self.selected_agent { AgentType::TextThread | AgentType::NativeAgent => true, AgentType::Custom { name } => { - let agent = ExternalAgent::Custom { name: name.clone() }; + let agent = Agent::Custom { name: name.clone() }; self.connection_store .read(cx) .entry(&agent) @@ -1574,7 +1574,7 @@ impl AgentPanel { let history = self .connection_store .read(cx) - .entry(&ExternalAgent::NativeAgent)? + .entry(&Agent::NativeAgent)? .read(cx) .history()? .clone(); @@ -1584,7 +1584,7 @@ impl AgentPanel { }) } AgentType::Custom { name } => { - let agent = ExternalAgent::Custom { name: name.clone() }; + let agent = Agent::Custom { name: name.clone() }; let history = self .connection_store .read(cx) @@ -2376,10 +2376,10 @@ impl AgentPanel { cx.notify(); } - fn selected_external_agent(&self) -> Option { + fn selected_external_agent(&self) -> Option { match &self.selected_agent { - AgentType::NativeAgent => Some(ExternalAgent::NativeAgent), - AgentType::Custom { name } => Some(ExternalAgent::Custom { name: name.clone() }), + AgentType::NativeAgent => Some(Agent::NativeAgent), + AgentType::Custom { name } => Some(Agent::Custom { name: name.clone() }), AgentType::TextThread => None, } } @@ -2448,7 +2448,7 @@ impl AgentPanel { window.dispatch_action(NewTextThread.boxed_clone(), cx); } AgentType::NativeAgent => self.external_thread( - Some(crate::ExternalAgent::NativeAgent), + Some(crate::Agent::NativeAgent), None, None, None, @@ -2458,7 +2458,7 @@ impl AgentPanel { cx, ), AgentType::Custom { name } => self.external_thread( - Some(crate::ExternalAgent::Custom { name }), + Some(crate::Agent::Custom { name }), None, None, None, @@ -2544,7 +2544,7 @@ impl AgentPanel { initial_content: Option, workspace: WeakEntity, project: Entity, - ext_agent: ExternalAgent, + ext_agent: Agent, focus: bool, window: &mut Window, cx: &mut Context, @@ -4976,7 +4976,7 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist { .read(cx) .connection_store() .read(cx) - .entry(&crate::ExternalAgent::NativeAgent) + .entry(&crate::Agent::NativeAgent) .and_then(|s| s.read(cx).history()) else { log::error!("No connection entry found for native agent"); @@ -5158,7 +5158,7 @@ impl AgentPanel { let workspace = self.workspace.clone(); let project = self.project.clone(); - let ext_agent = ExternalAgent::Custom { + let ext_agent = Agent::Custom { name: server.name(), }; diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index 52ce6f0bd7a312966b6602fb43be4074d7f3e620..fbf47615cb23b75eaeff1f785ada8bf8605556d3 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -205,7 +205,7 @@ pub struct NewThread; #[serde(deny_unknown_fields)] pub struct NewExternalAgentThread { /// Which agent to use for the conversation. - agent: Option, + agent: Option, } #[derive(Clone, PartialEq, Deserialize, JsonSchema, Action)] @@ -218,7 +218,7 @@ pub struct NewNativeAgentThreadFromSummary { // TODO unify this with AgentType #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, JsonSchema)] #[serde(rename_all = "snake_case")] -pub enum ExternalAgent { +pub enum Agent { NativeAgent, Custom { name: SharedString }, } @@ -227,7 +227,7 @@ pub enum ExternalAgent { // the registry: "claude_code" -> Custom { name: "claude-acp" }, "codex" -> Custom { name: // "codex-acp" }, "gemini" -> Custom { name: "gemini" }. // Can be removed at some point in the future and go back to #[derive(Deserialize)]. -impl<'de> serde::Deserialize<'de> for ExternalAgent { +impl<'de> serde::Deserialize<'de> for Agent { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, @@ -280,7 +280,7 @@ impl<'de> serde::Deserialize<'de> for ExternalAgent { } } -impl ExternalAgent { +impl Agent { pub fn server( &self, fs: Arc, @@ -752,20 +752,20 @@ mod tests { use project::agent_server_store::{CLAUDE_AGENT_NAME, CODEX_NAME, GEMINI_NAME}; assert_eq!( - serde_json::from_str::(r#""claude_code""#).unwrap(), - ExternalAgent::Custom { + serde_json::from_str::(r#""claude_code""#).unwrap(), + Agent::Custom { name: CLAUDE_AGENT_NAME.into(), }, ); assert_eq!( - serde_json::from_str::(r#""codex""#).unwrap(), - ExternalAgent::Custom { + serde_json::from_str::(r#""codex""#).unwrap(), + Agent::Custom { name: CODEX_NAME.into(), }, ); assert_eq!( - serde_json::from_str::(r#""gemini""#).unwrap(), - ExternalAgent::Custom { + serde_json::from_str::(r#""gemini""#).unwrap(), + Agent::Custom { name: GEMINI_NAME.into(), }, ); @@ -774,12 +774,12 @@ mod tests { #[test] fn test_deserialize_current_external_agent_variants() { assert_eq!( - serde_json::from_str::(r#""native_agent""#).unwrap(), - ExternalAgent::NativeAgent, + serde_json::from_str::(r#""native_agent""#).unwrap(), + Agent::NativeAgent, ); assert_eq!( - serde_json::from_str::(r#"{"custom":{"name":"my-agent"}}"#).unwrap(), - ExternalAgent::Custom { + serde_json::from_str::(r#"{"custom":{"name":"my-agent"}}"#).unwrap(), + Agent::Custom { name: "my-agent".into(), }, ); diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs index 8aeacbd61ad404f94c39efbd14a846a3b52150d9..e84e18e645ed4a84bd667564416682298b35ce17 100644 --- a/crates/agent_ui/src/connection_view.rs +++ b/crates/agent_ui/src/connection_view.rs @@ -76,9 +76,9 @@ use crate::message_editor::{MessageEditor, MessageEditorEvent}; use crate::profile_selector::{ProfileProvider, ProfileSelector}; use crate::ui::{AgentNotification, AgentNotificationEvent}; use crate::{ - AgentDiffPane, AgentInitialContent, AgentPanel, AllowAlways, AllowOnce, AuthorizeToolCall, - ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, CycleThinkingEffort, - EditFirstQueuedMessage, ExpandMessageEditor, ExternalAgent, Follow, KeepAll, NewThread, + Agent, AgentDiffPane, AgentInitialContent, AgentPanel, AllowAlways, AllowOnce, + AuthorizeToolCall, ClearMessageQueue, CycleFavoriteModels, CycleModeSelector, + CycleThinkingEffort, EditFirstQueuedMessage, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAddContextMenu, OpenAgentDiff, OpenHistory, RejectAll, RejectOnce, RemoveFirstQueuedMessage, SendImmediately, SendNextQueuedMessage, ToggleFastMode, ToggleProfileSelector, ToggleThinkingEffortMenu, ToggleThinkingMode, UndoLastReject, @@ -309,7 +309,7 @@ impl EventEmitter for ConnectionView {} pub struct ConnectionView { agent: Rc, connection_store: Entity, - connection_key: ExternalAgent, + connection_key: Agent, agent_server_store: Entity, workspace: WeakEntity, project: Entity, @@ -477,7 +477,7 @@ impl ConnectionView { pub fn new( agent: Rc, connection_store: Entity, - connection_key: ExternalAgent, + connection_key: Agent, resume_session_id: Option, cwd: Option, title: Option, @@ -597,7 +597,7 @@ impl ConnectionView { fn initial_state( agent: Rc, connection_store: Entity, - connection_key: ExternalAgent, + connection_key: Agent, resume_session_id: Option, cwd: Option, title: Option, @@ -2918,7 +2918,7 @@ pub(crate) mod tests { ConnectionView::new( Rc::new(StubAgentServer::default_response()), connection_store, - ExternalAgent::Custom { + Agent::Custom { name: "Test".into(), }, None, @@ -3030,7 +3030,7 @@ pub(crate) mod tests { ConnectionView::new( Rc::new(StubAgentServer::new(ResumeOnlyAgentConnection)), connection_store, - ExternalAgent::Custom { + Agent::Custom { name: "Test".into(), }, Some(SessionId::new("resume-session")), @@ -3087,7 +3087,7 @@ pub(crate) mod tests { ConnectionView::new( Rc::new(StubAgentServer::new(connection)), connection_store, - ExternalAgent::Custom { + Agent::Custom { name: "Test".into(), }, Some(SessionId::new("session-1")), @@ -3142,7 +3142,7 @@ pub(crate) mod tests { ConnectionView::new( Rc::new(StubAgentServer::new(connection)), connection_store, - ExternalAgent::Custom { + Agent::Custom { name: "Test".into(), }, Some(SessionId::new("session-1")), @@ -3197,7 +3197,7 @@ pub(crate) mod tests { ConnectionView::new( Rc::new(StubAgentServer::new(connection)), connection_store, - ExternalAgent::Custom { + Agent::Custom { name: "Test".into(), }, Some(SessionId::new("session-1")), @@ -3514,7 +3514,7 @@ pub(crate) mod tests { ConnectionView::new( Rc::new(agent), connection_store, - ExternalAgent::Custom { + Agent::Custom { name: "Test".into(), }, None, @@ -3729,7 +3729,7 @@ pub(crate) mod tests { let connection_store = cx.update(|_window, cx| cx.new(|cx| AgentConnectionStore::new(project.clone(), cx))); - let agent_key = ExternalAgent::Custom { + let agent_key = Agent::Custom { name: "Test".into(), }; @@ -4479,7 +4479,7 @@ pub(crate) mod tests { ConnectionView::new( Rc::new(StubAgentServer::new(connection.as_ref().clone())), connection_store, - ExternalAgent::Custom { + Agent::Custom { name: "Test".into(), }, None, diff --git a/crates/agent_ui/src/inline_assistant.rs b/crates/agent_ui/src/inline_assistant.rs index 8fde876183db385c019e6ccb1f2e5a0d4b121892..1fc66f6079fa146440a1f5a594d9f160e4580ab2 100644 --- a/crates/agent_ui/src/inline_assistant.rs +++ b/crates/agent_ui/src/inline_assistant.rs @@ -281,7 +281,7 @@ impl InlineAssistant { let Some(history) = agent_panel .connection_store() .read(cx) - .entry(&crate::ExternalAgent::NativeAgent) + .entry(&crate::Agent::NativeAgent) .and_then(|s| s.read(cx).history().cloned()) else { log::error!("No connection entry found for native agent"); @@ -1981,7 +1981,7 @@ impl CodeActionProvider for AssistantCodeActionProvider { let history = panel .connection_store() .read(cx) - .entry(&crate::ExternalAgent::NativeAgent) + .entry(&crate::Agent::NativeAgent) .and_then(|e| e.read(cx).history()) .context("no history found for native agent")? .downgrade(); From d28fc4e241f1574f6c6638165e6867f296903183 Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 12 Mar 2026 12:52:29 +0100 Subject: [PATCH 516/548] agent_ui: Register native agent when creating agent panel (#51379) This ensures that in places like the inline assist we can just rely on it being available. Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 34 ++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 09d52b6000392693d435217b4739ddc452b8de6d..f7c07abe5541187c7daf0dc037c00286c606f5bb 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -84,7 +84,7 @@ use ui::{ KeyBinding, PopoverMenu, PopoverMenuHandle, SpinnerLabel, Tab, TintColor, Tooltip, prelude::*, utils::WithRemSize, }; -use util::ResultExt as _; +use util::{ResultExt as _, debug_panic}; use workspace::{ CollaboratorId, DraggedSelection, DraggedSidebar, DraggedTab, FocusWorkspaceSidebar, MultiWorkspace, SIDEBAR_RESIZE_HANDLE_SIZE, ToggleWorkspaceSidebar, ToggleZoom, @@ -1178,6 +1178,17 @@ impl AgentPanel { None }; + let connection_store = cx.new(|cx| { + let mut store = AgentConnectionStore::new(project.clone(), cx); + // Register the native agent right away, so that it is available for + // the inline assistant etc. + store.request_connection( + Agent::NativeAgent, + Agent::NativeAgent.server(fs.clone(), thread_store.clone()), + cx, + ); + store + }); let mut panel = Self { workspace_id, active_view, @@ -1188,7 +1199,7 @@ impl AgentPanel { language_registry, text_thread_store, prompt_store, - connection_store: cx.new(|cx| AgentConnectionStore::new(project.clone(), cx)), + connection_store, configuration: None, configuration_subscription: None, focus_handle: cx.focus_handle(), @@ -1335,18 +1346,19 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { - let agent = Agent::NativeAgent; - - let server = agent.server(self.fs.clone(), self.thread_store.clone()); let session_id = action.from_session_id.clone(); - let entry = self.connection_store.update(cx, |store, cx| { - store.request_connection(agent.clone(), server, cx) - }); - let connect_task = entry.read(cx).wait_for_connection(); + let Some(history) = self + .connection_store + .read(cx) + .entry(&Agent::NativeAgent) + .and_then(|e| e.read(cx).history().cloned()) + else { + debug_panic!("Native agent is not registered"); + return; + }; cx.spawn_in(window, async move |this, cx| { - let history = connect_task.await?.history; this.update_in(cx, |this, window, cx| { let thread = history .read(cx) @@ -1354,7 +1366,7 @@ impl AgentPanel { .context("Session not found")?; this.external_thread( - Some(agent), + Some(Agent::NativeAgent), None, None, None, From e0881e38f91b87623795208615ca466415d1970e Mon Sep 17 00:00:00 2001 From: Xin Zhao Date: Thu, 12 Mar 2026 20:24:51 +0800 Subject: [PATCH 517/548] python: Add `label_for_symbol` for ty adapter (#51355) Ported `label_for_symbol` logic directly from the basedpyright adapter without adjustments. Given Python's dynamic nature, the current implementation provides sufficient coverage. No further modifications are needed for now. Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed missing syntax highlighting in symbol search when using the ty language server. --- crates/languages/src/python.rs | 224 ++++++++++++--------------------- 1 file changed, 82 insertions(+), 142 deletions(-) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 078db5ba027c4d089b7c2f62cbd7e8468e526171..e109d2685efaac6aaacddb7f467180ae48ba54e4 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -159,6 +159,75 @@ fn process_pyright_completions(items: &mut [lsp::CompletionItem]) { } } +fn label_for_pyright_completion( + item: &lsp::CompletionItem, + language: &Arc, +) -> Option { + let label = &item.label; + let label_len = label.len(); + let grammar = language.grammar()?; + let highlight_id = match item.kind? { + lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"), + lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"), + lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"), + lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"), + lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"), + _ => { + return None; + } + }; + let mut text = label.clone(); + if let Some(completion_details) = item + .label_details + .as_ref() + .and_then(|details| details.description.as_ref()) + { + write!(&mut text, " {}", completion_details).ok(); + } + Some(language::CodeLabel::filtered( + text, + label_len, + item.filter_text.as_deref(), + highlight_id + .map(|id| (0..label_len, id)) + .into_iter() + .collect(), + )) +} + +fn label_for_python_symbol( + symbol: &Symbol, + language: &Arc, +) -> Option { + let name = &symbol.name; + let (text, filter_range, display_range) = match symbol.kind { + lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { + let text = format!("def {}():\n", name); + let filter_range = 4..4 + name.len(); + let display_range = 0..filter_range.end; + (text, filter_range, display_range) + } + lsp::SymbolKind::CLASS => { + let text = format!("class {}:", name); + let filter_range = 6..6 + name.len(); + let display_range = 0..filter_range.end; + (text, filter_range, display_range) + } + lsp::SymbolKind::CONSTANT => { + let text = format!("{} = 0", name); + let filter_range = 0..name.len(); + let display_range = 0..filter_range.end; + (text, filter_range, display_range) + } + _ => return None, + }; + Some(language::CodeLabel::new( + text[display_range.clone()].to_string(), + filter_range, + language.highlight_text(&text.as_str().into(), display_range), + )) +} + pub struct TyLspAdapter { fs: Arc, } @@ -255,6 +324,14 @@ impl LspAdapter for TyLspAdapter { )) } + async fn label_for_symbol( + &self, + symbol: &language::Symbol, + language: &Arc, + ) -> Option { + label_for_python_symbol(symbol, language) + } + async fn workspace_configuration( self: Arc, delegate: &Arc, @@ -531,36 +608,7 @@ impl LspAdapter for PyrightLspAdapter { item: &lsp::CompletionItem, language: &Arc, ) -> Option { - let label = &item.label; - let label_len = label.len(); - let grammar = language.grammar()?; - let highlight_id = match item.kind? { - lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"), - lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"), - lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"), - lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"), - lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"), - _ => { - return None; - } - }; - let mut text = label.clone(); - if let Some(completion_details) = item - .label_details - .as_ref() - .and_then(|details| details.description.as_ref()) - { - write!(&mut text, " {}", completion_details).ok(); - } - Some(language::CodeLabel::filtered( - text, - label_len, - item.filter_text.as_deref(), - highlight_id - .map(|id| (0..label_len, id)) - .into_iter() - .collect(), - )) + label_for_pyright_completion(item, language) } async fn label_for_symbol( @@ -568,34 +616,7 @@ impl LspAdapter for PyrightLspAdapter { symbol: &language::Symbol, language: &Arc, ) -> Option { - let name = &symbol.name; - let (text, filter_range, display_range) = match symbol.kind { - lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { - let text = format!("def {}():\n", name); - let filter_range = 4..4 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CLASS => { - let text = format!("class {}:", name); - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CONSTANT => { - let text = format!("{} = 0", name); - let filter_range = 0..name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - _ => return None, - }; - - Some(language::CodeLabel::new( - text[display_range.clone()].to_string(), - filter_range, - language.highlight_text(&text.as_str().into(), display_range), - )) + label_for_python_symbol(symbol, language) } async fn workspace_configuration( @@ -1738,33 +1759,7 @@ impl LspAdapter for PyLspAdapter { symbol: &language::Symbol, language: &Arc, ) -> Option { - let name = &symbol.name; - let (text, filter_range, display_range) = match symbol.kind { - lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { - let text = format!("def {}():\n", name); - let filter_range = 4..4 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CLASS => { - let text = format!("class {}:", name); - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CONSTANT => { - let text = format!("{} = 0", name); - let filter_range = 0..name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - _ => return None, - }; - Some(language::CodeLabel::new( - text[display_range.clone()].to_string(), - filter_range, - language.highlight_text(&text.as_str().into(), display_range), - )) + label_for_python_symbol(symbol, language) } async fn workspace_configuration( @@ -2019,36 +2014,7 @@ impl LspAdapter for BasedPyrightLspAdapter { item: &lsp::CompletionItem, language: &Arc, ) -> Option { - let label = &item.label; - let label_len = label.len(); - let grammar = language.grammar()?; - let highlight_id = match item.kind? { - lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"), - lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"), - lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"), - lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"), - lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"), - _ => { - return None; - } - }; - let mut text = label.clone(); - if let Some(completion_details) = item - .label_details - .as_ref() - .and_then(|details| details.description.as_ref()) - { - write!(&mut text, " {}", completion_details).ok(); - } - Some(language::CodeLabel::filtered( - text, - label_len, - item.filter_text.as_deref(), - highlight_id - .map(|id| (0..label.len(), id)) - .into_iter() - .collect(), - )) + label_for_pyright_completion(item, language) } async fn label_for_symbol( @@ -2056,33 +2022,7 @@ impl LspAdapter for BasedPyrightLspAdapter { symbol: &Symbol, language: &Arc, ) -> Option { - let name = &symbol.name; - let (text, filter_range, display_range) = match symbol.kind { - lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => { - let text = format!("def {}():\n", name); - let filter_range = 4..4 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CLASS => { - let text = format!("class {}:", name); - let filter_range = 6..6 + name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - lsp::SymbolKind::CONSTANT => { - let text = format!("{} = 0", name); - let filter_range = 0..name.len(); - let display_range = 0..filter_range.end; - (text, filter_range, display_range) - } - _ => return None, - }; - Some(language::CodeLabel::new( - text[display_range.clone()].to_string(), - filter_range, - language.highlight_text(&text.as_str().into(), display_range), - )) + label_for_python_symbol(symbol, language) } async fn workspace_configuration( From 314b7e55fb3fb2d8277a38217a64d335668ef473 Mon Sep 17 00:00:00 2001 From: Nelson Campos <60667230+nelsoncampos-cloudwalk@users.noreply.github.com> Date: Thu, 12 Mar 2026 10:05:52 -0300 Subject: [PATCH 518/548] debugger: Fix restart only working once per session (#51247) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `Session::restart_task` is set to `Some` when a restart is initiated but never cleared back to `None`. The guard at the top of `restart()` checks `self.restart_task.is_some()` and returns early, so only the first restart attempt succeeds. This primarily affects debug adapters that advertise `supportsRestartRequest` dynamically via a `CapabilitiesEvent` after launch, such as the Flutter debug adapter. Related: https://github.com/zed-extensions/dart/issues/45 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) (N/A — no UI changes) Release Notes: - debugger: Fixed debug session restart only working once when the adapter supports DAP restart requests. --------- Co-authored-by: Claude Opus 4.6 (1M context) Co-authored-by: Anthony Eid --- crates/debugger_ui/src/tests.rs | 8 +- .../debugger_ui/src/tests/debugger_panel.rs | 74 ++++++++++++++++++- crates/project/src/debugger/session.rs | 28 ++++--- 3 files changed, 97 insertions(+), 13 deletions(-) diff --git a/crates/debugger_ui/src/tests.rs b/crates/debugger_ui/src/tests.rs index c183f8941c3f30cb43ffaa638eae4e6b387e226d..cc407dfd810ceedb11c4d8030c46a6f17065b34b 100644 --- a/crates/debugger_ui/src/tests.rs +++ b/crates/debugger_ui/src/tests.rs @@ -132,7 +132,13 @@ pub fn start_debug_session_with) + 'static>( .workspace() .read(cx) .panel::(cx) - .and_then(|panel| panel.read(cx).active_session()) + .and_then(|panel| { + panel + .read(cx) + .sessions_with_children + .keys() + .max_by_key(|session| session.read(cx).session_id(cx)) + }) .map(|session| session.read(cx).running_state().read(cx).session()) .cloned() .context("Failed to get active session") diff --git a/crates/debugger_ui/src/tests/debugger_panel.rs b/crates/debugger_ui/src/tests/debugger_panel.rs index 207e82b4958941e04ea04fc47c9471141e61a64d..e4c258a8d2af0b865f13c28430c44a66117a11cd 100644 --- a/crates/debugger_ui/src/tests/debugger_panel.rs +++ b/crates/debugger_ui/src/tests/debugger_panel.rs @@ -27,7 +27,7 @@ use std::{ path::Path, sync::{ Arc, - atomic::{AtomicBool, Ordering}, + atomic::{AtomicBool, AtomicUsize, Ordering}, }, }; use terminal_view::terminal_panel::TerminalPanel; @@ -2481,3 +2481,75 @@ async fn test_adapter_shutdown_with_child_sessions_on_app_quit( "Child session should have received disconnect request" ); } + +#[gpui::test] +async fn test_restart_request_is_not_sent_more_than_once_until_response( + executor: BackgroundExecutor, + cx: &mut TestAppContext, +) { + init_test(cx); + + let fs = FakeFs::new(executor.clone()); + + fs.insert_tree( + path!("/project"), + json!({ + "main.rs": "First line\nSecond line\nThird line\nFourth line", + }), + ) + .await; + + let project = Project::test(fs, [path!("/project").as_ref()], cx).await; + let workspace = init_test_workspace(&project, cx).await; + let cx = &mut VisualTestContext::from_window(*workspace, cx); + + let session = start_debug_session(&workspace, cx, move |client| { + client.on_request::(move |_, _| { + Ok(dap::Capabilities { + supports_restart_request: Some(true), + ..Default::default() + }) + }); + }) + .unwrap(); + + let client = session.update(cx, |session, _| session.adapter_client().unwrap()); + + let restart_count = Arc::new(AtomicUsize::new(0)); + + client.on_request::({ + let restart_count = restart_count.clone(); + move |_, _| { + restart_count.fetch_add(1, Ordering::SeqCst); + Ok(()) + } + }); + + // This works because the restart request sender is on the foreground thread + // so it will start running after the gpui update stack is cleared + session.update(cx, |session, cx| { + session.restart(None, cx); + session.restart(None, cx); + session.restart(None, cx); + }); + + cx.run_until_parked(); + + assert_eq!( + restart_count.load(Ordering::SeqCst), + 1, + "Only one restart request should be sent while a restart is in-flight" + ); + + session.update(cx, |session, cx| { + session.restart(None, cx); + }); + + cx.run_until_parked(); + + assert_eq!( + restart_count.load(Ordering::SeqCst), + 2, + "A second restart should be allowed after the first one completes" + ); +} diff --git a/crates/project/src/debugger/session.rs b/crates/project/src/debugger/session.rs index a6c3f52b17a4a6cf241aa49329f3f14f0b5cefbc..87e11cfd97a2f63bba3cefca671e4413deb6765f 100644 --- a/crates/project/src/debugger/session.rs +++ b/crates/project/src/debugger/session.rs @@ -2187,21 +2187,27 @@ impl Session { self.capabilities.supports_restart_request.unwrap_or(false) && !self.is_terminated(); self.restart_task = Some(cx.spawn(async move |this, cx| { - let _ = this.update(cx, |session, cx| { + this.update(cx, |session, cx| { if supports_dap_restart { - session - .request( - RestartCommand { - raw: args.unwrap_or(Value::Null), - }, - Self::fallback_to_manual_restart, - cx, - ) - .detach(); + session.request( + RestartCommand { + raw: args.unwrap_or(Value::Null), + }, + Self::fallback_to_manual_restart, + cx, + ) } else { cx.emit(SessionStateEvent::Restart); + Task::ready(None) } - }); + }) + .unwrap_or_else(|_| Task::ready(None)) + .await; + + this.update(cx, |session, _cx| { + session.restart_task = None; + }) + .ok(); })); } From 47cc0bac418c9b7ab63e4116961301fd51c745fb Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Mar 2026 10:14:10 -0300 Subject: [PATCH 519/548] agent_ui: Add keybinding to cycle through new thread location options & settings (#51384) This PR adds the ability to save in the settings whether new threads should start in the current project or in a new Git worktree. Additionally, it also adds a keybinding that allows cycling through the menu options easily, with the ability to use cmd-click/enter to choose which one is set as the default. No release notes because this feature/settings depends on a feature flag that isn't out yet. Release Notes: - N/A --- assets/keymaps/default-linux.json | 2 +- assets/keymaps/default-macos.json | 2 +- assets/keymaps/default-windows.json | 2 +- crates/agent/src/tool_permissions.rs | 1 + crates/agent_settings/src/agent_settings.rs | 4 +- crates/agent_ui/src/agent_panel.rs | 125 ++++++++++++++------ crates/agent_ui/src/agent_ui.rs | 5 +- crates/agent_ui/src/ui/hold_for_default.rs | 19 ++- crates/settings_content/src/agent.rs | 21 ++++ crates/settings_ui/src/page_data.rs | 24 +++- 10 files changed, 159 insertions(+), 46 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index cb5cef24c50f9f9ac637f3ac70adb24d37e56d61..5780eedb4445f613cbbd4e9a09976f2d475b28c7 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -258,7 +258,7 @@ "ctrl-shift-j": "agent::ToggleNavigationMenu", "ctrl-alt-i": "agent::ToggleOptionsMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", - "ctrl-alt-shift-t": "agent::ToggleStartThreadInSelector", + "ctrl-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl->": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 08fb63868be875f41f6c461354b46f1081a2026f..6fc6905dd5f4502ff7ee90e7f6f9499b2e03fa6a 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -297,7 +297,7 @@ "cmd-shift-j": "agent::ToggleNavigationMenu", "cmd-alt-m": "agent::ToggleOptionsMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", - "cmd-alt-shift-t": "agent::ToggleStartThreadInSelector", + "cmd-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "cmd->": "agent::AddSelectionToThread", "cmd-shift-e": "project_panel::ToggleFocus", diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 600025e2069978f3020afb5cb978d05a53317682..ac23d45695e11ec46172c566282ea65bf7774ac8 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -259,7 +259,7 @@ "shift-alt-j": "agent::ToggleNavigationMenu", "shift-alt-i": "agent::ToggleOptionsMenu", "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", - "ctrl-shift-alt-t": "agent::ToggleStartThreadInSelector", + "ctrl-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl-shift-.": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index 79564bbddea7063d00e18d97c8eab89533b20da5..4cb4d265b3170429430b815d7490099a50678714 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -560,6 +560,7 @@ mod tests { message_editor_min_lines: 1, tool_permissions, show_turn_stats: false, + new_thread_location: Default::default(), } } diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 02341af42b9247ba07cb3f8c771a51626cd721ed..d5d4f16eb742a92f6abf8081c43709f161ef4038 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -12,7 +12,7 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection, - NotifyWhenAgentWaiting, RegisterSetting, Settings, ToolPermissionMode, + NewThreadLocation, NotifyWhenAgentWaiting, RegisterSetting, Settings, ToolPermissionMode, }; pub use crate::agent_profile::*; @@ -51,6 +51,7 @@ pub struct AgentSettings { pub message_editor_min_lines: usize, pub show_turn_stats: bool, pub tool_permissions: ToolPermissions, + pub new_thread_location: NewThreadLocation, } impl AgentSettings { @@ -438,6 +439,7 @@ impl Settings for AgentSettings { message_editor_min_lines: agent.message_editor_min_lines.unwrap(), show_turn_stats: agent.show_turn_stats.unwrap(), tool_permissions: compile_tool_permissions(agent.tool_permissions), + new_thread_location: agent.new_thread_location.unwrap_or_default(), } } } diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index f7c07abe5541187c7daf0dc037c00286c606f5bb..4fc6e3dd1f257377e3f5213b1ae216115fd01fff 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -29,12 +29,12 @@ use zed_actions::agent::{ ResolveConflictedFilesWithAgent, ResolveConflictsWithAgent, ReviewBranchDiff, }; -use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal}; +use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal, HoldForDefault}; use crate::{ - AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow, - InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, - OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn, - ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, ToggleStartThreadInSelector, + AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, CycleStartThreadIn, + Follow, InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, + OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, + StartThreadIn, ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu, agent_configuration::{AgentConfiguration, AssistantConfigurationEvent}, connection_view::{AcpThreadViewEvent, ThreadView}, slash_command::SlashCommandCompletionProvider, @@ -312,18 +312,6 @@ pub fn init(cx: &mut App) { }); } }) - .register_action(|workspace, _: &ToggleStartThreadInSelector, window, cx| { - if let Some(panel) = workspace.panel::(cx) { - workspace.focus_panel::(window, cx); - panel.update(cx, |panel, cx| { - panel.toggle_start_thread_in_selector( - &ToggleStartThreadInSelector, - window, - cx, - ); - }); - } - }) .register_action(|workspace, _: &OpenAcpOnboardingModal, window, cx| { AcpOnboardingModal::toggle(workspace, window, cx) }) @@ -477,6 +465,13 @@ pub fn init(cx: &mut App) { }); } }) + .register_action(|workspace, _: &CycleStartThreadIn, _window, cx| { + if let Some(panel) = workspace.panel::(cx) { + panel.update(cx, |panel, cx| { + panel.cycle_start_thread_in(cx); + }); + } + }) .register_action(|workspace, _: &ToggleWorkspaceSidebar, window, cx| { if !multi_workspace_enabled(cx) { return; @@ -1751,15 +1746,6 @@ impl AgentPanel { self.new_thread_menu_handle.toggle(window, cx); } - pub fn toggle_start_thread_in_selector( - &mut self, - _: &ToggleStartThreadInSelector, - window: &mut Window, - cx: &mut Context, - ) { - self.start_thread_in_menu_handle.toggle(window, cx); - } - pub fn increase_font_size( &mut self, action: &IncreaseBufferFontSize, @@ -2388,6 +2374,28 @@ impl AgentPanel { cx.notify(); } + fn cycle_start_thread_in(&mut self, cx: &mut Context) { + let next = match self.start_thread_in { + StartThreadIn::LocalProject => StartThreadIn::NewWorktree, + StartThreadIn::NewWorktree => StartThreadIn::LocalProject, + }; + self.set_start_thread_in(&next, cx); + } + + fn reset_start_thread_in_to_default(&mut self, cx: &mut Context) { + use settings::{NewThreadLocation, Settings}; + let default = AgentSettings::get_global(cx).new_thread_location; + let start_thread_in = match default { + NewThreadLocation::LocalProject => StartThreadIn::LocalProject, + NewThreadLocation::NewWorktree => StartThreadIn::NewWorktree, + }; + if self.start_thread_in != start_thread_in { + self.start_thread_in = start_thread_in; + self.serialize(cx); + cx.notify(); + } + } + fn selected_external_agent(&self) -> Option { match &self.selected_agent { AgentType::NativeAgent => Some(Agent::NativeAgent), @@ -2445,6 +2453,7 @@ impl AgentPanel { window: &mut Window, cx: &mut Context, ) { + self.reset_start_thread_in_to_default(cx); self.new_agent_thread_inner(agent, true, window, cx); } @@ -3592,9 +3601,12 @@ impl AgentPanel { } fn render_start_thread_in_selector(&self, cx: &mut Context) -> impl IntoElement { + use settings::{NewThreadLocation, Settings}; + let focus_handle = self.focus_handle(cx); let has_git_repo = self.project_has_git_repository(cx); let is_via_collab = self.project.read(cx).is_via_collab(); + let fs = self.fs.clone(); let is_creating = matches!( self.worktree_creation_status, @@ -3604,6 +3616,10 @@ impl AgentPanel { let current_target = self.start_thread_in; let trigger_label = self.start_thread_in.label(); + let new_thread_location = AgentSettings::get_global(cx).new_thread_location; + let is_local_default = new_thread_location == NewThreadLocation::LocalProject; + let is_new_worktree_default = new_thread_location == NewThreadLocation::NewWorktree; + let icon = if self.start_thread_in_menu_handle.is_deployed() { IconName::ChevronUp } else { @@ -3631,7 +3647,7 @@ impl AgentPanel { move |_window, cx| { Tooltip::for_action_in( "Start Thread In…", - &ToggleStartThreadInSelector, + &CycleStartThreadIn, &focus_handle, cx, ) @@ -3640,6 +3656,7 @@ impl AgentPanel { .menu(move |window, cx| { let is_local_selected = current_target == StartThreadIn::LocalProject; let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree; + let fs = fs.clone(); Some(ContextMenu::build(window, cx, move |menu, _window, _cx| { let new_worktree_disabled = !has_git_repo || is_via_collab; @@ -3648,18 +3665,53 @@ impl AgentPanel { .item( ContextMenuEntry::new("Current Project") .toggleable(IconPosition::End, is_local_selected) - .handler(|window, cx| { - window - .dispatch_action(Box::new(StartThreadIn::LocalProject), cx); + .documentation_aside(documentation_side, move |_| { + HoldForDefault::new(is_local_default) + .more_content(false) + .into_any_element() + }) + .handler({ + let fs = fs.clone(); + move |window, cx| { + if window.modifiers().secondary() { + update_settings_file(fs.clone(), cx, |settings, _| { + settings + .agent + .get_or_insert_default() + .set_new_thread_location( + NewThreadLocation::LocalProject, + ); + }); + } + window.dispatch_action( + Box::new(StartThreadIn::LocalProject), + cx, + ); + } }), ) .item({ let entry = ContextMenuEntry::new("New Worktree") .toggleable(IconPosition::End, is_new_worktree_selected) .disabled(new_worktree_disabled) - .handler(|window, cx| { - window - .dispatch_action(Box::new(StartThreadIn::NewWorktree), cx); + .handler({ + let fs = fs.clone(); + move |window, cx| { + if window.modifiers().secondary() { + update_settings_file(fs.clone(), cx, |settings, _| { + settings + .agent + .get_or_insert_default() + .set_new_thread_location( + NewThreadLocation::NewWorktree, + ); + }); + } + window.dispatch_action( + Box::new(StartThreadIn::NewWorktree), + cx, + ); + } }); if new_worktree_disabled { @@ -3675,7 +3727,11 @@ impl AgentPanel { .into_any_element() }) } else { - entry + entry.documentation_aside(documentation_side, move |_| { + HoldForDefault::new(is_new_worktree_default) + .more_content(false) + .into_any_element() + }) } }) })) @@ -4849,7 +4905,6 @@ impl Render for AgentPanel { .on_action(cx.listener(Self::go_back)) .on_action(cx.listener(Self::toggle_navigation_menu)) .on_action(cx.listener(Self::toggle_options_menu)) - .on_action(cx.listener(Self::toggle_start_thread_in_selector)) .on_action(cx.listener(Self::increase_font_size)) .on_action(cx.listener(Self::decrease_font_size)) .on_action(cx.listener(Self::reset_font_size)) diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index fbf47615cb23b75eaeff1f785ada8bf8605556d3..ea70d155b79e190dcfe9138b620aff4415b6d935 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -86,8 +86,8 @@ actions!( NewTextThread, /// Toggles the menu to create new agent threads. ToggleNewThreadMenu, - /// Toggles the selector for choosing where new threads start (current project or new worktree). - ToggleStartThreadInSelector, + /// Cycles through the options for where new threads start (current project or new worktree). + CycleStartThreadIn, /// Toggles the navigation menu for switching between threads and views. ToggleNavigationMenu, /// Toggles the options menu for agent settings and preferences. @@ -655,6 +655,7 @@ mod tests { message_editor_min_lines: 1, tool_permissions: Default::default(), show_turn_stats: false, + new_thread_location: Default::default(), }; cx.update(|cx| { diff --git a/crates/agent_ui/src/ui/hold_for_default.rs b/crates/agent_ui/src/ui/hold_for_default.rs index 1972f5de4d38fd5ba47ff91709be6ded302b61ae..436ca65ddd93b977a09c8de8eaeb25dc6c0eb1a0 100644 --- a/crates/agent_ui/src/ui/hold_for_default.rs +++ b/crates/agent_ui/src/ui/hold_for_default.rs @@ -4,20 +4,31 @@ use ui::{prelude::*, render_modifiers}; #[derive(IntoElement)] pub struct HoldForDefault { is_default: bool, + more_content: bool, } impl HoldForDefault { pub fn new(is_default: bool) -> Self { - Self { is_default } + Self { + is_default, + more_content: true, + } + } + + pub fn more_content(mut self, more_content: bool) -> Self { + self.more_content = more_content; + self } } impl RenderOnce for HoldForDefault { fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { h_flex() - .pt_1() - .border_t_1() - .border_color(cx.theme().colors().border_variant) + .when(self.more_content, |this| { + this.pt_1() + .border_t_1() + .border_color(cx.theme().colors().border_variant) + }) .gap_0p5() .text_sm() .text_color(Color::Muted.color(cx)) diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs index 87e117b8b0bbdd9a789bae18c3f9dce98a6f1bc0..8061e591b0a3f81e8b8081a0b363c112fb388ce4 100644 --- a/crates/settings_content/src/agent.rs +++ b/crates/settings_content/src/agent.rs @@ -9,6 +9,19 @@ use crate::ExtendingVec; use crate::DockPosition; +/// Where new threads should start by default. +#[derive( + Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom, +)] +#[serde(rename_all = "snake_case")] +pub enum NewThreadLocation { + /// Start threads in the current project. + #[default] + LocalProject, + /// Start threads in a new worktree. + NewWorktree, +} + #[with_fallible_options] #[derive(Clone, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom, Debug, Default)] pub struct AgentSettingsContent { @@ -59,6 +72,10 @@ pub struct AgentSettingsContent { /// /// Default: "thread" pub default_view: Option, + /// Where new threads should start by default. + /// + /// Default: "local_project" + pub new_thread_location: Option, /// The available agent profiles. pub profiles: Option, AgentProfileContent>>, /// Where to show a popup notification when the agent is waiting for user input. @@ -146,6 +163,10 @@ impl AgentSettingsContent { self.default_profile = Some(profile_id); } + pub fn set_new_thread_location(&mut self, value: NewThreadLocation) { + self.new_thread_location = Some(value); + } + pub fn add_favorite_model(&mut self, model: LanguageModelSelection) { if !self.favorite_models.contains(&model) { self.favorite_models.push(model); diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index dbac4d7ba350fcff07016a2ccfa483f3d84472c7..708840668d7502ae0c34e9f1751fd7b76da2ca07 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -6972,7 +6972,7 @@ fn ai_page() -> SettingsPage { ] } - fn agent_configuration_section() -> [SettingsPageItem; 12] { + fn agent_configuration_section() -> [SettingsPageItem; 13] { [ SettingsPageItem::SectionHeader("Agent Configuration"), SettingsPageItem::SubPageLink(SubPageLink { @@ -6984,6 +6984,28 @@ fn ai_page() -> SettingsPage { files: USER, render: render_tool_permissions_setup_page, }), + SettingsPageItem::SettingItem(SettingItem { + title: "New Thread Location", + description: "Whether to start a new thread in the current local project or in a new Git worktree.", + field: Box::new(SettingField { + json_path: Some("agent.default_start_thread_in"), + pick: |settings_content| { + settings_content + .agent + .as_ref()? + .new_thread_location + .as_ref() + }, + write: |settings_content, value| { + settings_content + .agent + .get_or_insert_default() + .new_thread_location = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SettingItem(SettingItem { title: "Single File Review", description: "When enabled, agent edits will also be displayed in single-file buffers for review.", From 9ddf672d57a4faa93e7d84a610465838ddf52d6b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 12 Mar 2026 14:26:16 +0100 Subject: [PATCH 520/548] project: Fix semantic tokens coloring deleted diff hunks (#51386) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/project/src/lsp_store/semantic_tokens.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crates/project/src/lsp_store/semantic_tokens.rs b/crates/project/src/lsp_store/semantic_tokens.rs index cfcd74ad7de7baaf60833cd9db1085d60307c20e..2927e5c0af77c50420462e95c271e61828b020e5 100644 --- a/crates/project/src/lsp_store/semantic_tokens.rs +++ b/crates/project/src/lsp_store/semantic_tokens.rs @@ -585,8 +585,7 @@ async fn raw_to_buffer_semantic_tokens( } Some(BufferSemanticToken { - range: buffer_snapshot.anchor_before(start) - ..buffer_snapshot.anchor_after(end), + range: buffer_snapshot.anchor_range_around(start..end), token_type: token.token_type, token_modifiers: token.token_modifiers, }) From 8e78b9fa97fe494198a94501ce93f8edb7a72851 Mon Sep 17 00:00:00 2001 From: Smit Barmase Date: Thu, 12 Mar 2026 21:16:48 +0530 Subject: [PATCH 521/548] Fix window drags when dragging button/input on titlebar in macOS (#51400) Closes https://github.com/zed-industries/zed/issues/27500 This PR fixes an issue on macOS where dragging title bar buttons and other UI elements would drag the window instead of no-op, like in native Mac apps. That made interactions like selecting text with the mouse impossible in those areas, including the title input in Rules Library. We don't want to handle this at GPUI level, since you might still want this dragging behavior while having no native titlebar for some apps, and without implementing your own handler. So, we just handle this for Zed. On macOS, we now set `is_movable: false` on all windows, which disables that drag-anything behavior and relies on the native window drag handler for window dragging instead. This also meant implementing a platform title bar for the sidebar in Rules Library, since dragging there was previously handled by the `is_movable` behavior. We already had a full-width platform title bar there on other platforms. On macOS, it is sidebar-only to keep existing design. Release Notes: - N/A --- .../src/platform_title_bar.rs | 10 +++ crates/rules_library/src/rules_library.rs | 74 ++++++++++++------- crates/zed/src/zed.rs | 2 +- 3 files changed, 57 insertions(+), 29 deletions(-) diff --git a/crates/platform_title_bar/src/platform_title_bar.rs b/crates/platform_title_bar/src/platform_title_bar.rs index 1db29b0f53d9e7b185e6c3cd3029ed2e6077753e..70d24812974ee00caaad7005a593733e30788060 100644 --- a/crates/platform_title_bar/src/platform_title_bar.rs +++ b/crates/platform_title_bar/src/platform_title_bar.rs @@ -30,6 +30,7 @@ pub struct PlatformTitleBar { platform_style: PlatformStyle, children: SmallVec<[AnyElement; 2]>, should_move: bool, + background_color: Option, system_window_tabs: Entity, } @@ -43,11 +44,16 @@ impl PlatformTitleBar { platform_style, children: SmallVec::new(), should_move: false, + background_color: None, system_window_tabs, } } pub fn title_bar_color(&self, window: &mut Window, cx: &mut Context) -> Hsla { + if let Some(background_color) = self.background_color { + return background_color; + } + if cfg!(any(target_os = "linux", target_os = "freebsd")) { if window.is_window_active() && !self.should_move { cx.theme().colors().title_bar_background @@ -66,6 +72,10 @@ impl PlatformTitleBar { self.children = children.into_iter().collect(); } + pub fn set_background_color(&mut self, background_color: Option) { + self.background_color = background_color; + } + pub fn init(cx: &mut App) { SystemWindowTabs::init(cx); } diff --git a/crates/rules_library/src/rules_library.rs b/crates/rules_library/src/rules_library.rs index 73bf5fdd8fcaaf1437013d300102a9e593823c7b..dd4bbcfaeb7a14ea4bda8c546f5cf2539734eb73 100644 --- a/crates/rules_library/src/rules_library.rs +++ b/crates/rules_library/src/rules_library.rs @@ -3,9 +3,9 @@ use collections::{HashMap, HashSet}; use editor::{CompletionProvider, SelectionEffects}; use editor::{CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle, actions::Tab}; use gpui::{ - App, Bounds, DEFAULT_ADDITIONAL_WINDOW_SIZE, Entity, EventEmitter, Focusable, PromptLevel, - Subscription, Task, TextStyle, Tiling, TitlebarOptions, WindowBounds, WindowHandle, - WindowOptions, actions, point, size, transparent_black, + App, Bounds, DEFAULT_ADDITIONAL_WINDOW_SIZE, Entity, EventEmitter, Focusable, MouseButton, + PromptLevel, Subscription, Task, TextStyle, Tiling, TitlebarOptions, WindowBounds, + WindowHandle, WindowOptions, actions, point, size, transparent_black, }; use language::{Buffer, LanguageRegistry, language_settings::SoftWrap}; use language_model::{ @@ -133,6 +133,7 @@ pub fn open_rules_library( window_decorations: Some(window_decorations), window_min_size: Some(DEFAULT_ADDITIONAL_WINDOW_SIZE), kind: gpui::WindowKind::Floating, + is_movable: !cfg!(target_os = "macos"), ..Default::default() }, |window, cx| { @@ -503,11 +504,7 @@ impl RulesLibrary { }); Self { - title_bar: if !cfg!(target_os = "macos") { - Some(cx.new(|cx| PlatformTitleBar::new("rules-library-title-bar", cx))) - } else { - None - }, + title_bar: Some(cx.new(|cx| PlatformTitleBar::new("rules-library-title-bar", cx))), store, language_registry, rule_editors: HashMap::default(), @@ -1129,30 +1126,44 @@ impl RulesLibrary { v_flex() .id("rule-list") .capture_action(cx.listener(Self::focus_active_rule)) - .px_1p5() .h_full() .w_64() .overflow_x_hidden() .bg(cx.theme().colors().panel_background) + .when(!cfg!(target_os = "macos"), |this| this.px_1p5()) .map(|this| { if cfg!(target_os = "macos") { - this.child( - h_flex() - .p(DynamicSpacing::Base04.rems(cx)) - .h_9() - .w_full() - .flex_none() - .justify_end() - .child( - IconButton::new("new-rule", IconName::Plus) - .tooltip(move |_window, cx| { - Tooltip::for_action("New Rule", &NewRule, cx) - }) - .on_click(|_, window, cx| { - window.dispatch_action(Box::new(NewRule), cx); - }), - ), - ) + let Some(title_bar) = self.title_bar.as_ref() else { + return this; + }; + let button_padding = DynamicSpacing::Base08.rems(cx); + let panel_background = cx.theme().colors().panel_background; + title_bar.update(cx, |title_bar, _cx| { + title_bar.set_background_color(Some(panel_background)); + title_bar.set_children(Some( + h_flex() + .w_full() + .pr(button_padding) + .justify_end() + .child( + div() + .on_mouse_down(MouseButton::Left, |_, _, cx| { + cx.stop_propagation(); + }) + .child( + IconButton::new("new-rule", IconName::Plus) + .tooltip(move |_window, cx| { + Tooltip::for_action("New Rule", &NewRule, cx) + }) + .on_click(|_, window, cx| { + window.dispatch_action(Box::new(NewRule), cx); + }), + ), + ) + .into_any_element(), + )); + }); + this.child(title_bar.clone()) } else { this.child( h_flex().p_1().w_full().child( @@ -1170,7 +1181,12 @@ impl RulesLibrary { ) } }) - .child(div().flex_grow().child(self.picker.clone())) + .child( + div() + .flex_grow() + .when(cfg!(target_os = "macos"), |this| this.px_1p5()) + .child(self.picker.clone()), + ) } fn render_active_rule_editor( @@ -1417,7 +1433,9 @@ impl Render for RulesLibrary { .overflow_hidden() .font(ui_font) .text_color(theme.colors().text) - .children(self.title_bar.clone()) + .when(!cfg!(target_os = "macos"), |this| { + this.children(self.title_bar.clone()) + }) .bg(theme.colors().background) .child( h_flex() diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index b64bcbf3ab9ab5e29fdd473a200c2367e3f6f777..25defa1dde5977bd94935dafd60d97ae84b5a323 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -342,7 +342,7 @@ pub fn build_window_options(display_uuid: Option, cx: &mut App) -> WindowO focus: false, show: false, kind: WindowKind::Normal, - is_movable: true, + is_movable: !cfg!(target_os = "macos"), display_id: display.map(|display| display.id()), window_background: cx.theme().window_background_appearance(), app_id: Some(app_id.to_owned()), From 7b6932485679b7492893542ca7b15630f6ea0ec4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o=20Soares?= <37777652+Dnreikronos@users.noreply.github.com> Date: Thu, 12 Mar 2026 13:25:52 -0300 Subject: [PATCH 522/548] Truncate long diagnostic messages in the status bar (#51031) Closes #50186 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) ## Screenshots: ### Before: image ### After: image Release Notes: - Fixed long diagnostic messages in the status bar pushing right-side buttons (terminal, agent, etc.) off screen --- crates/diagnostics/src/items.rs | 3 ++- crates/workspace/src/status_bar.rs | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/diagnostics/src/items.rs b/crates/diagnostics/src/items.rs index b4ca52ea7239b6e4e76160a475d703ddd2933f44..67a6877bbe95778815d9470c0d9c8360657328f3 100644 --- a/crates/diagnostics/src/items.rs +++ b/crates/diagnostics/src/items.rs @@ -28,7 +28,7 @@ pub struct DiagnosticIndicator { impl Render for DiagnosticIndicator { fn render(&mut self, _: &mut Window, cx: &mut Context) -> impl IntoElement { - let indicator = h_flex().gap_2(); + let indicator = h_flex().gap_2().min_w_0().overflow_x_hidden(); if !ProjectSettings::get_global(cx).diagnostics.button { return indicator.hidden(); } @@ -67,6 +67,7 @@ impl Render for DiagnosticIndicator { Some( Button::new("diagnostic_message", SharedString::new(message)) .label_size(LabelSize::Small) + .truncate(true) .tooltip(|_window, cx| { Tooltip::for_action( "Next Diagnostic", diff --git a/crates/workspace/src/status_bar.rs b/crates/workspace/src/status_bar.rs index 9087cbba42b054c1b247bdf3d9402688de4b7add..6164ff3f7f1ba3ee2b578beb6aa0c3ccced50884 100644 --- a/crates/workspace/src/status_bar.rs +++ b/crates/workspace/src/status_bar.rs @@ -68,12 +68,14 @@ impl StatusBar { fn render_left_tools(&self) -> impl IntoElement { h_flex() .gap_1() + .min_w_0() .overflow_x_hidden() .children(self.left_items.iter().map(|item| item.to_any())) } fn render_right_tools(&self) -> impl IntoElement { h_flex() + .flex_shrink_0() .gap_1() .overflow_x_hidden() .children(self.right_items.iter().rev().map(|item| item.to_any())) From 4842e095d968dfee9b9837d3ef1626532263af33 Mon Sep 17 00:00:00 2001 From: Viraj Bhartiya Date: Thu, 12 Mar 2026 21:58:21 +0530 Subject: [PATCH 523/548] editor: Skip `stop_at_indent` for single-line editors (#50681) In single-line editors like the Find bar, MoveToBeginningOfLine with stop_at_indent should go directly to column 0 instead of stopping at the indentation level. Closes #50634 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zedindustries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed `MoveToBeginningOfLine` stopping at indentation in single-line editors like the Find bar instead of moving to column 0. --- crates/editor/src/editor.rs | 6 ++-- crates/editor/src/editor_tests.rs | 50 +++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 2 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 18a02e9773b3952d99b71f6d337f3c8950aff78e..bec381506060435419e86727051cda53ab220316 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -14703,6 +14703,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + let stop_at_indent = action.stop_at_indent && !self.mode.is_single_line(); self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_cursors_with(&mut |map, head, _| { @@ -14711,7 +14712,7 @@ impl Editor { map, head, action.stop_at_soft_wraps, - action.stop_at_indent, + stop_at_indent, ), SelectionGoal::None, ) @@ -14725,6 +14726,7 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { + let stop_at_indent = action.stop_at_indent && !self.mode.is_single_line(); self.hide_mouse_cursor(HideMouseCursorOrigin::MovementAction, cx); self.change_selections(Default::default(), window, cx, |s| { s.move_heads_with(&mut |map, head, _| { @@ -14733,7 +14735,7 @@ impl Editor { map, head, action.stop_at_soft_wraps, - action.stop_at_indent, + stop_at_indent, ), SelectionGoal::None, ) diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index fe71cb76f0f16dc7a928ccff725585c0e857c62e..0da80a2a73f22afac7085b579494d708be2444a4 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -1868,6 +1868,56 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) { }); } +#[gpui::test] +fn test_beginning_of_line_single_line_editor(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let editor = cx.add_window(|window, cx| Editor::single_line(window, cx)); + + _ = editor.update(cx, |editor, window, cx| { + editor.set_text(" indented text", window, cx); + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 10)..DisplayPoint::new(DisplayRow(0), 10) + ]); + }); + + editor.move_to_beginning_of_line( + &MoveToBeginningOfLine { + stop_at_soft_wraps: true, + stop_at_indent: true, + }, + window, + cx, + ); + assert_eq!( + display_ranges(editor, cx), + &[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)] + ); + }); + + _ = editor.update(cx, |editor, window, cx| { + editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| { + s.select_display_ranges([ + DisplayPoint::new(DisplayRow(0), 10)..DisplayPoint::new(DisplayRow(0), 10) + ]); + }); + + editor.select_to_beginning_of_line( + &SelectToBeginningOfLine { + stop_at_soft_wraps: true, + stop_at_indent: true, + }, + window, + cx, + ); + assert_eq!( + display_ranges(editor, cx), + &[DisplayPoint::new(DisplayRow(0), 10)..DisplayPoint::new(DisplayRow(0), 0)] + ); + }); +} + #[gpui::test] fn test_beginning_end_of_line_ignore_soft_wrap(cx: &mut TestAppContext) { init_test(cx, |_| {}); From edc8255da6b3f84cf37220d101aff223a07c4cc7 Mon Sep 17 00:00:00 2001 From: Kunall Banerjee Date: Thu, 12 Mar 2026 12:30:21 -0400 Subject: [PATCH 524/548] docs: Add Vue language server configuration (#51356) Follow-up to https://github.com/zed-extensions/vue/pull/87. Release Notes: - N/A --- docs/src/languages/vue.md | 54 ++++++++++++++++++++++++++++++++++++++- typos.toml | 4 ++- 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/docs/src/languages/vue.md b/docs/src/languages/vue.md index 607d2b18a5243a5b552db96308faab6aebeb8b6c..3c2336119dfceb4aeea226bb2ccc2484dd438cbc 100644 --- a/docs/src/languages/vue.md +++ b/docs/src/languages/vue.md @@ -8,7 +8,59 @@ description: "Configure Vue language support in Zed, including language servers, Vue support is available through the [Vue extension](https://github.com/zed-extensions/vue). - Tree-sitter: [tree-sitter-grammars/tree-sitter-vue](https://github.com/tree-sitter-grammars/tree-sitter-vue) -- Language Server: [vuejs/language-tools/](https://github.com/vuejs/language-tools/) +- Language Server: [vuejs/language-tools](https://github.com/vuejs/language-tools) + +## Initialization Options + +### Specifying location of TypeScript SDK + +By default, this extension assumes that you are working in a project with a `node_modules` directory, and searches for +the TypeScript SDK inside that directory. + +This may not always be true; for example, when working in a project that uses Yarn PnP, there is no `node_modules`. For +editor support, the [documented](https://yarnpkg.com/getting-started/editor-sdks) approach is to run something like +`yarn dlx @yarnpkg/sdks`. In that case, you can provide the following initialization options in your Zed settings: + +```json +{ + "lsp": { + "vue": { + "initialization_options": { + "typescript": { + "tsdk": ".yarn/sdks/typescript/lib" + } + } + } + } +} +``` + +## Settings Options + +`lsp.vue.settings` is passed through to the Vue language server (Volar / [`vuejs/language-tools`](https://github.com/vuejs/language-tools)). The following settings are enabled by default: + +```json +{ + "lsp": { + "vue": { + "settings": { + // Display inlay hints for the `$event` parameter in inline event handlers. + "vue.inlayHints.inlineHandlerLeading": true, + // Display hints when required component props are missing in templates. + "vue.inlayHints.missingProps": true, + // Display inlay hints for patterns that wrap component options. + "vue.inlayHints.optionsWrapper": true, + // Display inlay hints related to `v-bind` shorthand (`:`). + "vue.inlayHints.vBindShorthand": true + } + } + } +} +``` + +You can find the upstream settings configuration schema [`here`](https://github.com/vuejs/language-tools/blob/ee5041d27940cf6f9a5150635d3b13140a9dff54/extensions/vscode/package.json#L252). + +> Note: Some settings (e.g. `vue.editor.focusMode`) may not take effect. ## Using the Tailwind CSS Language Server with Vue diff --git a/typos.toml b/typos.toml index 863fea3822d62a51f737c3d7fa87a4c198710cfa..8c57caaf0417efdb01013e76f179515d9629a47c 100644 --- a/typos.toml +++ b/typos.toml @@ -92,6 +92,8 @@ extend-ignore-re = [ # AMD GPU Services "ags", # AMD GPU Services - "AGS" + "AGS", + # Yarn Plug'n'Play + "PnP" ] check-filename = true From 2ddb1e6c4d609c21a22baf2bef303f4f13f909dd Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Mar 2026 14:05:30 -0300 Subject: [PATCH 525/548] agent_ui: Add archive view to the sidebar (#51336) This PR adds a button to the bottom of the sidebar that opens the archive view, which at the moment, only shows the same, uncategorized thread list available in the regular agent panel's history view. Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner Co-authored-by: Bennet Bo Fenner <53836821+bennetbo@users.noreply.github.com> Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- assets/icons/archive.svg | 5 + crates/agent_ui/src/agent_ui.rs | 1 + crates/agent_ui/src/sidebar.rs | 232 ++++--- crates/agent_ui/src/threads_archive_view.rs | 654 ++++++++++++++++++++ crates/icons/src/icons.rs | 1 + 5 files changed, 823 insertions(+), 70 deletions(-) create mode 100644 assets/icons/archive.svg create mode 100644 crates/agent_ui/src/threads_archive_view.rs diff --git a/assets/icons/archive.svg b/assets/icons/archive.svg new file mode 100644 index 0000000000000000000000000000000000000000..9ffe3f39d27c7fe5cbb532a4f263c8800398e96f --- /dev/null +++ b/assets/icons/archive.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs index ea70d155b79e190dcfe9138b620aff4415b6d935..db0cf873418ea38f8d5771c13b281528218fb94e 100644 --- a/crates/agent_ui/src/agent_ui.rs +++ b/crates/agent_ui/src/agent_ui.rs @@ -34,6 +34,7 @@ mod text_thread_editor; mod text_thread_history; mod thread_history; mod thread_history_view; +mod threads_archive_view; mod ui; use std::rc::Rc; diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index e204205819a8eb41a0624fb8a4a8ba9a96174add..2d4259717d160521ddd4884cbb6a1a1241456b64 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -1,3 +1,4 @@ +use crate::threads_archive_view::{ThreadsArchiveView, ThreadsArchiveViewEvent}; use crate::{AgentPanel, AgentPanelEvent, NewThread}; use acp_thread::ThreadStatus; use action_log::DiffStats; @@ -6,19 +7,18 @@ use agent_client_protocol as acp; use agent_settings::AgentSettings; use chrono::Utc; use db::kvp::KEY_VALUE_STORE; -use editor::{Editor, EditorElement, EditorStyle}; +use editor::Editor; use feature_flags::{AgentV2FeatureFlag, FeatureFlagViewExt as _}; use gpui::{ - Action as _, AnyElement, App, Context, Entity, FocusHandle, Focusable, FontStyle, ListState, - Pixels, Render, SharedString, TextStyle, WeakEntity, Window, actions, list, prelude::*, px, - relative, rems, + Action as _, AnyElement, App, Context, Entity, FocusHandle, Focusable, ListState, Pixels, + Render, SharedString, WeakEntity, Window, actions, list, prelude::*, px, }; use menu::{Cancel, Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use project::Event as ProjectEvent; use settings::Settings; use std::collections::{HashMap, HashSet}; use std::mem; -use theme::{ActiveTheme, ThemeSettings}; +use theme::ActiveTheme; use ui::{ AgentThreadStatus, ButtonStyle, HighlightedLabel, IconButtonShape, ListItem, Tab, ThreadItem, Tooltip, WithScrollbar, prelude::*, @@ -46,6 +46,13 @@ const MAX_WIDTH: Pixels = px(800.0); const DEFAULT_THREADS_SHOWN: usize = 5; const SIDEBAR_STATE_KEY: &str = "sidebar_state"; +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] +enum SidebarView { + #[default] + ThreadList, + Archive, +} + fn read_sidebar_open_state(multi_workspace_id: u64) -> bool { KEY_VALUE_STORE .scoped(SIDEBAR_STATE_KEY) @@ -212,6 +219,9 @@ pub struct Sidebar { active_entry_index: Option, collapsed_groups: HashSet, expanded_groups: HashMap, + view: SidebarView, + archive_view: Option>, + _subscriptions: Vec, } impl Sidebar { @@ -311,6 +321,9 @@ impl Sidebar { active_entry_index: None, collapsed_groups: HashSet::new(), expanded_groups: HashMap::new(), + view: SidebarView::default(), + archive_view: None, + _subscriptions: Vec::new(), } } @@ -1323,28 +1336,8 @@ impl Sidebar { .into_any_element() } - fn render_filter_input(&self, cx: &mut Context) -> impl IntoElement { - let settings = ThemeSettings::get_global(cx); - let text_style = TextStyle { - color: cx.theme().colors().text, - font_family: settings.ui_font.family.clone(), - font_features: settings.ui_font.features.clone(), - font_fallbacks: settings.ui_font.fallbacks.clone(), - font_size: rems(0.875).into(), - font_weight: settings.ui_font.weight, - font_style: FontStyle::Normal, - line_height: relative(1.3), - ..Default::default() - }; - - EditorElement::new( - &self.filter_editor, - EditorStyle { - local_player: cx.theme().players().local(), - text: text_style, - ..Default::default() - }, - ) + fn render_filter_input(&self) -> impl IntoElement { + self.filter_editor.clone() } fn render_view_more( @@ -1451,6 +1444,61 @@ impl Sidebar { .into_any_element() } + fn render_thread_list_header( + &self, + docked_right: bool, + cx: &mut Context, + ) -> impl IntoElement { + let has_query = self.has_filter_query(cx); + + h_flex() + .h(Tab::container_height(cx)) + .flex_none() + .gap_1p5() + .border_b_1() + .border_color(cx.theme().colors().border) + .when(!docked_right, |this| { + this.child(self.render_sidebar_toggle_button(false, cx)) + }) + .child(self.render_filter_input()) + .when(has_query, |this| { + this.when(!docked_right, |this| this.pr_1p5()).child( + IconButton::new("clear_filter", IconName::Close) + .shape(IconButtonShape::Square) + .tooltip(Tooltip::text("Clear Search")) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_filter_editor_text(window, cx); + this.update_entries(cx); + })), + ) + }) + .when(docked_right, |this| { + this.pl_2() + .pr_0p5() + .child(self.render_sidebar_toggle_button(true, cx)) + }) + } + + fn render_thread_list_footer(&self, cx: &mut Context) -> impl IntoElement { + h_flex() + .p_1p5() + .border_t_1() + .border_color(cx.theme().colors().border) + .child( + Button::new("view-archive", "Archive") + .full_width() + .label_size(LabelSize::Small) + .style(ButtonStyle::Outlined) + .icon(IconName::Archive) + .icon_color(Color::Muted) + .icon_size(IconSize::XSmall) + .icon_position(IconPosition::Start) + .on_click(cx.listener(|this, _, window, cx| { + this.show_archive(window, cx); + })), + ) + } + fn render_sidebar_toggle_button( &self, docked_right: bool, @@ -1491,6 +1539,67 @@ impl Sidebar { self.is_open } + fn show_archive(&mut self, window: &mut Window, cx: &mut Context) { + let Some(active_workspace) = self.multi_workspace.upgrade().and_then(|w| { + w.read(cx) + .workspaces() + .get(w.read(cx).active_workspace_index()) + .cloned() + }) else { + return; + }; + + let Some(agent_panel) = active_workspace.read(cx).panel::(cx) else { + return; + }; + + let thread_store = agent_panel.read(cx).thread_store().clone(); + let fs = active_workspace.read(cx).project().read(cx).fs().clone(); + let agent_connection_store = agent_panel.read(cx).connection_store().clone(); + let agent_server_store = active_workspace + .read(cx) + .project() + .read(cx) + .agent_server_store() + .clone(); + + let archive_view = cx.new(|cx| { + ThreadsArchiveView::new( + agent_connection_store, + agent_server_store, + thread_store, + fs, + window, + cx, + ) + }); + let subscription = cx.subscribe_in( + &archive_view, + window, + |this, _, event: &ThreadsArchiveViewEvent, window, cx| match event { + ThreadsArchiveViewEvent::Close => { + this.show_thread_list(window, cx); + } + ThreadsArchiveViewEvent::OpenThread(_session_info) => { + //TODO: Actually open thread once we support it + } + }, + ); + + self._subscriptions.push(subscription); + self.archive_view = Some(archive_view); + self.view = SidebarView::Archive; + cx.notify(); + } + + fn show_thread_list(&mut self, window: &mut Window, cx: &mut Context) { + self.view = SidebarView::ThreadList; + self.archive_view = None; + self._subscriptions.clear(); + window.focus(&self.focus_handle, cx); + cx.notify(); + } + pub fn set_open(&mut self, open: bool, cx: &mut Context) { if self.is_open == open { return; @@ -1558,7 +1667,6 @@ impl Focusable for Sidebar { impl Render for Sidebar { fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { let ui_font = theme::setup_ui_font(window, cx); - let has_query = self.has_filter_query(cx); let docked_right = AgentSettings::get_global(cx).dock == settings::DockPosition::Right; let sticky_header = self.render_sticky_header(docked_right, window, cx); @@ -1579,50 +1687,34 @@ impl Render for Sidebar { .font(ui_font) .size_full() .bg(cx.theme().colors().surface_background) - .child( - h_flex() - .h(Tab::container_height(cx)) - .flex_none() - .gap_1p5() - .border_b_1() - .border_color(cx.theme().colors().border) - .when(!docked_right, |this| { - this.child(self.render_sidebar_toggle_button(false, cx)) - }) - .child(self.render_filter_input(cx)) - .when(has_query, |this| { - this.when(!docked_right, |this| this.pr_1p5()).child( - IconButton::new("clear_filter", IconName::Close) - .shape(IconButtonShape::Square) - .tooltip(Tooltip::text("Clear Search")) - .on_click(cx.listener(|this, _, window, cx| { - this.reset_filter_editor_text(window, cx); - this.update_entries(cx); - })), - ) - }) - .when(docked_right, |this| { - this.pl_2() - .pr_0p5() - .child(self.render_sidebar_toggle_button(true, cx)) - }), - ) - .child( - v_flex() - .relative() - .flex_1() - .overflow_hidden() + .map(|this| match self.view { + SidebarView::ThreadList => this + .child(self.render_thread_list_header(docked_right, cx)) .child( - list( - self.list_state.clone(), - cx.processor(Self::render_list_entry), - ) - .flex_1() - .size_full(), + v_flex() + .relative() + .flex_1() + .overflow_hidden() + .child( + list( + self.list_state.clone(), + cx.processor(Self::render_list_entry), + ) + .flex_1() + .size_full(), + ) + .when_some(sticky_header, |this, header| this.child(header)) + .vertical_scrollbar_for(&self.list_state, window, cx), ) - .when_some(sticky_header, |this, header| this.child(header)) - .vertical_scrollbar_for(&self.list_state, window, cx), - ) + .child(self.render_thread_list_footer(cx)), + SidebarView::Archive => { + if let Some(archive_view) = &self.archive_view { + this.child(archive_view.clone()) + } else { + this + } + } + }) } } diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs new file mode 100644 index 0000000000000000000000000000000000000000..8ee0eedbd8702c7901258087af5d149fcf210648 --- /dev/null +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -0,0 +1,654 @@ +use std::sync::Arc; + +use crate::{Agent, agent_connection_store::AgentConnectionStore, thread_history::ThreadHistory}; +use acp_thread::AgentSessionInfo; +use agent::ThreadStore; +use chrono::{Datelike as _, Local, NaiveDate, TimeDelta, Utc}; +use editor::Editor; +use fs::Fs; +use gpui::{ + AnyElement, App, Context, Entity, EventEmitter, FocusHandle, Focusable, ListState, Render, + SharedString, Subscription, Task, Window, list, prelude::*, px, +}; +use itertools::Itertools as _; +use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; +use project::{AgentServerStore, ExternalAgentServerName}; +use theme::ActiveTheme; +use ui::{ + ButtonLike, ContextMenu, ContextMenuEntry, HighlightedLabel, ListItem, PopoverMenu, + PopoverMenuHandle, Tab, TintColor, Tooltip, WithScrollbar, prelude::*, +}; +use util::ResultExt as _; +use zed_actions::editor::{MoveDown, MoveUp}; + +#[derive(Clone)] +enum ArchiveListItem { + BucketSeparator(TimeBucket), + Entry { + session: AgentSessionInfo, + highlight_positions: Vec, + }, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +enum TimeBucket { + Today, + Yesterday, + ThisWeek, + PastWeek, + Older, +} + +impl TimeBucket { + fn from_dates(reference: NaiveDate, date: NaiveDate) -> Self { + if date == reference { + return TimeBucket::Today; + } + if date == reference - TimeDelta::days(1) { + return TimeBucket::Yesterday; + } + let week = date.iso_week(); + if reference.iso_week() == week { + return TimeBucket::ThisWeek; + } + let last_week = (reference - TimeDelta::days(7)).iso_week(); + if week == last_week { + return TimeBucket::PastWeek; + } + TimeBucket::Older + } + + fn label(&self) -> &'static str { + match self { + TimeBucket::Today => "Today", + TimeBucket::Yesterday => "Yesterday", + TimeBucket::ThisWeek => "This Week", + TimeBucket::PastWeek => "Past Week", + TimeBucket::Older => "Older", + } + } +} + +fn fuzzy_match_positions(query: &str, text: &str) -> Option> { + let query = query.to_lowercase(); + let text_lower = text.to_lowercase(); + let mut positions = Vec::new(); + let mut query_chars = query.chars().peekable(); + for (i, c) in text_lower.chars().enumerate() { + if query_chars.peek() == Some(&c) { + positions.push(i); + query_chars.next(); + } + } + if query_chars.peek().is_none() { + Some(positions) + } else { + None + } +} + +pub enum ThreadsArchiveViewEvent { + Close, + OpenThread(AgentSessionInfo), +} + +impl EventEmitter for ThreadsArchiveView {} + +pub struct ThreadsArchiveView { + agent_connection_store: Entity, + agent_server_store: Entity, + thread_store: Entity, + fs: Arc, + history: Option>, + _history_subscription: Subscription, + selected_agent: Agent, + focus_handle: FocusHandle, + list_state: ListState, + items: Vec, + selection: Option, + filter_editor: Entity, + _subscriptions: Vec, + selected_agent_menu: PopoverMenuHandle, + _refresh_history_task: Task<()>, +} + +impl ThreadsArchiveView { + pub fn new( + agent_connection_store: Entity, + agent_server_store: Entity, + thread_store: Entity, + fs: Arc, + window: &mut Window, + cx: &mut Context, + ) -> Self { + let focus_handle = cx.focus_handle(); + + let filter_editor = cx.new(|cx| { + let mut editor = Editor::single_line(window, cx); + editor.set_placeholder_text("Search archive…", window, cx); + editor + }); + + let filter_editor_subscription = + cx.subscribe(&filter_editor, |this: &mut Self, _, event, cx| { + if let editor::EditorEvent::BufferEdited = event { + this.update_items(cx); + } + }); + + let mut this = Self { + agent_connection_store, + agent_server_store, + thread_store, + fs, + history: None, + _history_subscription: Subscription::new(|| {}), + selected_agent: Agent::NativeAgent, + focus_handle, + list_state: ListState::new(0, gpui::ListAlignment::Top, px(1000.)), + items: Vec::new(), + selection: None, + filter_editor, + _subscriptions: vec![filter_editor_subscription], + selected_agent_menu: PopoverMenuHandle::default(), + _refresh_history_task: Task::ready(()), + }; + this.set_selected_agent(Agent::NativeAgent, cx); + this + } + + fn set_selected_agent(&mut self, agent: Agent, cx: &mut Context) { + self.selected_agent = agent.clone(); + + let server = agent.server(self.fs.clone(), self.thread_store.clone()); + let connection = self + .agent_connection_store + .update(cx, |store, cx| store.request_connection(agent, server, cx)); + + let task = connection.read(cx).wait_for_connection(); + self._refresh_history_task = cx.spawn(async move |this, cx| { + if let Some(state) = task.await.log_err() { + this.update(cx, |this, cx| this.set_history(state.history, cx)) + .ok(); + } + }); + + cx.notify(); + } + + fn set_history(&mut self, history: Entity, cx: &mut Context) { + self._history_subscription = cx.observe(&history, |this, _, cx| { + this.update_items(cx); + }); + history.update(cx, |history, cx| { + history.refresh_full_history(cx); + }); + self.history = Some(history); + self.update_items(cx); + cx.notify(); + } + + fn update_items(&mut self, cx: &mut Context) { + let Some(history) = self.history.as_ref() else { + return; + }; + + let sessions = history.read(cx).sessions().to_vec(); + let query = self.filter_editor.read(cx).text(cx).to_lowercase(); + let today = Local::now().naive_local().date(); + + let mut items = Vec::with_capacity(sessions.len() + 5); + let mut current_bucket: Option = None; + + for session in sessions { + let highlight_positions = if !query.is_empty() { + let title = session.title.as_ref().map(|t| t.as_ref()).unwrap_or(""); + match fuzzy_match_positions(&query, title) { + Some(positions) => positions, + None => continue, + } + } else { + Vec::new() + }; + + let entry_bucket = session + .updated_at + .map(|timestamp| { + let entry_date = timestamp.with_timezone(&Local).naive_local().date(); + TimeBucket::from_dates(today, entry_date) + }) + .unwrap_or(TimeBucket::Older); + + if Some(entry_bucket) != current_bucket { + current_bucket = Some(entry_bucket); + items.push(ArchiveListItem::BucketSeparator(entry_bucket)); + } + + items.push(ArchiveListItem::Entry { + session, + highlight_positions, + }); + } + + self.list_state.reset(items.len()); + self.items = items; + cx.notify(); + } + + fn reset_filter_editor_text(&mut self, window: &mut Window, cx: &mut Context) { + self.filter_editor.update(cx, |editor, cx| { + editor.set_text("", window, cx); + }); + } + + fn go_back(&mut self, window: &mut Window, cx: &mut Context) { + self.reset_filter_editor_text(window, cx); + cx.emit(ThreadsArchiveViewEvent::Close); + } + + fn open_thread( + &mut self, + session_info: AgentSessionInfo, + window: &mut Window, + cx: &mut Context, + ) { + self.selection = None; + self.reset_filter_editor_text(window, cx); + cx.emit(ThreadsArchiveViewEvent::OpenThread(session_info)); + } + + fn is_selectable_item(&self, ix: usize) -> bool { + matches!(self.items.get(ix), Some(ArchiveListItem::Entry { .. })) + } + + fn find_next_selectable(&self, start: usize) -> Option { + (start..self.items.len()).find(|&i| self.is_selectable_item(i)) + } + + fn find_previous_selectable(&self, start: usize) -> Option { + (0..=start).rev().find(|&i| self.is_selectable_item(i)) + } + + fn editor_move_down(&mut self, _: &MoveDown, window: &mut Window, cx: &mut Context) { + self.select_next(&SelectNext, window, cx); + } + + fn editor_move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context) { + self.select_previous(&SelectPrevious, window, cx); + } + + fn select_next(&mut self, _: &SelectNext, _window: &mut Window, cx: &mut Context) { + let next = match self.selection { + Some(ix) => self.find_next_selectable(ix + 1), + None => self.find_next_selectable(0), + }; + if let Some(next) = next { + self.selection = Some(next); + self.list_state.scroll_to_reveal_item(next); + cx.notify(); + } + } + + fn select_previous( + &mut self, + _: &SelectPrevious, + _window: &mut Window, + cx: &mut Context, + ) { + let prev = match self.selection { + Some(ix) if ix > 0 => self.find_previous_selectable(ix - 1), + None => { + let last = self.items.len().saturating_sub(1); + self.find_previous_selectable(last) + } + _ => return, + }; + if let Some(prev) = prev { + self.selection = Some(prev); + self.list_state.scroll_to_reveal_item(prev); + cx.notify(); + } + } + + fn select_first(&mut self, _: &SelectFirst, _window: &mut Window, cx: &mut Context) { + if let Some(first) = self.find_next_selectable(0) { + self.selection = Some(first); + self.list_state.scroll_to_reveal_item(first); + cx.notify(); + } + } + + fn select_last(&mut self, _: &SelectLast, _window: &mut Window, cx: &mut Context) { + let last = self.items.len().saturating_sub(1); + if let Some(last) = self.find_previous_selectable(last) { + self.selection = Some(last); + self.list_state.scroll_to_reveal_item(last); + cx.notify(); + } + } + + fn confirm(&mut self, _: &Confirm, window: &mut Window, cx: &mut Context) { + let Some(ix) = self.selection else { return }; + let Some(ArchiveListItem::Entry { session, .. }) = self.items.get(ix) else { + return; + }; + self.open_thread(session.clone(), window, cx); + } + + fn render_list_entry( + &mut self, + ix: usize, + _window: &mut Window, + cx: &mut Context, + ) -> AnyElement { + let Some(item) = self.items.get(ix) else { + return div().into_any_element(); + }; + + match item { + ArchiveListItem::BucketSeparator(bucket) => div() + .w_full() + .px_2() + .pt_3() + .pb_1() + .child( + Label::new(bucket.label()) + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element(), + ArchiveListItem::Entry { + session, + highlight_positions, + } => { + let is_selected = self.selection == Some(ix); + let title: SharedString = + session.title.clone().unwrap_or_else(|| "Untitled".into()); + let session_info = session.clone(); + let highlight_positions = highlight_positions.clone(); + + let timestamp = session.created_at.or(session.updated_at).map(|entry_time| { + let now = Utc::now(); + let duration = now.signed_duration_since(entry_time); + + let minutes = duration.num_minutes(); + let hours = duration.num_hours(); + let days = duration.num_days(); + let weeks = days / 7; + let months = days / 30; + + if minutes < 60 { + format!("{}m", minutes.max(1)) + } else if hours < 24 { + format!("{}h", hours) + } else if weeks < 4 { + format!("{}w", weeks.max(1)) + } else { + format!("{}mo", months.max(1)) + } + }); + + let id = SharedString::from(format!("archive-entry-{}", ix)); + + let title_label = if highlight_positions.is_empty() { + Label::new(title) + .size(LabelSize::Small) + .truncate() + .into_any_element() + } else { + HighlightedLabel::new(title, highlight_positions) + .size(LabelSize::Small) + .truncate() + .into_any_element() + }; + + ListItem::new(id) + .toggle_state(is_selected) + .disabled(true) + .child( + h_flex() + .min_w_0() + .w_full() + .py_1() + .pl_0p5() + .pr_1p5() + .gap_2() + .justify_between() + .child(title_label) + .when_some(timestamp, |this, ts| { + this.child( + Label::new(ts).size(LabelSize::Small).color(Color::Muted), + ) + }), + ) + .on_click(cx.listener(move |this, _, window, cx| { + this.open_thread(session_info.clone(), window, cx); + })) + .into_any_element() + } + } + } + + fn render_agent_picker(&self, cx: &mut Context) -> PopoverMenu { + let agent_server_store = self.agent_server_store.clone(); + + let (chevron_icon, icon_color) = if self.selected_agent_menu.is_deployed() { + (IconName::ChevronUp, Color::Accent) + } else { + (IconName::ChevronDown, Color::Muted) + }; + + let selected_agent_icon = if let Agent::Custom { name } = &self.selected_agent { + let store = agent_server_store.read(cx); + let icon = store.agent_icon(&ExternalAgentServerName(name.clone())); + + if let Some(icon) = icon { + Icon::from_external_svg(icon) + } else { + Icon::new(IconName::Sparkle) + } + .color(Color::Muted) + .size(IconSize::Small) + } else { + Icon::new(IconName::ZedAgent) + .color(Color::Muted) + .size(IconSize::Small) + }; + + let this = cx.weak_entity(); + + PopoverMenu::new("agent_history_menu") + .trigger( + ButtonLike::new("selected_agent") + .selected_style(ButtonStyle::Tinted(TintColor::Accent)) + .child( + h_flex().gap_1().child(selected_agent_icon).child( + Icon::new(chevron_icon) + .color(icon_color) + .size(IconSize::XSmall), + ), + ), + ) + .menu(move |window, cx| { + Some(ContextMenu::build(window, cx, |menu, _window, cx| { + menu.item( + ContextMenuEntry::new("Zed Agent") + .icon(IconName::ZedAgent) + .icon_color(Color::Muted) + .handler({ + let this = this.clone(); + move |_, cx| { + this.update(cx, |this, cx| { + this.set_selected_agent(Agent::NativeAgent, cx) + }) + .ok(); + } + }), + ) + .separator() + .map(|mut menu| { + let agent_server_store = agent_server_store.read(cx); + let registry_store = project::AgentRegistryStore::try_global(cx); + let registry_store_ref = registry_store.as_ref().map(|s| s.read(cx)); + + struct AgentMenuItem { + id: ExternalAgentServerName, + display_name: SharedString, + } + + let agent_items = agent_server_store + .external_agents() + .map(|name| { + let display_name = agent_server_store + .agent_display_name(name) + .or_else(|| { + registry_store_ref + .as_ref() + .and_then(|store| store.agent(name.0.as_ref())) + .map(|a| a.name().clone()) + }) + .unwrap_or_else(|| name.0.clone()); + AgentMenuItem { + id: name.clone(), + display_name, + } + }) + .sorted_unstable_by_key(|e| e.display_name.to_lowercase()) + .collect::>(); + + for item in &agent_items { + let mut entry = ContextMenuEntry::new(item.display_name.clone()); + + let icon_path = agent_server_store.agent_icon(&item.id).or_else(|| { + registry_store_ref + .as_ref() + .and_then(|store| store.agent(item.id.0.as_str())) + .and_then(|a| a.icon_path().cloned()) + }); + + if let Some(icon_path) = icon_path { + entry = entry.custom_icon_svg(icon_path); + } else { + entry = entry.icon(IconName::ZedAgent); + } + + entry = entry.icon_color(Color::Muted).handler({ + let this = this.clone(); + let agent = Agent::Custom { + name: item.id.0.clone(), + }; + move |_, cx| { + this.update(cx, |this, cx| { + this.set_selected_agent(agent.clone(), cx) + }) + .ok(); + } + }); + + menu = menu.item(entry); + } + menu + }) + })) + }) + .with_handle(self.selected_agent_menu.clone()) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(1.0), + y: px(1.0), + }) + } + + fn render_header(&self, cx: &mut Context) -> impl IntoElement { + let has_query = !self.filter_editor.read(cx).text(cx).is_empty(); + + h_flex() + .h(Tab::container_height(cx)) + .px_1() + .gap_1p5() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + h_flex() + .flex_1() + .w_full() + .gap_1p5() + .child( + IconButton::new("back", IconName::ArrowLeft) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Back to Sidebar")) + .on_click(cx.listener(|this, _, window, cx| { + this.go_back(window, cx); + })), + ) + .child(self.filter_editor.clone()) + .when(has_query, |this| { + this.border_r_1().child( + IconButton::new("clear_archive_filter", IconName::Close) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Clear Search")) + .on_click(cx.listener(|this, _, window, cx| { + this.reset_filter_editor_text(window, cx); + this.update_items(cx); + })), + ) + }), + ) + .child(self.render_agent_picker(cx)) + } +} + +impl Focusable for ThreadsArchiveView { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } +} + +impl Render for ThreadsArchiveView { + fn render(&mut self, window: &mut Window, cx: &mut Context) -> impl IntoElement { + let is_empty = self.items.is_empty(); + let has_query = !self.filter_editor.read(cx).text(cx).is_empty(); + + let empty_state_container = |label: SharedString| { + v_flex() + .flex_1() + .justify_center() + .items_center() + .child(Label::new(label).size(LabelSize::Small).color(Color::Muted)) + }; + + v_flex() + .key_context("ThreadsArchiveView") + .track_focus(&self.focus_handle) + .on_action(cx.listener(Self::select_next)) + .on_action(cx.listener(Self::select_previous)) + .on_action(cx.listener(Self::editor_move_down)) + .on_action(cx.listener(Self::editor_move_up)) + .on_action(cx.listener(Self::select_first)) + .on_action(cx.listener(Self::select_last)) + .on_action(cx.listener(Self::confirm)) + .size_full() + .bg(cx.theme().colors().surface_background) + .child(self.render_header(cx)) + .child(if is_empty && has_query { + empty_state_container("No threads match your search.".into()).into_any_element() + } else if is_empty { + empty_state_container("No archived threads yet.".into()).into_any_element() + } else { + v_flex() + .flex_1() + .overflow_hidden() + .child( + list( + self.list_state.clone(), + cx.processor(Self::render_list_entry), + ) + .flex_1() + .size_full(), + ) + .vertical_scrollbar_for(&self.list_state, window, cx) + .into_any_element() + }) + } +} diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index 94fed7f03f46e64ef0ac929e60cf6ae848145e72..17db6371114e1623280c22a23dd44e8efc6fa594 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -27,6 +27,7 @@ pub enum IconName { AiVZero, AiXAi, AiZed, + Archive, ArrowCircle, ArrowDown, ArrowDown10, From 86b5e92108b3eb95f92ef77d4fec392d711c80c9 Mon Sep 17 00:00:00 2001 From: Anikesh kumar Date: Thu, 12 Mar 2026 22:35:56 +0530 Subject: [PATCH 526/548] docs: Fix incorrect binary name for `visual_test_runner` (#51153) Fix binary name in macOS's development guide. Closes #51151 Release Notes: - N/A --- docs/src/development/macos.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/development/macos.md b/docs/src/development/macos.md index 62c2218e52751c1117da90e76ae13554b7e8f792..82d7264e2bb123b52ece8abcc44c3563d49de453 100644 --- a/docs/src/development/macos.md +++ b/docs/src/development/macos.md @@ -89,7 +89,7 @@ Before making any UI changes, generate baseline images from a known-good state: ```sh git checkout origin/main -UPDATE_BASELINE=1 cargo run -p zed --bin visual_test_runner --features visual-tests +UPDATE_BASELINE=1 cargo run -p zed --bin zed_visual_test_runner --features visual-tests git checkout - ``` From 4bd1a090d939534ceec725307d43e7b154832950 Mon Sep 17 00:00:00 2001 From: Lee ByeongJun Date: Fri, 13 Mar 2026 02:09:23 +0900 Subject: [PATCH 527/548] editor: Fix bracket colorization with folds and large functions (#51108) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #47846 `visible_excerpts` computed the visible buffer range by adding display line count directly to the buffer start row: ```rust // Before multi_buffer_visible_start + Point::new(visible_line_count, 0) ``` This ignores folds entirely. When a 700-line function is folded into one display line, content after the fold is visible on screen but falls outside the computed buffer range, so its brackets are never colorized. The fix converts through display coordinates so the fold/wrap layers are respected: ```rust // After let display_end = DisplayPoint::new(display_start.row + visible_line_count, 0); let multi_buffer_visible_end = display_end.to_point(&display_snapshot); ``` ### Results **Before Fix** 스크린샷 2026-03-10 오후 8 29 10 **After Fix** 스크린샷 2026-03-10 오후 8 32 27 Before you mark this PR as ready for review, make sure that you have: - [X] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - Fixed bracket colorization not working for content after folded regions and for functions with large bodies. --------- Co-authored-by: Kirill Bulatov --- crates/editor/src/bracket_colorization.rs | 54 +++++++++++++++++++++++ crates/editor/src/editor.rs | 24 +++++----- 2 files changed, 67 insertions(+), 11 deletions(-) diff --git a/crates/editor/src/bracket_colorization.rs b/crates/editor/src/bracket_colorization.rs index 16fe29a7fa4aa066cf045a63c477fbb569d80334..657f1e1b23d91ca421da6a38fbeaa382a65863db 100644 --- a/crates/editor/src/bracket_colorization.rs +++ b/crates/editor/src/bracket_colorization.rs @@ -1455,6 +1455,60 @@ mod foo «1{ ); } + #[gpui::test] + // reproduction of #47846 + async fn test_bracket_colorization_with_folds(cx: &mut gpui::TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.colorize_brackets = Some(true); + }); + let mut cx = EditorLspTestContext::new( + Arc::into_inner(rust_lang()).unwrap(), + lsp::ServerCapabilities::default(), + cx, + ) + .await; + + // Generate a large function body. When folded, this collapses + // to a single display line, making small_function visible on screen. + let mut big_body = String::new(); + for i in 0..700 { + big_body.push_str(&format!(" let var_{i:04} = ({i});\n")); + } + let source = format!( + "ˇfn big_function() {{\n{big_body}}}\n\nfn small_function() {{\n let x = (1, (2, 3));\n}}\n" + ); + + cx.set_state(&source); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + cx.update_editor(|editor, window, cx| { + editor.fold_ranges( + vec![Point::new(0, 0)..Point::new(701, 1)], + false, + window, + cx, + ); + }); + cx.executor().advance_clock(Duration::from_millis(100)); + cx.executor().run_until_parked(); + + assert_eq!( + indoc! {r#" +⋯1» + +fn small_function«1()1» «1{ + let x = «2(1, «3(2, 3)3»)2»; +}1» + +1 hsla(207.80, 16.20%, 69.19%, 1.00) +2 hsla(29.00, 54.00%, 65.88%, 1.00) +3 hsla(286.00, 51.00%, 75.25%, 1.00) +"#,}, + bracket_colors_markup(&mut cx), + ); + } + fn separate_with_comment_lines(head: &str, tail: &str, comment_lines: usize) -> String { let mut result = head.to_string(); result.push_str("\n"); diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index bec381506060435419e86727051cda53ab220316..707fb43cc3b573772ef24b7fe7eea69a2ad3c8ec 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -2621,16 +2621,7 @@ impl Editor { .await; editor .update_in(cx, |editor, window, cx| { - editor.register_visible_buffers(cx); - editor.colorize_brackets(false, cx); - editor.refresh_inlay_hints( - InlayHintRefreshReason::NewLinesShown, - cx, - ); - if !editor.buffer().read(cx).is_singleton() { - editor.update_lsp_data(None, window, cx); - editor.refresh_runnables(window, cx); - } + editor.update_data_on_scroll(window, cx) }) .ok(); }); @@ -20055,7 +20046,7 @@ impl Editor { &mut self, creases: Vec>, auto_scroll: bool, - _window: &mut Window, + window: &mut Window, cx: &mut Context, ) { if creases.is_empty() { @@ -20071,6 +20062,7 @@ impl Editor { cx.notify(); self.scrollbar_marker_state.dirty = true; + self.update_data_on_scroll(window, cx); self.folds_did_change(cx); } @@ -25367,6 +25359,16 @@ impl Editor { fn disable_runnables(&mut self) { self.enable_runnables = false; } + + fn update_data_on_scroll(&mut self, window: &mut Window, cx: &mut Context<'_, Self>) { + self.register_visible_buffers(cx); + self.colorize_brackets(false, cx); + self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx); + if !self.buffer().read(cx).is_singleton() { + self.update_lsp_data(None, window, cx); + self.refresh_runnables(window, cx); + } + } } fn edit_for_markdown_paste<'a>( From a8d0cdb5598b0775aefa39e8567698a38deeec20 Mon Sep 17 00:00:00 2001 From: AdamJedl <100023363+AdamJedl@users.noreply.github.com> Date: Thu, 12 Mar 2026 18:15:17 +0100 Subject: [PATCH 528/548] project_panel: Improve wording around file deletion (#43801) Make it clear in the UI that "Delete" of file or folder is permanent action. For example in windows explorer and VS Code "Delete" means move to trash. Or maybe also remove permanent delete from the context menu completely and allow it only through keyboard shortcut, like it's in Windows Explorer, VS Code and KDE Dolphin file manager. Release Notes: - Improved wording within file deletion prompts in the projetct panel. --------- Co-authored-by: MrSubidubi --- crates/project_panel/src/project_panel.rs | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/crates/project_panel/src/project_panel.rs b/crates/project_panel/src/project_panel.rs index 068fb8d71fa883e9d2b518c7d19adacea74fadcb..2984bb49c6a961c77adc1b82c806f7ec57d54a3e 100644 --- a/crates/project_panel/src/project_panel.rs +++ b/crates/project_panel/src/project_panel.rs @@ -2371,6 +2371,11 @@ impl ProjectPanel { } let answer = if !skip_prompt { let operation = if trash { "Trash" } else { "Delete" }; + let message_start = if trash { + "Do you want to trash" + } else { + "Are you sure you want to permanently delete" + }; let prompt = match file_paths.first() { Some((_, path)) if file_paths.len() == 1 => { let unsaved_warning = if dirty_buffers > 0 { @@ -2379,7 +2384,7 @@ impl ProjectPanel { "" }; - format!("{operation} {path}?{unsaved_warning}") + format!("{message_start} {path}?{unsaved_warning}") } _ => { const CUTOFF_POINT: usize = 10; @@ -2411,14 +2416,20 @@ impl ProjectPanel { }; format!( - "Do you want to {} the following {} files?\n{}{unsaved_warning}", - operation.to_lowercase(), + "{message_start} the following {} files?\n{}{unsaved_warning}", file_paths.len(), names.join("\n") ) } }; - Some(window.prompt(PromptLevel::Info, &prompt, None, &[operation, "Cancel"], cx)) + let detail = (!trash).then_some("This cannot be undone."); + Some(window.prompt( + PromptLevel::Info, + &prompt, + detail, + &[operation, "Cancel"], + cx, + )) } else { None }; From dcab4646086d952207feebefb11d85f8af1ae32e Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Thu, 12 Mar 2026 18:49:36 +0100 Subject: [PATCH 529/548] editor: Fix gutter hitbox hover check (#51405) The gutter hitbox would previously check the hover using the position, ignoring any occluding hitboxes rendered above it. This would then trigger the crease toggles to show which should not happen in that case, since the gutter was not really hovered. Release Notes: - Fixed an issue where the crease toggles in the gutter would sometimes show when interacting with a popover present over the editor gutter. --- crates/editor/src/element.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 5de14d80681ca1ad07534e8764217ef75cc90305..dcbd00ef8c89de8c4a3e3334ae1804ebe9e7b042 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -1243,7 +1243,7 @@ impl EditorElement { let gutter_hitbox = &position_map.gutter_hitbox; let modifiers = event.modifiers; let text_hovered = text_hitbox.is_hovered(window); - let gutter_hovered = gutter_hitbox.bounds.contains(&event.position); + let gutter_hovered = gutter_hitbox.is_hovered(window); editor.set_gutter_hovered(gutter_hovered, cx); editor.show_mouse_cursor(cx); From 17adc40d61387c7db26fe278ab8cf6e67c9bce4c Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 12 Mar 2026 10:53:38 -0700 Subject: [PATCH 530/548] Implement sidebar rendering of the configured worktrees (#51342) Implements worktree support for the agent panel sidebar Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 6 + crates/agent_ui/src/sidebar.rs | 484 +++++++++++++++++- .../20221109000000_test_schema.sql | 1 + .../migrations/20251208000000_test_schema.sql | 3 +- crates/collab/src/db/queries/projects.rs | 9 + crates/collab/src/db/queries/rooms.rs | 5 + .../src/db/tables/project_repository.rs | 2 + crates/collab/tests/integration/git_tests.rs | 233 ++++++++- crates/fs/src/fake_git_repo.rs | 2 +- crates/project/src/git_store.rs | 39 +- crates/proto/proto/git.proto | 1 + 11 files changed, 777 insertions(+), 8 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index 4fc6e3dd1f257377e3f5213b1ae216115fd01fff..f9a136c10fe26ce1763fbde52c532f065e097463 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -2642,6 +2642,12 @@ impl AgentPanel { } } + // TODO: The mapping from workspace root paths to git repositories needs a + // unified approach across the codebase: this method, `sidebar::is_root_repo`, + // thread persistence (which PathList is saved to the database), and thread + // querying (which PathList is used to read threads back). All of these need + // to agree on how repos are resolved for a given workspace, especially in + // multi-root and nested-repo configurations. /// Partitions the project's visible worktrees into git-backed repositories /// and plain (non-git) paths. Git repos will have worktrees created for /// them; non-git paths are carried over to the new workspace as-is. diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index 2d4259717d160521ddd4884cbb6a1a1241456b64..24c5d5f5e5295a7e25af9f486323a16a2405c8e0 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -18,6 +18,8 @@ use project::Event as ProjectEvent; use settings::Settings; use std::collections::{HashMap, HashSet}; use std::mem; +use std::path::Path; +use std::sync::Arc; use theme::ActiveTheme; use ui::{ AgentThreadStatus, ButtonStyle, HighlightedLabel, IconButtonShape, ListItem, Tab, ThreadItem, @@ -107,6 +109,8 @@ struct ThreadEntry { is_live: bool, is_background: bool, highlight_positions: Vec, + worktree_name: Option, + worktree_highlight_positions: Vec, diff_stats: DiffStats, } @@ -172,6 +176,32 @@ fn fuzzy_match_positions(query: &str, candidate: &str) -> Option> { } } +// TODO: The mapping from workspace root paths to git repositories needs a +// unified approach across the codebase: this function, `AgentPanel::classify_worktrees`, +// thread persistence (which PathList is saved to the database), and thread +// querying (which PathList is used to read threads back). All of these need +// to agree on how repos are resolved for a given workspace, especially in +// multi-root and nested-repo configurations. +fn root_repository_snapshots( + workspace: &Entity, + cx: &App, +) -> Vec { + let (path_list, _) = workspace_path_list_and_label(workspace, cx); + let project = workspace.read(cx).project().read(cx); + project + .repositories(cx) + .values() + .filter_map(|repo| { + let snapshot = repo.read(cx).snapshot(); + let is_root = path_list + .paths() + .iter() + .any(|p| p.as_path() == snapshot.work_directory_abs_path.as_ref()); + is_root.then_some(snapshot) + }) + .collect() +} + fn workspace_path_list_and_label( workspace: &Entity, cx: &App, @@ -348,6 +378,26 @@ impl Sidebar { ) .detach(); + let git_store = workspace.read(cx).project().read(cx).git_store().clone(); + cx.subscribe_in( + &git_store, + window, + |this, _, event: &project::git_store::GitStoreEvent, window, cx| { + if matches!( + event, + project::git_store::GitStoreEvent::RepositoryUpdated( + _, + project::git_store::RepositoryEvent::GitWorktreeListChanged, + _, + ) + ) { + this.prune_stale_worktree_workspaces(window, cx); + this.update_entries(cx); + } + }, + ) + .detach(); + cx.subscribe_in( workspace, window, @@ -472,7 +522,52 @@ impl Sidebar { // Compute active_entry_index inline during the build pass. let mut active_entry_index: Option = None; - for workspace in workspaces.iter() { + // Identify absorbed workspaces in a single pass. A workspace is + // "absorbed" when it points at a git worktree checkout whose main + // repo is open as another workspace — its threads appear under the + // main repo's header instead of getting their own. + let mut main_repo_workspace: HashMap, usize> = HashMap::new(); + let mut absorbed: HashMap = HashMap::new(); + let mut pending: HashMap, Vec<(usize, SharedString)>> = HashMap::new(); + + for (i, workspace) in workspaces.iter().enumerate() { + for snapshot in root_repository_snapshots(workspace, cx) { + if snapshot.work_directory_abs_path == snapshot.original_repo_abs_path { + main_repo_workspace + .entry(snapshot.work_directory_abs_path.clone()) + .or_insert(i); + if let Some(waiting) = pending.remove(&snapshot.work_directory_abs_path) { + for (ws_idx, name) in waiting { + absorbed.insert(ws_idx, (i, name)); + } + } + } else { + let name: SharedString = snapshot + .work_directory_abs_path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string() + .into(); + if let Some(&main_idx) = + main_repo_workspace.get(&snapshot.original_repo_abs_path) + { + absorbed.insert(i, (main_idx, name)); + } else { + pending + .entry(snapshot.original_repo_abs_path.clone()) + .or_default() + .push((i, name)); + } + } + } + } + + for (ws_index, workspace) in workspaces.iter().enumerate() { + if absorbed.contains_key(&ws_index) { + continue; + } + let (path_list, label) = workspace_path_list_and_label(workspace, cx); let is_collapsed = self.collapsed_groups.contains(&path_list); @@ -481,8 +576,11 @@ impl Sidebar { let mut threads: Vec = Vec::new(); if should_load_threads { + let mut seen_session_ids: HashSet = HashSet::new(); + if let Some(ref thread_store) = thread_store { for meta in thread_store.read(cx).threads_for_paths(&path_list) { + seen_session_ids.insert(meta.id.clone()); threads.push(ThreadEntry { session_info: meta.into(), icon: IconName::ZedAgent, @@ -492,11 +590,56 @@ impl Sidebar { is_live: false, is_background: false, highlight_positions: Vec::new(), + worktree_name: None, + worktree_highlight_positions: Vec::new(), diff_stats: DiffStats::default(), }); } } + // Load threads from linked git worktrees of this workspace's repos. + if let Some(ref thread_store) = thread_store { + let mut linked_worktree_queries: Vec<(PathList, SharedString)> = Vec::new(); + for snapshot in root_repository_snapshots(workspace, cx) { + if snapshot.work_directory_abs_path != snapshot.original_repo_abs_path { + continue; + } + for git_worktree in snapshot.linked_worktrees() { + let name = git_worktree + .path + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(); + linked_worktree_queries.push(( + PathList::new(std::slice::from_ref(&git_worktree.path)), + name.into(), + )); + } + } + + for (worktree_path_list, worktree_name) in &linked_worktree_queries { + for meta in thread_store.read(cx).threads_for_paths(worktree_path_list) { + if !seen_session_ids.insert(meta.id.clone()) { + continue; + } + threads.push(ThreadEntry { + session_info: meta.into(), + icon: IconName::ZedAgent, + icon_from_external_svg: None, + status: AgentThreadStatus::default(), + workspace: workspace.clone(), + is_live: false, + is_background: false, + highlight_positions: Vec::new(), + worktree_name: Some(worktree_name.clone()), + worktree_highlight_positions: Vec::new(), + diff_stats: DiffStats::default(), + }); + } + } + } + let live_infos = Self::all_thread_infos_for_workspace(workspace, cx); if !live_infos.is_empty() { @@ -570,7 +713,16 @@ impl Sidebar { if let Some(positions) = fuzzy_match_positions(&query, title) { thread.highlight_positions = positions; } - if workspace_matched || !thread.highlight_positions.is_empty() { + if let Some(worktree_name) = &thread.worktree_name { + if let Some(positions) = fuzzy_match_positions(&query, worktree_name) { + thread.worktree_highlight_positions = positions; + } + } + let worktree_matched = !thread.worktree_highlight_positions.is_empty(); + if workspace_matched + || !thread.highlight_positions.is_empty() + || worktree_matched + { matched_threads.push(thread); } } @@ -1024,6 +1176,52 @@ impl Sidebar { }); } + fn prune_stale_worktree_workspaces(&mut self, window: &mut Window, cx: &mut Context) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + let workspaces = multi_workspace.read(cx).workspaces().to_vec(); + + // Collect all worktree paths that are currently listed by any main + // repo open in any workspace. + let mut known_worktree_paths: HashSet = HashSet::new(); + for workspace in &workspaces { + for snapshot in root_repository_snapshots(workspace, cx) { + if snapshot.work_directory_abs_path != snapshot.original_repo_abs_path { + continue; + } + for git_worktree in snapshot.linked_worktrees() { + known_worktree_paths.insert(git_worktree.path.to_path_buf()); + } + } + } + + // Find workspaces that consist of exactly one root folder which is a + // stale worktree checkout. Multi-root workspaces are never pruned — + // losing one worktree shouldn't destroy a workspace that also + // contains other folders. + let mut to_remove: Vec> = Vec::new(); + for workspace in &workspaces { + let (path_list, _) = workspace_path_list_and_label(workspace, cx); + if path_list.paths().len() != 1 { + continue; + } + let should_prune = root_repository_snapshots(workspace, cx) + .iter() + .any(|snapshot| { + snapshot.work_directory_abs_path != snapshot.original_repo_abs_path + && !known_worktree_paths.contains(snapshot.work_directory_abs_path.as_ref()) + }); + if should_prune { + to_remove.push(workspace.clone()); + } + } + + for workspace in &to_remove { + self.remove_workspace(workspace, window, cx); + } + } + fn remove_workspace( &mut self, workspace: &Entity, @@ -1316,6 +1514,10 @@ impl Sidebar { .when_some(thread.icon_from_external_svg.clone(), |this, svg| { this.custom_icon_from_external_svg(svg) }) + .when_some(thread.worktree_name.clone(), |this, name| { + this.worktree(name) + }) + .worktree_highlight_positions(thread.worktree_highlight_positions.clone()) .when_some(timestamp, |this, ts| this.timestamp(ts)) .highlight_positions(thread.highlight_positions.to_vec()) .status(thread.status) @@ -1913,9 +2115,14 @@ mod tests { } else { "" }; + let worktree = thread + .worktree_name + .as_ref() + .map(|name| format!(" {{{}}}", name)) + .unwrap_or_default(); format!( - " {}{}{}{}{}", - title, active, status_str, notified, selected + " {}{}{}{}{}{}", + title, worktree, active, status_str, notified, selected ) } ListEntry::ViewMore { @@ -2244,6 +2451,8 @@ mod tests { is_live: false, is_background: false, highlight_positions: Vec::new(), + worktree_name: None, + worktree_highlight_positions: Vec::new(), diff_stats: DiffStats::default(), }), // Active thread with Running status @@ -2263,6 +2472,8 @@ mod tests { is_live: true, is_background: false, highlight_positions: Vec::new(), + worktree_name: None, + worktree_highlight_positions: Vec::new(), diff_stats: DiffStats::default(), }), // Active thread with Error status @@ -2282,6 +2493,8 @@ mod tests { is_live: true, is_background: false, highlight_positions: Vec::new(), + worktree_name: None, + worktree_highlight_positions: Vec::new(), diff_stats: DiffStats::default(), }), // Thread with WaitingForConfirmation status, not active @@ -2301,6 +2514,8 @@ mod tests { is_live: false, is_background: false, highlight_positions: Vec::new(), + worktree_name: None, + worktree_highlight_positions: Vec::new(), diff_stats: DiffStats::default(), }), // Background thread that completed (should show notification) @@ -2320,6 +2535,8 @@ mod tests { is_live: true, is_background: true, highlight_positions: Vec::new(), + worktree_name: None, + worktree_highlight_positions: Vec::new(), diff_stats: DiffStats::default(), }), // View More entry @@ -3829,4 +4046,263 @@ mod tests { ); }); } + + async fn save_named_thread( + session_id: &str, + title: &str, + path_list: &PathList, + cx: &mut gpui::VisualTestContext, + ) { + let thread_store = cx.update(|_window, cx| ThreadStore::global(cx)); + let save_task = thread_store.update(cx, |store, cx| { + store.save_thread( + acp::SessionId::new(Arc::from(session_id)), + make_test_thread( + title, + chrono::TimeZone::with_ymd_and_hms(&Utc, 2024, 1, 1, 0, 0, 0).unwrap(), + ), + path_list.clone(), + cx, + ) + }); + save_task.await.unwrap(); + cx.run_until_parked(); + } + + async fn init_test_project_with_git( + worktree_path: &str, + cx: &mut TestAppContext, + ) -> (Entity, Arc) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + worktree_path, + serde_json::json!({ + ".git": {}, + "src": {}, + }), + ) + .await; + cx.update(|cx| ::set_global(fs.clone(), cx)); + let project = project::Project::test(fs.clone(), [worktree_path.as_ref()], cx).await; + (project, fs) + } + + #[gpui::test] + async fn test_search_matches_worktree_name(cx: &mut TestAppContext) { + let (project, fs) = init_test_project_with_git("/project", cx).await; + + fs.as_fake() + .with_git_state(std::path::Path::new("/project/.git"), false, |state| { + state.worktrees.push(git::repository::Worktree { + path: std::path::PathBuf::from("/wt/rosewood"), + ref_name: "refs/heads/rosewood".into(), + sha: "abc".into(), + }); + }) + .unwrap(); + + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let main_paths = PathList::new(&[std::path::PathBuf::from("/project")]); + let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt/rosewood")]); + save_named_thread("main-t", "Unrelated Thread", &main_paths, cx).await; + save_named_thread("wt-t", "Fix Bug", &wt_paths, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Search for "rosewood" — should match the worktree name, not the title. + type_in_search(&sidebar, "rosewood", cx); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " Fix Bug {rosewood} <== selected"], + ); + } + + #[gpui::test] + async fn test_git_worktree_added_live_updates_sidebar(cx: &mut TestAppContext) { + let (project, fs) = init_test_project_with_git("/project", cx).await; + + project + .update(cx, |project, cx| project.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = + cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx)); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread against a worktree path that doesn't exist yet. + let wt_paths = PathList::new(&[std::path::PathBuf::from("/wt/rosewood")]); + save_named_thread("wt-thread", "Worktree Thread", &wt_paths, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Thread is not visible yet — no worktree knows about this path. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " [+ New Thread]"] + ); + + // Now add the worktree to the git state and trigger a rescan. + fs.as_fake() + .with_git_state(std::path::Path::new("/project/.git"), true, |state| { + state.worktrees.push(git::repository::Worktree { + path: std::path::PathBuf::from("/wt/rosewood"), + ref_name: "refs/heads/rosewood".into(), + sha: "abc".into(), + }); + }) + .unwrap(); + + cx.run_until_parked(); + + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " Worktree Thread {rosewood}",] + ); + } + + #[gpui::test] + async fn test_two_worktree_workspaces_absorbed_when_main_added(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + // Create the main repo directory (not opened as a workspace yet). + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + "feature-b": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-b", + }, + }, + }, + "src": {}, + }), + ) + .await; + + // Two worktree checkouts whose .git files point back to the main repo. + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": {}, + }), + ) + .await; + fs.insert_tree( + "/wt-feature-b", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-b", + "src": {}, + }), + ) + .await; + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let project_a = project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + let project_b = project::Project::test(fs.clone(), ["/wt-feature-b".as_ref()], cx).await; + + project_a.update(cx, |p, cx| p.git_scans_complete(cx)).await; + project_b.update(cx, |p, cx| p.git_scans_complete(cx)).await; + + // Open both worktrees as workspaces — no main repo yet. + let (multi_workspace, cx) = cx + .add_window_view(|window, cx| MultiWorkspace::test_new(project_a.clone(), window, cx)); + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(project_b.clone(), window, cx); + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + let paths_a = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); + let paths_b = PathList::new(&[std::path::PathBuf::from("/wt-feature-b")]); + save_named_thread("thread-a", "Thread A", &paths_a, cx).await; + save_named_thread("thread-b", "Thread B", &paths_b, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Without the main repo, each worktree has its own header. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [wt-feature-a]", + " Thread A", + "v [wt-feature-b]", + " Thread B", + ] + ); + + // Configure the main repo to list both worktrees before opening + // it so the initial git scan picks them up. + fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { + state.worktrees.push(git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: "refs/heads/feature-a".into(), + sha: "aaa".into(), + }); + state.worktrees.push(git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-b"), + ref_name: "refs/heads/feature-b".into(), + sha: "bbb".into(), + }); + }) + .unwrap(); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(main_project.clone(), window, cx); + }); + cx.run_until_parked(); + + // Both worktree workspaces should now be absorbed under the main + // repo header, with worktree chips. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec![ + "v [project]", + " Thread A {wt-feature-a}", + " Thread B {wt-feature-b}", + ] + ); + + // Remove feature-b from the main repo's linked worktrees. + // The feature-b workspace should be pruned automatically. + fs.with_git_state(std::path::Path::new("/project/.git"), true, |state| { + state + .worktrees + .retain(|wt| wt.path != std::path::Path::new("/wt-feature-b")); + }) + .unwrap(); + + cx.run_until_parked(); + + // feature-b's workspace is pruned; feature-a remains absorbed + // under the main repo. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " Thread A {wt-feature-a}",] + ); + } } diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index 3e4b5c2ce211f68ef7e12895b542db5e6e3ea47c..75d7dbf194068f78b3d566e54bb0fa18f66a9878 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -109,6 +109,7 @@ CREATE TABLE "project_repositories" ( "head_commit_details" VARCHAR, "remote_upstream_url" VARCHAR, "remote_origin_url" VARCHAR, + "linked_worktrees" VARCHAR, PRIMARY KEY (project_id, id) ); diff --git a/crates/collab/migrations/20251208000000_test_schema.sql b/crates/collab/migrations/20251208000000_test_schema.sql index 53543a23f710e49084a7b1127e7b743df6ef97c8..394deaf2c0d6a80a2ab6ab1b95a333081c816e23 100644 --- a/crates/collab/migrations/20251208000000_test_schema.sql +++ b/crates/collab/migrations/20251208000000_test_schema.sql @@ -307,7 +307,8 @@ CREATE TABLE public.project_repositories ( head_commit_details character varying, merge_message character varying, remote_upstream_url character varying, - remote_origin_url character varying + remote_origin_url character varying, + linked_worktrees text ); CREATE TABLE public.project_repository_statuses ( diff --git a/crates/collab/src/db/queries/projects.rs b/crates/collab/src/db/queries/projects.rs index 24cf639a715aa9b88da80375b389debaea0c4295..71365fb3846c1dccbf527d76779ed8816bde243b 100644 --- a/crates/collab/src/db/queries/projects.rs +++ b/crates/collab/src/db/queries/projects.rs @@ -374,6 +374,9 @@ impl Database { merge_message: ActiveValue::set(update.merge_message.clone()), remote_upstream_url: ActiveValue::set(update.remote_upstream_url.clone()), remote_origin_url: ActiveValue::set(update.remote_origin_url.clone()), + linked_worktrees: ActiveValue::Set(Some( + serde_json::to_string(&update.linked_worktrees).unwrap(), + )), }) .on_conflict( OnConflict::columns([ @@ -388,6 +391,7 @@ impl Database { project_repository::Column::CurrentMergeConflicts, project_repository::Column::HeadCommitDetails, project_repository::Column::MergeMessage, + project_repository::Column::LinkedWorktrees, ]) .to_owned(), ) @@ -883,6 +887,11 @@ impl Database { remote_upstream_url: db_repository_entry.remote_upstream_url.clone(), remote_origin_url: db_repository_entry.remote_origin_url.clone(), original_repo_abs_path: Some(db_repository_entry.abs_path), + linked_worktrees: db_repository_entry + .linked_worktrees + .as_deref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or_default(), }); } } diff --git a/crates/collab/src/db/queries/rooms.rs b/crates/collab/src/db/queries/rooms.rs index b4cbd83167b227542d8de1022b7e2cf49f5a7645..3197d142cba7a1969e6fdb9423dc94497f6ca53c 100644 --- a/crates/collab/src/db/queries/rooms.rs +++ b/crates/collab/src/db/queries/rooms.rs @@ -799,6 +799,11 @@ impl Database { remote_upstream_url: db_repository.remote_upstream_url.clone(), remote_origin_url: db_repository.remote_origin_url.clone(), original_repo_abs_path: Some(db_repository.abs_path), + linked_worktrees: db_repository + .linked_worktrees + .as_deref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or_default(), }); } } diff --git a/crates/collab/src/db/tables/project_repository.rs b/crates/collab/src/db/tables/project_repository.rs index 190ae8d79c54bb78daef4a1568ec75683eb0b0f2..33b20817e61a137285e27525eb5b2a221d3cfd9e 100644 --- a/crates/collab/src/db/tables/project_repository.rs +++ b/crates/collab/src/db/tables/project_repository.rs @@ -24,6 +24,8 @@ pub struct Model { pub head_commit_details: Option, pub remote_upstream_url: Option, pub remote_origin_url: Option, + // JSON array of linked worktree objects + pub linked_worktrees: Option, } #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs index f8c461b91fc41cc5a0e20271a85e685af2801d24..fc20150d662b96be9b6ad4f99ae1f33032b6fb7b 100644 --- a/crates/collab/tests/integration/git_tests.rs +++ b/crates/collab/tests/integration/git_tests.rs @@ -1,9 +1,10 @@ use std::path::{Path, PathBuf}; use call::ActiveCall; +use client::RECEIVE_TIMEOUT; use collections::HashMap; use git::{ - repository::RepoPath, + repository::{RepoPath, Worktree as GitWorktree}, status::{DiffStat, FileStatus, StatusCode, TrackedStatus}, }; use git_ui::{git_panel::GitPanel, project_diff::ProjectDiff}; @@ -365,6 +366,236 @@ async fn test_remote_git_worktrees( ); } +#[gpui::test] +async fn test_linked_worktrees_sync( + executor: BackgroundExecutor, + cx_a: &mut TestAppContext, + cx_b: &mut TestAppContext, + cx_c: &mut TestAppContext, +) { + let mut server = TestServer::start(executor.clone()).await; + let client_a = server.create_client(cx_a, "user_a").await; + let client_b = server.create_client(cx_b, "user_b").await; + let client_c = server.create_client(cx_c, "user_c").await; + server + .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)]) + .await; + let active_call_a = cx_a.read(ActiveCall::global); + + // Set up a git repo with two linked worktrees already present. + client_a + .fs() + .insert_tree( + path!("/project"), + json!({ ".git": {}, "file.txt": "content" }), + ) + .await; + + client_a + .fs() + .with_git_state(Path::new(path!("/project/.git")), true, |state| { + state.worktrees.push(GitWorktree { + path: PathBuf::from(path!("/project")), + ref_name: "refs/heads/main".into(), + sha: "aaa111".into(), + }); + state.worktrees.push(GitWorktree { + path: PathBuf::from(path!("/project/feature-branch")), + ref_name: "refs/heads/feature-branch".into(), + sha: "bbb222".into(), + }); + state.worktrees.push(GitWorktree { + path: PathBuf::from(path!("/project/bugfix-branch")), + ref_name: "refs/heads/bugfix-branch".into(), + sha: "ccc333".into(), + }); + }) + .unwrap(); + + let (project_a, _) = client_a.build_local_project(path!("/project"), cx_a).await; + + // Wait for git scanning to complete on the host. + executor.run_until_parked(); + + // Verify the host sees 2 linked worktrees (main worktree is filtered out). + let host_linked = project_a.read_with(cx_a, |project, cx| { + let repos = project.repositories(cx); + assert_eq!(repos.len(), 1, "host should have exactly 1 repository"); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + host_linked.len(), + 2, + "host should have 2 linked worktrees (main filtered out)" + ); + assert_eq!( + host_linked[0].path, + PathBuf::from(path!("/project/feature-branch")) + ); + assert_eq!( + host_linked[0].ref_name.as_ref(), + "refs/heads/feature-branch" + ); + assert_eq!(host_linked[0].sha.as_ref(), "bbb222"); + assert_eq!( + host_linked[1].path, + PathBuf::from(path!("/project/bugfix-branch")) + ); + assert_eq!(host_linked[1].ref_name.as_ref(), "refs/heads/bugfix-branch"); + assert_eq!(host_linked[1].sha.as_ref(), "ccc333"); + + // Share the project and have client B join. + let project_id = active_call_a + .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx)) + .await + .unwrap(); + let project_b = client_b.join_remote_project(project_id, cx_b).await; + + executor.run_until_parked(); + + // Verify the guest sees the same linked worktrees as the host. + let guest_linked = project_b.read_with(cx_b, |project, cx| { + let repos = project.repositories(cx); + assert_eq!(repos.len(), 1, "guest should have exactly 1 repository"); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + guest_linked, host_linked, + "guest's linked_worktrees should match host's after initial sync" + ); + + // Now mutate: add a third linked worktree on the host side. + client_a + .fs() + .with_git_state(Path::new(path!("/project/.git")), true, |state| { + state.worktrees.push(GitWorktree { + path: PathBuf::from(path!("/project/hotfix-branch")), + ref_name: "refs/heads/hotfix-branch".into(), + sha: "ddd444".into(), + }); + }) + .unwrap(); + + // Wait for the host to re-scan and propagate the update. + executor.run_until_parked(); + + // Verify host now sees 3 linked worktrees. + let host_linked_updated = project_a.read_with(cx_a, |project, cx| { + let repos = project.repositories(cx); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + host_linked_updated.len(), + 3, + "host should now have 3 linked worktrees" + ); + assert_eq!( + host_linked_updated[2].path, + PathBuf::from(path!("/project/hotfix-branch")) + ); + + // Verify the guest also received the update. + let guest_linked_updated = project_b.read_with(cx_b, |project, cx| { + let repos = project.repositories(cx); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + guest_linked_updated, host_linked_updated, + "guest's linked_worktrees should match host's after update" + ); + + // Now mutate: remove one linked worktree from the host side. + client_a + .fs() + .with_git_state(Path::new(path!("/project/.git")), true, |state| { + state + .worktrees + .retain(|wt| wt.ref_name.as_ref() != "refs/heads/bugfix-branch"); + }) + .unwrap(); + + executor.run_until_parked(); + + // Verify host now sees 2 linked worktrees (feature-branch and hotfix-branch). + let host_linked_after_removal = project_a.read_with(cx_a, |project, cx| { + let repos = project.repositories(cx); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + host_linked_after_removal.len(), + 2, + "host should have 2 linked worktrees after removal" + ); + assert!( + host_linked_after_removal + .iter() + .all(|wt| wt.ref_name.as_ref() != "refs/heads/bugfix-branch"), + "bugfix-branch should have been removed" + ); + + // Verify the guest also reflects the removal. + let guest_linked_after_removal = project_b.read_with(cx_b, |project, cx| { + let repos = project.repositories(cx); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + guest_linked_after_removal, host_linked_after_removal, + "guest's linked_worktrees should match host's after removal" + ); + + // Test DB roundtrip: client C joins late, getting state from the database. + // This verifies that linked_worktrees are persisted and restored correctly. + let project_c = client_c.join_remote_project(project_id, cx_c).await; + executor.run_until_parked(); + + let late_joiner_linked = project_c.read_with(cx_c, |project, cx| { + let repos = project.repositories(cx); + assert_eq!( + repos.len(), + 1, + "late joiner should have exactly 1 repository" + ); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + late_joiner_linked, host_linked_after_removal, + "late-joining client's linked_worktrees should match host's (DB roundtrip)" + ); + + // Test reconnection: disconnect client B (guest) and reconnect. + // After rejoining, client B should get linked_worktrees back from the DB. + server.disconnect_client(client_b.peer_id().unwrap()); + executor.advance_clock(RECEIVE_TIMEOUT); + executor.run_until_parked(); + + // Client B reconnects automatically. + executor.advance_clock(RECEIVE_TIMEOUT); + executor.run_until_parked(); + + // Verify client B still has the correct linked worktrees after reconnection. + let guest_linked_after_reconnect = project_b.read_with(cx_b, |project, cx| { + let repos = project.repositories(cx); + assert_eq!( + repos.len(), + 1, + "guest should still have exactly 1 repository after reconnect" + ); + let repo = repos.values().next().unwrap(); + repo.read(cx).linked_worktrees().to_vec() + }); + assert_eq!( + guest_linked_after_reconnect, host_linked_after_removal, + "guest's linked_worktrees should survive guest disconnect/reconnect" + ); +} + #[gpui::test] async fn test_diff_stat_sync_between_host_and_downstream_client( cx_a: &mut TestAppContext, diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 85489b6057cd8214ee512fb477428c93cdb32219..0cb610f7dd2d4ccf809d907347bf3b3be2c82444 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -790,7 +790,7 @@ impl GitRepository for FakeGitRepository { } fn diff(&self, _diff: git::repository::DiffType) -> BoxFuture<'_, Result> { - unimplemented!() + future::ready(Ok(String::new())).boxed() } fn diff_stat( diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 0572fd1f4f19beebd3674e1b24c828daffb9973c..e9330014c3f066705ac3ea1e54f5e498c5d22348 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -293,6 +293,7 @@ pub struct RepositorySnapshot { pub remote_origin_url: Option, pub remote_upstream_url: Option, pub stash_entries: GitStash, + pub linked_worktrees: Arc<[GitWorktree]>, } type JobId = u64; @@ -429,6 +430,7 @@ pub enum RepositoryEvent { StatusesChanged, BranchChanged, StashEntriesChanged, + GitWorktreeListChanged, PendingOpsChanged { pending_ops: SumTree }, GraphEvent((LogSource, LogOrder), GitGraphEvent), } @@ -3575,6 +3577,7 @@ impl RepositorySnapshot { remote_origin_url: None, remote_upstream_url: None, stash_entries: Default::default(), + linked_worktrees: Arc::from([]), path_style, } } @@ -3613,6 +3616,11 @@ impl RepositorySnapshot { original_repo_abs_path: Some( self.original_repo_abs_path.to_string_lossy().into_owned(), ), + linked_worktrees: self + .linked_worktrees + .iter() + .map(worktree_to_proto) + .collect(), } } @@ -3689,9 +3697,18 @@ impl RepositorySnapshot { original_repo_abs_path: Some( self.original_repo_abs_path.to_string_lossy().into_owned(), ), + linked_worktrees: self + .linked_worktrees + .iter() + .map(worktree_to_proto) + .collect(), } } + pub fn linked_worktrees(&self) -> &[GitWorktree] { + &self.linked_worktrees + } + pub fn status(&self) -> impl Iterator + '_ { self.statuses_by_path.iter().cloned() } @@ -6145,6 +6162,15 @@ impl Repository { cx.emit(RepositoryEvent::StashEntriesChanged) } self.snapshot.stash_entries = new_stash_entries; + let new_linked_worktrees: Arc<[GitWorktree]> = update + .linked_worktrees + .iter() + .map(proto_to_worktree) + .collect(); + if *self.snapshot.linked_worktrees != *new_linked_worktrees { + cx.emit(RepositoryEvent::GitWorktreeListChanged); + } + self.snapshot.linked_worktrees = new_linked_worktrees; self.snapshot.remote_upstream_url = update.remote_upstream_url; self.snapshot.remote_origin_url = update.remote_origin_url; @@ -6901,14 +6927,20 @@ async fn compute_snapshot( })) .boxed() }; - let (statuses, diff_stats) = futures::future::try_join( + let (statuses, diff_stats, all_worktrees) = futures::future::try_join3( backend.status(&[RepoPath::from_rel_path( &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(), )]), diff_stat_future, + backend.worktrees(), ) .await?; + let linked_worktrees: Arc<[GitWorktree]> = all_worktrees + .into_iter() + .filter(|wt| wt.path != *work_directory_abs_path) + .collect(); + let diff_stat_map: HashMap<&RepoPath, DiffStat> = diff_stats.entries.iter().map(|(p, s)| (p, *s)).collect(); let stash_entries = backend.stash_entries().await?; @@ -6938,6 +6970,10 @@ async fn compute_snapshot( events.push(RepositoryEvent::BranchChanged); } + if *linked_worktrees != *prev_snapshot.linked_worktrees { + events.push(RepositoryEvent::GitWorktreeListChanged); + } + let remote_origin_url = backend.remote_url("origin").await; let remote_upstream_url = backend.remote_url("upstream").await; @@ -6954,6 +6990,7 @@ async fn compute_snapshot( remote_origin_url, remote_upstream_url, stash_entries, + linked_worktrees, }; Ok((snapshot, events)) diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index 87fdc058f95c045de5f1e8f7ef03c8e32c2fa518..bb6b73ce3b89d51e9bf594c9e01254f5f0d579a4 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -126,6 +126,7 @@ message UpdateRepository { optional string remote_upstream_url = 14; optional string remote_origin_url = 15; optional string original_repo_abs_path = 16; + repeated Worktree linked_worktrees = 17; } message RemoveRepository { From 329df2cecdfb2257bbca03e989732332e324026c Mon Sep 17 00:00:00 2001 From: Katie Geer Date: Thu, 12 Mar 2026 11:34:40 -0700 Subject: [PATCH 531/548] docs: Add voice and tone guidance to agent rules (#51408) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adding more tone guidance to docs' agents.md file Release Notes: - N/A *or* Added/Fixed/Improved ... --------- Co-authored-by: María Craig Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com> --- docs/AGENTS.md | 68 ++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 61 insertions(+), 7 deletions(-) diff --git a/docs/AGENTS.md b/docs/AGENTS.md index fdd61ff6aeaf8cd09ae0b017c5199e7033fba964..54f477472b1b4d22f06623220d5fb4a3eb181db4 100644 --- a/docs/AGENTS.md +++ b/docs/AGENTS.md @@ -126,6 +126,59 @@ Images are hosted externally. Reference format: - With anchors: `[Custom Models](./llm-providers.md#anthropic-custom-models)` - Parent directory: `[Telemetry](../telemetry.md)` +## Voice and Tone + +### Core Principles + +- **Practical over promotional**: Focus on what users can do, not on selling Zed. Avoid marketing language like "powerful," "revolutionary," or "best-in-class." +- **Honest about limitations**: When Zed lacks a feature or doesn't match another tool's depth, say so directly. Pair limitations with workarounds or alternative workflows. +- **Direct and concise**: Use short sentences. Get to the point. Developers are scanning, not reading novels. +- **Second person**: Address the reader as "you." Avoid "the user" or "one." +- **Present tense**: "Zed opens the file" not "Zed will open the file." + +### What to Avoid + +- Superlatives without substance ("incredibly fast," "seamlessly integrated") +- Hedging language ("simply," "just," "easily")—if something is simple, the instructions will show it +- Apologetic tone for missing features—state the limitation and move on +- Comparisons that disparage other tools—be factual, not competitive +- Lots of use of em or en dashes. + +## Examples of Good Copy + +### Good: Direct and actionable + +``` +To format on save, open the Settings Editor (`Cmd+,`) and search for `format_on_save`. Set it to `on`. + +Or add this to your settings.json: +{ + "format_on_save": "on" +} +``` + +### Bad: Wordy and promotional + +``` +Zed provides a powerful and seamless formatting experience. Simply navigate to the settings and you'll find the format_on_save option which enables Zed's incredible auto-formatting capabilities. +``` + +### Good: Honest about limitations + +``` +Zed doesn't index your project like IntelliJ does. You open a folder and start working immediately—no waiting. The trade-off: cross-project analysis relies on language servers, which may not go as deep. + +**How to adapt:** +- Use `Cmd+Shift+F` for project-wide text search +- Use `Cmd+O` for symbol search (powered by your language server) +``` + +### Bad: Defensive or dismissive + +``` +While some users might miss indexing, Zed's approach is actually better because it's faster. +``` + ## Scope ### In-Scope Documentation @@ -204,13 +257,14 @@ Inherit all conventions from `docs/.rules`. Key points: ### Terminology -| Use | Instead of | -| --------------- | -------------------------------------- | -| folder | directory | -| project | workspace | -| Settings Editor | settings UI | -| command palette | command bar | -| panel | sidebar (be specific: "Project Panel") | +| Use | Instead of | +| --------------- | --------------------------------------------------------------------- | +| folder | directory | +| project | workspace | +| Settings Editor | settings UI | +| command palette | command bar | +| panel | tool window, sidebar (be specific: "Project Panel," "Terminal Panel") | +| language server | LSP (spell out first use, then LSP is fine) | ## Zed-Specific Conventions From ac16a7891f7278cb7c6734a767d46709bf923bc8 Mon Sep 17 00:00:00 2001 From: Skanda Bhat Date: Thu, 12 Mar 2026 20:05:28 +0100 Subject: [PATCH 532/548] vim: Fix visual mode entry at line end near trailing newline (#50709) In Helix, selecting a line with `x` creates a selection from column 0 of the current row to column 0 of the next row. The default `InsertEndOfLine` uses the selection head (which is on the next row) to find the line end, placing the cursor on the wrong line. This commit introduces a new `HelixInsertEndOfLine`, mapped by default to `shift-a` when Helix mode is enabled, that moves left from the head first to land on the correct line. Release Notes: - Fixed `shift-a` in Helix select mode placing the cursor on the wrong line after selecting with `x` --------- Co-authored-by: SkandaBhat <9384046+SkandaBhat@users.noreply.github.com> Co-authored-by: dino --- assets/keymaps/vim.json | 1 + crates/vim/src/helix.rs | 119 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 120 insertions(+) diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 1f2742f982bc2165181a797e577b350f5630def9..66693ab0a153a73af1dccb101e0ed36259b774fa 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -427,6 +427,7 @@ "escape": "vim::SwitchToHelixNormalMode", "i": "vim::HelixInsert", "a": "vim::HelixAppend", + "shift-a": "vim::HelixInsertEndOfLine", "ctrl-[": "editor::Cancel", }, }, diff --git a/crates/vim/src/helix.rs b/crates/vim/src/helix.rs index 126683f0b419ae9a44d17d90d760f06b106fad8a..06630d18edfe0d1f3e643f02a1f50e5a1f4a0682 100644 --- a/crates/vim/src/helix.rs +++ b/crates/vim/src/helix.rs @@ -36,6 +36,8 @@ actions!( HelixInsert, /// Appends at the end of the selection. HelixAppend, + /// Inserts at the end of the current Helix cursor line. + HelixInsertEndOfLine, /// Goes to the location of the last modification. HelixGotoLastModification, /// Select entire line or multiple lines, extending downwards. @@ -64,6 +66,7 @@ pub fn register(editor: &mut Editor, cx: &mut Context) { Vim::action(editor, cx, Vim::helix_select_lines); Vim::action(editor, cx, Vim::helix_insert); Vim::action(editor, cx, Vim::helix_append); + Vim::action(editor, cx, Vim::helix_insert_end_of_line); Vim::action(editor, cx, Vim::helix_yank); Vim::action(editor, cx, Vim::helix_goto_last_modification); Vim::action(editor, cx, Vim::helix_paste); @@ -600,6 +603,34 @@ impl Vim { }); } + /// Helix-specific implementation of `shift-a` that accounts for Helix's + /// selection model, where selecting a line with `x` creates a selection + /// from column 0 of the current row to column 0 of the next row, so the + /// default [`vim::normal::InsertEndOfLine`] would move the cursor to the + /// end of the wrong line. + fn helix_insert_end_of_line( + &mut self, + _: &HelixInsertEndOfLine, + window: &mut Window, + cx: &mut Context, + ) { + self.start_recording(cx); + self.switch_mode(Mode::Insert, false, window, cx); + self.update_editor(cx, |_, editor, cx| { + editor.change_selections(Default::default(), window, cx, |s| { + s.move_with(&mut |map, selection| { + let cursor = if !selection.is_empty() && !selection.reversed { + movement::left(map, selection.head()) + } else { + selection.head() + }; + selection + .collapse_to(motion::next_line_end(map, cursor, 1), SelectionGoal::None); + }); + }); + }); + } + pub fn helix_replace(&mut self, text: &str, window: &mut Window, cx: &mut Context) { self.update_editor(cx, |_, editor, cx| { editor.transact(window, cx, |editor, window, cx| { @@ -1447,6 +1478,47 @@ mod test { ˇ»line five"}, Mode::HelixNormal, ); + + // Test selecting with an empty line below the current line + cx.set_state( + indoc! {" + line one + line twoˇ + + line four + line five"}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("x"); + cx.assert_state( + indoc! {" + line one + «line two + ˇ» + line four + line five"}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("x"); + cx.assert_state( + indoc! {" + line one + «line two + + ˇ»line four + line five"}, + Mode::HelixNormal, + ); + cx.simulate_keystrokes("x"); + cx.assert_state( + indoc! {" + line one + «line two + + line four + ˇ»line five"}, + Mode::HelixNormal, + ); } #[gpui::test] @@ -1848,4 +1920,51 @@ mod test { Mode::HelixSelect, ); } + + #[gpui::test] + async fn test_helix_insert_end_of_line(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + cx.enable_helix(); + + // Ensure that, when lines are selected using `x`, pressing `shift-a` + // actually puts the cursor at the end of the selected lines and not at + // the end of the line below. + cx.set_state( + indoc! {" + line oˇne + line two"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("x"); + cx.assert_state( + indoc! {" + «line one + ˇ»line two"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("shift-a"); + cx.assert_state( + indoc! {" + line oneˇ + line two"}, + Mode::Insert, + ); + + cx.set_state( + indoc! {" + line «one + lineˇ» two"}, + Mode::HelixNormal, + ); + + cx.simulate_keystrokes("shift-a"); + cx.assert_state( + indoc! {" + line one + line twoˇ"}, + Mode::Insert, + ); + } } From bc9a3e53af44040fa4c44255527be53aa693645e Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Mar 2026 13:07:39 -0600 Subject: [PATCH 533/548] Tidy up DiffStat (#51411) Release Notes: - Tweaked the git diff status to render + and - using the font instead of icons. --- crates/ui/src/components/diff_stat.rs | 30 ++++++--------------------- 1 file changed, 6 insertions(+), 24 deletions(-) diff --git a/crates/ui/src/components/diff_stat.rs b/crates/ui/src/components/diff_stat.rs index ec6d515f1b4f847631fc65fae4ed3ccd3185d271..45539c62869b8c23cb76671d2a7a862c9592a181 100644 --- a/crates/ui/src/components/diff_stat.rs +++ b/crates/ui/src/components/diff_stat.rs @@ -30,32 +30,14 @@ impl RenderOnce for DiffStat { .id(self.id) .gap_1() .child( - h_flex() - .gap_0p5() - .child( - Icon::new(IconName::Plus) - .size(IconSize::XSmall) - .color(Color::Success), - ) - .child( - Label::new(self.added.to_string()) - .color(Color::Success) - .size(self.label_size), - ), + Label::new(format!("+\u{2009}{}", self.added)) + .color(Color::Success) + .size(self.label_size), ) .child( - h_flex() - .gap_0p5() - .child( - Icon::new(IconName::Dash) - .size(IconSize::XSmall) - .color(Color::Error), - ) - .child( - Label::new(self.removed.to_string()) - .color(Color::Error) - .size(self.label_size), - ), + Label::new(format!("\u{2012}\u{2009}{}", self.removed)) + .color(Color::Error) + .size(self.label_size), ) } } From 7a615628457d8ce5dc9a4cd682682726fc9589cd Mon Sep 17 00:00:00 2001 From: Om Chillure Date: Fri, 13 Mar 2026 01:03:48 +0530 Subject: [PATCH 534/548] Fix title/camelCase commands stripping leading indentation Fixes (#50523) Fixes: #48945 Description: The convert:to-title-case, convert:to-upper-camel-case, and convert:to-lower-camel-case editor commands were stripping leading whitespace from each line of a multi-line selection. Root cause: The conversion functions split on whitespace using .split_whitespace() and then joined the resulting words, discarding any leading spaces/tabs before the first word on each line. Fix: Each line now preserves its leading whitespace by capturing and re-prepending it before applying the case conversion. Tests: Added test cases covering multi-line selections with indentation for all three commands. Video : [bug1fix.webm](https://github.com/user-attachments/assets/f4d25c55-bc6d-44e6-a989-7d9b4bc59ac9) Release Notes: - Fixed trailing whitespace handling on text case changes --- crates/editor/src/editor.rs | 36 +++++++++++----- crates/editor/src/editor_tests.rs | 71 +++++++++++++++++++++++++++++++ 2 files changed, 97 insertions(+), 10 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 707fb43cc3b573772ef24b7fe7eea69a2ad3c8ec..20d976ad6c0e0a9c82fbaa681efea80f2873d375 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -12438,9 +12438,7 @@ impl Editor { cx: &mut Context, ) { self.manipulate_text(window, cx, |text| { - text.split('\n') - .map(|line| line.to_case(Case::Title)) - .join("\n") + Self::convert_text_case(text, Case::Title) }) } @@ -12450,7 +12448,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_text(window, cx, |text| text.to_case(Case::Snake)) + self.manipulate_text(window, cx, |text| { + Self::convert_text_case(text, Case::Snake) + }) } pub fn convert_to_kebab_case( @@ -12459,7 +12459,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_text(window, cx, |text| text.to_case(Case::Kebab)) + self.manipulate_text(window, cx, |text| { + Self::convert_text_case(text, Case::Kebab) + }) } pub fn convert_to_upper_camel_case( @@ -12469,9 +12471,7 @@ impl Editor { cx: &mut Context, ) { self.manipulate_text(window, cx, |text| { - text.split('\n') - .map(|line| line.to_case(Case::UpperCamel)) - .join("\n") + Self::convert_text_case(text, Case::UpperCamel) }) } @@ -12481,7 +12481,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_text(window, cx, |text| text.to_case(Case::Camel)) + self.manipulate_text(window, cx, |text| { + Self::convert_text_case(text, Case::Camel) + }) } pub fn convert_to_opposite_case( @@ -12509,7 +12511,9 @@ impl Editor { window: &mut Window, cx: &mut Context, ) { - self.manipulate_text(window, cx, |text| text.to_case(Case::Sentence)) + self.manipulate_text(window, cx, |text| { + Self::convert_text_case(text, Case::Sentence) + }) } pub fn toggle_case(&mut self, _: &ToggleCase, window: &mut Window, cx: &mut Context) { @@ -12540,6 +12544,18 @@ impl Editor { }) } + fn convert_text_case(text: &str, case: Case) -> String { + text.lines() + .map(|line| { + let trimmed_start = line.trim_start(); + let leading = &line[..line.len() - trimmed_start.len()]; + let trimmed = trimmed_start.trim_end(); + let trailing = &trimmed_start[trimmed.len()..]; + format!("{}{}{}", leading, trimmed.to_case(case), trailing) + }) + .join("\n") + } + pub fn convert_to_rot47( &mut self, _: &ConvertToRot47, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 0da80a2a73f22afac7085b579494d708be2444a4..f497881531bf4ba39cb22aca4cf90923f7d10b81 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -6268,6 +6268,77 @@ async fn test_manipulate_text(cx: &mut TestAppContext) { «HeLlO, wOrLD!ˇ» "}); + // Test that case conversions backed by `to_case` preserve leading/trailing whitespace. + cx.set_state(indoc! {" + « hello worldˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_to_title_case(&ConvertToTitleCase, window, cx)); + cx.assert_editor_state(indoc! {" + « Hello Worldˇ» + "}); + + cx.set_state(indoc! {" + « hello worldˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_to_upper_camel_case(&ConvertToUpperCamelCase, window, cx) + }); + cx.assert_editor_state(indoc! {" + « HelloWorldˇ» + "}); + + cx.set_state(indoc! {" + « hello worldˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_to_lower_camel_case(&ConvertToLowerCamelCase, window, cx) + }); + cx.assert_editor_state(indoc! {" + « helloWorldˇ» + "}); + + cx.set_state(indoc! {" + « hello worldˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_to_snake_case(&ConvertToSnakeCase, window, cx)); + cx.assert_editor_state(indoc! {" + « hello_worldˇ» + "}); + + cx.set_state(indoc! {" + « hello worldˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_to_kebab_case(&ConvertToKebabCase, window, cx)); + cx.assert_editor_state(indoc! {" + « hello-worldˇ» + "}); + + cx.set_state(indoc! {" + « hello worldˇ» + "}); + cx.update_editor(|e, window, cx| { + e.convert_to_sentence_case(&ConvertToSentenceCase, window, cx) + }); + cx.assert_editor_state(indoc! {" + « Hello worldˇ» + "}); + + cx.set_state(indoc! {" + « hello world\t\tˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_to_title_case(&ConvertToTitleCase, window, cx)); + cx.assert_editor_state(indoc! {" + « Hello World\t\tˇ» + "}); + + cx.set_state(indoc! {" + « hello world\t\tˇ» + "}); + cx.update_editor(|e, window, cx| e.convert_to_snake_case(&ConvertToSnakeCase, window, cx)); + cx.assert_editor_state(indoc! {" + « hello_world\t\tˇ» + "}); + // Test selections with `line_mode() = true`. cx.update_editor(|editor, _window, _cx| editor.selections.set_line_mode(true)); cx.set_state(indoc! {" From ec2659a095c1e073f8918469e2528c277a76567f Mon Sep 17 00:00:00 2001 From: Tommy Han Date: Fri, 13 Mar 2026 03:35:42 +0800 Subject: [PATCH 535/548] Add hotkeys and actions for toggle light and dark theme (#49027) Mentioned in #47258 Release Notes: - Added hotkey options and actions for toggling light and dark theme. - Add default keymap as `cmd/ctrl+k cmd/ctrl+shift+t` --- assets/keymaps/default-linux.json | 1 + assets/keymaps/default-macos.json | 1 + assets/keymaps/default-windows.json | 1 + crates/theme/src/settings.rs | 12 ++-- crates/workspace/src/workspace.rs | 91 ++++++++++++++++++++++++++++- crates/zed/src/zed.rs | 1 + crates/zed_actions/src/lib.rs | 6 ++ docs/src/appearance.md | 8 ++- docs/src/themes.md | 29 +++++++++ 9 files changed, 139 insertions(+), 11 deletions(-) diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index 5780eedb4445f613cbbd4e9a09976f2d475b28c7..0516221b6e0849ab631c021d020050be99aaf728 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -624,6 +624,7 @@ "ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-t": "theme_selector::Toggle", + "ctrl-k ctrl-shift-t": "theme::ToggleMode", "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 6fc6905dd5f4502ff7ee90e7f6f9499b2e03fa6a..a4aec7cfe8053f3f23b43652f7e58f319c9691f6 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -691,6 +691,7 @@ "cmd-shift-t": "pane::ReopenClosedItem", "cmd-k cmd-s": "zed::OpenKeymap", "cmd-k cmd-t": "theme_selector::Toggle", + "cmd-k cmd-shift-t": "theme::ToggleMode", "ctrl-alt-cmd-p": "settings_profile_selector::Toggle", "cmd-t": "project_symbols::Toggle", "cmd-p": "file_finder::Toggle", diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index ac23d45695e11ec46172c566282ea65bf7774ac8..c10054d5813c6deae33b7a790b3639e7f2c802aa 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -616,6 +616,7 @@ "ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-t": "theme_selector::Toggle", + "ctrl-k ctrl-shift-t": "theme::ToggleMode", "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index a092e2698722a980f0b2a4b5ea64b9bfa0f33d01..c09d3daf6074f24248de12e56ebc2122e2c123e7 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -378,14 +378,14 @@ pub fn set_mode(content: &mut SettingsContent, mode: ThemeAppearanceMode) { if let Some(selection) = theme.theme.as_mut() { match selection { - settings::ThemeSelection::Static(theme) => { + settings::ThemeSelection::Static(_) => { // If the theme was previously set to a single static theme, - // we don't know whether it was a light or dark theme, so we - // just use it for both. + // reset to the default dynamic light/dark pair and let users + // customize light/dark themes explicitly afterward. *selection = settings::ThemeSelection::Dynamic { - mode, - light: theme.clone(), - dark: theme.clone(), + mode: ThemeAppearanceMode::System, + light: ThemeName(settings::DEFAULT_LIGHT_THEME.into()), + dark: ThemeName(settings::DEFAULT_DARK_THEME.into()), }; } settings::ThemeSelection::Dynamic { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index b57b5028a4e5558b1f90c715463165ba68d914e3..949dc127a7465c4cf3941ee4c4982fad37d06281 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -146,7 +146,7 @@ pub use workspace_settings::{ AutosaveSetting, BottomDockLayout, RestoreOnStartupBehavior, StatusBarSettings, TabBarSettings, WorkspaceSettings, }; -use zed_actions::{Spawn, feedback::FileBugReport}; +use zed_actions::{Spawn, feedback::FileBugReport, theme::ToggleMode}; use crate::{item::ItemBufferKind, notifications::NotificationId}; use crate::{ @@ -6499,6 +6499,7 @@ impl Workspace { .on_action(cx.listener(Self::move_item_to_pane_at_index)) .on_action(cx.listener(Self::move_focused_panel_to_next_position)) .on_action(cx.listener(Self::toggle_edit_predictions_all_files)) + .on_action(cx.listener(Self::toggle_theme_mode)) .on_action(cx.listener(|workspace, _: &Unfollow, window, cx| { let pane = workspace.active_pane().clone(); workspace.unfollow_in_pane(&pane, window, cx); @@ -7153,6 +7154,23 @@ impl Workspace { }); } + fn toggle_theme_mode(&mut self, _: &ToggleMode, _window: &mut Window, cx: &mut Context) { + let current_mode = ThemeSettings::get_global(cx).theme.mode(); + let next_mode = match current_mode { + Some(theme::ThemeAppearanceMode::Light) => theme::ThemeAppearanceMode::Dark, + Some(theme::ThemeAppearanceMode::Dark) => theme::ThemeAppearanceMode::Light, + Some(theme::ThemeAppearanceMode::System) | None => match cx.theme().appearance() { + theme::Appearance::Light => theme::ThemeAppearanceMode::Dark, + theme::Appearance::Dark => theme::ThemeAppearanceMode::Light, + }, + }; + + let fs = self.project().read(cx).fs().clone(); + settings::update_settings_file(fs, cx, move |settings, _cx| { + theme::set_mode(settings, next_mode); + }); + } + pub fn show_worktree_trust_security_modal( &mut self, toggle: bool, @@ -9964,7 +9982,7 @@ pub fn with_active_or_new_workspace( #[cfg(test)] mod tests { - use std::{cell::RefCell, rc::Rc}; + use std::{cell::RefCell, rc::Rc, sync::Arc, time::Duration}; use super::*; use crate::{ @@ -9982,6 +10000,7 @@ mod tests { use project::{Project, ProjectEntryId}; use serde_json::json; use settings::SettingsStore; + use util::path; use util::rel_path::rel_path; #[gpui::test] @@ -13540,6 +13559,74 @@ mod tests { }); } + #[gpui::test] + async fn test_toggle_theme_mode_persists_and_updates_active_theme(cx: &mut TestAppContext) { + use settings::{ThemeName, ThemeSelection}; + use theme::SystemAppearance; + use zed_actions::theme::ToggleMode; + + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let settings_fs: Arc = fs.clone(); + + fs.insert_tree(path!("/root"), json!({ "file.rs": "fn main() {}\n" })) + .await; + + // Build a test project and workspace view so the test can invoke + // the workspace action handler the same way the UI would. + let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await; + let (workspace, cx) = + cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx)); + + // Seed the settings file with a plain static light theme so the + // first toggle always starts from a known persisted state. + workspace.update_in(cx, |_workspace, _window, cx| { + *SystemAppearance::global_mut(cx) = SystemAppearance(theme::Appearance::Light); + settings::update_settings_file(settings_fs.clone(), cx, |settings, _cx| { + settings.theme.theme = Some(ThemeSelection::Static(ThemeName("One Light".into()))); + }); + }); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + // Confirm the initial persisted settings contain the static theme + // we just wrote before any toggling happens. + let settings_text = SettingsStore::load_settings(&settings_fs).await.unwrap(); + assert!(settings_text.contains(r#""theme": "One Light""#)); + + // Toggle once. This should migrate the persisted theme settings + // into light/dark slots and enable system mode. + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_theme_mode(&ToggleMode, window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + // 1. Static -> Dynamic + // this assertion checks theme changed from static to dynamic. + let settings_text = SettingsStore::load_settings(&settings_fs).await.unwrap(); + let parsed: serde_json::Value = settings::parse_json_with_comments(&settings_text).unwrap(); + assert_eq!( + parsed["theme"], + serde_json::json!({ + "mode": "system", + "light": "One Light", + "dark": "One Dark" + }) + ); + + // 2. Toggle again, suppose it will change the mode to light + workspace.update_in(cx, |workspace, window, cx| { + workspace.toggle_theme_mode(&ToggleMode, window, cx); + }); + cx.executor().advance_clock(Duration::from_millis(200)); + cx.run_until_parked(); + + let settings_text = SettingsStore::load_settings(&settings_fs).await.unwrap(); + assert!(settings_text.contains(r#""mode": "light""#)); + } + fn dirty_project_item(id: u64, path: &str, cx: &mut App) -> Entity { let item = TestProjectItem::new(id, path, cx); item.update(cx, |item, _| { diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 25defa1dde5977bd94935dafd60d97ae84b5a323..511b0edc6ac168fa47b52e66c9632487de86acf4 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -4878,6 +4878,7 @@ mod tests { "task", "terminal", "terminal_panel", + "theme", "theme_selector", "toast", "toolchain", diff --git a/crates/zed_actions/src/lib.rs b/crates/zed_actions/src/lib.rs index 854f71175e79c84f03261a3d58f89638b7259e54..8edc80b4ec7816cd9e2ae2d7b995dd74b8128a9a 100644 --- a/crates/zed_actions/src/lib.rs +++ b/crates/zed_actions/src/lib.rs @@ -325,6 +325,12 @@ pub mod feedback { ); } +pub mod theme { + use gpui::actions; + + actions!(theme, [ToggleMode]); +} + pub mod theme_selector { use gpui::Action; use schemars::JsonSchema; diff --git a/docs/src/appearance.md b/docs/src/appearance.md index fdf5e239ccf581988e439845d0c2f94e4bb1b95c..1c26d67100379462298c4026dbf578b936b61fb1 100644 --- a/docs/src/appearance.md +++ b/docs/src/appearance.md @@ -15,11 +15,13 @@ Here's how to make Zed feel like home: 1. **Pick a theme**: Press {#kb theme_selector::Toggle} to open the Theme Selector. Arrow through the list to preview themes in real time, and press Enter to apply. -2. **Choose an icon theme**: Run `icon theme selector: toggle` from the command palette to browse icon themes. +2. **Toggle light/dark mode quickly**: Press {#kb theme::ToggleMode}. If you currently use a static `"theme": "..."` value, the first toggle converts it to dynamic mode settings with default themes. -3. **Set your font**: Open the Settings Editor with {#kb zed::OpenSettings} and search for `buffer_font_family`. Set it to your preferred coding font. +3. **Choose an icon theme**: Run `icon theme selector: toggle` from the command palette to browse icon themes. -4. **Adjust font size**: In the same Settings Editor, search for `buffer_font_size` and `ui_font_size` to tweak the editor and interface text sizes. +4. **Set your font**: Open the Settings Editor with {#kb zed::OpenSettings} and search for `buffer_font_family`. Set it to your preferred coding font. + +5. **Adjust font size**: In the same Settings Editor, search for `buffer_font_size` and `ui_font_size` to tweak the editor and interface text sizes. That's it. You now have a personalized Zed setup. diff --git a/docs/src/themes.md b/docs/src/themes.md index 0d3103eaab46fefff22095d14cab02f799ef851d..1dd2c144e2a2a53a50e21f6fc51f3b0c121eca25 100644 --- a/docs/src/themes.md +++ b/docs/src/themes.md @@ -44,6 +44,35 @@ You can set the mode to `"dark"` or `"light"` to ignore the current system mode. } ``` +### Toggle Theme Mode from the Keyboard + +Use {#kb theme::ToggleMode} to switch the current theme mode between light and dark. + +If your settings currently use a static theme value, like: + +```json [settings] +{ + "theme": "Any Theme" +} +``` + +the first toggle converts it to dynamic theme selection with default themes: + +```json [settings] +{ + "theme": { + "mode": "system", + "light": "One Light", + "dark": "One Dark" + } +} +``` + +You are required to set both `light` and `dark` themes manually after the first toggle. + +After that, toggling updates only `theme.mode`. +If `light` and `dark` are the same theme, the first toggle may not produce a visible UI change until you set different values for `light` and `dark`. + ## Theme Overrides To override specific attributes of a theme, use the `theme_overrides` setting. From 5586fbf288b909ade034488e0953a7e95857a16c Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Mar 2026 17:41:56 -0300 Subject: [PATCH 536/548] agent_ui: Add UI refinements to the sidebar and archive view (#51419) Adds a loading state to the archive view and a couple of other tiny UI tweaks to the thread item and such. Release Notes: - N/A --- crates/agent_ui/src/sidebar.rs | 4 +- crates/agent_ui/src/threads_archive_view.rs | 92 ++++++++++++++------- crates/ui/src/components/ai/thread_item.rs | 5 +- 3 files changed, 67 insertions(+), 34 deletions(-) diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index 24c5d5f5e5295a7e25af9f486323a16a2405c8e0..7d7779e75504a93c7923ba26ec87e4fce4bbceb9 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -1555,7 +1555,7 @@ impl Sidebar { let id = SharedString::from(format!("view-more-{}", ix)); let (icon, label) = if is_fully_expanded { - (IconName::ListCollapse, "Collapse List") + (IconName::ListCollapse, "Collapse") } else { (IconName::Plus, "View More") }; @@ -1685,7 +1685,7 @@ impl Sidebar { h_flex() .p_1p5() .border_t_1() - .border_color(cx.theme().colors().border) + .border_color(cx.theme().colors().border_variant) .child( Button::new("view-archive", "Archive") .full_width() diff --git a/crates/agent_ui/src/threads_archive_view.rs b/crates/agent_ui/src/threads_archive_view.rs index 8ee0eedbd8702c7901258087af5d149fcf210648..3d7dba591dfa60f7408f9710561863791bcd802b 100644 --- a/crates/agent_ui/src/threads_archive_view.rs +++ b/crates/agent_ui/src/threads_archive_view.rs @@ -15,8 +15,8 @@ use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrevious}; use project::{AgentServerStore, ExternalAgentServerName}; use theme::ActiveTheme; use ui::{ - ButtonLike, ContextMenu, ContextMenuEntry, HighlightedLabel, ListItem, PopoverMenu, - PopoverMenuHandle, Tab, TintColor, Tooltip, WithScrollbar, prelude::*, + ButtonLike, CommonAnimationExt, ContextMenu, ContextMenuEntry, HighlightedLabel, ListItem, + PopoverMenu, PopoverMenuHandle, Tab, TintColor, Tooltip, WithScrollbar, prelude::*, }; use util::ResultExt as _; use zed_actions::editor::{MoveDown, MoveUp}; @@ -110,6 +110,7 @@ pub struct ThreadsArchiveView { _subscriptions: Vec, selected_agent_menu: PopoverMenuHandle, _refresh_history_task: Task<()>, + is_loading: bool, } impl ThreadsArchiveView { @@ -152,13 +153,20 @@ impl ThreadsArchiveView { _subscriptions: vec![filter_editor_subscription], selected_agent_menu: PopoverMenuHandle::default(), _refresh_history_task: Task::ready(()), + is_loading: true, }; - this.set_selected_agent(Agent::NativeAgent, cx); + this.set_selected_agent(Agent::NativeAgent, window, cx); this } - fn set_selected_agent(&mut self, agent: Agent, cx: &mut Context) { + fn set_selected_agent(&mut self, agent: Agent, window: &mut Window, cx: &mut Context) { self.selected_agent = agent.clone(); + self.is_loading = true; + self.history = None; + self.items.clear(); + self.selection = None; + self.list_state.reset(0); + self.reset_filter_editor_text(window, cx); let server = agent.server(self.fs.clone(), self.thread_store.clone()); let connection = self @@ -184,6 +192,7 @@ impl ThreadsArchiveView { history.refresh_full_history(cx); }); self.history = Some(history); + self.is_loading = false; self.update_items(cx); cx.notify(); } @@ -477,9 +486,9 @@ impl ThreadsArchiveView { .icon_color(Color::Muted) .handler({ let this = this.clone(); - move |_, cx| { + move |window, cx| { this.update(cx, |this, cx| { - this.set_selected_agent(Agent::NativeAgent, cx) + this.set_selected_agent(Agent::NativeAgent, window, cx) }) .ok(); } @@ -537,9 +546,9 @@ impl ThreadsArchiveView { let agent = Agent::Custom { name: item.id.0.clone(), }; - move |_, cx| { + move |window, cx| { this.update(cx, |this, cx| { - this.set_selected_agent(agent.clone(), cx) + this.set_selected_agent(agent.clone(), window, cx) }) .ok(); } @@ -565,7 +574,6 @@ impl ThreadsArchiveView { h_flex() .h(Tab::container_height(cx)) .px_1() - .gap_1p5() .justify_between() .border_b_1() .border_color(cx.theme().colors().border) @@ -610,12 +618,54 @@ impl Render for ThreadsArchiveView { let is_empty = self.items.is_empty(); let has_query = !self.filter_editor.read(cx).text(cx).is_empty(); - let empty_state_container = |label: SharedString| { + let content = if self.is_loading { v_flex() .flex_1() .justify_center() .items_center() - .child(Label::new(label).size(LabelSize::Small).color(Color::Muted)) + .child( + Icon::new(IconName::LoadCircle) + .size(IconSize::Small) + .color(Color::Muted) + .with_rotate_animation(2), + ) + .into_any_element() + } else if is_empty && has_query { + v_flex() + .flex_1() + .justify_center() + .items_center() + .child( + Label::new("No threads match your search.") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + } else if is_empty { + v_flex() + .flex_1() + .justify_center() + .items_center() + .child( + Label::new("No archived threads yet.") + .size(LabelSize::Small) + .color(Color::Muted), + ) + .into_any_element() + } else { + v_flex() + .flex_1() + .overflow_hidden() + .child( + list( + self.list_state.clone(), + cx.processor(Self::render_list_entry), + ) + .flex_1() + .size_full(), + ) + .vertical_scrollbar_for(&self.list_state, window, cx) + .into_any_element() }; v_flex() @@ -631,24 +681,6 @@ impl Render for ThreadsArchiveView { .size_full() .bg(cx.theme().colors().surface_background) .child(self.render_header(cx)) - .child(if is_empty && has_query { - empty_state_container("No threads match your search.".into()).into_any_element() - } else if is_empty { - empty_state_container("No archived threads yet.".into()).into_any_element() - } else { - v_flex() - .flex_1() - .overflow_hidden() - .child( - list( - self.list_state.clone(), - cx.processor(Self::render_list_entry), - ) - .flex_1() - .size_full(), - ) - .vertical_scrollbar_for(&self.list_state, window, cx) - .into_any_element() - }) + .child(content) } } diff --git a/crates/ui/src/components/ai/thread_item.rs b/crates/ui/src/components/ai/thread_item.rs index 5be91e9d98a1219dcfbbba70a5541ba7b827cfc5..13e1db8f483ea251a6f65b61054c205d040a0d53 100644 --- a/crates/ui/src/components/ai/thread_item.rs +++ b/crates/ui/src/components/ai/thread_item.rs @@ -235,9 +235,9 @@ impl RenderOnce for ThreadItem { let gradient_overlay = GradientFade::new(base_bg, color.element_hover, color.element_active) - .width(px(32.0)) + .width(px(64.0)) .right(px(-10.0)) - .gradient_stop(0.8) + .gradient_stop(0.75) .group_name("thread-item"); let has_diff_stats = self.added.is_some() || self.removed.is_some(); @@ -264,6 +264,7 @@ impl RenderOnce for ThreadItem { .border_color(color.border_focused) }) .hover(|s| s.bg(color.element_hover)) + .active(|s| s.bg(color.element_active)) .on_hover(self.on_hover) .child( h_flex() From df8bafdccf88ea4ade0c25707db7fb8d8150ad1e Mon Sep 17 00:00:00 2001 From: Ben Kunkle Date: Thu, 12 Mar 2026 15:56:37 -0500 Subject: [PATCH 537/548] ep: Avoid including collaborator edits in edit history sent to model (#51343) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/edit_prediction/src/edit_prediction.rs | 308 +++++++++++++---- .../src/edit_prediction_tests.rs | 326 ++++++++++++++---- crates/zeta_prompt/src/zeta_prompt.rs | 110 +++++- 3 files changed, 603 insertions(+), 141 deletions(-) diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs index 63240ddd53108f0b2450386150958e23f975d7ed..2347a731cb5b5f3590dafcf0a57dc0bab88c380c 100644 --- a/crates/edit_prediction/src/edit_prediction.rs +++ b/crates/edit_prediction/src/edit_prediction.rs @@ -75,6 +75,7 @@ pub mod zeta; #[cfg(test)] mod edit_prediction_tests; +use crate::cursor_excerpt::expand_context_syntactically_then_linewise; use crate::example_spec::ExampleSpec; use crate::license_detection::LicenseDetectionWatcher; use crate::mercury::Mercury; @@ -99,8 +100,9 @@ actions!( ); /// Maximum number of events to track. -const EVENT_COUNT_MAX: usize = 6; +const EVENT_COUNT_MAX: usize = 10; const CHANGE_GROUPING_LINE_SPAN: u32 = 8; +const COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS: usize = 512; const LAST_CHANGE_GROUPING_TIME: Duration = Duration::from_secs(1); const ZED_PREDICT_DATA_COLLECTION_CHOICE: &str = "zed_predict_data_collection_choice"; const REJECT_REQUEST_DEBOUNCE: Duration = Duration::from_secs(15); @@ -242,21 +244,31 @@ pub enum UserActionType { pub struct StoredEvent { pub event: Arc, pub old_snapshot: TextBufferSnapshot, - pub edit_range: Range, + pub new_snapshot_version: clock::Global, + pub total_edit_range: Range, } impl StoredEvent { fn can_merge( &self, - next_old_event: &&&StoredEvent, - new_snapshot: &TextBufferSnapshot, - last_edit_range: &Range, + next_old_event: &StoredEvent, + latest_snapshot: &TextBufferSnapshot, + latest_edit_range: &Range, ) -> bool { - // Events must be for the same buffer + // Events must be for the same buffer and be contiguous across included snapshots to be mergeable. if self.old_snapshot.remote_id() != next_old_event.old_snapshot.remote_id() { return false; } - if self.old_snapshot.remote_id() != new_snapshot.remote_id() { + if self.old_snapshot.remote_id() != latest_snapshot.remote_id() { + return false; + } + if self.new_snapshot_version != next_old_event.old_snapshot.version { + return false; + } + if !latest_snapshot + .version + .observed_all(&next_old_event.new_snapshot_version) + { return false; } @@ -281,9 +293,9 @@ impl StoredEvent { return false; } - let left_range = self.edit_range.to_point(new_snapshot); - let right_range = next_old_event.edit_range.to_point(new_snapshot); - let latest_range = last_edit_range.to_point(&new_snapshot); + let left_range = self.total_edit_range.to_point(latest_snapshot); + let right_range = next_old_event.total_edit_range.to_point(latest_snapshot); + let latest_range = latest_edit_range.to_point(latest_snapshot); // Events near to the latest edit are not merged if their sources differ. if lines_between_ranges(&left_range, &latest_range) @@ -516,7 +528,9 @@ struct LastEvent { new_snapshot: TextBufferSnapshot, old_file: Option>, new_file: Option>, - edit_range: Option>, + latest_edit_range: Range, + total_edit_range: Range, + total_edit_range_at_last_pause_boundary: Option>, predicted: bool, snapshot_after_last_editing_pause: Option, last_edit_time: Option, @@ -542,8 +556,11 @@ impl LastEvent { }) }); - let (diff, edit_range) = - compute_diff_between_snapshots(&self.old_snapshot, &self.new_snapshot)?; + let (diff, edit_range) = compute_diff_between_snapshots_in_range( + &self.old_snapshot, + &self.new_snapshot, + &self.total_edit_range, + )?; if path == old_path && diff.is_empty() { None @@ -556,9 +573,10 @@ impl LastEvent { in_open_source_repo, predicted: self.predicted, }), - edit_range: self.new_snapshot.anchor_before(edit_range.start) - ..self.new_snapshot.anchor_before(edit_range.end), old_snapshot: self.old_snapshot.clone(), + new_snapshot_version: self.new_snapshot.version.clone(), + total_edit_range: self.new_snapshot.anchor_before(edit_range.start) + ..self.new_snapshot.anchor_before(edit_range.end), }) } } @@ -568,12 +586,28 @@ impl LastEvent { return (self.clone(), None); }; + let total_edit_range_before_pause = self + .total_edit_range_at_last_pause_boundary + .clone() + .unwrap_or_else(|| self.total_edit_range.clone()); + + let Some(total_edit_range_after_pause) = + compute_total_edit_range_between_snapshots(boundary_snapshot, &self.new_snapshot) + else { + return (self.clone(), None); + }; + + let latest_edit_range_before_pause = total_edit_range_before_pause.clone(); + let latest_edit_range_after_pause = total_edit_range_after_pause.clone(); + let before = LastEvent { old_snapshot: self.old_snapshot.clone(), new_snapshot: boundary_snapshot.clone(), old_file: self.old_file.clone(), new_file: self.new_file.clone(), - edit_range: None, + latest_edit_range: latest_edit_range_before_pause, + total_edit_range: total_edit_range_before_pause, + total_edit_range_at_last_pause_boundary: None, predicted: self.predicted, snapshot_after_last_editing_pause: None, last_edit_time: self.last_edit_time, @@ -584,7 +618,9 @@ impl LastEvent { new_snapshot: self.new_snapshot.clone(), old_file: self.old_file.clone(), new_file: self.new_file.clone(), - edit_range: None, + latest_edit_range: latest_edit_range_after_pause, + total_edit_range: total_edit_range_after_pause, + total_edit_range_at_last_pause_boundary: None, predicted: self.predicted, snapshot_after_last_editing_pause: None, last_edit_time: self.last_edit_time, @@ -594,21 +630,78 @@ impl LastEvent { } } -pub(crate) fn compute_diff_between_snapshots( +fn compute_total_edit_range_between_snapshots( old_snapshot: &TextBufferSnapshot, new_snapshot: &TextBufferSnapshot, -) -> Option<(String, Range)> { +) -> Option> { let edits: Vec> = new_snapshot .edits_since::(&old_snapshot.version) .collect(); let (first_edit, last_edit) = edits.first().zip(edits.last())?; - - let old_start_point = old_snapshot.offset_to_point(first_edit.old.start); - let old_end_point = old_snapshot.offset_to_point(last_edit.old.end); let new_start_point = new_snapshot.offset_to_point(first_edit.new.start); let new_end_point = new_snapshot.offset_to_point(last_edit.new.end); + Some(new_snapshot.anchor_before(new_start_point)..new_snapshot.anchor_before(new_end_point)) +} + +fn compute_old_range_for_new_range( + old_snapshot: &TextBufferSnapshot, + new_snapshot: &TextBufferSnapshot, + total_edit_range: &Range, +) -> Option> { + let new_start_offset = total_edit_range.start.to_offset(new_snapshot); + let new_end_offset = total_edit_range.end.to_offset(new_snapshot); + + let edits: Vec> = new_snapshot + .edits_since::(&old_snapshot.version) + .collect(); + let mut old_start_offset = None; + let mut old_end_offset = None; + let mut delta: isize = 0; + + for edit in &edits { + if old_start_offset.is_none() && new_start_offset <= edit.new.end { + old_start_offset = Some(if new_start_offset < edit.new.start { + new_start_offset.checked_add_signed(-delta)? + } else { + edit.old.start + }); + } + + if old_end_offset.is_none() && new_end_offset <= edit.new.end { + old_end_offset = Some(if new_end_offset < edit.new.start { + new_end_offset.checked_add_signed(-delta)? + } else { + edit.old.end + }); + } + + delta += edit.new.len() as isize - edit.old.len() as isize; + } + + let old_start_offset = + old_start_offset.unwrap_or_else(|| new_start_offset.saturating_add_signed(-delta)); + let old_end_offset = + old_end_offset.unwrap_or_else(|| new_end_offset.saturating_add_signed(-delta)); + + Some( + old_snapshot.offset_to_point(old_start_offset) + ..old_snapshot.offset_to_point(old_end_offset), + ) +} + +fn compute_diff_between_snapshots_in_range( + old_snapshot: &TextBufferSnapshot, + new_snapshot: &TextBufferSnapshot, + total_edit_range: &Range, +) -> Option<(String, Range)> { + let new_start_point = total_edit_range.start.to_point(new_snapshot); + let new_end_point = total_edit_range.end.to_point(new_snapshot); + let old_range = compute_old_range_for_new_range(old_snapshot, new_snapshot, total_edit_range)?; + let old_start_point = old_range.start; + let old_end_point = old_range.end; + const CONTEXT_LINES: u32 = 3; let old_context_start_row = old_start_point.row.saturating_sub(CONTEXT_LINES); @@ -1198,10 +1291,12 @@ impl EditPredictionStore { cx.subscribe(buffer, { let project = project.downgrade(); move |this, buffer, event, cx| { - if let language::BufferEvent::Edited { .. } = event + if let language::BufferEvent::Edited { is_local } = event && let Some(project) = project.upgrade() { - this.report_changes_for_buffer(&buffer, &project, false, cx); + this.report_changes_for_buffer( + &buffer, &project, false, *is_local, cx, + ); } } }), @@ -1223,6 +1318,7 @@ impl EditPredictionStore { buffer: &Entity, project: &Entity, is_predicted: bool, + is_local: bool, cx: &mut Context, ) { let project_state = self.get_or_init_project(project, cx); @@ -1234,7 +1330,6 @@ impl EditPredictionStore { if new_snapshot.version == registered_buffer.snapshot.version { return; } - let old_file = mem::replace(&mut registered_buffer.file, new_file.clone()); let old_snapshot = mem::replace(&mut registered_buffer.snapshot, new_snapshot.clone()); let mut num_edits = 0usize; @@ -1267,28 +1362,44 @@ impl EditPredictionStore { } } - let action_type = match (total_deleted, total_inserted, num_edits) { - (0, ins, n) if ins == n => UserActionType::InsertChar, - (0, _, _) => UserActionType::InsertSelection, - (del, 0, n) if del == n => UserActionType::DeleteChar, - (_, 0, _) => UserActionType::DeleteSelection, - (_, ins, n) if ins == n => UserActionType::InsertChar, - (_, _, _) => UserActionType::InsertSelection, - }; + let include_in_history = is_local + || collaborator_edit_overlaps_locality_region( + project_state, + project, + buffer, + &buf.snapshot(), + &edit_range, + cx, + ); - if let Some(offset) = last_offset { - let point = new_snapshot.offset_to_point(offset); - let timestamp_epoch_ms = SystemTime::now() - .duration_since(UNIX_EPOCH) - .map(|d| d.as_millis() as u64) - .unwrap_or(0); - project_state.record_user_action(UserActionRecord { - action_type, - buffer_id: buffer.entity_id(), - line_number: point.row, - offset, - timestamp_epoch_ms, - }); + if is_local { + let action_type = match (total_deleted, total_inserted, num_edits) { + (0, ins, n) if ins == n => UserActionType::InsertChar, + (0, _, _) => UserActionType::InsertSelection, + (del, 0, n) if del == n => UserActionType::DeleteChar, + (_, 0, _) => UserActionType::DeleteSelection, + (_, ins, n) if ins == n => UserActionType::InsertChar, + (_, _, _) => UserActionType::InsertSelection, + }; + + if let Some(offset) = last_offset { + let point = new_snapshot.offset_to_point(offset); + let timestamp_epoch_ms = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_millis() as u64) + .unwrap_or(0); + project_state.record_user_action(UserActionRecord { + action_type, + buffer_id: buffer.entity_id(), + line_number: point.row, + offset, + timestamp_epoch_ms, + }); + } + } + + if !include_in_history { + return; } let events = &mut project_state.events; @@ -1302,15 +1413,10 @@ impl EditPredictionStore { let should_coalesce = is_next_snapshot_of_same_buffer && !prediction_source_changed - && last_event - .edit_range - .as_ref() - .is_some_and(|last_edit_range| { - lines_between_ranges( - &edit_range.to_point(&new_snapshot), - &last_edit_range.to_point(&new_snapshot), - ) <= CHANGE_GROUPING_LINE_SPAN - }); + && lines_between_ranges( + &edit_range.to_point(&new_snapshot), + &last_event.latest_edit_range.to_point(&new_snapshot), + ) <= CHANGE_GROUPING_LINE_SPAN; if should_coalesce { let pause_elapsed = last_event @@ -1320,9 +1426,13 @@ impl EditPredictionStore { if pause_elapsed { last_event.snapshot_after_last_editing_pause = Some(last_event.new_snapshot.clone()); + last_event.total_edit_range_at_last_pause_boundary = + Some(last_event.total_edit_range.clone()); } - last_event.edit_range = Some(edit_range); + last_event.latest_edit_range = edit_range.clone(); + last_event.total_edit_range = + merge_anchor_ranges(&last_event.total_edit_range, &edit_range, &new_snapshot); last_event.new_snapshot = new_snapshot; last_event.last_edit_time = Some(now); return; @@ -1345,7 +1455,9 @@ impl EditPredictionStore { new_file, old_snapshot, new_snapshot, - edit_range: Some(edit_range), + latest_edit_range: edit_range.clone(), + total_edit_range: edit_range, + total_edit_range_at_last_pause_boundary: None, predicted: is_predicted, snapshot_after_last_editing_pause: None, last_edit_time: Some(now), @@ -1401,7 +1513,13 @@ impl EditPredictionStore { return; }; - self.report_changes_for_buffer(¤t_prediction.prediction.buffer, project, true, cx); + self.report_changes_for_buffer( + ¤t_prediction.prediction.buffer, + project, + true, + true, + cx, + ); // can't hold &mut project_state ref across report_changes_for_buffer_call let Some(project_state) = self.projects.get_mut(&project.entity_id()) else { @@ -2670,6 +2788,32 @@ impl EditPredictionStore { } } +fn collaborator_edit_overlaps_locality_region( + project_state: &ProjectState, + project: &Entity, + buffer: &Entity, + snapshot: &BufferSnapshot, + edit_range: &Range, + cx: &App, +) -> bool { + let Some((active_buffer, Some(position))) = project_state.active_buffer(project, cx) else { + return false; + }; + + if active_buffer.entity_id() != buffer.entity_id() { + return false; + } + + let locality_point_range = expand_context_syntactically_then_linewise( + snapshot, + (position..position).to_point(snapshot), + COLLABORATOR_EDIT_LOCALITY_CONTEXT_TOKENS, + ); + let locality_anchor_range = snapshot.anchor_range_around(locality_point_range); + + edit_range.overlaps(&locality_anchor_range, snapshot) +} + fn merge_trailing_events_if_needed( events: &mut VecDeque, end_snapshot: &TextBufferSnapshot, @@ -2680,13 +2824,19 @@ fn merge_trailing_events_if_needed( if last_event.old_snapshot.remote_id() != latest_snapshot.remote_id() { return; } + if !latest_snapshot + .version + .observed_all(&last_event.new_snapshot_version) + { + return; + } } let mut next_old_event = None; let mut mergeable_count = 0; for old_event in events.iter().rev() { - if let Some(next_old_event) = &next_old_event - && !old_event.can_merge(&next_old_event, latest_snapshot, latest_edit_range) + if let Some(next_old_event) = next_old_event + && !old_event.can_merge(next_old_event, latest_snapshot, latest_edit_range) { break; } @@ -2701,10 +2851,19 @@ fn merge_trailing_events_if_needed( let mut events_to_merge = events.range(events.len() - mergeable_count..).peekable(); let oldest_event = events_to_merge.peek().unwrap(); let oldest_snapshot = oldest_event.old_snapshot.clone(); + let newest_snapshot = end_snapshot; + let mut merged_edit_range = oldest_event.total_edit_range.clone(); - if let Some((diff, edited_range)) = - compute_diff_between_snapshots(&oldest_snapshot, end_snapshot) - { + for event in events.range(events.len() - mergeable_count + 1..) { + merged_edit_range = + merge_anchor_ranges(&merged_edit_range, &event.total_edit_range, latest_snapshot); + } + + if let Some((diff, edit_range)) = compute_diff_between_snapshots_in_range( + &oldest_snapshot, + newest_snapshot, + &merged_edit_range, + ) { let merged_event = match oldest_event.event.as_ref() { zeta_prompt::Event::BufferChange { old_path, @@ -2728,8 +2887,9 @@ fn merge_trailing_events_if_needed( }), }), old_snapshot: oldest_snapshot.clone(), - edit_range: end_snapshot.anchor_before(edited_range.start) - ..end_snapshot.anchor_before(edited_range.end), + new_snapshot_version: newest_snapshot.version.clone(), + total_edit_range: newest_snapshot.anchor_before(edit_range.start) + ..newest_snapshot.anchor_before(edit_range.end), }, }; events.truncate(events.len() - mergeable_count); @@ -2737,6 +2897,24 @@ fn merge_trailing_events_if_needed( } } +fn merge_anchor_ranges( + left: &Range, + right: &Range, + snapshot: &TextBufferSnapshot, +) -> Range { + let start = if left.start.cmp(&right.start, snapshot).is_le() { + left.start + } else { + right.start + }; + let end = if left.end.cmp(&right.end, snapshot).is_ge() { + left.end + } else { + right.end + }; + start..end +} + #[derive(Error, Debug)] #[error( "You must update to Zed version {minimum_version} or higher to continue using edit predictions." diff --git a/crates/edit_prediction/src/edit_prediction_tests.rs b/crates/edit_prediction/src/edit_prediction_tests.rs index 8f97df2c308980e1c2c89838609b30e1aedb1917..f377f3f705f8d3e04fd4718bbfd650ae4189ba37 100644 --- a/crates/edit_prediction/src/edit_prediction_tests.rs +++ b/crates/edit_prediction/src/edit_prediction_tests.rs @@ -1,7 +1,8 @@ use super::*; -use crate::{compute_diff_between_snapshots, udiff::apply_diff_to_string}; +use crate::udiff::apply_diff_to_string; use client::{UserStore, test::FakeServer}; use clock::FakeSystemClock; +use clock::ReplicaId; use cloud_api_types::{CreateLlmTokenResponse, LlmToken}; use cloud_llm_client::{ EditPredictionRejectReason, EditPredictionRejection, RejectEditPredictionsBody, @@ -18,8 +19,8 @@ use gpui::{ }; use indoc::indoc; use language::{ - Anchor, Buffer, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSeverity, - Operation, Point, Selection, SelectionGoal, + Anchor, Buffer, Capability, CursorShape, Diagnostic, DiagnosticEntry, DiagnosticSet, + DiagnosticSeverity, Operation, Point, Selection, SelectionGoal, }; use language_model::RefreshLlmTokenListener; use lsp::LanguageServerId; @@ -28,7 +29,7 @@ use pretty_assertions::{assert_eq, assert_matches}; use project::{FakeFs, Project}; use serde_json::json; use settings::SettingsStore; -use std::{path::Path, sync::Arc, time::Duration}; +use std::{ops::Range, path::Path, sync::Arc, time::Duration}; use util::{ path, test::{TextRangeMarker, marked_text_ranges_by}, @@ -370,6 +371,12 @@ async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContex ep_store.edit_history_for_project(&project, cx) }); assert_eq!(events.len(), 2); + + let first_total_edit_range = buffer.read_with(cx, |buffer, _| { + events[0].total_edit_range.to_point(&buffer.snapshot()) + }); + assert_eq!(first_total_edit_range, Point::new(1, 0)..Point::new(1, 3)); + let zeta_prompt::Event::BufferChange { diff, .. } = events[0].event.as_ref(); assert_eq!( diff.as_str(), @@ -382,6 +389,11 @@ async fn test_edit_history_getter_pause_splits_last_event(cx: &mut TestAppContex "} ); + let second_total_edit_range = buffer.read_with(cx, |buffer, _| { + events[1].total_edit_range.to_point(&buffer.snapshot()) + }); + assert_eq!(second_total_edit_range, Point::new(1, 3)..Point::new(1, 13)); + let zeta_prompt::Event::BufferChange { diff, .. } = events[1].event.as_ref(); assert_eq!( diff.as_str(), @@ -598,6 +610,240 @@ fn render_events_with_predicted(events: &[StoredEvent]) -> Vec { .collect() } +fn make_collaborator_replica( + buffer: &Entity, + cx: &mut TestAppContext, +) -> (Entity, clock::Global) { + let (state, version) = + buffer.read_with(cx, |buffer, _cx| (buffer.to_proto(_cx), buffer.version())); + let collaborator = cx.new(|_cx| { + Buffer::from_proto(ReplicaId::new(1), Capability::ReadWrite, state, None).unwrap() + }); + (collaborator, version) +} + +async fn apply_collaborator_edit( + collaborator: &Entity, + buffer: &Entity, + since_version: &mut clock::Global, + edit_range: Range, + new_text: &str, + cx: &mut TestAppContext, +) { + collaborator.update(cx, |collaborator, cx| { + collaborator.edit([(edit_range, new_text)], None, cx); + }); + + let serialize_task = collaborator.read_with(cx, |collaborator, cx| { + collaborator.serialize_ops(Some(since_version.clone()), cx) + }); + let ops = serialize_task.await; + *since_version = collaborator.read_with(cx, |collaborator, _cx| collaborator.version()); + + buffer.update(cx, |buffer, cx| { + buffer.apply_ops( + ops.into_iter() + .map(|op| language::proto::deserialize_operation(op).unwrap()), + cx, + ); + }); +} + +#[gpui::test] +async fn test_nearby_collaborator_edits_are_kept_in_history(cx: &mut TestAppContext) { + let (ep_store, _requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.rs": "line 0\nline 1\nline 2\nline 3\nline 4\nline 5\nline 6\nline 7\nline 8\nline 9\nline 10\nline 11\nline 12\nline 13\nline 14\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap(); + project.set_active_path(Some(path.clone()), cx); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0))); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx); + let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx); + }); + + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx); + }); + + let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx); + + let (line_one_start, line_one_len) = collaborator.read_with(cx, |buffer, _cx| { + (Point::new(1, 0).to_offset(buffer), buffer.line_len(1)) + }); + + apply_collaborator_edit( + &collaborator, + &buffer, + &mut collaborator_version, + line_one_start..line_one_start + line_one_len as usize, + "REMOTE ONE", + cx, + ) + .await; + + let events = ep_store.update(cx, |ep_store, cx| { + ep_store.edit_history_for_project(&project, cx) + }); + + assert_eq!( + render_events_with_predicted(&events), + vec![indoc! {" + manual + @@ -1,5 +1,5 @@ + -line 0 + -line 1 + +LOCAL ZERO + +REMOTE ONE + line 2 + line 3 + line 4 + "}] + ); +} + +#[gpui::test] +async fn test_distant_collaborator_edits_are_omitted_from_history(cx: &mut TestAppContext) { + let (ep_store, _requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.rs": (0..1000) + .map(|i| format!("line {i}\n")) + .collect::() + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap(); + project.set_active_path(Some(path.clone()), cx); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + let cursor = buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0))); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&buffer, &project, cx); + let _ = ep_store.prediction_at(&buffer, Some(cursor), &project, cx); + }); + + buffer.update(cx, |buffer, cx| { + buffer.edit(vec![(0..6, "LOCAL ZERO")], None, cx); + }); + + let (collaborator, mut collaborator_version) = make_collaborator_replica(&buffer, cx); + + let far_line_start = buffer.read_with(cx, |buffer, _cx| Point::new(900, 0).to_offset(buffer)); + + apply_collaborator_edit( + &collaborator, + &buffer, + &mut collaborator_version, + far_line_start..far_line_start + 7, + "REMOTE FAR", + cx, + ) + .await; + + let events = ep_store.update(cx, |ep_store, cx| { + ep_store.edit_history_for_project(&project, cx) + }); + + assert_eq!( + render_events_with_predicted(&events), + vec![indoc! {" + manual + @@ -1,4 +1,4 @@ + -line 0 + +LOCAL ZERO + line 1 + line 2 + line 3 + "}] + ); +} + +#[gpui::test] +async fn test_irrelevant_collaborator_edits_in_different_files_are_omitted_from_history( + cx: &mut TestAppContext, +) { + let (ep_store, _requests) = init_test_with_fake_client(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/root", + json!({ + "foo.rs": "line 0\nline 1\nline 2\nline 3\n", + "bar.rs": "line 0\nline 1\nline 2\nline 3\n" + }), + ) + .await; + let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await; + + let foo_buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/foo.rs"), cx).unwrap(); + project.set_active_path(Some(path.clone()), cx); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + let bar_buffer = project + .update(cx, |project, cx| { + let path = project.find_project_path(path!("root/bar.rs"), cx).unwrap(); + project.open_buffer(path, cx) + }) + .await + .unwrap(); + + let foo_cursor = foo_buffer.read_with(cx, |buffer, _cx| buffer.anchor_before(Point::new(1, 0))); + + ep_store.update(cx, |ep_store, cx| { + ep_store.register_buffer(&foo_buffer, &project, cx); + ep_store.register_buffer(&bar_buffer, &project, cx); + let _ = ep_store.prediction_at(&foo_buffer, Some(foo_cursor), &project, cx); + }); + + let (bar_collaborator, mut bar_version) = make_collaborator_replica(&bar_buffer, cx); + + apply_collaborator_edit( + &bar_collaborator, + &bar_buffer, + &mut bar_version, + 0..6, + "REMOTE BAR", + cx, + ) + .await; + + let events = ep_store.update(cx, |ep_store, cx| { + ep_store.edit_history_for_project(&project, cx) + }); + + assert!(events.is_empty()); +} + #[gpui::test] async fn test_predicted_flag_coalescing(cx: &mut TestAppContext) { let (ep_store, _requests) = init_test_with_fake_client(cx); @@ -680,7 +926,7 @@ async fn test_predicted_flag_coalescing(cx: &mut TestAppContext) { let end = Point::new(2, 6).to_offset(buffer); buffer.edit(vec![(offset..end, "LINE TWO")], None, cx); }); - ep_store.report_changes_for_buffer(&buffer, &project, true, cx); + ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx); }); let events = ep_store.update(cx, |ep_store, cx| { @@ -722,7 +968,7 @@ async fn test_predicted_flag_coalescing(cx: &mut TestAppContext) { let end = Point::new(3, 6).to_offset(buffer); buffer.edit(vec![(offset..end, "LINE THREE")], None, cx); }); - ep_store.report_changes_for_buffer(&buffer, &project, true, cx); + ep_store.report_changes_for_buffer(&buffer, &project, true, true, cx); }); let events = ep_store.update(cx, |ep_store, cx| { @@ -2420,74 +2666,6 @@ async fn test_unauthenticated_without_custom_url_blocks_prediction_impl(cx: &mut ); } -#[gpui::test] -fn test_compute_diff_between_snapshots(cx: &mut TestAppContext) { - let buffer = cx.new(|cx| { - Buffer::local( - indoc! {" - zero - one - two - three - four - five - six - seven - eight - nine - ten - eleven - twelve - thirteen - fourteen - fifteen - sixteen - seventeen - eighteen - nineteen - twenty - twenty-one - twenty-two - twenty-three - twenty-four - "}, - cx, - ) - }); - - let old_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot()); - - buffer.update(cx, |buffer, cx| { - let point = Point::new(12, 0); - buffer.edit([(point..point, "SECOND INSERTION\n")], None, cx); - let point = Point::new(8, 0); - buffer.edit([(point..point, "FIRST INSERTION\n")], None, cx); - }); - - let new_snapshot = buffer.read_with(cx, |buffer, _| buffer.text_snapshot()); - - let (diff, _) = compute_diff_between_snapshots(&old_snapshot, &new_snapshot).unwrap(); - - assert_eq!( - diff, - indoc! {" - @@ -6,10 +6,12 @@ - five - six - seven - +FIRST INSERTION - eight - nine - ten - eleven - +SECOND INSERTION - twelve - thirteen - fourteen - "} - ); -} - #[gpui::test] async fn test_diagnostic_jump_excludes_collaborator_regions(cx: &mut TestAppContext) { fn set_collaborator_cursor(buffer: &Entity, row: u32, cx: &mut TestAppContext) { diff --git a/crates/zeta_prompt/src/zeta_prompt.rs b/crates/zeta_prompt/src/zeta_prompt.rs index 41d02478c33ce807bf1771cf25799c9a427e63ed..8dd4d88e2a89cadc39e1335b4bcdc18a0a144571 100644 --- a/crates/zeta_prompt/src/zeta_prompt.rs +++ b/crates/zeta_prompt/src/zeta_prompt.rs @@ -479,6 +479,7 @@ pub fn format_prompt_with_budget_for_format( "<|file_sep|>", "edit history", budget_after_cursor, + max_edit_event_count_for_format(&format), ); let edit_history_tokens = estimate_tokens(edit_history_section.len()); let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); @@ -516,6 +517,22 @@ pub fn filter_redundant_excerpts( related_files } +pub fn max_edit_event_count_for_format(format: &ZetaFormat) -> usize { + match format { + ZetaFormat::V0112MiddleAtEnd + | ZetaFormat::V0113Ordered + | ZetaFormat::V0114180EditableRegion + | ZetaFormat::V0120GitMergeMarkers + | ZetaFormat::V0131GitMergeMarkersPrefix + | ZetaFormat::V0211Prefill + | ZetaFormat::V0211SeedCoder + | ZetaFormat::v0226Hashline + | ZetaFormat::V0304SeedNoEdits + | ZetaFormat::V0304VariableEdit + | ZetaFormat::V0306SeedMultiRegions => 6, + } +} + pub fn get_prefill_for_format( format: ZetaFormat, context: &str, @@ -682,6 +699,7 @@ fn format_edit_history_within_budget( file_marker: &str, edit_history_name: &str, max_tokens: usize, + max_edit_event_count: usize, ) -> String { let header = format!("{}{}\n", file_marker, edit_history_name); let header_tokens = estimate_tokens(header.len()); @@ -692,7 +710,7 @@ fn format_edit_history_within_budget( let mut event_strings: Vec = Vec::new(); let mut total_tokens = header_tokens; - for event in events.iter().rev() { + for event in events.iter().rev().take(max_edit_event_count) { let mut event_str = String::new(); write_event(&mut event_str, event); let event_tokens = estimate_tokens(event_str.len()); @@ -2698,6 +2716,7 @@ pub mod seed_coder { FILE_MARKER, "edit_history", budget_after_cursor, + max_edit_event_count_for_format(&ZetaFormat::V0211SeedCoder), ); let edit_history_tokens = estimate_tokens(edit_history_section.len()); let budget_after_edit_history = budget_after_cursor.saturating_sub(edit_history_tokens); @@ -3824,7 +3843,13 @@ pub mod zeta1 { /// Formats events in zeta1 style (oldest first). fn format_zeta1_events(events: &[Arc]) -> String { let mut result = String::new(); - for event in events { + for event in + events + .iter() + .skip(events.len().saturating_sub(max_edit_event_count_for_format( + &ZetaFormat::V0114180EditableRegion, + ))) + { let event_string = format_zeta1_event(event); if event_string.is_empty() { continue; @@ -4781,6 +4806,87 @@ mod tests { ); } + #[test] + fn test_max_event_count() { + fn make_numbered_event(index: usize) -> Event { + return make_event( + &format!("event-{index}.rs"), + &format!("-old-{index}\n+new-{index}\n"), + ); + } + let input = make_input( + "x", + 0..1, + 0, + (0..3).map(make_numbered_event).collect(), + vec![], + ); + + let edit_history_section = format_edit_history_within_budget( + &input.events, + "<|file_sep|>", + "edit history", + usize::MAX, + 5, + ); + + assert_eq!( + &edit_history_section, + indoc!( + " + <|file_sep|>edit history + --- a/event-0.rs + +++ b/event-0.rs + -old-0 + +new-0 + --- a/event-1.rs + +++ b/event-1.rs + -old-1 + +new-1 + --- a/event-2.rs + +++ b/event-2.rs + -old-2 + +new-2 + " + ) + ); + + let edit_history_section = format_edit_history_within_budget( + &input.events, + "<|file_sep|>", + "edit history", + usize::MAX, + 2, + ); + + assert_eq!( + &edit_history_section, + indoc!( + " + <|file_sep|>edit history + --- a/event-1.rs + +++ b/event-1.rs + -old-1 + +new-1 + --- a/event-2.rs + +++ b/event-2.rs + -old-2 + +new-2 + " + ) + ); + + let edit_history_section = format_edit_history_within_budget( + &input.events, + "<|file_sep|>", + "edit history", + usize::MAX, + 0, + ); + + assert_eq!(&edit_history_section, ""); + } + #[test] fn test_clean_zeta1_model_output_basic() { let output = indoc! {" From ad1e82e9e2cbfd45bed487aaac4f34114aa62ebe Mon Sep 17 00:00:00 2001 From: franciskafyi Date: Fri, 13 Mar 2026 00:36:21 +0300 Subject: [PATCH 538/548] docs: Improve feature process (#51425) Small tweaks to our feature doc and a link out to more about how the Feature Request process works. Release Notes: - N/A --- .../DISCUSSION_TEMPLATE/feature-requests.yml | 2 +- docs/src/development/feature-process.md | 26 +++++++++++-------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/.github/DISCUSSION_TEMPLATE/feature-requests.yml b/.github/DISCUSSION_TEMPLATE/feature-requests.yml index 183a3de934eccc8baa8428e822176e31d1d11782..e8a695063c34771ac6120b1e477b7494a17aa3c9 100644 --- a/.github/DISCUSSION_TEMPLATE/feature-requests.yml +++ b/.github/DISCUSSION_TEMPLATE/feature-requests.yml @@ -40,4 +40,4 @@ body: attributes: value: | Learn more about how feature requests work in our - [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/47963). + [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/51422). diff --git a/docs/src/development/feature-process.md b/docs/src/development/feature-process.md index 811e1a4fd6130fdf0abc687f6943f58b24e81b08..ec39c6c4b59ef5916d5f5dcfada9abf326f77a3a 100644 --- a/docs/src/development/feature-process.md +++ b/docs/src/development/feature-process.md @@ -2,7 +2,7 @@ This is for moderate-to-large features — new UI, behavior changes, or work that cuts across multiple parts of Zed. Small keybindings or settings tweaks don't need all of this. -> **Before you start:** If you're an external contributor, make sure the feature is something the team wants before investing significant effort. That said, coming prepared with background research makes it much easier for the team to understand and approve the proposal. Read the [Contributing guide](../../../CONTRIBUTING.md#sending-changes) — if there isn't already a GitHub issue with staff confirmation, start with a GitHub Discussion or a Discord message rather than a PR. +> **Before you start:** If you're an external contributor, make sure the feature is something the team wants before investing significant effort. Please read the [Contributing Guide](../../../CONTRIBUTING.md) and our [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/51422) — if there isn't already a GitHub issue with clear staff confirmation, start with a GitHub Discussion. Feature request PRs that skip this process have a _very_ low merge rate. Taking the time to follow our process significantly increases the chances your idea gets picked up and built. ## 1. Why does this matter? @@ -18,16 +18,20 @@ Write a short, concrete feature statement, then back it up with the context gath Here's an example format, though adapt it to whatever your feature needs: -> **Feature:** Inline Git Blame -> **Purpose:** Show the last commit author and message for each line directly after the editor text, so developers can understand code history without opening the git blame. -> **Background:** -> This is standard across all major code editors -> \[screenshot of VSCode] -> \[screenshot of Intellij] -> \[screenshot of Neovim] -> and has 146 thumbs up on the [github issue](https://github.com). -> **Decisions:** -> We have to decide whether to use the git CLI or a git library. Zed uses a git library but its blame implementation is too slow for a code editor, so we should use the CLI's porcelain interface. +**Feature:** Inline Git Blame + +**Purpose:** Show the last commit author and message for each line directly after the editor text, so developers can understand code history without opening the git blame. + +**Background:** +This is standard across all major code editors: + +- \[screenshot of VSCode] +- \[screenshot of Intellij] +- \[screenshot of Neovim] +- and has 146 thumbs up on this [github issue](https://github.com). + +**Decisions:** +We have to decide whether to use the git CLI or a git library. Zed uses a git library but its blame implementation is too slow for a code editor, so we should use the CLI's porcelain interface. ## 3. What else does this affect? From b32067d24868600b3b64f9bdc4656053fd5be0ba Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Mar 2026 16:15:12 -0600 Subject: [PATCH 539/548] GPUI updates (#51415) - **Fix race condition in test_collaborating_with_completion** - **WIP: Integrate scheduler crate into GPUI TestDispatcher** - **WIP: scheduler integration debugging** - **Fix formatting** - **Unify RunnableMeta and add execution tracking to TestScheduler** - **Remove unused execution tracking from TestScheduler and TestDispatcher** - **Add is_ready() to GPUI Task for API parity with scheduler** - **Eliminate RunnableVariant::Compat - all runnables now have source location metadata** - **Update integration plans to reflect completed phases** - **Simplify RunnableVariant to type alias** - **Delegate TestDispatcher task queues to TestScheduler (Phase 2b)** - **Remove waiting_hint/waiting_backtrace and debug logging from TestDispatcher** - **Remove wrapper methods from TestDispatcher - access scheduler() directly** - **Update integration plan with complete state and instructions for full scheduler migration** - **Use scheduler's native timer() and simplify TestDispatcher** - **Fix rng() usage to lock mutex, update plan with SharedRng wrapper** - **Add SharedRng wrapper for ergonomic random number generation** - **Update plan: mark Phase 1 (SharedRng) as complete** - **Update scheduler integration plan with Phase 2 investigation notes** - **Phase 3: Delegate simulate_random_delay to scheduler.yield_random()** - **Phase 4: Remove TaskLabel** - **Phase 5 (WIP): Simplify block_internal and remove unparkers** - **Phase 5 Complete: Scheduler integration finished** - **Update integration plan with code review findings** - **Phase 6 & 7: Restore realtime priority support and delete dead code** - **Add TestApp and TestAppWindow for cleaner GPUI testing** - **Fix formatting across the branch** - **Fix Linux build: add explicit type annotation and rename probability() to weight()** - **Add TestApp and TestAppWindow for cleaner GPUI testing** - **Rename TestAppWindow to TestWindow, internal TestWindow to TestPlatformWindow** - **Remove unused RunnableVariant imports on Linux** - **Add STATUS.md for next agent** - **Run cargo fmt** - **Use per-app element arena only and scope test draws** - **Fix collab tests for scheduler timing and ordering** - **Store element arena on App and route element allocations through draw scope** - **Fix TestScheduler lock ordering between rng and state** - **Fix inlay hints test by explicitly triggering refresh after viewport setup** - **Add scheduler integration regression risk analysis doc** - **Fix tests: avoid caching Entity in global OnceLock for Codestral API key** - **Document learned weak point: global cached Entity handles break across App contexts** - **Add scheduler regression test for block_with_timeout continuation and explicit time advancement** - **Document TestScheduler timeout tick budget behavior and explicit time advancement guidance** - **Add test asserting realtime priority spawns panic under TestDispatcher** - **Document realtime priority determinism contract in tests** - **Remove realtime priority until we have a concrete use case (cc @localcc)** - **Update STATUS for scheduler integration decisions and realtime priority removal** - **Fix prettier docs and clippy in scheduler tests** - **Remove unused imports from Windows dispatcher** - **WIP: scheduler integration debugging + agent terminal diagnostics** - **Update scheduler integration status** - **Remove temporary planning docs, consolidate into scheduler integration doc** - **Remove unrelated changes from scheduler integration** - **Fix clippy errors** - **Add STATUS.md with debugging instructions for Linux/Windows hang** - **WIP: local changes needed by ex** - **Add pointer capture API for stable drag handling** - **Add pointer capture API for stable drag handling** - **chore: update generated cargo manifests** - **gpui: Expose ShapedLine::width() for pen advancement** - **Remove git2 usage from util test.rs** - **Store DiagnosticQuad bounds in logical Pixels** - **WIP: executor and test_app changes for scheduler integration** - **Expose font APIs publicly** - **gpui: add typed diagnostics and record_diagnostic API** - **WIP: gpui test window diagnostics changes** - **Add LineCacheKey trait and shape_line_cached API for content-addressable shaping** - **Fix RenderGlyphParams field additions for Ex compatibility** - **Add doc comment for recommended_rendering_mode, fix formatting** - **Add scheduler_executor() method for Ex compatibility** - **Fix TestWindow -> TestPlatformWindow in test_context.rs** - **Add headless metal renderer and window focus improvements** - **Fix double borrow in TestWindow::simulate_resize** - **Fix cbindgen panic: remove default type parameter from Diagnostic** - **Implement AppContext for HeadlessMetalAppContext** - **Missing trait impls** - **Add ShapedLine::split_at and eliminate re-shaping in soft wraps** - **Add handoff doc for platform-neutral-tests merge** - **Remove ex-only test infrastructure before merging main** - **Add cross-platform HeadlessAppContext with pluggable text system** - **Export platform_text_system() from gpui_windows for cross-platform tests** - **Restore TestApp/TestAppWindow with pluggable text system support** - **Add TestApp::open_window_sized for tests that need specific window dimensions** - **Fix some warnings** - **Fixes** - **Add a platform-neutral headless renderer interface** - **Synchronize Managed texture before CPU readback on discrete GPUs** - **Allow creating TestDispatcher with custom scheduler** Release Notes: - N/A --------- Co-authored-by: Nathan Sobo Co-authored-by: John Tur Co-authored-by: Agus Zubiaga Co-authored-by: Antonio Scandurra --- crates/gpui/src/app.rs | 8 + crates/gpui/src/app/headless_app_context.rs | 267 +++++++++ crates/gpui/src/app/test_app.rs | 607 ++++++++++++++++++++ crates/gpui/src/app/test_context.rs | 27 + crates/gpui/src/color.rs | 9 + crates/gpui/src/executor.rs | 7 + crates/gpui/src/platform.rs | 25 + crates/gpui/src/platform/test/dispatcher.rs | 15 +- crates/gpui/src/platform/test/platform.rs | 36 +- crates/gpui/src/platform/test/window.rs | 35 +- crates/gpui/src/scene.rs | 4 +- crates/gpui/src/text_system.rs | 203 ++++++- crates/gpui/src/text_system/line.rs | 405 ++++++++++++- crates/gpui/src/text_system/line_layout.rs | 271 +++++++++ crates/gpui/src/window.rs | 139 ++++- crates/gpui_macos/src/metal_renderer.rs | 303 ++++++++-- crates/gpui_macos/src/text_system.rs | 6 +- crates/gpui_macos/src/window.rs | 12 +- crates/gpui_platform/src/gpui_platform.rs | 16 + 19 files changed, 2315 insertions(+), 80 deletions(-) create mode 100644 crates/gpui/src/app/headless_app_context.rs create mode 100644 crates/gpui/src/app/test_app.rs diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 8af0a8923b38a6f711d701730996afca012fb48b..3d22d48a3a808a6f437a5875bfd4e337b7672d80 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -27,9 +27,13 @@ use collections::{FxHashMap, FxHashSet, HashMap, VecDeque}; pub use context::*; pub use entity_map::*; use gpui_util::{ResultExt, debug_panic}; +#[cfg(any(test, feature = "test-support"))] +pub use headless_app_context::*; use http_client::{HttpClient, Url}; use smallvec::SmallVec; #[cfg(any(test, feature = "test-support"))] +pub use test_app::*; +#[cfg(any(test, feature = "test-support"))] pub use test_context::*; #[cfg(all(target_os = "macos", any(test, feature = "test-support")))] pub use visual_test_context::*; @@ -54,6 +58,10 @@ mod async_context; mod context; mod entity_map; #[cfg(any(test, feature = "test-support"))] +mod headless_app_context; +#[cfg(any(test, feature = "test-support"))] +mod test_app; +#[cfg(any(test, feature = "test-support"))] mod test_context; #[cfg(all(target_os = "macos", any(test, feature = "test-support")))] mod visual_test_context; diff --git a/crates/gpui/src/app/headless_app_context.rs b/crates/gpui/src/app/headless_app_context.rs new file mode 100644 index 0000000000000000000000000000000000000000..bebade89d9a8417769147e5f64923953e4bc3694 --- /dev/null +++ b/crates/gpui/src/app/headless_app_context.rs @@ -0,0 +1,267 @@ +//! Cross-platform headless app context for tests that need real text shaping. +//! +//! This replaces the macOS-only `HeadlessMetalAppContext` with a platform-neutral +//! implementation backed by `TestPlatform`. Tests supply a real `PlatformTextSystem` +//! (e.g. `DirectWriteTextSystem` on Windows, `MacTextSystem` on macOS) to get +//! accurate glyph measurements while keeping everything else deterministic. +//! +//! Optionally, a renderer factory can be provided to enable real GPU rendering +//! and screenshot capture via [`HeadlessAppContext::capture_screenshot`]. + +use crate::{ + AnyView, AnyWindowHandle, App, AppCell, AppContext, AssetSource, BackgroundExecutor, Bounds, + Context, Entity, ForegroundExecutor, Global, Pixels, PlatformHeadlessRenderer, + PlatformTextSystem, Render, Reservation, Size, Task, TestDispatcher, TestPlatform, TextSystem, + Window, WindowBounds, WindowHandle, WindowOptions, + app::{GpuiBorrow, GpuiMode}, +}; +use anyhow::Result; +use image::RgbaImage; +use std::{future::Future, rc::Rc, sync::Arc, time::Duration}; + +/// A cross-platform headless app context for tests that need real text shaping. +/// +/// Unlike the old `HeadlessMetalAppContext`, this works on any platform. It uses +/// `TestPlatform` for deterministic scheduling and accepts a pluggable +/// `PlatformTextSystem` so tests get real glyph measurements. +/// +/// # Usage +/// +/// ```ignore +/// let text_system = Arc::new(gpui_wgpu::CosmicTextSystem::new("fallback")); +/// let mut cx = HeadlessAppContext::with_platform( +/// text_system, +/// Arc::new(Assets), +/// || gpui_platform::current_headless_renderer(), +/// ); +/// ``` +pub struct HeadlessAppContext { + /// The underlying app cell. + pub app: Rc, + /// The background executor for running async tasks. + pub background_executor: BackgroundExecutor, + /// The foreground executor for running tasks on the main thread. + pub foreground_executor: ForegroundExecutor, + dispatcher: TestDispatcher, + text_system: Arc, +} + +impl HeadlessAppContext { + /// Creates a new headless app context with the given text system. + pub fn new(platform_text_system: Arc) -> Self { + Self::with_platform(platform_text_system, Arc::new(()), || None) + } + + /// Creates a new headless app context with a custom text system and asset source. + pub fn with_asset_source( + platform_text_system: Arc, + asset_source: Arc, + ) -> Self { + Self::with_platform(platform_text_system, asset_source, || None) + } + + /// Creates a new headless app context with the given text system, asset source, + /// and an optional renderer factory for screenshot support. + pub fn with_platform( + platform_text_system: Arc, + asset_source: Arc, + renderer_factory: impl Fn() -> Option> + 'static, + ) -> Self { + let seed = std::env::var("SEED") + .ok() + .and_then(|s| s.parse().ok()) + .unwrap_or(0); + + let dispatcher = TestDispatcher::new(seed); + let arc_dispatcher = Arc::new(dispatcher.clone()); + let background_executor = BackgroundExecutor::new(arc_dispatcher.clone()); + let foreground_executor = ForegroundExecutor::new(arc_dispatcher); + + let renderer_factory: Box Option>> = + Box::new(renderer_factory); + let platform = TestPlatform::with_platform( + background_executor.clone(), + foreground_executor.clone(), + platform_text_system.clone(), + Some(renderer_factory), + ); + + let text_system = Arc::new(TextSystem::new(platform_text_system)); + let http_client = http_client::FakeHttpClient::with_404_response(); + let app = App::new_app(platform, asset_source, http_client); + app.borrow_mut().mode = GpuiMode::test(); + + Self { + app, + background_executor, + foreground_executor, + dispatcher, + text_system, + } + } + + /// Opens a window for headless rendering. + pub fn open_window( + &mut self, + size: Size, + build_root: impl FnOnce(&mut Window, &mut App) -> Entity, + ) -> Result> { + use crate::{point, px}; + + let bounds = Bounds { + origin: point(px(0.0), px(0.0)), + size, + }; + + let mut cx = self.app.borrow_mut(); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + focus: false, + show: false, + ..Default::default() + }, + build_root, + ) + } + + /// Runs all pending tasks until parked. + pub fn run_until_parked(&self) { + self.dispatcher.run_until_parked(); + } + + /// Advances the simulated clock. + pub fn advance_clock(&self, duration: Duration) { + self.dispatcher.advance_clock(duration); + } + + /// Enables parking mode, allowing blocking on real I/O (e.g., async asset loading). + pub fn allow_parking(&self) { + self.dispatcher.allow_parking(); + } + + /// Disables parking mode, returning to deterministic test execution. + pub fn forbid_parking(&self) { + self.dispatcher.forbid_parking(); + } + + /// Updates app state. + pub fn update(&mut self, f: impl FnOnce(&mut App) -> R) -> R { + let mut app = self.app.borrow_mut(); + f(&mut app) + } + + /// Updates a window and calls draw to render. + pub fn update_window( + &mut self, + window: AnyWindowHandle, + f: impl FnOnce(AnyView, &mut Window, &mut App) -> R, + ) -> Result { + let mut app = self.app.borrow_mut(); + app.update_window(window, f) + } + + /// Captures a screenshot from a window. + /// + /// Requires that the context was created with a renderer factory that + /// returns `Some` via [`HeadlessAppContext::with_platform`]. + pub fn capture_screenshot(&mut self, window: AnyWindowHandle) -> Result { + let mut app = self.app.borrow_mut(); + app.update_window(window, |_, window, _| window.render_to_image())? + } + + /// Returns the text system. + pub fn text_system(&self) -> &Arc { + &self.text_system + } + + /// Returns the background executor. + pub fn background_executor(&self) -> &BackgroundExecutor { + &self.background_executor + } + + /// Returns the foreground executor. + pub fn foreground_executor(&self) -> &ForegroundExecutor { + &self.foreground_executor + } +} + +impl AppContext for HeadlessAppContext { + fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { + let mut app = self.app.borrow_mut(); + app.new(build_entity) + } + + fn reserve_entity(&mut self) -> Reservation { + let mut app = self.app.borrow_mut(); + app.reserve_entity() + } + + fn insert_entity( + &mut self, + reservation: Reservation, + build_entity: impl FnOnce(&mut Context) -> T, + ) -> Entity { + let mut app = self.app.borrow_mut(); + app.insert_entity(reservation, build_entity) + } + + fn update_entity( + &mut self, + handle: &Entity, + update: impl FnOnce(&mut T, &mut Context) -> R, + ) -> R { + let mut app = self.app.borrow_mut(); + app.update_entity(handle, update) + } + + fn as_mut<'a, T>(&'a mut self, _: &Entity) -> GpuiBorrow<'a, T> + where + T: 'static, + { + panic!("Cannot use as_mut with HeadlessAppContext. Call update() instead.") + } + + fn read_entity(&self, handle: &Entity, read: impl FnOnce(&T, &App) -> R) -> R + where + T: 'static, + { + let app = self.app.borrow(); + app.read_entity(handle, read) + } + + fn update_window(&mut self, window: AnyWindowHandle, f: F) -> Result + where + F: FnOnce(AnyView, &mut Window, &mut App) -> T, + { + let mut lock = self.app.borrow_mut(); + lock.update_window(window, f) + } + + fn read_window( + &self, + window: &WindowHandle, + read: impl FnOnce(Entity, &App) -> R, + ) -> Result + where + T: 'static, + { + let app = self.app.borrow(); + app.read_window(window, read) + } + + fn background_spawn(&self, future: impl Future + Send + 'static) -> Task + where + R: Send + 'static, + { + self.background_executor.spawn(future) + } + + fn read_global(&self, callback: impl FnOnce(&G, &App) -> R) -> R + where + G: Global, + { + let app = self.app.borrow(); + app.read_global(callback) + } +} diff --git a/crates/gpui/src/app/test_app.rs b/crates/gpui/src/app/test_app.rs new file mode 100644 index 0000000000000000000000000000000000000000..268fa891b563289b85195097d27e06d0b3e15680 --- /dev/null +++ b/crates/gpui/src/app/test_app.rs @@ -0,0 +1,607 @@ +//! A clean testing API for GPUI applications. +//! +//! `TestApp` provides a simpler alternative to `TestAppContext` with: +//! - Automatic effect flushing after updates +//! - Clean window creation and inspection +//! - Input simulation helpers +//! +//! # Example +//! ```ignore +//! #[test] +//! fn test_my_view() { +//! let mut app = TestApp::new(); +//! +//! let mut window = app.open_window(|window, cx| { +//! MyView::new(window, cx) +//! }); +//! +//! window.update(|view, window, cx| { +//! view.do_something(cx); +//! }); +//! +//! // Check rendered state +//! assert_eq!(window.title(), Some("Expected Title")); +//! } +//! ``` + +use crate::{ + AnyWindowHandle, App, AppCell, AppContext, AsyncApp, BackgroundExecutor, BorrowAppContext, + Bounds, ClipboardItem, Context, Entity, ForegroundExecutor, Global, InputEvent, Keystroke, + MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Platform, + PlatformTextSystem, Point, Render, Size, Task, TestDispatcher, TestPlatform, TextSystem, + Window, WindowBounds, WindowHandle, WindowOptions, app::GpuiMode, +}; +use std::{future::Future, rc::Rc, sync::Arc, time::Duration}; + +/// A test application context with a clean API. +/// +/// Unlike `TestAppContext`, `TestApp` automatically flushes effects after +/// each update and provides simpler window management. +pub struct TestApp { + app: Rc, + platform: Rc, + background_executor: BackgroundExecutor, + foreground_executor: ForegroundExecutor, + #[allow(dead_code)] + dispatcher: TestDispatcher, + text_system: Arc, +} + +impl TestApp { + /// Create a new test application. + pub fn new() -> Self { + Self::with_seed(0) + } + + /// Create a new test application with a specific random seed. + pub fn with_seed(seed: u64) -> Self { + Self::build(seed, None, Arc::new(())) + } + + /// Create a new test application with a custom text system for real font shaping. + pub fn with_text_system(text_system: Arc) -> Self { + Self::build(0, Some(text_system), Arc::new(())) + } + + /// Create a new test application with a custom text system and asset source. + pub fn with_text_system_and_assets( + text_system: Arc, + asset_source: Arc, + ) -> Self { + Self::build(0, Some(text_system), asset_source) + } + + fn build( + seed: u64, + platform_text_system: Option>, + asset_source: Arc, + ) -> Self { + let dispatcher = TestDispatcher::new(seed); + let arc_dispatcher = Arc::new(dispatcher.clone()); + let background_executor = BackgroundExecutor::new(arc_dispatcher.clone()); + let foreground_executor = ForegroundExecutor::new(arc_dispatcher); + let platform = match platform_text_system.clone() { + Some(ts) => TestPlatform::with_text_system( + background_executor.clone(), + foreground_executor.clone(), + ts, + ), + None => TestPlatform::new(background_executor.clone(), foreground_executor.clone()), + }; + let http_client = http_client::FakeHttpClient::with_404_response(); + let text_system = Arc::new(TextSystem::new( + platform_text_system.unwrap_or_else(|| platform.text_system.clone()), + )); + + let app = App::new_app(platform.clone(), asset_source, http_client); + app.borrow_mut().mode = GpuiMode::test(); + + Self { + app, + platform, + background_executor, + foreground_executor, + dispatcher, + text_system, + } + } + + /// Run a closure with mutable access to the App context. + /// Automatically runs until parked after the closure completes. + pub fn update(&mut self, f: impl FnOnce(&mut App) -> R) -> R { + let result = { + let mut app = self.app.borrow_mut(); + app.update(f) + }; + self.run_until_parked(); + result + } + + /// Run a closure with read-only access to the App context. + pub fn read(&self, f: impl FnOnce(&App) -> R) -> R { + let app = self.app.borrow(); + f(&app) + } + + /// Create a new entity in the app. + pub fn new_entity( + &mut self, + build: impl FnOnce(&mut Context) -> T, + ) -> Entity { + self.update(|cx| cx.new(build)) + } + + /// Update an entity. + pub fn update_entity( + &mut self, + entity: &Entity, + f: impl FnOnce(&mut T, &mut Context) -> R, + ) -> R { + self.update(|cx| entity.update(cx, f)) + } + + /// Read an entity. + pub fn read_entity( + &self, + entity: &Entity, + f: impl FnOnce(&T, &App) -> R, + ) -> R { + self.read(|cx| f(entity.read(cx), cx)) + } + + /// Open a test window with the given root view, using maximized bounds. + pub fn open_window( + &mut self, + build_view: impl FnOnce(&mut Window, &mut Context) -> V, + ) -> TestAppWindow { + let bounds = self.read(|cx| Bounds::maximized(None, cx)); + let handle = self.update(|cx| { + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(bounds)), + ..Default::default() + }, + |window, cx| cx.new(|cx| build_view(window, cx)), + ) + .unwrap() + }); + + TestAppWindow { + handle, + app: self.app.clone(), + platform: self.platform.clone(), + background_executor: self.background_executor.clone(), + } + } + + /// Open a test window with specific options. + pub fn open_window_with_options( + &mut self, + options: WindowOptions, + build_view: impl FnOnce(&mut Window, &mut Context) -> V, + ) -> TestAppWindow { + let handle = self.update(|cx| { + cx.open_window(options, |window, cx| cx.new(|cx| build_view(window, cx))) + .unwrap() + }); + + TestAppWindow { + handle, + app: self.app.clone(), + platform: self.platform.clone(), + background_executor: self.background_executor.clone(), + } + } + + /// Run pending tasks until there's nothing left to do. + pub fn run_until_parked(&self) { + self.background_executor.run_until_parked(); + } + + /// Advance the simulated clock by the given duration. + pub fn advance_clock(&self, duration: Duration) { + self.background_executor.advance_clock(duration); + } + + /// Spawn a future on the foreground executor. + pub fn spawn(&self, f: impl FnOnce(AsyncApp) -> Fut) -> Task + where + Fut: Future + 'static, + R: 'static, + { + self.foreground_executor.spawn(f(self.to_async())) + } + + /// Spawn a future on the background executor. + pub fn background_spawn(&self, future: impl Future + Send + 'static) -> Task + where + R: Send + 'static, + { + self.background_executor.spawn(future) + } + + /// Get an async handle to the app. + pub fn to_async(&self) -> AsyncApp { + AsyncApp { + app: Rc::downgrade(&self.app), + background_executor: self.background_executor.clone(), + foreground_executor: self.foreground_executor.clone(), + } + } + + /// Get the background executor. + pub fn background_executor(&self) -> &BackgroundExecutor { + &self.background_executor + } + + /// Get the foreground executor. + pub fn foreground_executor(&self) -> &ForegroundExecutor { + &self.foreground_executor + } + + /// Get the text system. + pub fn text_system(&self) -> &Arc { + &self.text_system + } + + /// Check if a global of the given type exists. + pub fn has_global(&self) -> bool { + self.read(|cx| cx.has_global::()) + } + + /// Set a global value. + pub fn set_global(&mut self, global: G) { + self.update(|cx| cx.set_global(global)); + } + + /// Read a global value. + pub fn read_global(&self, f: impl FnOnce(&G, &App) -> R) -> R { + self.read(|cx| f(cx.global(), cx)) + } + + /// Update a global value. + pub fn update_global(&mut self, f: impl FnOnce(&mut G, &mut App) -> R) -> R { + self.update(|cx| cx.update_global(f)) + } + + // Platform simulation methods + + /// Write text to the simulated clipboard. + pub fn write_to_clipboard(&self, item: ClipboardItem) { + self.platform.write_to_clipboard(item); + } + + /// Read from the simulated clipboard. + pub fn read_from_clipboard(&self) -> Option { + self.platform.read_from_clipboard() + } + + /// Get URLs that have been opened via `cx.open_url()`. + pub fn opened_url(&self) -> Option { + self.platform.opened_url.borrow().clone() + } + + /// Check if a file path prompt is pending. + pub fn did_prompt_for_new_path(&self) -> bool { + self.platform.did_prompt_for_new_path() + } + + /// Simulate answering a path selection dialog. + pub fn simulate_new_path_selection( + &self, + select: impl FnOnce(&std::path::Path) -> Option, + ) { + self.platform.simulate_new_path_selection(select); + } + + /// Check if a prompt dialog is pending. + pub fn has_pending_prompt(&self) -> bool { + self.platform.has_pending_prompt() + } + + /// Simulate answering a prompt dialog. + pub fn simulate_prompt_answer(&self, button: &str) { + self.platform.simulate_prompt_answer(button); + } + + /// Get all open windows. + pub fn windows(&self) -> Vec { + self.read(|cx| cx.windows()) + } +} + +impl Default for TestApp { + fn default() -> Self { + Self::new() + } +} + +/// A test window with inspection and simulation capabilities. +pub struct TestAppWindow { + handle: WindowHandle, + app: Rc, + platform: Rc, + background_executor: BackgroundExecutor, +} + +impl TestAppWindow { + /// Get the window handle. + pub fn handle(&self) -> WindowHandle { + self.handle + } + + /// Get the root view entity. + pub fn root(&self) -> Entity { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |root_view, _, _| { + root_view.downcast::().expect("root view type mismatch") + }) + .expect("window not found") + } + + /// Update the root view. + pub fn update(&mut self, f: impl FnOnce(&mut V, &mut Window, &mut Context) -> R) -> R { + let result = { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |root_view, window, cx| { + let view = root_view.downcast::().expect("root view type mismatch"); + view.update(cx, |view, cx| f(view, window, cx)) + }) + .expect("window not found") + }; + self.background_executor.run_until_parked(); + result + } + + /// Read the root view. + pub fn read(&self, f: impl FnOnce(&V, &App) -> R) -> R { + let app = self.app.borrow(); + let view = self + .app + .borrow() + .windows + .get(self.handle.window_id()) + .and_then(|w| w.as_ref()) + .and_then(|w| w.root.clone()) + .and_then(|r| r.downcast::().ok()) + .expect("window or root view not found"); + f(view.read(&app), &app) + } + + /// Get the window title. + pub fn title(&self) -> Option { + let app = self.app.borrow(); + app.read_window(&self.handle, |_, _cx| { + // TODO: expose title through Window API + None + }) + .unwrap() + } + + /// Simulate a keystroke. + pub fn simulate_keystroke(&mut self, keystroke: &str) { + let keystroke = Keystroke::parse(keystroke).unwrap(); + { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |_, window, cx| { + window.dispatch_keystroke(keystroke, cx); + }) + .unwrap(); + } + self.background_executor.run_until_parked(); + } + + /// Simulate multiple keystrokes (space-separated). + pub fn simulate_keystrokes(&mut self, keystrokes: &str) { + for keystroke in keystrokes.split(' ') { + self.simulate_keystroke(keystroke); + } + } + + /// Simulate typing text. + pub fn simulate_input(&mut self, input: &str) { + for char in input.chars() { + self.simulate_keystroke(&char.to_string()); + } + } + + /// Simulate a mouse move. + pub fn simulate_mouse_move(&mut self, position: Point) { + self.simulate_event(MouseMoveEvent { + position, + modifiers: Default::default(), + pressed_button: None, + }); + } + + /// Simulate a mouse down event. + pub fn simulate_mouse_down(&mut self, position: Point, button: MouseButton) { + self.simulate_event(MouseDownEvent { + position, + button, + modifiers: Default::default(), + click_count: 1, + first_mouse: false, + }); + } + + /// Simulate a mouse up event. + pub fn simulate_mouse_up(&mut self, position: Point, button: MouseButton) { + self.simulate_event(MouseUpEvent { + position, + button, + modifiers: Default::default(), + click_count: 1, + }); + } + + /// Simulate a click at the given position. + pub fn simulate_click(&mut self, position: Point, button: MouseButton) { + self.simulate_mouse_down(position, button); + self.simulate_mouse_up(position, button); + } + + /// Simulate a scroll event. + pub fn simulate_scroll(&mut self, position: Point, delta: Point) { + self.simulate_event(crate::ScrollWheelEvent { + position, + delta: crate::ScrollDelta::Pixels(delta), + modifiers: Default::default(), + touch_phase: crate::TouchPhase::Moved, + }); + } + + /// Simulate an input event. + pub fn simulate_event(&mut self, event: E) { + let platform_input = event.to_platform_input(); + { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |_, window, cx| { + window.dispatch_event(platform_input, cx); + }) + .unwrap(); + } + self.background_executor.run_until_parked(); + } + + /// Simulate resizing the window. + pub fn simulate_resize(&mut self, size: Size) { + let window_id = self.handle.window_id(); + let mut app = self.app.borrow_mut(); + if let Some(Some(window)) = app.windows.get_mut(window_id) { + if let Some(test_window) = window.platform_window.as_test() { + test_window.simulate_resize(size); + } + } + drop(app); + self.background_executor.run_until_parked(); + } + + /// Force a redraw of the window. + pub fn draw(&mut self) { + let mut app = self.app.borrow_mut(); + let any_handle: AnyWindowHandle = self.handle.into(); + app.update_window(any_handle, |_, window, cx| { + window.draw(cx).clear(); + }) + .unwrap(); + } +} + +impl Clone for TestAppWindow { + fn clone(&self) -> Self { + Self { + handle: self.handle, + app: self.app.clone(), + platform: self.platform.clone(), + background_executor: self.background_executor.clone(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{FocusHandle, Focusable, div, prelude::*}; + + struct Counter { + count: usize, + focus_handle: FocusHandle, + } + + impl Counter { + fn new(_window: &mut Window, cx: &mut Context) -> Self { + let focus_handle = cx.focus_handle(); + Self { + count: 0, + focus_handle, + } + } + + fn increment(&mut self, _cx: &mut Context) { + self.count += 1; + } + } + + impl Focusable for Counter { + fn focus_handle(&self, _cx: &App) -> FocusHandle { + self.focus_handle.clone() + } + } + + impl Render for Counter { + fn render(&mut self, _window: &mut Window, _cx: &mut Context) -> impl IntoElement { + div().child(format!("Count: {}", self.count)) + } + } + + #[test] + fn test_basic_usage() { + let mut app = TestApp::new(); + + let mut window = app.open_window(Counter::new); + + window.update(|counter, _window, cx| { + counter.increment(cx); + }); + + window.read(|counter, _| { + assert_eq!(counter.count, 1); + }); + + drop(window); + app.update(|cx| cx.shutdown()); + } + + #[test] + fn test_entity_creation() { + let mut app = TestApp::new(); + + let entity = app.new_entity(|cx| Counter { + count: 42, + focus_handle: cx.focus_handle(), + }); + + app.read_entity(&entity, |counter, _| { + assert_eq!(counter.count, 42); + }); + + app.update_entity(&entity, |counter, _cx| { + counter.count += 1; + }); + + app.read_entity(&entity, |counter, _| { + assert_eq!(counter.count, 43); + }); + } + + #[test] + fn test_globals() { + let mut app = TestApp::new(); + + struct MyGlobal(String); + impl Global for MyGlobal {} + + assert!(!app.has_global::()); + + app.set_global(MyGlobal("hello".into())); + + assert!(app.has_global::()); + + app.read_global::(|global, _| { + assert_eq!(global.0, "hello"); + }); + + app.update_global::(|global, _| { + global.0 = "world".into(); + }); + + app.read_global::(|global, _| { + assert_eq!(global.0, "world"); + }); + } +} diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 0f0f0e14fbd8565d8f948579ed1ab23381c80108..7fa47191404fd28baf11f27d055e5ac7b85a747d 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -231,6 +231,33 @@ impl TestAppContext { .unwrap() } + /// Opens a new window with a specific size. + /// + /// Unlike `add_window` which uses maximized bounds, this allows controlling + /// the window dimensions, which is important for layout-sensitive tests. + pub fn open_window( + &mut self, + window_size: Size, + build_window: F, + ) -> WindowHandle + where + F: FnOnce(&mut Window, &mut Context) -> V, + V: 'static + Render, + { + let mut cx = self.app.borrow_mut(); + cx.open_window( + WindowOptions { + window_bounds: Some(WindowBounds::Windowed(Bounds { + origin: Point::default(), + size: window_size, + })), + ..Default::default() + }, + |window, cx| cx.new(|cx| build_window(window, cx)), + ) + .unwrap() + } + /// Adds a new window with no content. pub fn add_empty_window(&mut self) -> &mut VisualTestContext { let mut cx = self.app.borrow_mut(); diff --git a/crates/gpui/src/color.rs b/crates/gpui/src/color.rs index bb41a2f996e250b8c73377922f81170bb432321f..75585bcd90881513d835d28d260319d08acf9c4d 100644 --- a/crates/gpui/src/color.rs +++ b/crates/gpui/src/color.rs @@ -820,6 +820,15 @@ impl LinearColorStop { } impl Background { + /// Returns the solid color if this is a solid background, None otherwise. + pub fn as_solid(&self) -> Option { + if self.tag == BackgroundTag::Solid { + Some(self.solid) + } else { + None + } + } + /// Use specified color space for color interpolation. /// /// diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index cb65f758d5a521f15f77e7be266b1b4ed0480d03..f66f58447879afb86b721a9d6d7d2c59c65a8953 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -129,6 +129,13 @@ impl BackgroundExecutor { } } + /// Returns the underlying scheduler::BackgroundExecutor. + /// + /// This is used by Ex to pass the executor to thread/worktree code. + pub fn scheduler_executor(&self) -> scheduler::BackgroundExecutor { + self.inner.clone() + } + /// Enqueues the given future to be run to completion on a background thread. #[track_caller] pub fn spawn(&self, future: impl Future + Send + 'static) -> Task diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 061a055e7ef23bc4a76b44eaadb90bc1660fdb42..885dad0d96dc50993a7098b5d48509e4749894ec 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -555,6 +555,20 @@ pub trait PlatformWindow: HasWindowHandle + HasDisplayHandle { } } +/// A renderer for headless windows that can produce real rendered output. +#[cfg(any(test, feature = "test-support"))] +pub trait PlatformHeadlessRenderer { + /// Render a scene and return the result as an RGBA image. + fn render_scene_to_image( + &mut self, + scene: &Scene, + size: Size, + ) -> Result; + + /// Returns the sprite atlas used by this renderer. + fn sprite_atlas(&self) -> Arc; +} + /// Type alias for runnables with metadata. /// Previously an enum with a single variant, now simplified to a direct type alias. #[doc(hidden)] @@ -573,6 +587,7 @@ pub trait PlatformDispatcher: Send + Sync { fn dispatch(&self, runnable: RunnableVariant, priority: Priority); fn dispatch_on_main_thread(&self, runnable: RunnableVariant, priority: Priority); fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant); + fn spawn_realtime(&self, f: Box); fn now(&self) -> Instant { @@ -592,19 +607,29 @@ pub trait PlatformDispatcher: Send + Sync { #[expect(missing_docs)] pub trait PlatformTextSystem: Send + Sync { fn add_fonts(&self, fonts: Vec>) -> Result<()>; + /// Get all available font names. fn all_font_names(&self) -> Vec; + /// Get the font ID for a font descriptor. fn font_id(&self, descriptor: &Font) -> Result; + /// Get metrics for a font. fn font_metrics(&self, font_id: FontId) -> FontMetrics; + /// Get typographic bounds for a glyph. fn typographic_bounds(&self, font_id: FontId, glyph_id: GlyphId) -> Result>; + /// Get the advance width for a glyph. fn advance(&self, font_id: FontId, glyph_id: GlyphId) -> Result>; + /// Get the glyph ID for a character. fn glyph_for_char(&self, font_id: FontId, ch: char) -> Option; + /// Get raster bounds for a glyph. fn glyph_raster_bounds(&self, params: &RenderGlyphParams) -> Result>; + /// Rasterize a glyph. fn rasterize_glyph( &self, params: &RenderGlyphParams, raster_bounds: Bounds, ) -> Result<(Size, Vec)>; + /// Layout a line of text with the given font runs. fn layout_line(&self, text: &str, font_size: Pixels, runs: &[FontRun]) -> LineLayout; + /// Returns the recommended text rendering mode for the given font and size. fn recommended_rendering_mode(&self, _font_id: FontId, _font_size: Pixels) -> TextRenderingMode; } diff --git a/crates/gpui/src/platform/test/dispatcher.rs b/crates/gpui/src/platform/test/dispatcher.rs index c40ec8f669d1e2e58f8af3bcf0fbd64fbddbe4d8..29aff84ff9d07f3a558ab68f2ac3117835688cc8 100644 --- a/crates/gpui/src/platform/test/dispatcher.rs +++ b/crates/gpui/src/platform/test/dispatcher.rs @@ -30,11 +30,12 @@ impl TestDispatcher { .map_or(false, |var| var == "1" || var == "true"), timeout_ticks: 0..=1000, })); + Self::from_scheduler(scheduler) + } - let session_id = scheduler.allocate_session_id(); - + pub fn from_scheduler(scheduler: Arc) -> Self { TestDispatcher { - session_id, + session_id: scheduler.allocate_session_id(), scheduler, num_cpus_override: Arc::new(AtomicUsize::new(0)), } @@ -76,6 +77,14 @@ impl TestDispatcher { while self.tick(false) {} } + pub fn allow_parking(&self) { + self.scheduler.allow_parking(); + } + + pub fn forbid_parking(&self) { + self.scheduler.forbid_parking(); + } + /// Override the value returned by `BackgroundExecutor::num_cpus()` in tests. /// A value of 0 means no override (the default of 4 is used). pub fn set_num_cpus(&self, count: usize) { diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index 1da42f5742215f9001dcbd09cc42977ea28623ea..a59b21f038a01b48686ee211919afd7c647b7331 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -1,9 +1,9 @@ use crate::{ AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DevicePixels, DummyKeyboardMapper, ForegroundExecutor, Keymap, NoopTextSystem, Platform, PlatformDisplay, - PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, PromptButton, - ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, SourceMetadata, Task, - TestDisplay, TestWindow, ThermalState, WindowAppearance, WindowParams, size, + PlatformHeadlessRenderer, PlatformKeyboardLayout, PlatformKeyboardMapper, PlatformTextSystem, + PromptButton, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, SourceMetadata, + Task, TestDisplay, TestWindow, ThermalState, WindowAppearance, WindowParams, size, }; use anyhow::Result; use collections::VecDeque; @@ -34,6 +34,7 @@ pub(crate) struct TestPlatform { pub opened_url: RefCell>, pub text_system: Arc, pub expect_restart: RefCell>>>, + headless_renderer_factory: Option Option>>>, weak: Weak, } @@ -88,8 +89,30 @@ pub(crate) struct TestPrompts { impl TestPlatform { pub fn new(executor: BackgroundExecutor, foreground_executor: ForegroundExecutor) -> Rc { - let text_system = Arc::new(NoopTextSystem); - + Self::with_platform( + executor, + foreground_executor, + Arc::new(NoopTextSystem), + None, + ) + } + + pub fn with_text_system( + executor: BackgroundExecutor, + foreground_executor: ForegroundExecutor, + text_system: Arc, + ) -> Rc { + Self::with_platform(executor, foreground_executor, text_system, None) + } + + pub fn with_platform( + executor: BackgroundExecutor, + foreground_executor: ForegroundExecutor, + text_system: Arc, + headless_renderer_factory: Option< + Box Option>>, + >, + ) -> Rc { Rc::new_cyclic(|weak| TestPlatform { background_executor: executor, foreground_executor, @@ -107,6 +130,7 @@ impl TestPlatform { weak: weak.clone(), opened_url: Default::default(), text_system, + headless_renderer_factory, }) } @@ -299,11 +323,13 @@ impl Platform for TestPlatform { handle: AnyWindowHandle, params: WindowParams, ) -> anyhow::Result> { + let renderer = self.headless_renderer_factory.as_ref().and_then(|f| f()); let window = TestWindow::new( handle, params, self.weak.clone(), self.active_display.clone(), + renderer, ); Ok(Box::new(window)) } diff --git a/crates/gpui/src/platform/test/window.rs b/crates/gpui/src/platform/test/window.rs index feb3b162abe09d8cdef008aa9f794b046da22cc6..583450c9e93e6bfdf8f45a4dcd1a83feb9b08111 100644 --- a/crates/gpui/src/platform/test/window.rs +++ b/crates/gpui/src/platform/test/window.rs @@ -1,10 +1,12 @@ use crate::{ - AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DispatchEventResult, GpuSpecs, - Pixels, PlatformAtlas, PlatformDisplay, PlatformInput, PlatformInputHandler, PlatformWindow, - Point, PromptButton, RequestFrameOptions, Size, TestPlatform, TileId, WindowAppearance, + AnyWindowHandle, AtlasKey, AtlasTextureId, AtlasTile, Bounds, DevicePixels, + DispatchEventResult, GpuSpecs, Pixels, PlatformAtlas, PlatformDisplay, + PlatformHeadlessRenderer, PlatformInput, PlatformInputHandler, PlatformWindow, Point, + PromptButton, RequestFrameOptions, Scene, Size, TestPlatform, TileId, WindowAppearance, WindowBackgroundAppearance, WindowBounds, WindowControlArea, WindowParams, }; use collections::HashMap; +use image::RgbaImage; use parking_lot::Mutex; use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; use std::{ @@ -21,6 +23,7 @@ pub(crate) struct TestWindowState { platform: Weak, // TODO: Replace with `Rc` sprite_atlas: Arc, + renderer: Option>, pub(crate) should_close_handler: Option bool>>, hit_test_window_control_callback: Option Option>>, input_callback: Option DispatchEventResult>>, @@ -57,13 +60,19 @@ impl TestWindow { params: WindowParams, platform: Weak, display: Rc, + renderer: Option>, ) -> Self { + let sprite_atlas: Arc = match &renderer { + Some(r) => r.sprite_atlas(), + None => Arc::new(TestAtlas::new()), + }; Self(Rc::new(Mutex::new(TestWindowState { bounds: params.bounds, display, platform, handle, - sprite_atlas: Arc::new(TestAtlas::new()), + sprite_atlas, + renderer, title: Default::default(), edited: false, should_close_handler: None, @@ -81,10 +90,11 @@ impl TestWindow { pub fn simulate_resize(&mut self, size: Size) { let scale_factor = self.scale_factor(); let mut lock = self.0.lock(); + // Always update bounds, even if no callback is registered + lock.bounds.size = size; let Some(mut callback) = lock.resize_callback.take() else { return; }; - lock.bounds.size = size; drop(lock); callback(size, scale_factor); self.0.lock().resize_callback = Some(callback); @@ -275,12 +285,25 @@ impl PlatformWindow for TestWindow { fn on_appearance_changed(&self, _callback: Box) {} - fn draw(&self, _scene: &crate::Scene) {} + fn draw(&self, _scene: &Scene) {} fn sprite_atlas(&self) -> sync::Arc { self.0.lock().sprite_atlas.clone() } + #[cfg(any(test, feature = "test-support"))] + fn render_to_image(&self, scene: &Scene) -> anyhow::Result { + let mut state = self.0.lock(); + let size = state.bounds.size; + if let Some(renderer) = &mut state.renderer { + let scale_factor = 2.0; + let device_size: Size = size.to_device_pixels(scale_factor); + renderer.render_scene_to_image(scene, device_size) + } else { + anyhow::bail!("render_to_image not available: no HeadlessRenderer configured") + } + } + fn as_test(&mut self) -> Option<&mut TestWindow> { Some(self) } diff --git a/crates/gpui/src/scene.rs b/crates/gpui/src/scene.rs index 7e0ffe017024cc7914885df9ea713a3ec3db820e..22b1bb468d84b2897b312c6fc8af00ee5c8523db 100644 --- a/crates/gpui/src/scene.rs +++ b/crates/gpui/src/scene.rs @@ -657,7 +657,7 @@ impl Default for TransformationMatrix { #[expect(missing_docs)] pub struct MonochromeSprite { pub order: DrawOrder, - pub pad: u32, // align to 8 bytes + pub pad: u32, pub bounds: Bounds, pub content_mask: ContentMask, pub color: Hsla, @@ -695,7 +695,7 @@ impl From for Primitive { #[expect(missing_docs)] pub struct PolychromeSprite { pub order: DrawOrder, - pub pad: u32, // align to 8 bytes + pub pad: u32, pub grayscale: bool, pub opacity: f32, pub bounds: Bounds, diff --git a/crates/gpui/src/text_system.rs b/crates/gpui/src/text_system.rs index 43982b2666bde8210f770419623cc0b9afd6e2af..b62a0ad6fd4f885b127144bd66e8e3e41747d889 100644 --- a/crates/gpui/src/text_system.rs +++ b/crates/gpui/src/text_system.rs @@ -63,7 +63,8 @@ pub struct TextSystem { } impl TextSystem { - pub(crate) fn new(platform_text_system: Arc) -> Self { + /// Create a new TextSystem with the given platform text system. + pub fn new(platform_text_system: Arc) -> Self { TextSystem { platform_text_system, font_metrics: RwLock::default(), @@ -372,7 +373,8 @@ pub struct WindowTextSystem { } impl WindowTextSystem { - pub(crate) fn new(text_system: Arc) -> Self { + /// Create a new WindowTextSystem with the given TextSystem. + pub fn new(text_system: Arc) -> Self { Self { line_layout_cache: LineLayoutCache::new(text_system.platform_text_system.clone()), text_system, @@ -438,6 +440,74 @@ impl WindowTextSystem { } } + /// Shape the given line using a caller-provided content hash as the cache key. + /// + /// This enables cache hits without materializing a contiguous `SharedString` for the text. + /// If the cache misses, `materialize_text` is invoked to produce the `SharedString` for shaping. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + /// + /// Like [`Self::shape_line`], this must be used only for single-line text (no `\n`). + pub fn shape_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[TextRun], + force_width: Option, + materialize_text: impl FnOnce() -> SharedString, + ) -> ShapedLine { + let mut decoration_runs = SmallVec::<[DecorationRun; 32]>::new(); + for run in runs { + if let Some(last_run) = decoration_runs.last_mut() + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + && last_run.background_color == run.background_color + { + last_run.len += run.len as u32; + continue; + } + decoration_runs.push(DecorationRun { + len: run.len as u32, + color: run.color, + background_color: run.background_color, + underline: run.underline, + strikethrough: run.strikethrough, + }); + } + + let mut used_force_width = force_width; + let layout = self.layout_line_by_hash( + text_hash, + text_len, + font_size, + runs, + used_force_width, + || { + let text = materialize_text(); + debug_assert!( + text.find('\n').is_none(), + "text argument should not contain newlines" + ); + text + }, + ); + + // We only materialize actual text on cache miss; on hit we avoid allocations. + // Since `ShapedLine` carries a `SharedString`, use an empty placeholder for hits. + // NOTE: Callers must not rely on `ShapedLine.text` for content when using this API. + let text: SharedString = SharedString::new_static(""); + + ShapedLine { + layout, + text, + decoration_runs, + } + } + /// Shape a multi line string of text, at the given font_size, for painting to the screen. /// Subsets of the text can be styled independently with the `runs` parameter. /// If `wrap_width` is provided, the line breaks will be adjusted to fit within the given width. @@ -627,6 +697,130 @@ impl WindowTextSystem { layout } + + /// Probe the line layout cache using a caller-provided content hash, without allocating. + /// + /// Returns `Some(layout)` if the layout is already cached in either the current frame + /// or the previous frame. Returns `None` if it is not cached. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn try_layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[TextRun], + force_width: Option, + ) -> Option> { + let mut last_run = None::<&TextRun>; + let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); + font_runs.clear(); + + for run in runs.iter() { + let decoration_changed = if let Some(last_run) = last_run + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + // we do not consider differing background color relevant, as it does not affect glyphs + // && last_run.background_color == run.background_color + { + false + } else { + last_run = Some(run); + true + }; + + let font_id = self.resolve_font(&run.font); + if let Some(font_run) = font_runs.last_mut() + && font_id == font_run.font_id + && !decoration_changed + { + font_run.len += run.len; + } else { + font_runs.push(FontRun { + len: run.len, + font_id, + }); + } + } + + let layout = self.line_layout_cache.try_layout_line_by_hash( + text_hash, + text_len, + font_size, + &font_runs, + force_width, + ); + + self.font_runs_pool.lock().push(font_runs); + + layout + } + + /// Layout the given line of text using a caller-provided content hash as the cache key. + /// + /// This enables cache hits without materializing a contiguous `SharedString` for the text. + /// If the cache misses, `materialize_text` is invoked to produce the `SharedString` for shaping. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[TextRun], + force_width: Option, + materialize_text: impl FnOnce() -> SharedString, + ) -> Arc { + let mut last_run = None::<&TextRun>; + let mut font_runs = self.font_runs_pool.lock().pop().unwrap_or_default(); + font_runs.clear(); + + for run in runs.iter() { + let decoration_changed = if let Some(last_run) = last_run + && last_run.color == run.color + && last_run.underline == run.underline + && last_run.strikethrough == run.strikethrough + // we do not consider differing background color relevant, as it does not affect glyphs + // && last_run.background_color == run.background_color + { + false + } else { + last_run = Some(run); + true + }; + + let font_id = self.resolve_font(&run.font); + if let Some(font_run) = font_runs.last_mut() + && font_id == font_run.font_id + && !decoration_changed + { + font_run.len += run.len; + } else { + font_runs.push(FontRun { + len: run.len, + font_id, + }); + } + } + + let layout = self.line_layout_cache.layout_line_by_hash( + text_hash, + text_len, + font_size, + &font_runs, + force_width, + materialize_text, + ); + + self.font_runs_pool.lock().push(font_runs); + + layout + } } #[derive(Hash, Eq, PartialEq)] @@ -802,6 +996,11 @@ impl TextRun { #[repr(C)] pub struct GlyphId(pub u32); +/// Parameters for rendering a glyph, used as cache keys for raster bounds. +/// +/// This struct identifies a specific glyph rendering configuration including +/// font, size, subpixel positioning, and scale factor. It's used to look up +/// cached raster bounds and sprite atlas entries. #[derive(Clone, Debug, PartialEq)] #[expect(missing_docs)] pub struct RenderGlyphParams { diff --git a/crates/gpui/src/text_system/line.rs b/crates/gpui/src/text_system/line.rs index c87e051ad3b4e5fc86d17ad0e6168553108175fa..7b5714188ff97d0169806ac5da9f039f9be2c16a 100644 --- a/crates/gpui/src/text_system/line.rs +++ b/crates/gpui/src/text_system/line.rs @@ -1,12 +1,24 @@ use crate::{ - App, Bounds, Half, Hsla, LineLayout, Pixels, Point, Result, SharedString, StrikethroughStyle, - TextAlign, UnderlineStyle, Window, WrapBoundary, WrappedLineLayout, black, fill, point, px, - size, + App, Bounds, DevicePixels, Half, Hsla, LineLayout, Pixels, Point, RenderGlyphParams, Result, + ShapedGlyph, ShapedRun, SharedString, StrikethroughStyle, TextAlign, UnderlineStyle, Window, + WrapBoundary, WrappedLineLayout, black, fill, point, px, size, }; use derive_more::{Deref, DerefMut}; use smallvec::SmallVec; use std::sync::Arc; +/// Pre-computed glyph data for efficient painting without per-glyph cache lookups. +/// +/// This is produced by `ShapedLine::compute_glyph_raster_data` during prepaint +/// and consumed by `ShapedLine::paint_with_raster_data` during paint. +#[derive(Clone, Debug)] +pub struct GlyphRasterData { + /// The raster bounds for each glyph, in paint order. + pub bounds: Vec>, + /// The render params for each glyph (needed for sprite atlas lookup). + pub params: Vec, +} + /// Set the text decoration for a run of text. #[derive(Debug, Clone)] pub struct DecorationRun { @@ -44,6 +56,14 @@ impl ShapedLine { self.layout.len } + /// The width of the shaped line in pixels. + /// + /// This is the glyph advance width computed by the text shaping system and is useful for + /// incrementally advancing a "pen" when painting multiple fragments on the same row. + pub fn width(&self) -> Pixels { + self.layout.width + } + /// Override the len, useful if you're rendering text a /// as text b (e.g. rendering invisibles). pub fn with_len(mut self, len: usize) -> Self { @@ -108,6 +128,120 @@ impl ShapedLine { Ok(()) } + + /// Split this shaped line at a byte index, returning `(prefix, suffix)`. + /// + /// - `prefix` contains glyphs for bytes `[0, byte_index)` with original positions. + /// Its width equals the x-advance up to the split point. + /// - `suffix` contains glyphs for bytes `[byte_index, len)` with positions + /// shifted left so the first glyph starts at x=0, and byte indices rebased to 0. + /// - Decoration runs are partitioned at the boundary; a run that straddles it is + /// split into two with adjusted lengths. + /// - `font_size`, `ascent`, and `descent` are copied to both halves. + pub fn split_at(&self, byte_index: usize) -> (ShapedLine, ShapedLine) { + let x_offset = self.layout.x_for_index(byte_index); + + // Partition glyph runs. A single run may contribute glyphs to both halves. + let mut left_runs = Vec::new(); + let mut right_runs = Vec::new(); + + for run in &self.layout.runs { + let split_pos = run.glyphs.partition_point(|g| g.index < byte_index); + + if split_pos > 0 { + left_runs.push(ShapedRun { + font_id: run.font_id, + glyphs: run.glyphs[..split_pos].to_vec(), + }); + } + + if split_pos < run.glyphs.len() { + let right_glyphs = run.glyphs[split_pos..] + .iter() + .map(|g| ShapedGlyph { + id: g.id, + position: point(g.position.x - x_offset, g.position.y), + index: g.index - byte_index, + is_emoji: g.is_emoji, + }) + .collect(); + right_runs.push(ShapedRun { + font_id: run.font_id, + glyphs: right_glyphs, + }); + } + } + + // Partition decoration runs. A run straddling the boundary is split into two. + let mut left_decorations = SmallVec::new(); + let mut right_decorations = SmallVec::new(); + let mut decoration_offset = 0u32; + let split_point = byte_index as u32; + + for decoration in &self.decoration_runs { + let run_end = decoration_offset + decoration.len; + + if run_end <= split_point { + left_decorations.push(decoration.clone()); + } else if decoration_offset >= split_point { + right_decorations.push(decoration.clone()); + } else { + let left_len = split_point - decoration_offset; + let right_len = run_end - split_point; + left_decorations.push(DecorationRun { + len: left_len, + color: decoration.color, + background_color: decoration.background_color, + underline: decoration.underline, + strikethrough: decoration.strikethrough, + }); + right_decorations.push(DecorationRun { + len: right_len, + color: decoration.color, + background_color: decoration.background_color, + underline: decoration.underline, + strikethrough: decoration.strikethrough, + }); + } + + decoration_offset = run_end; + } + + // Split text + let left_text = SharedString::new(self.text[..byte_index].to_string()); + let right_text = SharedString::new(self.text[byte_index..].to_string()); + + let left_width = x_offset; + let right_width = self.layout.width - left_width; + + let left = ShapedLine { + layout: Arc::new(LineLayout { + font_size: self.layout.font_size, + width: left_width, + ascent: self.layout.ascent, + descent: self.layout.descent, + runs: left_runs, + len: byte_index, + }), + text: left_text, + decoration_runs: left_decorations, + }; + + let right = ShapedLine { + layout: Arc::new(LineLayout { + font_size: self.layout.font_size, + width: right_width, + ascent: self.layout.ascent, + descent: self.layout.descent, + runs: right_runs, + len: self.layout.len - byte_index, + }), + text: right_text, + decoration_runs: right_decorations, + }; + + (left, right) + } } /// A line of text that has been shaped, decorated, and wrapped by the text layout system. @@ -594,3 +728,268 @@ fn aligned_origin_x( TextAlign::Right => origin.x + align_width - line_width, } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::{FontId, GlyphId}; + + /// Helper: build a ShapedLine from glyph descriptors without the platform text system. + /// Each glyph is described as (byte_index, x_position). + fn make_shaped_line( + text: &str, + glyphs: &[(usize, f32)], + width: f32, + decorations: &[DecorationRun], + ) -> ShapedLine { + let shaped_glyphs: Vec = glyphs + .iter() + .map(|&(index, x)| ShapedGlyph { + id: GlyphId(0), + position: point(px(x), px(0.0)), + index, + is_emoji: false, + }) + .collect(); + + ShapedLine { + layout: Arc::new(LineLayout { + font_size: px(16.0), + width: px(width), + ascent: px(12.0), + descent: px(4.0), + runs: vec![ShapedRun { + font_id: FontId(0), + glyphs: shaped_glyphs, + }], + len: text.len(), + }), + text: SharedString::new(text.to_string()), + decoration_runs: SmallVec::from(decorations.to_vec()), + } + } + + #[test] + fn test_split_at_invariants() { + // Split "abcdef" at every possible byte index and verify structural invariants. + let line = make_shaped_line( + "abcdef", + &[ + (0, 0.0), + (1, 10.0), + (2, 20.0), + (3, 30.0), + (4, 40.0), + (5, 50.0), + ], + 60.0, + &[], + ); + + for i in 0..=6 { + let (left, right) = line.split_at(i); + + assert_eq!( + left.width() + right.width(), + line.width(), + "widths must sum at split={i}" + ); + assert_eq!( + left.len() + right.len(), + line.len(), + "lengths must sum at split={i}" + ); + assert_eq!( + format!("{}{}", left.text.as_ref(), right.text.as_ref()), + "abcdef", + "text must concatenate at split={i}" + ); + assert_eq!(left.font_size, line.font_size, "font_size at split={i}"); + assert_eq!(right.ascent, line.ascent, "ascent at split={i}"); + assert_eq!(right.descent, line.descent, "descent at split={i}"); + } + + // Edge: split at 0 produces no left runs, full content on right + let (left, right) = line.split_at(0); + assert_eq!(left.runs.len(), 0); + assert_eq!(right.runs[0].glyphs.len(), 6); + + // Edge: split at end produces full content on left, no right runs + let (left, right) = line.split_at(6); + assert_eq!(left.runs[0].glyphs.len(), 6); + assert_eq!(right.runs.len(), 0); + } + + #[test] + fn test_split_at_glyph_rebasing() { + // Two font runs (simulating a font fallback boundary at byte 3): + // run A (FontId 0): glyphs at bytes 0,1,2 positions 0,10,20 + // run B (FontId 1): glyphs at bytes 3,4,5 positions 30,40,50 + // Successive splits simulate the incremental splitting done during wrap. + let line = ShapedLine { + layout: Arc::new(LineLayout { + font_size: px(16.0), + width: px(60.0), + ascent: px(12.0), + descent: px(4.0), + runs: vec![ + ShapedRun { + font_id: FontId(0), + glyphs: vec![ + ShapedGlyph { + id: GlyphId(0), + position: point(px(0.0), px(0.0)), + index: 0, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(10.0), px(0.0)), + index: 1, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(20.0), px(0.0)), + index: 2, + is_emoji: false, + }, + ], + }, + ShapedRun { + font_id: FontId(1), + glyphs: vec![ + ShapedGlyph { + id: GlyphId(0), + position: point(px(30.0), px(0.0)), + index: 3, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(40.0), px(0.0)), + index: 4, + is_emoji: false, + }, + ShapedGlyph { + id: GlyphId(0), + position: point(px(50.0), px(0.0)), + index: 5, + is_emoji: false, + }, + ], + }, + ], + len: 6, + }), + text: SharedString::new("abcdef".to_string()), + decoration_runs: SmallVec::new(), + }; + + // First split at byte 2 — mid-run in run A + let (first, remainder) = line.split_at(2); + assert_eq!(first.text.as_ref(), "ab"); + assert_eq!(first.runs.len(), 1); + assert_eq!(first.runs[0].font_id, FontId(0)); + + // Remainder "cdef" should have two runs: tail of A (1 glyph) + all of B (3 glyphs) + assert_eq!(remainder.text.as_ref(), "cdef"); + assert_eq!(remainder.runs.len(), 2); + assert_eq!(remainder.runs[0].font_id, FontId(0)); + assert_eq!(remainder.runs[0].glyphs.len(), 1); + assert_eq!(remainder.runs[0].glyphs[0].index, 0); + assert_eq!(remainder.runs[0].glyphs[0].position.x, px(0.0)); + assert_eq!(remainder.runs[1].font_id, FontId(1)); + assert_eq!(remainder.runs[1].glyphs[0].index, 1); + assert_eq!(remainder.runs[1].glyphs[0].position.x, px(10.0)); + + // Second split at byte 2 within remainder — crosses the run boundary + let (second, final_part) = remainder.split_at(2); + assert_eq!(second.text.as_ref(), "cd"); + assert_eq!(final_part.text.as_ref(), "ef"); + assert_eq!(final_part.runs[0].glyphs[0].index, 0); + assert_eq!(final_part.runs[0].glyphs[0].position.x, px(0.0)); + + // Widths must sum across all three pieces + assert_eq!( + first.width() + second.width() + final_part.width(), + line.width() + ); + } + + #[test] + fn test_split_at_decorations() { + // Three decoration runs: red [0..2), green [2..5), blue [5..6). + // Split at byte 3 — red goes entirely left, green straddles, blue goes entirely right. + let red = Hsla { + h: 0.0, + s: 1.0, + l: 0.5, + a: 1.0, + }; + let green = Hsla { + h: 0.3, + s: 1.0, + l: 0.5, + a: 1.0, + }; + let blue = Hsla { + h: 0.6, + s: 1.0, + l: 0.5, + a: 1.0, + }; + + let line = make_shaped_line( + "abcdef", + &[ + (0, 0.0), + (1, 10.0), + (2, 20.0), + (3, 30.0), + (4, 40.0), + (5, 50.0), + ], + 60.0, + &[ + DecorationRun { + len: 2, + color: red, + background_color: None, + underline: None, + strikethrough: None, + }, + DecorationRun { + len: 3, + color: green, + background_color: None, + underline: None, + strikethrough: None, + }, + DecorationRun { + len: 1, + color: blue, + background_color: None, + underline: None, + strikethrough: None, + }, + ], + ); + + let (left, right) = line.split_at(3); + + // Left: red(2) + green(1) — green straddled, left portion has len 1 + assert_eq!(left.decoration_runs.len(), 2); + assert_eq!(left.decoration_runs[0].len, 2); + assert_eq!(left.decoration_runs[0].color, red); + assert_eq!(left.decoration_runs[1].len, 1); + assert_eq!(left.decoration_runs[1].color, green); + + // Right: green(2) + blue(1) — green straddled, right portion has len 2 + assert_eq!(right.decoration_runs.len(), 2); + assert_eq!(right.decoration_runs[0].len, 2); + assert_eq!(right.decoration_runs[0].color, green); + assert_eq!(right.decoration_runs[1].len, 1); + assert_eq!(right.decoration_runs[1].color, blue); + } +} diff --git a/crates/gpui/src/text_system/line_layout.rs b/crates/gpui/src/text_system/line_layout.rs index 78ab21b3d324674b0f34d9ab418893430df70f2a..8f3d7563d068979defa8b3f93367a2c9b7102cc1 100644 --- a/crates/gpui/src/text_system/line_layout.rs +++ b/crates/gpui/src/text_system/line_layout.rs @@ -401,12 +401,25 @@ struct FrameCache { wrapped_lines: FxHashMap, Arc>, used_lines: Vec>, used_wrapped_lines: Vec>, + + // Content-addressable caches keyed by caller-provided text hash + layout params. + // These allow cache hits without materializing a contiguous `SharedString`. + // + // IMPORTANT: To support allocation-free lookups, we store these maps using a key type + // (`HashedCacheKeyRef`) that can be computed without building a contiguous `&str`/`SharedString`. + // On miss, we allocate once and store under an owned `HashedCacheKey`. + lines_by_hash: FxHashMap, Arc>, + wrapped_lines_by_hash: FxHashMap, Arc>, + used_lines_by_hash: Vec>, + used_wrapped_lines_by_hash: Vec>, } #[derive(Clone, Default)] pub(crate) struct LineLayoutIndex { lines_index: usize, wrapped_lines_index: usize, + lines_by_hash_index: usize, + wrapped_lines_by_hash_index: usize, } impl LineLayoutCache { @@ -423,6 +436,8 @@ impl LineLayoutCache { LineLayoutIndex { lines_index: frame.used_lines.len(), wrapped_lines_index: frame.used_wrapped_lines.len(), + lines_by_hash_index: frame.used_lines_by_hash.len(), + wrapped_lines_by_hash_index: frame.used_wrapped_lines_by_hash.len(), } } @@ -445,6 +460,24 @@ impl LineLayoutCache { } current_frame.used_wrapped_lines.push(key.clone()); } + + for key in &previous_frame.used_lines_by_hash + [range.start.lines_by_hash_index..range.end.lines_by_hash_index] + { + if let Some((key, line)) = previous_frame.lines_by_hash.remove_entry(key) { + current_frame.lines_by_hash.insert(key, line); + } + current_frame.used_lines_by_hash.push(key.clone()); + } + + for key in &previous_frame.used_wrapped_lines_by_hash + [range.start.wrapped_lines_by_hash_index..range.end.wrapped_lines_by_hash_index] + { + if let Some((key, line)) = previous_frame.wrapped_lines_by_hash.remove_entry(key) { + current_frame.wrapped_lines_by_hash.insert(key, line); + } + current_frame.used_wrapped_lines_by_hash.push(key.clone()); + } } pub fn truncate_layouts(&self, index: LineLayoutIndex) { @@ -453,6 +486,12 @@ impl LineLayoutCache { current_frame .used_wrapped_lines .truncate(index.wrapped_lines_index); + current_frame + .used_lines_by_hash + .truncate(index.lines_by_hash_index); + current_frame + .used_wrapped_lines_by_hash + .truncate(index.wrapped_lines_by_hash_index); } pub fn finish_frame(&self) { @@ -463,6 +502,11 @@ impl LineLayoutCache { curr_frame.wrapped_lines.clear(); curr_frame.used_lines.clear(); curr_frame.used_wrapped_lines.clear(); + + curr_frame.lines_by_hash.clear(); + curr_frame.wrapped_lines_by_hash.clear(); + curr_frame.used_lines_by_hash.clear(); + curr_frame.used_wrapped_lines_by_hash.clear(); } pub fn layout_wrapped_line( @@ -590,6 +634,165 @@ impl LineLayoutCache { layout } } + + /// Try to retrieve a previously-shaped line layout using a caller-provided content hash. + /// + /// This is a *non-allocating* cache probe: it does not materialize any text. If the layout + /// is not already cached in either the current frame or previous frame, returns `None`. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn try_layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[FontRun], + force_width: Option, + ) -> Option> { + let key_ref = HashedCacheKeyRef { + text_hash, + text_len, + font_size, + runs, + wrap_width: None, + force_width, + }; + + let current_frame = self.current_frame.read(); + if let Some((_, layout)) = current_frame.lines_by_hash.iter().find(|(key, _)| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) { + return Some(layout.clone()); + } + + let previous_frame = self.previous_frame.lock(); + if let Some((_, layout)) = previous_frame.lines_by_hash.iter().find(|(key, _)| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) { + return Some(layout.clone()); + } + + None + } + + /// Layout a line of text using a caller-provided content hash as the cache key. + /// + /// This enables cache hits without materializing a contiguous `SharedString` for `text`. + /// If the cache misses, `materialize_text` is invoked to produce the `SharedString` for shaping. + /// + /// Contract (caller enforced): + /// - Same `text_hash` implies identical text content (collision risk accepted by caller). + /// - `text_len` should be the UTF-8 byte length of the text (helps reduce accidental collisions). + pub fn layout_line_by_hash( + &self, + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &[FontRun], + force_width: Option, + materialize_text: impl FnOnce() -> SharedString, + ) -> Arc { + let key_ref = HashedCacheKeyRef { + text_hash, + text_len, + font_size, + runs, + wrap_width: None, + force_width, + }; + + // Fast path: already cached (no allocation). + let current_frame = self.current_frame.upgradable_read(); + if let Some((_, layout)) = current_frame.lines_by_hash.iter().find(|(key, _)| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) { + return layout.clone(); + } + + let mut current_frame = RwLockUpgradableReadGuard::upgrade(current_frame); + + // Try to reuse from previous frame without allocating; do a linear scan to find a matching key. + // (We avoid `drain()` here because it would eagerly move all entries.) + let mut previous_frame = self.previous_frame.lock(); + if let Some(existing_key) = previous_frame + .used_lines_by_hash + .iter() + .find(|key| { + HashedCacheKeyRef { + text_hash: key.text_hash, + text_len: key.text_len, + font_size: key.font_size, + runs: key.runs.as_slice(), + wrap_width: key.wrap_width, + force_width: key.force_width, + } == key_ref + }) + .cloned() + { + if let Some((key, layout)) = previous_frame.lines_by_hash.remove_entry(&existing_key) { + current_frame + .lines_by_hash + .insert(key.clone(), layout.clone()); + current_frame.used_lines_by_hash.push(key); + return layout; + } + } + + let text = materialize_text(); + let mut layout = self + .platform_text_system + .layout_line(&text, font_size, runs); + + if let Some(force_width) = force_width { + let mut glyph_pos = 0; + for run in layout.runs.iter_mut() { + for glyph in run.glyphs.iter_mut() { + if (glyph.position.x - glyph_pos * force_width).abs() > px(1.) { + glyph.position.x = glyph_pos * force_width; + } + glyph_pos += 1; + } + } + } + + let key = Arc::new(HashedCacheKey { + text_hash, + text_len, + font_size, + runs: SmallVec::from(runs), + wrap_width: None, + force_width, + }); + let layout = Arc::new(layout); + current_frame + .lines_by_hash + .insert(key.clone(), layout.clone()); + current_frame.used_lines_by_hash.push(key); + layout + } } /// A run of text with a single font. @@ -622,12 +825,80 @@ struct CacheKeyRef<'a> { force_width: Option, } +#[derive(Clone, Debug)] +struct HashedCacheKey { + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: SmallVec<[FontRun; 1]>, + wrap_width: Option, + force_width: Option, +} + +#[derive(Copy, Clone)] +struct HashedCacheKeyRef<'a> { + text_hash: u64, + text_len: usize, + font_size: Pixels, + runs: &'a [FontRun], + wrap_width: Option, + force_width: Option, +} + impl PartialEq for dyn AsCacheKeyRef + '_ { fn eq(&self, other: &dyn AsCacheKeyRef) -> bool { self.as_cache_key_ref() == other.as_cache_key_ref() } } +impl PartialEq for HashedCacheKey { + fn eq(&self, other: &Self) -> bool { + self.text_hash == other.text_hash + && self.text_len == other.text_len + && self.font_size == other.font_size + && self.runs.as_slice() == other.runs.as_slice() + && self.wrap_width == other.wrap_width + && self.force_width == other.force_width + } +} + +impl Eq for HashedCacheKey {} + +impl Hash for HashedCacheKey { + fn hash(&self, state: &mut H) { + self.text_hash.hash(state); + self.text_len.hash(state); + self.font_size.hash(state); + self.runs.as_slice().hash(state); + self.wrap_width.hash(state); + self.force_width.hash(state); + } +} + +impl PartialEq for HashedCacheKeyRef<'_> { + fn eq(&self, other: &Self) -> bool { + self.text_hash == other.text_hash + && self.text_len == other.text_len + && self.font_size == other.font_size + && self.runs == other.runs + && self.wrap_width == other.wrap_width + && self.force_width == other.force_width + } +} + +impl Eq for HashedCacheKeyRef<'_> {} + +impl Hash for HashedCacheKeyRef<'_> { + fn hash(&self, state: &mut H) { + self.text_hash.hash(state); + self.text_len.hash(state); + self.font_size.hash(state); + self.runs.hash(state); + self.wrap_width.hash(state); + self.force_width.hash(state); + } +} + impl Eq for dyn AsCacheKeyRef + '_ {} impl Hash for dyn AsCacheKeyRef + '_ { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index e3c61a4fd31f35df591f20075221907270e352c8..2a80f553eb9ff5a36cf1637a1106fd4c13712f15 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -566,6 +566,10 @@ impl HitboxId { /// /// See [`Hitbox::is_hovered`] for details. pub fn is_hovered(self, window: &Window) -> bool { + // If this hitbox has captured the pointer, it's always considered hovered + if window.captured_hitbox == Some(self) { + return true; + } let hit_test = &window.mouse_hit_test; for id in hit_test.ids.iter().take(hit_test.hover_hitbox_count) { if self == *id { @@ -822,6 +826,11 @@ impl Frame { self.tab_stops.clear(); self.focus = None; + #[cfg(any(test, feature = "test-support"))] + { + self.debug_bounds.clear(); + } + #[cfg(any(feature = "inspector", debug_assertions))] { self.next_inspector_instance_ids.clear(); @@ -952,6 +961,9 @@ pub struct Window { pub(crate) pending_input_observers: SubscriberSet<(), AnyObserver>, prompt: Option, pub(crate) client_inset: Option, + /// The hitbox that has captured the pointer, if any. + /// While captured, mouse events route to this hitbox regardless of hit testing. + captured_hitbox: Option, #[cfg(any(feature = "inspector", debug_assertions))] inspector: Option>, } @@ -1439,6 +1451,7 @@ impl Window { prompt: None, client_inset: None, image_cache_stack: Vec::new(), + captured_hitbox: None, #[cfg(any(feature = "inspector", debug_assertions))] inspector: None, }) @@ -1888,7 +1901,12 @@ impl Window { }) } - fn bounds_changed(&mut self, cx: &mut App) { + /// Notify the window that its bounds have changed. + /// + /// This updates internal state like `viewport_size` and `scale_factor` from + /// the platform window, then notifies observers. Normally called automatically + /// by the platform's resize callback, but exposed publicly for test infrastructure. + pub fn bounds_changed(&mut self, cx: &mut App) { self.scale_factor = self.platform_window.scale_factor(); self.viewport_size = self.platform_window.content_size(); self.display_id = self.platform_window.display().map(|display| display.id()); @@ -2144,6 +2162,26 @@ impl Window { self.mouse_position } + /// Captures the pointer for the given hitbox. While captured, all mouse move and mouse up + /// events will be routed to listeners that check this hitbox's `is_hovered` status, + /// regardless of actual hit testing. This enables drag operations that continue + /// even when the pointer moves outside the element's bounds. + /// + /// The capture is automatically released on mouse up. + pub fn capture_pointer(&mut self, hitbox_id: HitboxId) { + self.captured_hitbox = Some(hitbox_id); + } + + /// Releases any active pointer capture. + pub fn release_pointer(&mut self) { + self.captured_hitbox = None; + } + + /// Returns the hitbox that has captured the pointer, if any. + pub fn captured_hitbox(&self) -> Option { + self.captured_hitbox + } + /// The current state of the keyboard's modifiers pub fn modifiers(&self) -> Modifiers { self.modifiers @@ -3295,6 +3333,100 @@ impl Window { Ok(()) } + /// Paints a monochrome glyph with pre-computed raster bounds. + /// + /// This is faster than `paint_glyph` because it skips the per-glyph cache lookup. + /// Use `ShapedLine::compute_glyph_raster_data` to batch-compute raster bounds during prepaint. + pub fn paint_glyph_with_raster_bounds( + &mut self, + origin: Point, + _font_id: FontId, + _glyph_id: GlyphId, + _font_size: Pixels, + color: Hsla, + raster_bounds: Bounds, + params: &RenderGlyphParams, + ) -> Result<()> { + self.invalidator.debug_assert_paint(); + + let element_opacity = self.element_opacity(); + let scale_factor = self.scale_factor(); + let glyph_origin = origin.scale(scale_factor); + + if !raster_bounds.is_zero() { + let tile = self + .sprite_atlas + .get_or_insert_with(¶ms.clone().into(), &mut || { + let (size, bytes) = self.text_system().rasterize_glyph(params)?; + Ok(Some((size, Cow::Owned(bytes)))) + })? + .expect("Callback above only errors or returns Some"); + let bounds = Bounds { + origin: glyph_origin.map(|px| px.floor()) + raster_bounds.origin.map(Into::into), + size: tile.bounds.size.map(Into::into), + }; + let content_mask = self.content_mask().scale(scale_factor); + self.next_frame.scene.insert_primitive(MonochromeSprite { + order: 0, + pad: 0, + bounds, + content_mask, + color: color.opacity(element_opacity), + tile, + transformation: TransformationMatrix::unit(), + }); + } + Ok(()) + } + + /// Paints an emoji glyph with pre-computed raster bounds. + /// + /// This is faster than `paint_emoji` because it skips the per-glyph cache lookup. + /// Use `ShapedLine::compute_glyph_raster_data` to batch-compute raster bounds during prepaint. + pub fn paint_emoji_with_raster_bounds( + &mut self, + origin: Point, + _font_id: FontId, + _glyph_id: GlyphId, + _font_size: Pixels, + raster_bounds: Bounds, + params: &RenderGlyphParams, + ) -> Result<()> { + self.invalidator.debug_assert_paint(); + + let scale_factor = self.scale_factor(); + let glyph_origin = origin.scale(scale_factor); + + if !raster_bounds.is_zero() { + let tile = self + .sprite_atlas + .get_or_insert_with(¶ms.clone().into(), &mut || { + let (size, bytes) = self.text_system().rasterize_glyph(params)?; + Ok(Some((size, Cow::Owned(bytes)))) + })? + .expect("Callback above only errors or returns Some"); + + let bounds = Bounds { + origin: glyph_origin.map(|px| px.floor()) + raster_bounds.origin.map(Into::into), + size: tile.bounds.size.map(Into::into), + }; + let content_mask = self.content_mask().scale(scale_factor); + let opacity = self.element_opacity(); + + self.next_frame.scene.insert_primitive(PolychromeSprite { + order: 0, + pad: 0, + grayscale: false, + bounds, + corner_radii: Default::default(), + content_mask, + tile, + opacity, + }); + } + Ok(()) + } + fn should_use_subpixel_rendering(&self, font_id: FontId, font_size: Pixels) -> bool { if self.platform_window.background_appearance() != WindowBackgroundAppearance::Opaque { return false; @@ -4063,6 +4195,11 @@ impl Window { self.refresh(); } } + + // Auto-release pointer capture on mouse up + if event.is::() && self.captured_hitbox.is_some() { + self.captured_hitbox = None; + } } fn dispatch_key_event(&mut self, event: &dyn Any, cx: &mut App) { diff --git a/crates/gpui_macos/src/metal_renderer.rs b/crates/gpui_macos/src/metal_renderer.rs index 93e039019b1ca639118b5453ff8f9de0d30e4f99..e96d14b15691bec1da54aa9d46e3e765218292b2 100644 --- a/crates/gpui_macos/src/metal_renderer.rs +++ b/crates/gpui_macos/src/metal_renderer.rs @@ -110,10 +110,12 @@ impl InstanceBufferPool { pub(crate) struct MetalRenderer { device: metal::Device, - layer: metal::MetalLayer, + layer: Option, is_apple_gpu: bool, is_unified_memory: bool, presents_with_transaction: bool, + /// For headless rendering, tracks whether output should be opaque + opaque: bool, command_queue: CommandQueue, paths_rasterization_pipeline_state: metal::RenderPipelineState, path_sprites_pipeline_state: metal::RenderPipelineState, @@ -142,26 +144,9 @@ pub struct PathRasterizationVertex { } impl MetalRenderer { + /// Creates a new MetalRenderer with a CAMetalLayer for window-based rendering. pub fn new(instance_buffer_pool: Arc>, transparent: bool) -> Self { - // Prefer low‐power integrated GPUs on Intel Mac. On Apple - // Silicon, there is only ever one GPU, so this is equivalent to - // `metal::Device::system_default()`. - let device = if let Some(d) = metal::Device::all() - .into_iter() - .min_by_key(|d| (d.is_removable(), !d.is_low_power())) - { - d - } else { - // For some reason `all()` can return an empty list, see https://github.com/zed-industries/zed/issues/37689 - // In that case, we fall back to the system default device. - log::error!( - "Unable to enumerate Metal devices; attempting to use system default device" - ); - metal::Device::system_default().unwrap_or_else(|| { - log::error!("unable to access a compatible graphics device"); - std::process::exit(1); - }) - }; + let device = Self::create_device(); let layer = metal::MetalLayer::new(); layer.set_device(&device); @@ -182,6 +167,48 @@ impl MetalRenderer { | AutoresizingMask::HEIGHT_SIZABLE ]; } + + Self::new_internal(device, Some(layer), !transparent, instance_buffer_pool) + } + + /// Creates a new headless MetalRenderer for offscreen rendering without a window. + /// + /// This renderer can render scenes to images without requiring a CAMetalLayer, + /// window, or AppKit. Use `render_scene_to_image()` to render scenes. + #[cfg(any(test, feature = "test-support"))] + pub fn new_headless(instance_buffer_pool: Arc>) -> Self { + let device = Self::create_device(); + Self::new_internal(device, None, true, instance_buffer_pool) + } + + fn create_device() -> metal::Device { + // Prefer low‐power integrated GPUs on Intel Mac. On Apple + // Silicon, there is only ever one GPU, so this is equivalent to + // `metal::Device::system_default()`. + if let Some(d) = metal::Device::all() + .into_iter() + .min_by_key(|d| (d.is_removable(), !d.is_low_power())) + { + d + } else { + // For some reason `all()` can return an empty list, see https://github.com/zed-industries/zed/issues/37689 + // In that case, we fall back to the system default device. + log::error!( + "Unable to enumerate Metal devices; attempting to use system default device" + ); + metal::Device::system_default().unwrap_or_else(|| { + log::error!("unable to access a compatible graphics device"); + std::process::exit(1); + }) + } + } + + fn new_internal( + device: metal::Device, + layer: Option, + opaque: bool, + instance_buffer_pool: Arc>, + ) -> Self { #[cfg(feature = "runtime_shaders")] let library = device .new_library_with_source(&SHADERS_SOURCE_FILE, &metal::CompileOptions::new()) @@ -303,6 +330,7 @@ impl MetalRenderer { presents_with_transaction: false, is_apple_gpu, is_unified_memory, + opaque, command_queue, paths_rasterization_pipeline_state, path_sprites_pipeline_state, @@ -322,12 +350,15 @@ impl MetalRenderer { } } - pub fn layer(&self) -> &metal::MetalLayerRef { - &self.layer + pub fn layer(&self) -> Option<&metal::MetalLayerRef> { + self.layer.as_ref().map(|l| l.as_ref()) } pub fn layer_ptr(&self) -> *mut CAMetalLayer { - self.layer.as_ptr() + self.layer + .as_ref() + .map(|l| l.as_ptr()) + .unwrap_or(ptr::null_mut()) } pub fn sprite_atlas(&self) -> &Arc { @@ -336,26 +367,25 @@ impl MetalRenderer { pub fn set_presents_with_transaction(&mut self, presents_with_transaction: bool) { self.presents_with_transaction = presents_with_transaction; - self.layer - .set_presents_with_transaction(presents_with_transaction); + if let Some(layer) = &self.layer { + layer.set_presents_with_transaction(presents_with_transaction); + } } pub fn update_drawable_size(&mut self, size: Size) { - let size = NSSize { - width: size.width.0 as f64, - height: size.height.0 as f64, - }; - unsafe { - let _: () = msg_send![ - self.layer(), - setDrawableSize: size - ]; + if let Some(layer) = &self.layer { + let ns_size = NSSize { + width: size.width.0 as f64, + height: size.height.0 as f64, + }; + unsafe { + let _: () = msg_send![ + layer.as_ref(), + setDrawableSize: ns_size + ]; + } } - let device_pixels_size = Size { - width: DevicePixels(size.width as i32), - height: DevicePixels(size.height as i32), - }; - self.update_path_intermediate_textures(device_pixels_size); + self.update_path_intermediate_textures(size); } fn update_path_intermediate_textures(&mut self, size: Size) { @@ -396,8 +426,11 @@ impl MetalRenderer { } } - pub fn update_transparency(&self, transparent: bool) { - self.layer.set_opaque(!transparent); + pub fn update_transparency(&mut self, transparent: bool) { + self.opaque = !transparent; + if let Some(layer) = &self.layer { + layer.set_opaque(!transparent); + } } pub fn destroy(&self) { @@ -405,7 +438,15 @@ impl MetalRenderer { } pub fn draw(&mut self, scene: &Scene) { - let layer = self.layer.clone(); + let layer = match &self.layer { + Some(l) => l.clone(), + None => { + log::error!( + "draw() called on headless renderer - use render_scene_to_image() instead" + ); + return; + } + }; let viewport_size = layer.drawable_size(); let viewport_size: Size = size( (viewport_size.width.ceil() as i32).into(), @@ -476,9 +517,15 @@ impl MetalRenderer { /// Renders the scene to a texture and returns the pixel data as an RGBA image. /// This does not present the frame to screen - useful for visual testing /// where we want to capture what would be rendered without displaying it. + /// + /// Note: This requires a layer-backed renderer. For headless rendering, + /// use `render_scene_to_image()` instead. #[cfg(any(test, feature = "test-support"))] pub fn render_to_image(&mut self, scene: &Scene) -> Result { - let layer = self.layer.clone(); + let layer = self + .layer + .clone() + .ok_or_else(|| anyhow::anyhow!("render_to_image requires a layer-backed renderer"))?; let viewport_size = layer.drawable_size(); let viewport_size: Size = size( (viewport_size.width.ceil() as i32).into(), @@ -567,21 +614,146 @@ impl MetalRenderer { } } + /// Renders a scene to an image without requiring a window or CAMetalLayer. + /// + /// This is the primary method for headless rendering. It creates an offscreen + /// texture, renders the scene to it, and returns the pixel data as an RGBA image. + #[cfg(any(test, feature = "test-support"))] + pub fn render_scene_to_image( + &mut self, + scene: &Scene, + size: Size, + ) -> Result { + if size.width.0 <= 0 || size.height.0 <= 0 { + anyhow::bail!("Invalid size for render_scene_to_image: {:?}", size); + } + + // Update path intermediate textures for this size + self.update_path_intermediate_textures(size); + + // Create an offscreen texture as render target + let texture_descriptor = metal::TextureDescriptor::new(); + texture_descriptor.set_width(size.width.0 as u64); + texture_descriptor.set_height(size.height.0 as u64); + texture_descriptor.set_pixel_format(MTLPixelFormat::BGRA8Unorm); + texture_descriptor + .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead); + texture_descriptor.set_storage_mode(metal::MTLStorageMode::Managed); + let target_texture = self.device.new_texture(&texture_descriptor); + + loop { + let mut instance_buffer = self + .instance_buffer_pool + .lock() + .acquire(&self.device, self.is_unified_memory); + + let command_buffer = + self.draw_primitives_to_texture(scene, &mut instance_buffer, &target_texture, size); + + match command_buffer { + Ok(command_buffer) => { + let instance_buffer_pool = self.instance_buffer_pool.clone(); + let instance_buffer = Cell::new(Some(instance_buffer)); + let block = ConcreteBlock::new(move |_| { + if let Some(instance_buffer) = instance_buffer.take() { + instance_buffer_pool.lock().release(instance_buffer); + } + }); + let block = block.copy(); + command_buffer.add_completed_handler(&block); + + // On discrete GPUs (non-unified memory), Managed textures + // require an explicit blit synchronize before the CPU can + // read back the rendered data. Without this, get_bytes + // returns stale zeros. + if !self.is_unified_memory { + let blit = command_buffer.new_blit_command_encoder(); + blit.synchronize_resource(&target_texture); + blit.end_encoding(); + } + + // Commit and wait for completion + command_buffer.commit(); + command_buffer.wait_until_completed(); + + // Read pixels from the texture + let width = size.width.0 as u32; + let height = size.height.0 as u32; + let bytes_per_row = width as usize * 4; + let buffer_size = height as usize * bytes_per_row; + + let mut pixels = vec![0u8; buffer_size]; + + let region = metal::MTLRegion { + origin: metal::MTLOrigin { x: 0, y: 0, z: 0 }, + size: metal::MTLSize { + width: width as u64, + height: height as u64, + depth: 1, + }, + }; + + target_texture.get_bytes( + pixels.as_mut_ptr() as *mut std::ffi::c_void, + bytes_per_row as u64, + region, + 0, + ); + + // Convert BGRA to RGBA (swap B and R channels) + for chunk in pixels.chunks_exact_mut(4) { + chunk.swap(0, 2); + } + + return RgbaImage::from_raw(width, height, pixels).ok_or_else(|| { + anyhow::anyhow!("Failed to create RgbaImage from pixel data") + }); + } + Err(err) => { + log::error!( + "failed to render: {}. retrying with larger instance buffer size", + err + ); + let mut instance_buffer_pool = self.instance_buffer_pool.lock(); + let buffer_size = instance_buffer_pool.buffer_size; + if buffer_size >= 256 * 1024 * 1024 { + anyhow::bail!("instance buffer size grew too large: {}", buffer_size); + } + instance_buffer_pool.reset(buffer_size * 2); + log::info!( + "increased instance buffer size to {}", + instance_buffer_pool.buffer_size + ); + } + } + } + } + fn draw_primitives( &mut self, scene: &Scene, instance_buffer: &mut InstanceBuffer, drawable: &metal::MetalDrawableRef, viewport_size: Size, + ) -> Result { + self.draw_primitives_to_texture(scene, instance_buffer, drawable.texture(), viewport_size) + } + + fn draw_primitives_to_texture( + &mut self, + scene: &Scene, + instance_buffer: &mut InstanceBuffer, + texture: &metal::TextureRef, + viewport_size: Size, ) -> Result { let command_queue = self.command_queue.clone(); let command_buffer = command_queue.new_command_buffer(); - let alpha = if self.layer.is_opaque() { 1. } else { 0. }; + let alpha = if self.opaque { 1. } else { 0. }; let mut instance_offset = 0; - let mut command_encoder = new_command_encoder( + let mut command_encoder = new_command_encoder_for_texture( command_buffer, - drawable, + texture, viewport_size, |color_attachment| { color_attachment.set_load_action(metal::MTLLoadAction::Clear); @@ -617,9 +789,9 @@ impl MetalRenderer { command_buffer, ); - command_encoder = new_command_encoder( + command_encoder = new_command_encoder_for_texture( command_buffer, - drawable, + texture, viewport_size, |color_attachment| { color_attachment.set_load_action(metal::MTLLoadAction::Load); @@ -1309,9 +1481,9 @@ impl MetalRenderer { } } -fn new_command_encoder<'a>( +fn new_command_encoder_for_texture<'a>( command_buffer: &'a metal::CommandBufferRef, - drawable: &'a metal::MetalDrawableRef, + texture: &'a metal::TextureRef, viewport_size: Size, configure_color_attachment: impl Fn(&RenderPassColorAttachmentDescriptorRef), ) -> &'a metal::RenderCommandEncoderRef { @@ -1320,7 +1492,7 @@ fn new_command_encoder<'a>( .color_attachments() .object_at(0) .unwrap(); - color_attachment.set_texture(Some(drawable.texture())); + color_attachment.set_texture(Some(texture)); color_attachment.set_store_action(metal::MTLStoreAction::Store); configure_color_attachment(color_attachment); @@ -1506,3 +1678,32 @@ pub struct SurfaceBounds { pub bounds: Bounds, pub content_mask: ContentMask, } + +#[cfg(any(test, feature = "test-support"))] +pub struct MetalHeadlessRenderer { + renderer: MetalRenderer, +} + +#[cfg(any(test, feature = "test-support"))] +impl MetalHeadlessRenderer { + pub fn new() -> Self { + let instance_buffer_pool = Arc::new(Mutex::new(InstanceBufferPool::default())); + let renderer = MetalRenderer::new_headless(instance_buffer_pool); + Self { renderer } + } +} + +#[cfg(any(test, feature = "test-support"))] +impl gpui::PlatformHeadlessRenderer for MetalHeadlessRenderer { + fn render_scene_to_image( + &mut self, + scene: &Scene, + size: Size, + ) -> anyhow::Result { + self.renderer.render_scene_to_image(scene, size) + } + + fn sprite_atlas(&self) -> Arc { + self.renderer.sprite_atlas().clone() + } +} diff --git a/crates/gpui_macos/src/text_system.rs b/crates/gpui_macos/src/text_system.rs index 2511bcf12dc240bf11d2c050579a6c06ebb155ed..e0f8a010eadf422ce588d8a7d30b3db6f9a4dcee 100644 --- a/crates/gpui_macos/src/text_system.rs +++ b/crates/gpui_macos/src/text_system.rs @@ -53,7 +53,8 @@ use crate::open_type::apply_features_and_fallbacks; #[allow(non_upper_case_globals)] const kCGImageAlphaOnly: u32 = 7; -pub(crate) struct MacTextSystem(RwLock); +/// macOS text system using CoreText for font shaping. +pub struct MacTextSystem(RwLock); #[derive(Clone, PartialEq, Eq, Hash)] struct FontKey { @@ -73,7 +74,8 @@ struct MacTextSystemState { } impl MacTextSystem { - pub(crate) fn new() -> Self { + /// Create a new MacTextSystem. + pub fn new() -> Self { Self(RwLock::new(MacTextSystemState { memory_source: MemSource::empty(), system_source: SystemSource::new(), diff --git a/crates/gpui_macos/src/window.rs b/crates/gpui_macos/src/window.rs index c20c86026a102464343fc7c8cfb03b69b19b7641..290b2b704672028c79d99ef7eddad7ce37ed230e 100644 --- a/crates/gpui_macos/src/window.rs +++ b/crates/gpui_macos/src/window.rs @@ -2067,11 +2067,13 @@ fn update_window_scale_factor(window_state: &Arc>) { let scale_factor = lock.scale_factor(); let size = lock.content_size(); let drawable_size = size.to_device_pixels(scale_factor); - unsafe { - let _: () = msg_send![ - lock.renderer.layer(), - setContentsScale: scale_factor as f64 - ]; + if let Some(layer) = lock.renderer.layer() { + unsafe { + let _: () = msg_send![ + layer, + setContentsScale: scale_factor as f64 + ]; + } } lock.renderer.update_drawable_size(drawable_size); diff --git a/crates/gpui_platform/src/gpui_platform.rs b/crates/gpui_platform/src/gpui_platform.rs index 7dac5498a652f7a7fe68b9f6d7ea23dffabdfb22..1d2fea90b477542031dfbf591f458b2427ec6e01 100644 --- a/crates/gpui_platform/src/gpui_platform.rs +++ b/crates/gpui_platform/src/gpui_platform.rs @@ -59,6 +59,22 @@ pub fn current_platform(headless: bool) -> Rc { } } +/// Returns a new [`HeadlessRenderer`] for the current platform, if available. +#[cfg(feature = "test-support")] +pub fn current_headless_renderer() -> Option> { + #[cfg(target_os = "macos")] + { + Some(Box::new( + gpui_macos::metal_renderer::MetalHeadlessRenderer::new(), + )) + } + + #[cfg(not(target_os = "macos"))] + { + None + } +} + #[cfg(all(test, target_os = "macos"))] mod tests { use super::*; From dd0e51ecb8706d918b0dfd7eac509c9c637a55fd Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Thu, 12 Mar 2026 23:24:38 +0100 Subject: [PATCH 540/548] agent_ui: Disable pickers while thread is generating (#50519) It does not make sense to enable them during the running turn and it can lead to more confusing states if subagents are used. Release Notes: - N/A --------- Co-authored-by: Danilo Leal --- .../src/connection_view/thread_view.rs | 48 +++++++++++++++++-- crates/agent_ui/src/model_selector_popover.rs | 48 +++++++++++++++---- crates/agent_ui/src/profile_selector.rs | 37 +++++++++++--- 3 files changed, 114 insertions(+), 19 deletions(-) diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs index 44f9e78a2bb47af6cb171194fbd5a34de7383f1b..030f6c5431eb79258be60f9d0139b8757611aa71 100644 --- a/crates/agent_ui/src/connection_view/thread_view.rs +++ b/crates/agent_ui/src/connection_view/thread_view.rs @@ -2674,6 +2674,14 @@ impl ThreadView { return div().into_any_element(); } + let is_generating = self.thread.read(cx).status() != ThreadStatus::Idle; + if let Some(model_selector) = &self.model_selector { + model_selector.update(cx, |selector, _| selector.set_disabled(is_generating)); + } + if let Some(profile_selector) = &self.profile_selector { + profile_selector.update(cx, |selector, _| selector.set_disabled(is_generating)); + } + let focus_handle = self.message_editor.focus_handle(cx); let editor_bg_color = cx.theme().colors().editor_background; let editor_expanded = self.editor_expanded; @@ -3223,6 +3231,7 @@ impl ThreadView { return None; } + let is_generating = self.thread.read(cx).status() != ThreadStatus::Idle; let thinking = thread.thinking_enabled(); let (tooltip_label, icon, color) = if thinking { @@ -3244,8 +3253,13 @@ impl ThreadView { let thinking_toggle = IconButton::new("thinking-mode", icon) .icon_size(IconSize::Small) .icon_color(color) - .tooltip(move |_, cx| { - Tooltip::for_action_in(tooltip_label, &ToggleThinkingMode, &focus_handle, cx) + .disabled(is_generating) + .tooltip(move |window, cx| { + if is_generating { + Tooltip::text("Disabled until generation is done")(window, cx) + } else { + Tooltip::for_action_in(tooltip_label, &ToggleThinkingMode, &focus_handle, cx) + } }) .on_click(cx.listener(move |this, _, _window, cx| { if let Some(thread) = this.as_native_thread(cx) { @@ -3277,6 +3291,7 @@ impl ThreadView { let right_btn = self.render_effort_selector( model.supported_effort_levels(), thread.thinking_effort().cloned(), + is_generating, cx, ); @@ -3291,6 +3306,7 @@ impl ThreadView { &self, supported_effort_levels: Vec, selected_effort: Option, + disabled: bool, cx: &Context, ) -> impl IntoElement { let weak_self = cx.weak_entity(); @@ -3359,6 +3375,7 @@ impl ThreadView { PopoverMenu::new("effort-selector") .trigger_with_tooltip( ButtonLike::new_rounded_right("effort-selector-trigger") + .disabled(disabled) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .child(Label::new(label).size(LabelSize::Small).color(label_color)) .child(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted)), @@ -7722,6 +7739,9 @@ impl Render for ThreadView { this.toggle_fast_mode(cx); })) .on_action(cx.listener(|this, _: &ToggleThinkingMode, _window, cx| { + if this.thread.read(cx).status() != ThreadStatus::Idle { + return; + } if let Some(thread) = this.as_native_thread(cx) { thread.update(cx, |thread, cx| { thread.set_thinking_enabled(!thread.thinking_enabled(), cx); @@ -7729,9 +7749,19 @@ impl Render for ThreadView { } })) .on_action(cx.listener(|this, _: &CycleThinkingEffort, _window, cx| { + if this.thread.read(cx).status() != ThreadStatus::Idle { + return; + } this.cycle_thinking_effort(cx); })) - .on_action(cx.listener(Self::toggle_thinking_effort_menu)) + .on_action( + cx.listener(|this, action: &ToggleThinkingEffortMenu, window, cx| { + if this.thread.read(cx).status() != ThreadStatus::Idle { + return; + } + this.toggle_thinking_effort_menu(action, window, cx); + }), + ) .on_action(cx.listener(|this, _: &SendNextQueuedMessage, window, cx| { this.send_queued_message_at_index(0, true, window, cx); })) @@ -7749,6 +7779,9 @@ impl Render for ThreadView { cx.notify(); })) .on_action(cx.listener(|this, _: &ToggleProfileSelector, window, cx| { + if this.thread.read(cx).status() != ThreadStatus::Idle { + return; + } if let Some(config_options_view) = this.config_options_view.clone() { let handled = config_options_view.update(cx, |view, cx| { view.toggle_category_picker( @@ -7769,6 +7802,9 @@ impl Render for ThreadView { } })) .on_action(cx.listener(|this, _: &CycleModeSelector, window, cx| { + if this.thread.read(cx).status() != ThreadStatus::Idle { + return; + } if let Some(config_options_view) = this.config_options_view.clone() { let handled = config_options_view.update(cx, |view, cx| { view.cycle_category_option( @@ -7793,6 +7829,9 @@ impl Render for ThreadView { } })) .on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| { + if this.thread.read(cx).status() != ThreadStatus::Idle { + return; + } if let Some(config_options_view) = this.config_options_view.clone() { let handled = config_options_view.update(cx, |view, cx| { view.toggle_category_picker( @@ -7812,6 +7851,9 @@ impl Render for ThreadView { } })) .on_action(cx.listener(|this, _: &CycleFavoriteModels, window, cx| { + if this.thread.read(cx).status() != ThreadStatus::Idle { + return; + } if let Some(config_options_view) = this.config_options_view.clone() { let handled = config_options_view.update(cx, |view, cx| { view.cycle_category_option( diff --git a/crates/agent_ui/src/model_selector_popover.rs b/crates/agent_ui/src/model_selector_popover.rs index 257337b6b0b8a39645bc38b4d814b250d7b5e1f9..7a4e9dbf8633680fe9c6ee3bda4acdb0ff5b1478 100644 --- a/crates/agent_ui/src/model_selector_popover.rs +++ b/crates/agent_ui/src/model_selector_popover.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use acp_thread::{AgentModelIcon, AgentModelInfo, AgentModelSelector}; use fs::Fs; -use gpui::{Entity, FocusHandle}; +use gpui::{AnyView, Entity, FocusHandle}; use picker::popover_menu::PickerPopoverMenu; use ui::{ButtonLike, PopoverMenuHandle, TintColor, Tooltip, prelude::*}; @@ -13,6 +13,7 @@ use crate::{ModelSelector, model_selector::acp_model_selector}; pub struct ModelSelectorPopover { selector: Entity, menu_handle: PopoverMenuHandle, + disabled: bool, } impl ModelSelectorPopover { @@ -30,10 +31,18 @@ impl ModelSelectorPopover { acp_model_selector(selector, agent_server, fs, focus_handle.clone(), window, cx) }), menu_handle, + disabled: false, } } + pub fn set_disabled(&mut self, disabled: bool) { + self.disabled = disabled; + } + pub fn toggle(&self, window: &mut Window, cx: &mut Context) { + if self.disabled { + return; + } self.menu_handle.toggle(window, cx); } @@ -42,6 +51,9 @@ impl ModelSelectorPopover { } pub fn cycle_favorite_models(&self, window: &mut Window, cx: &mut Context) { + if self.disabled { + return; + } self.selector.update(cx, |selector, cx| { selector.delegate.cycle_favorite_models(window, cx); }); @@ -61,23 +73,31 @@ impl Render for ModelSelectorPopover { let (color, icon) = if self.menu_handle.is_deployed() { (Color::Accent, IconName::ChevronUp) + } else if self.disabled { + (Color::Disabled, IconName::ChevronDown) } else { (Color::Muted, IconName::ChevronDown) }; let show_cycle_row = selector.delegate.favorites_count() > 1; + let disabled = self.disabled; - let tooltip = Tooltip::element({ - move |_, _cx| { - ModelSelectorTooltip::new() - .show_cycle_row(show_cycle_row) - .into_any_element() - } - }); + let tooltip: Box AnyView> = if disabled { + Box::new(Tooltip::text("Disabled until generation is done")) + } else { + Box::new(Tooltip::element({ + move |_, _cx| { + ModelSelectorTooltip::new() + .show_cycle_row(show_cycle_row) + .into_any_element() + } + })) + }; PickerPopoverMenu::new( self.selector.clone(), ButtonLike::new("active-model") + .disabled(self.disabled) .selected_style(ButtonStyle::Tinted(TintColor::Accent)) .when_some(model_icon, |this, icon| { this.child( @@ -95,7 +115,17 @@ impl Render for ModelSelectorPopover { .size(LabelSize::Small) .ml_0p5(), ) - .child(Icon::new(icon).color(Color::Muted).size(IconSize::XSmall)), + .child( + Icon::new(icon) + .map(|this| { + if self.disabled { + this.color(Color::Disabled) + } else { + this.color(Color::Muted) + } + }) + .size(IconSize::XSmall), + ), tooltip, gpui::Corner::BottomRight, cx, diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index 926549c22f88bcb0937dddf7c3ff1b32060ed297..f785c936a643f4280121d083831eba4c909bc0f5 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -5,8 +5,8 @@ use agent_settings::{ use fs::Fs; use fuzzy::{StringMatch, StringMatchCandidate, match_strings}; use gpui::{ - Action, AnyElement, App, BackgroundExecutor, Context, DismissEvent, Entity, FocusHandle, - Focusable, ForegroundExecutor, SharedString, Subscription, Task, Window, + Action, AnyElement, AnyView, App, BackgroundExecutor, Context, DismissEvent, Entity, + FocusHandle, Focusable, ForegroundExecutor, SharedString, Subscription, Task, Window, }; use picker::{Picker, PickerDelegate, popover_menu::PickerPopoverMenu}; use settings::{Settings as _, SettingsStore, update_settings_file}; @@ -34,6 +34,7 @@ pub trait ProfileProvider { pub struct ProfileSelector { profiles: AvailableProfiles, pending_refresh: bool, + disabled: bool, fs: Arc, provider: Arc, picker: Option>>, @@ -57,6 +58,7 @@ impl ProfileSelector { Self { profiles: AgentProfile::available_profiles(cx), pending_refresh: false, + disabled: false, fs, provider, picker: None, @@ -70,7 +72,19 @@ impl ProfileSelector { self.picker_handle.clone() } + pub fn set_disabled(&mut self, disabled: bool) { + self.disabled = disabled; + } + + pub fn is_disabled(&self) -> bool { + self.disabled + } + pub fn cycle_profile(&mut self, cx: &mut Context) { + if self.disabled { + return; + } + if !self.provider.profiles_supported(cx) { return; } @@ -175,6 +189,7 @@ impl Render for ProfileSelector { }; let trigger_button = Button::new("profile-selector", selected_profile) + .disabled(self.disabled) .label_size(LabelSize::Small) .color(Color::Muted) .icon(icon) @@ -183,10 +198,12 @@ impl Render for ProfileSelector { .icon_color(Color::Muted) .selected_style(ButtonStyle::Tinted(TintColor::Accent)); - PickerPopoverMenu::new( - picker, - trigger_button, - Tooltip::element({ + let disabled = self.disabled; + + let tooltip: Box AnyView> = if disabled { + Box::new(Tooltip::text("Disabled until generation is done")) + } else { + Box::new(Tooltip::element({ move |_window, cx| { let container = || h_flex().gap_1().justify_between(); v_flex() @@ -206,7 +223,13 @@ impl Render for ProfileSelector { ) .into_any() } - }), + })) + }; + + PickerPopoverMenu::new( + picker, + trigger_button, + tooltip, gpui::Corner::BottomRight, cx, ) From 5e60aa9872607a6caa2e4e6a6ce69ce12128f380 Mon Sep 17 00:00:00 2001 From: Mikayla Maki Date: Thu, 12 Mar 2026 15:34:09 -0700 Subject: [PATCH 541/548] Implement worktree interactions for the sidebar (#51421) Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/agent_ui/src/agent_panel.rs | 17 +- crates/agent_ui/src/sidebar.rs | 322 +++++++++++++++++- crates/journal/src/journal.rs | 7 +- crates/recent_projects/src/recent_projects.rs | 21 +- crates/workspace/src/multi_workspace.rs | 4 +- crates/workspace/src/workspace.rs | 88 ++--- crates/zed/src/zed.rs | 14 +- crates/zed/src/zed/open_listener.rs | 8 +- 8 files changed, 399 insertions(+), 82 deletions(-) diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs index f9a136c10fe26ce1763fbde52c532f065e097463..23dc1dfcbc086f4b145bb5372929d9aa32f30fc5 100644 --- a/crates/agent_ui/src/agent_panel.rs +++ b/crates/agent_ui/src/agent_panel.rs @@ -87,7 +87,7 @@ use ui::{ use util::{ResultExt as _, debug_panic}; use workspace::{ CollaboratorId, DraggedSelection, DraggedSidebar, DraggedTab, FocusWorkspaceSidebar, - MultiWorkspace, SIDEBAR_RESIZE_HANDLE_SIZE, ToggleWorkspaceSidebar, ToggleZoom, + MultiWorkspace, OpenResult, SIDEBAR_RESIZE_HANDLE_SIZE, ToggleWorkspaceSidebar, ToggleZoom, ToolbarItemView, Workspace, WorkspaceId, dock::{DockPosition, Panel, PanelEvent}, multi_workspace_enabled, @@ -3025,21 +3025,16 @@ impl AgentPanel { workspace.set_dock_structure(dock_structure, window, cx); })); - let (new_window_handle, _) = cx + let OpenResult { + window: new_window_handle, + workspace: new_workspace, + .. + } = cx .update(|_window, cx| { Workspace::new_local(all_paths, app_state, window_handle, None, init, false, cx) })? .await?; - let new_workspace = new_window_handle.update(cx, |multi_workspace, _window, _cx| { - let workspaces = multi_workspace.workspaces(); - workspaces.last().cloned() - })?; - - let Some(new_workspace) = new_workspace else { - anyhow::bail!("New workspace was not added to MultiWorkspace"); - }; - let panels_task = new_window_handle.update(cx, |_, _, cx| { new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task()) })?; diff --git a/crates/agent_ui/src/sidebar.rs b/crates/agent_ui/src/sidebar.rs index 7d7779e75504a93c7923ba26ec87e4fce4bbceb9..6dc684b3d30737dbce1b7d1c9c706341cf4ef11f 100644 --- a/crates/agent_ui/src/sidebar.rs +++ b/crates/agent_ui/src/sidebar.rs @@ -99,13 +99,19 @@ impl From<&ActiveThreadInfo> for acp_thread::AgentSessionInfo { } } +#[derive(Clone)] +enum ThreadEntryWorkspace { + Open(Entity), + Closed(PathList), +} + #[derive(Clone)] struct ThreadEntry { session_info: acp_thread::AgentSessionInfo, icon: IconName, icon_from_external_svg: Option, status: AgentThreadStatus, - workspace: Entity, + workspace: ThreadEntryWorkspace, is_live: bool, is_background: bool, highlight_positions: Vec, @@ -528,7 +534,8 @@ impl Sidebar { // main repo's header instead of getting their own. let mut main_repo_workspace: HashMap, usize> = HashMap::new(); let mut absorbed: HashMap = HashMap::new(); - let mut pending: HashMap, Vec<(usize, SharedString)>> = HashMap::new(); + let mut pending: HashMap, Vec<(usize, SharedString, Arc)>> = HashMap::new(); + let mut absorbed_workspace_by_path: HashMap, usize> = HashMap::new(); for (i, workspace) in workspaces.iter().enumerate() { for snapshot in root_repository_snapshots(workspace, cx) { @@ -537,8 +544,9 @@ impl Sidebar { .entry(snapshot.work_directory_abs_path.clone()) .or_insert(i); if let Some(waiting) = pending.remove(&snapshot.work_directory_abs_path) { - for (ws_idx, name) in waiting { + for (ws_idx, name, ws_path) in waiting { absorbed.insert(ws_idx, (i, name)); + absorbed_workspace_by_path.insert(ws_path, ws_idx); } } } else { @@ -553,11 +561,13 @@ impl Sidebar { main_repo_workspace.get(&snapshot.original_repo_abs_path) { absorbed.insert(i, (main_idx, name)); + absorbed_workspace_by_path + .insert(snapshot.work_directory_abs_path.clone(), i); } else { pending .entry(snapshot.original_repo_abs_path.clone()) .or_default() - .push((i, name)); + .push((i, name, snapshot.work_directory_abs_path.clone())); } } } @@ -586,7 +596,7 @@ impl Sidebar { icon: IconName::ZedAgent, icon_from_external_svg: None, status: AgentThreadStatus::default(), - workspace: workspace.clone(), + workspace: ThreadEntryWorkspace::Open(workspace.clone()), is_live: false, is_background: false, highlight_positions: Vec::new(), @@ -599,7 +609,8 @@ impl Sidebar { // Load threads from linked git worktrees of this workspace's repos. if let Some(ref thread_store) = thread_store { - let mut linked_worktree_queries: Vec<(PathList, SharedString)> = Vec::new(); + let mut linked_worktree_queries: Vec<(PathList, SharedString, Arc)> = + Vec::new(); for snapshot in root_repository_snapshots(workspace, cx) { if snapshot.work_directory_abs_path != snapshot.original_repo_abs_path { continue; @@ -614,11 +625,20 @@ impl Sidebar { linked_worktree_queries.push(( PathList::new(std::slice::from_ref(&git_worktree.path)), name.into(), + Arc::from(git_worktree.path.as_path()), )); } } - for (worktree_path_list, worktree_name) in &linked_worktree_queries { + for (worktree_path_list, worktree_name, worktree_path) in + &linked_worktree_queries + { + let target_workspace = + match absorbed_workspace_by_path.get(worktree_path.as_ref()) { + Some(&idx) => ThreadEntryWorkspace::Open(workspaces[idx].clone()), + None => ThreadEntryWorkspace::Closed(worktree_path_list.clone()), + }; + for meta in thread_store.read(cx).threads_for_paths(worktree_path_list) { if !seen_session_ids.insert(meta.id.clone()) { continue; @@ -628,7 +648,7 @@ impl Sidebar { icon: IconName::ZedAgent, icon_from_external_svg: None, status: AgentThreadStatus::default(), - workspace: workspace.clone(), + workspace: target_workspace.clone(), is_live: false, is_background: false, highlight_positions: Vec::new(), @@ -1347,8 +1367,20 @@ impl Sidebar { } ListEntry::Thread(thread) => { let session_info = thread.session_info.clone(); - let workspace = thread.workspace.clone(); - self.activate_thread(session_info, &workspace, window, cx); + match &thread.workspace { + ThreadEntryWorkspace::Open(workspace) => { + let workspace = workspace.clone(); + self.activate_thread(session_info, &workspace, window, cx); + } + ThreadEntryWorkspace::Closed(path_list) => { + self.open_workspace_and_activate_thread( + session_info, + path_list.clone(), + window, + cx, + ); + } + } } ListEntry::ViewMore { path_list, @@ -1403,6 +1435,32 @@ impl Sidebar { } } + fn open_workspace_and_activate_thread( + &mut self, + session_info: acp_thread::AgentSessionInfo, + path_list: PathList, + window: &mut Window, + cx: &mut Context, + ) { + let Some(multi_workspace) = self.multi_workspace.upgrade() else { + return; + }; + + let paths: Vec = + path_list.paths().iter().map(|p| p.to_path_buf()).collect(); + + let open_task = multi_workspace.update(cx, |mw, cx| mw.open_project(paths, window, cx)); + + cx.spawn_in(window, async move |this, cx| { + let workspace = open_task.await?; + this.update_in(cx, |this, window, cx| { + this.activate_thread(session_info, &workspace, window, cx); + })?; + anyhow::Ok(()) + }) + .detach_and_log_err(cx); + } + fn expand_selected_entry( &mut self, _: &ExpandSelectedEntry, @@ -1480,7 +1538,7 @@ impl Sidebar { .clone() .unwrap_or_else(|| "Untitled".into()); let session_info = thread.session_info.clone(); - let workspace = thread.workspace.clone(); + let thread_workspace = thread.workspace.clone(); let id = SharedString::from(format!("thread-entry-{}", ix)); @@ -1533,7 +1591,19 @@ impl Sidebar { .docked_right(docked_right) .on_click(cx.listener(move |this, _, window, cx| { this.selection = None; - this.activate_thread(session_info.clone(), &workspace, window, cx); + match &thread_workspace { + ThreadEntryWorkspace::Open(workspace) => { + this.activate_thread(session_info.clone(), workspace, window, cx); + } + ThreadEntryWorkspace::Closed(path_list) => { + this.open_workspace_and_activate_thread( + session_info.clone(), + path_list.clone(), + window, + cx, + ); + } + } })) .into_any_element() } @@ -2447,7 +2517,7 @@ mod tests { icon: IconName::ZedAgent, icon_from_external_svg: None, status: AgentThreadStatus::Completed, - workspace: workspace.clone(), + workspace: ThreadEntryWorkspace::Open(workspace.clone()), is_live: false, is_background: false, highlight_positions: Vec::new(), @@ -2468,7 +2538,7 @@ mod tests { icon: IconName::ZedAgent, icon_from_external_svg: None, status: AgentThreadStatus::Running, - workspace: workspace.clone(), + workspace: ThreadEntryWorkspace::Open(workspace.clone()), is_live: true, is_background: false, highlight_positions: Vec::new(), @@ -2489,7 +2559,7 @@ mod tests { icon: IconName::ZedAgent, icon_from_external_svg: None, status: AgentThreadStatus::Error, - workspace: workspace.clone(), + workspace: ThreadEntryWorkspace::Open(workspace.clone()), is_live: true, is_background: false, highlight_positions: Vec::new(), @@ -2510,7 +2580,7 @@ mod tests { icon: IconName::ZedAgent, icon_from_external_svg: None, status: AgentThreadStatus::WaitingForConfirmation, - workspace: workspace.clone(), + workspace: ThreadEntryWorkspace::Open(workspace.clone()), is_live: false, is_background: false, highlight_positions: Vec::new(), @@ -2531,7 +2601,7 @@ mod tests { icon: IconName::ZedAgent, icon_from_external_svg: None, status: AgentThreadStatus::Completed, - workspace: workspace.clone(), + workspace: ThreadEntryWorkspace::Open(workspace.clone()), is_live: true, is_background: true, highlight_positions: Vec::new(), @@ -4305,4 +4375,222 @@ mod tests { vec!["v [project]", " Thread A {wt-feature-a}",] ); } + + #[gpui::test] + async fn test_clicking_worktree_thread_opens_workspace_when_none_exists( + cx: &mut TestAppContext, + ) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": {}, + }), + ) + .await; + + fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { + state.worktrees.push(git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: "refs/heads/feature-a".into(), + sha: "aaa".into(), + }); + }) + .unwrap(); + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + // Only open the main repo — no workspace for the worktree. + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + MultiWorkspace::test_new(main_project.clone(), window, cx) + }); + let sidebar = setup_sidebar(&multi_workspace, cx); + + // Save a thread for the worktree path (no workspace for it). + let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); + save_named_thread("thread-wt", "WT Thread", &paths_wt, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // Thread should appear under the main repo with a worktree chip. + assert_eq!( + visible_entries_as_strings(&sidebar, cx), + vec!["v [project]", " WT Thread {wt-feature-a}"], + ); + + // Only 1 workspace should exist. + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.workspaces().len()), + 1, + ); + + // Focus the sidebar and select the worktree thread. + open_and_focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(1); // index 0 is header, 1 is the thread + }); + + // Confirm to open the worktree thread. + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // A new workspace should have been created for the worktree path. + let new_workspace = multi_workspace.read_with(cx, |mw, _| { + assert_eq!( + mw.workspaces().len(), + 2, + "confirming a worktree thread without a workspace should open one", + ); + mw.workspaces()[1].clone() + }); + + let (new_path_list, _) = new_workspace.read_with(cx, |_, cx| { + workspace_path_list_and_label(&new_workspace, cx) + }); + assert_eq!( + new_path_list, + PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]), + "the new workspace should have been opened for the worktree path", + ); + } + + #[gpui::test] + async fn test_clicking_absorbed_worktree_thread_activates_worktree_workspace( + cx: &mut TestAppContext, + ) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + + fs.insert_tree( + "/project", + serde_json::json!({ + ".git": { + "worktrees": { + "feature-a": { + "commondir": "../../", + "HEAD": "ref: refs/heads/feature-a", + }, + }, + }, + "src": {}, + }), + ) + .await; + + fs.insert_tree( + "/wt-feature-a", + serde_json::json!({ + ".git": "gitdir: /project/.git/worktrees/feature-a", + "src": {}, + }), + ) + .await; + + fs.with_git_state(std::path::Path::new("/project/.git"), false, |state| { + state.worktrees.push(git::repository::Worktree { + path: std::path::PathBuf::from("/wt-feature-a"), + ref_name: "refs/heads/feature-a".into(), + sha: "aaa".into(), + }); + }) + .unwrap(); + + cx.update(|cx| ::set_global(fs.clone(), cx)); + + let main_project = project::Project::test(fs.clone(), ["/project".as_ref()], cx).await; + let worktree_project = + project::Project::test(fs.clone(), ["/wt-feature-a".as_ref()], cx).await; + + main_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + worktree_project + .update(cx, |p, cx| p.git_scans_complete(cx)) + .await; + + let (multi_workspace, cx) = cx.add_window_view(|window, cx| { + MultiWorkspace::test_new(main_project.clone(), window, cx) + }); + + let worktree_workspace = multi_workspace.update_in(cx, |mw, window, cx| { + mw.test_add_workspace(worktree_project.clone(), window, cx) + }); + + // Activate the main workspace before setting up the sidebar. + multi_workspace.update_in(cx, |mw, window, cx| { + mw.activate_index(0, window, cx); + }); + + let sidebar = setup_sidebar(&multi_workspace, cx); + + let paths_main = PathList::new(&[std::path::PathBuf::from("/project")]); + let paths_wt = PathList::new(&[std::path::PathBuf::from("/wt-feature-a")]); + save_named_thread("thread-main", "Main Thread", &paths_main, cx).await; + save_named_thread("thread-wt", "WT Thread", &paths_wt, cx).await; + + multi_workspace.update_in(cx, |_, _window, cx| cx.notify()); + cx.run_until_parked(); + + // The worktree workspace should be absorbed under the main repo. + let entries = visible_entries_as_strings(&sidebar, cx); + assert_eq!(entries.len(), 3); + assert_eq!(entries[0], "v [project]"); + assert!(entries.contains(&" Main Thread".to_string())); + assert!(entries.contains(&" WT Thread {wt-feature-a}".to_string())); + + let wt_thread_index = entries + .iter() + .position(|e| e.contains("WT Thread")) + .expect("should find the worktree thread entry"); + + assert_eq!( + multi_workspace.read_with(cx, |mw, _| mw.active_workspace_index()), + 0, + "main workspace should be active initially" + ); + + // Focus the sidebar and select the absorbed worktree thread. + open_and_focus_sidebar(&sidebar, cx); + sidebar.update_in(cx, |sidebar, _window, _cx| { + sidebar.selection = Some(wt_thread_index); + }); + + // Confirm to activate the worktree thread. + cx.dispatch_action(Confirm); + cx.run_until_parked(); + + // The worktree workspace should now be active, not the main one. + let active_workspace = multi_workspace.read_with(cx, |mw, _| { + mw.workspaces()[mw.active_workspace_index()].clone() + }); + assert_eq!( + active_workspace, worktree_workspace, + "clicking an absorbed worktree thread should activate the worktree workspace" + ); + } } diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index ba97bcf66a77659fb3196ba45ebb3f831452e008..b8028c79b3d5da415a52d946d7601d8cbb40f738 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -9,7 +9,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use workspace::{AppState, OpenVisible, Workspace}; +use workspace::{AppState, OpenResult, OpenVisible, Workspace}; actions!( journal, @@ -107,7 +107,10 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap .spawn(cx, async move |cx| { let (journal_dir, entry_path) = create_entry.await?; let opened = if open_new_workspace { - let (new_workspace, _) = cx + let OpenResult { + window: new_workspace, + .. + } = cx .update(|_window, cx| { workspace::open_paths( &[journal_dir], diff --git a/crates/recent_projects/src/recent_projects.rs b/crates/recent_projects/src/recent_projects.rs index b5ae7b048276f671da48beaa52b0db5fbcdda61a..c9720af2aba7f4a27adf8e40745bb05012c4dafd 100644 --- a/crates/recent_projects/src/recent_projects.rs +++ b/crates/recent_projects/src/recent_projects.rs @@ -935,7 +935,14 @@ impl PickerDelegate for RecentProjectsDelegate { } return; } else { - workspace.open_workspace_for_paths(false, paths, window, cx) + workspace + .open_workspace_for_paths(false, paths, window, cx) + .detach_and_prompt_err( + "Failed to open project", + window, + cx, + |_, _, _| None, + ); } } SerializedWorkspaceLocation::Remote(mut connection) => { @@ -964,14 +971,14 @@ impl PickerDelegate for RecentProjectsDelegate { ) .await }) + .detach_and_prompt_err( + "Failed to open project", + window, + cx, + |_, _, _| None, + ); } } - .detach_and_prompt_err( - "Failed to open project", - window, - cx, - |_, _, _| None, - ); }); cx.emit(DismissEvent); } diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs index adfc62a2bd210b4da24202d734ba9f9eedd17aef..cb60978d85220baa8519a7a1816434b4c06eb0c3 100644 --- a/crates/workspace/src/multi_workspace.rs +++ b/crates/workspace/src/multi_workspace.rs @@ -498,7 +498,7 @@ impl MultiWorkspace { paths: Vec, window: &mut Window, cx: &mut Context, - ) -> Task> { + ) -> Task>> { let workspace = self.workspace().clone(); if multi_workspace_enabled(cx) { @@ -519,7 +519,7 @@ impl MultiWorkspace { })? .await } else { - Ok(()) + Ok(workspace) } }) } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 949dc127a7465c4cf3941ee4c4982fad37d06281..19d02e9a8a6742ba04bc52a68568cb2bf994608a 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -659,7 +659,7 @@ fn prompt_and_open_paths(app_state: Arc, options: PathPromptOptions, c } else { let task = Workspace::new_local(Vec::new(), app_state.clone(), None, None, None, true, cx); cx.spawn(async move |cx| { - let (window, _) = task.await?; + let OpenResult { window, .. } = task.await?; window.update(cx, |multi_workspace, window, cx| { window.activate_window(); let workspace = multi_workspace.workspace().clone(); @@ -1752,12 +1752,7 @@ impl Workspace { init: Option) + Send>>, activate: bool, cx: &mut App, - ) -> Task< - anyhow::Result<( - WindowHandle, - Vec>>>, - )>, - > { + ) -> Task> { let project_handle = Project::local( app_state.client.clone(), app_state.node_runtime.clone(), @@ -1997,7 +1992,11 @@ impl Workspace { }); }) .log_err(); - Ok((window, opened_items)) + Ok(OpenResult { + window, + workspace, + opened_items, + }) }) } @@ -2685,7 +2684,10 @@ impl Workspace { cx, ); cx.spawn_in(window, async move |_vh, cx| { - let (multi_workspace_window, _) = task.await?; + let OpenResult { + window: multi_workspace_window, + .. + } = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| callback(workspace, window, cx)) @@ -2723,7 +2725,10 @@ impl Workspace { cx, ); cx.spawn_in(window, async move |_vh, cx| { - let (multi_workspace_window, _) = task.await?; + let OpenResult { + window: multi_workspace_window, + .. + } = task.await?; multi_workspace_window.update(cx, |multi_workspace, window, cx| { let workspace = multi_workspace.workspace().clone(); workspace.update(cx, |workspace, cx| callback(workspace, window, cx)) @@ -3102,7 +3107,7 @@ impl Workspace { paths: Vec, window: &mut Window, cx: &mut Context, - ) -> Task> { + ) -> Task>> { let window_handle = window.window_handle().downcast::(); let is_remote = self.project.read(cx).is_via_collab(); let has_worktree = self.project.read(cx).worktrees(cx).next().is_some(); @@ -3118,19 +3123,20 @@ impl Workspace { let app_state = self.app_state.clone(); cx.spawn(async move |_, cx| { - cx.update(|cx| { - open_paths( - &paths, - app_state, - OpenOptions { - replace_window: window_to_replace, - ..Default::default() - }, - cx, - ) - }) - .await?; - Ok(()) + let OpenResult { workspace, .. } = cx + .update(|cx| { + open_paths( + &paths, + app_state, + OpenOptions { + replace_window: window_to_replace, + ..Default::default() + }, + cx, + ) + }) + .await?; + Ok(workspace) }) } @@ -8210,7 +8216,7 @@ pub async fn restore_multiworkspace( cx.update(|cx| open_workspace_by_id(first.workspace_id, app_state.clone(), None, cx)) .await? } else { - let (window, _items) = cx + let OpenResult { window, .. } = cx .update(|cx| { Workspace::new_local( first.paths.paths().to_vec(), @@ -8503,7 +8509,10 @@ pub fn join_channel( let mut active_window = requesting_window.or_else(|| activate_any_workspace_window(cx)); if active_window.is_none() { // no open workspaces, make one to show the error in (blergh) - let (window_handle, _) = cx + let OpenResult { + window: window_handle, + .. + } = cx .update(|cx| { Workspace::new_local( vec![], @@ -8759,6 +8768,14 @@ pub struct OpenOptions { pub env: Option>, } +/// The result of opening a workspace via [`open_paths`], [`Workspace::new_local`], +/// or [`Workspace::open_workspace_for_paths`]. +pub struct OpenResult { + pub window: WindowHandle, + pub workspace: Entity, + pub opened_items: Vec>>>, +} + /// Opens a workspace by its database ID, used for restoring empty workspaces with unsaved content. pub fn open_workspace_by_id( workspace_id: WorkspaceId, @@ -8878,12 +8895,7 @@ pub fn open_paths( app_state: Arc, open_options: OpenOptions, cx: &mut App, -) -> Task< - anyhow::Result<( - WindowHandle, - Vec>>>, - )>, -> { +) -> Task> { let abs_paths = abs_paths.to_vec(); #[cfg(target_os = "windows")] let wsl_path = abs_paths @@ -8962,7 +8974,7 @@ pub fn open_paths( }); }); - Ok((existing, open_task)) + Ok(OpenResult { window: existing, workspace: target_workspace, opened_items: open_task }) } else { let result = cx .update(move |cx| { @@ -8978,8 +8990,8 @@ pub fn open_paths( }) .await; - if let Ok((ref window_handle, _)) = result { - window_handle + if let Ok(ref result) = result { + result.window .update(cx, |_, window, _cx| { window.activate_window(); }) @@ -8991,9 +9003,9 @@ pub fn open_paths( #[cfg(target_os = "windows")] if let Some(util::paths::WslPath{distro, path}) = wsl_path - && let Ok((multi_workspace_window, _)) = &result + && let Ok(ref result) = result { - multi_workspace_window + result.window .update(cx, move |multi_workspace, _window, cx| { struct OpenInWsl; let workspace = multi_workspace.workspace().clone(); @@ -9040,7 +9052,7 @@ pub fn open_new( cx, ); cx.spawn(async move |cx| { - let (window, _opened_paths) = task.await?; + let OpenResult { window, .. } = task.await?; window .update(cx, |_, window, _cx| { window.activate_window(); diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 511b0edc6ac168fa47b52e66c9632487de86acf4..76930f627ecd6b8bf37729d9b48c0bacb300ecfb 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -3442,7 +3442,11 @@ mod tests { PathBuf::from(path!("/root/.git/HEAD")), PathBuf::from(path!("/root/excluded_dir/ignored_subdir")), ]; - let (opened_workspace, new_items) = cx + let workspace::OpenResult { + window: opened_workspace, + opened_items: new_items, + .. + } = cx .update(|cx| { workspace::open_paths( &paths_to_open, @@ -5866,7 +5870,9 @@ mod tests { // // Window A: workspace for dir1, workspace for dir2 // Window B: workspace for dir3 - let (window_a, _) = cx + let workspace::OpenResult { + window: window_a, .. + } = cx .update(|cx| { Workspace::new_local( vec![dir1.into()], @@ -5890,7 +5896,9 @@ mod tests { .expect("failed to open second workspace into window A"); cx.run_until_parked(); - let (window_b, _) = cx + let workspace::OpenResult { + window: window_b, .. + } = cx .update(|cx| { Workspace::new_local( vec![dir3.into()], diff --git a/crates/zed/src/zed/open_listener.rs b/crates/zed/src/zed/open_listener.rs index e8f8554482680c4a51fc182c58369de19184bcb0..ca376f300d97de83d0b4a9af7620ee98ba5b4215 100644 --- a/crates/zed/src/zed/open_listener.rs +++ b/crates/zed/src/zed/open_listener.rs @@ -29,7 +29,7 @@ use util::ResultExt; use util::paths::PathWithPosition; use workspace::PathList; use workspace::item::ItemHandle; -use workspace::{AppState, MultiWorkspace, OpenOptions, SerializedWorkspaceLocation}; +use workspace::{AppState, MultiWorkspace, OpenOptions, OpenResult, SerializedWorkspaceLocation}; #[derive(Default, Debug)] pub struct OpenRequest { @@ -345,7 +345,11 @@ pub async fn open_paths_with_positions( .map(|path_with_position| path_with_position.path.clone()) .collect::>(); - let (multi_workspace, mut items) = cx + let OpenResult { + window: multi_workspace, + opened_items: mut items, + .. + } = cx .update(|cx| workspace::open_paths(&paths, app_state, open_options, cx)) .await?; From cc09611d0b058f2c814b8d9e290bdd902e8a6ebf Mon Sep 17 00:00:00 2001 From: Amaan <121273095+AmaanBilwar@users.noreply.github.com> Date: Fri, 13 Mar 2026 04:13:34 +0530 Subject: [PATCH 542/548] workspace: Fix opening closed projects randomly when Zed restarts (#50961) Closes #49854 Before you mark this PR as ready for review, make sure that you have: - [x] Added a solid test coverage and/or screenshots from doing manual testing - [x] Done a self-review taking into account security and performance aspects - [x] No UI changes Video for the fix: in the video i open a project -> close the project -> quit out of zed -> reopen zed -> zed opens to an empty workspace which was not the case before https://github.com/user-attachments/assets/1afb44a1-932b-4dab-8228-9d9d65750b6e Release Notes: - Fixed closed projects re-opening erroneously --- crates/zed/src/zed.rs | 71 +++++++++++++++++++++++++++---------------- 1 file changed, 44 insertions(+), 27 deletions(-) diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 76930f627ecd6b8bf37729d9b48c0bacb300ecfb..2b515786d5dc503564607ffc1bc881a3077819a8 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -1066,37 +1066,54 @@ fn register_actions( }) .register_action({ let app_state = Arc::downgrade(&app_state); - move |_, _: &CloseProject, window, cx| { + move |_workspace, _: &CloseProject, window, cx| { let Some(window_handle) = window.window_handle().downcast::() else { return; }; if let Some(app_state) = app_state.upgrade() { - open_new( - workspace::OpenOptions { - replace_window: Some(window_handle), - ..Default::default() - }, - app_state, - cx, - |workspace, window, cx| { - cx.activate(true); - // Create buffer synchronously to avoid flicker - let project = workspace.project().clone(); - let buffer = project.update(cx, |project, cx| { - project.create_local_buffer("", None, true, cx) - }); - let editor = cx.new(|cx| { - Editor::for_buffer(buffer, Some(project), window, cx) - }); - workspace.add_item_to_active_pane( - Box::new(editor), - None, - true, - window, - cx, - ); - }, - ) + cx.spawn_in(window, async move |this, cx| { + let should_continue = this + .update_in(cx, |workspace, window, cx| { + workspace.prepare_to_close( + CloseIntent::ReplaceWindow, + window, + cx, + ) + })? + .await?; + if should_continue { + let task = cx.update(|_window, cx| { + open_new( + workspace::OpenOptions { + replace_window: Some(window_handle), + ..Default::default() + }, + app_state, + cx, + |workspace, window, cx| { + cx.activate(true); + let project = workspace.project().clone(); + let buffer = project.update(cx, |project, cx| { + project.create_local_buffer("", None, true, cx) + }); + let editor = cx.new(|cx| { + Editor::for_buffer(buffer, Some(project), window, cx) + }); + workspace.add_item_to_active_pane( + Box::new(editor), + None, + true, + window, + cx, + ); + }, + ) + })?; + task.await + } else { + Ok(()) + } + }) .detach_and_log_err(cx); } } From 94248361be53c511a19ffefc14a3c55c040a639c Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Mar 2026 16:45:41 -0600 Subject: [PATCH 543/548] Make dispatcher on TestApp public again (#51431) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/gpui/src/app/test_context.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index 7fa47191404fd28baf11f27d055e5ac7b85a747d..d8f459df3c54200f07b4584eeb8e1ffa8415554b 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -22,7 +22,8 @@ pub struct TestAppContext { pub background_executor: BackgroundExecutor, #[doc(hidden)] pub foreground_executor: ForegroundExecutor, - dispatcher: TestDispatcher, + #[doc(hidden)] + pub dispatcher: TestDispatcher, test_platform: Rc, text_system: Arc, fn_name: Option<&'static str>, From a07d0f4d2140b2bdf1da2d1721413077bc5c64e1 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Thu, 12 Mar 2026 18:49:17 -0400 Subject: [PATCH 544/548] Assign meaningful names to some single-letter bindings (#51432) This PR assigns meaningful names to some single-letter bindings we were using to refer to the organization. Release Notes: - N/A --- crates/language_model/src/model/cloud_model.rs | 2 +- crates/language_models/src/provider/cloud.rs | 6 +++--- crates/web_search_providers/src/cloud.rs | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/crates/language_model/src/model/cloud_model.rs b/crates/language_model/src/model/cloud_model.rs index e384ce05fa390677529235442c4cb91186520a02..527d24ec18c0f9ef08576a71fe92562dd94d4afd 100644 --- a/crates/language_model/src/model/cloud_model.rs +++ b/crates/language_model/src/model/cloud_model.rs @@ -159,7 +159,7 @@ impl RefreshLlmTokenListener { .user_store .read(cx) .current_organization() - .map(|o| o.id.clone()); + .map(|organization| organization.id.clone()); cx.spawn(async move |this, cx| { llm_api_token.refresh(&client, organization_id).await?; this.update(cx, |_this, cx| cx.emit(LlmTokenRefreshedEvent)) diff --git a/crates/language_models/src/provider/cloud.rs b/crates/language_models/src/provider/cloud.rs index 610b0167b86f8bf4426b671cedad45a28c3fdc6d..4fdf06cc959ccc853f92f4e150978cd15c8e70d3 100644 --- a/crates/language_models/src/provider/cloud.rs +++ b/crates/language_models/src/provider/cloud.rs @@ -157,7 +157,7 @@ impl State { .user_store .read(cx) .current_organization() - .map(|o| o.id.clone()); + .map(|organization| organization.id.clone()); cx.spawn(async move |this, cx| { let response = Self::fetch_models(client, llm_api_token, organization_id).await?; @@ -705,7 +705,7 @@ impl LanguageModel for CloudLanguageModel { .user_store .read(cx) .current_organization() - .map(|o| o.id.clone()); + .map(|organization| organization.id.clone()); let model_id = self.model.id.to_string(); let generate_content_request = into_google(request, model_id.clone(), GoogleModelMode::Default); @@ -777,7 +777,7 @@ impl LanguageModel for CloudLanguageModel { user_store .read(cx) .current_organization() - .map(|o| o.id.clone()) + .map(|organization| organization.id.clone()) }); let thinking_allowed = request.thinking_allowed; let enable_thinking = thinking_allowed && self.model.supports_thinking; diff --git a/crates/web_search_providers/src/cloud.rs b/crates/web_search_providers/src/cloud.rs index 51be6c9ddff01a956eebabe3e44166ae15de4515..17addd24d445a666138a1b37fef872beedd07aed 100644 --- a/crates/web_search_providers/src/cloud.rs +++ b/crates/web_search_providers/src/cloud.rs @@ -55,7 +55,7 @@ impl WebSearchProvider for CloudWebSearchProvider { .user_store .read(cx) .current_organization() - .map(|o| o.id.clone()); + .map(|organization| organization.id.clone()); let body = WebSearchBody { query }; cx.background_spawn(async move { perform_web_search(client, llm_api_token, organization_id, body).await From b15a8c1e5e214f7288259d98eab87892a6997ed7 Mon Sep 17 00:00:00 2001 From: Justin Su Date: Thu, 12 Mar 2026 20:05:27 -0400 Subject: [PATCH 545/548] docs: Clarify that `"..."` enables all other registered language servers (#51427) Closes #51416 Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- docs/src/languages/python.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/languages/python.md b/docs/src/languages/python.md index fdeabec5069ed20a9b168ab19129dde0cc6280ba..0f34fdb752143b30eb1f42a836482bd4ea1d1188 100644 --- a/docs/src/languages/python.md +++ b/docs/src/languages/python.md @@ -89,7 +89,7 @@ Configure language servers in Settings ({#kb zed::OpenSettings}) under Languages "languages": { "Python": { "language_servers": [ - // Disable basedpyright and enable ty, and include all + // Enable ty, disable basedpyright, and enable all // other registered language servers (ruff, pylsp, pyright). "ty", "!basedpyright", From d4bb640555e8035b45d6a56db206048ac1b35a0f Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Thu, 12 Mar 2026 21:09:53 -0300 Subject: [PATCH 546/548] git_ui: Remove unused ProjectDiffEmptyState component (#51436) Just cleaning up our component set a bit. This one wasn't used at all. Release Notes: - N/A --- crates/git_ui/src/project_diff.rs | 252 +----------------------------- 1 file changed, 1 insertion(+), 251 deletions(-) diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index ad7d6b86befd0b0f4a1ecf6386c030d4294cdf5e..3af77b8fb680abbca2688410b783007af573578d 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -2,7 +2,6 @@ use crate::{ conflict_view::ConflictAddon, git_panel::{GitPanel, GitPanelAddon, GitStatusEntry}, git_panel_settings::GitPanelSettings, - remote_button::{render_publish_button, render_push_button}, resolve_active_repository, }; use agent_settings::AgentSettings; @@ -18,8 +17,7 @@ use editor::{ use git::repository::DiffType; use git::{ - Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext, - repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus}, + Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext, repository::RepoPath, status::FileStatus, }; use gpui::{ @@ -1719,254 +1717,6 @@ impl Render for BranchDiffToolbar { } } -#[derive(IntoElement, RegisterComponent)] -pub struct ProjectDiffEmptyState { - pub no_repo: bool, - pub can_push_and_pull: bool, - pub focus_handle: Option, - pub current_branch: Option, - // has_pending_commits: bool, - // ahead_of_remote: bool, - // no_git_repository: bool, -} - -impl RenderOnce for ProjectDiffEmptyState { - fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement { - let status_against_remote = |ahead_by: usize, behind_by: usize| -> bool { - matches!(self.current_branch, Some(Branch { - upstream: - Some(Upstream { - tracking: - UpstreamTracking::Tracked(UpstreamTrackingStatus { - ahead, behind, .. - }), - .. - }), - .. - }) if (ahead > 0) == (ahead_by > 0) && (behind > 0) == (behind_by > 0)) - }; - - let change_count = |current_branch: &Branch| -> (usize, usize) { - match current_branch { - Branch { - upstream: - Some(Upstream { - tracking: - UpstreamTracking::Tracked(UpstreamTrackingStatus { - ahead, behind, .. - }), - .. - }), - .. - } => (*ahead as usize, *behind as usize), - _ => (0, 0), - } - }; - - let not_ahead_or_behind = status_against_remote(0, 0); - let ahead_of_remote = status_against_remote(1, 0); - let branch_not_on_remote = if let Some(branch) = self.current_branch.as_ref() { - branch.upstream.is_none() - } else { - false - }; - - let has_branch_container = |branch: &Branch| { - h_flex() - .max_w(px(420.)) - .bg(cx.theme().colors().text.opacity(0.05)) - .border_1() - .border_color(cx.theme().colors().border) - .rounded_sm() - .gap_8() - .px_6() - .py_4() - .map(|this| { - if ahead_of_remote { - let ahead_count = change_count(branch).0; - let ahead_string = format!("{} Commits Ahead", ahead_count); - this.child( - v_flex() - .child(Headline::new(ahead_string).size(HeadlineSize::Small)) - .child( - Label::new(format!("Push your changes to {}", branch.name())) - .color(Color::Muted), - ), - ) - .child(div().child(render_push_button( - self.focus_handle, - "push".into(), - ahead_count as u32, - ))) - } else if branch_not_on_remote { - this.child( - v_flex() - .child(Headline::new("Publish Branch").size(HeadlineSize::Small)) - .child( - Label::new(format!("Create {} on remote", branch.name())) - .color(Color::Muted), - ), - ) - .child( - div().child(render_publish_button(self.focus_handle, "publish".into())), - ) - } else { - this.child(Label::new("Remote status unknown").color(Color::Muted)) - } - }) - }; - - v_flex().size_full().items_center().justify_center().child( - v_flex() - .gap_1() - .when(self.no_repo, |this| { - this.text_center() - .child(Label::new("No Repository").color(Color::Muted)) - .child( - Button::new("initialize-repo", "Initialize Repository") - .on_click(move |_, _, cx| cx.dispatch_action(&git::Init)), - ) - }) - .map(|this| { - if not_ahead_or_behind && self.current_branch.is_some() { - this.text_center() - .child(Label::new("No Changes").color(Color::Muted)) - } else { - this.when_some(self.current_branch.as_ref(), |this, branch| { - this.child(has_branch_container(branch)) - }) - } - }), - ) - } -} - -mod preview { - use git::repository::{ - Branch, CommitSummary, Upstream, UpstreamTracking, UpstreamTrackingStatus, - }; - use ui::prelude::*; - - use super::ProjectDiffEmptyState; - - // View this component preview using `workspace: open component-preview` - impl Component for ProjectDiffEmptyState { - fn scope() -> ComponentScope { - ComponentScope::VersionControl - } - - fn preview(_window: &mut Window, _cx: &mut App) -> Option { - let unknown_upstream: Option = None; - let ahead_of_upstream: Option = Some( - UpstreamTrackingStatus { - ahead: 2, - behind: 0, - } - .into(), - ); - - let not_ahead_or_behind_upstream: Option = Some( - UpstreamTrackingStatus { - ahead: 0, - behind: 0, - } - .into(), - ); - - fn branch(upstream: Option) -> Branch { - Branch { - is_head: true, - ref_name: "some-branch".into(), - upstream: upstream.map(|tracking| Upstream { - ref_name: "origin/some-branch".into(), - tracking, - }), - most_recent_commit: Some(CommitSummary { - sha: "abc123".into(), - subject: "Modify stuff".into(), - commit_timestamp: 1710932954, - author_name: "John Doe".into(), - has_parent: true, - }), - } - } - - let no_repo_state = ProjectDiffEmptyState { - no_repo: true, - can_push_and_pull: false, - focus_handle: None, - current_branch: None, - }; - - let no_changes_state = ProjectDiffEmptyState { - no_repo: false, - can_push_and_pull: true, - focus_handle: None, - current_branch: Some(branch(not_ahead_or_behind_upstream)), - }; - - let ahead_of_upstream_state = ProjectDiffEmptyState { - no_repo: false, - can_push_and_pull: true, - focus_handle: None, - current_branch: Some(branch(ahead_of_upstream)), - }; - - let unknown_upstream_state = ProjectDiffEmptyState { - no_repo: false, - can_push_and_pull: true, - focus_handle: None, - current_branch: Some(branch(unknown_upstream)), - }; - - let (width, height) = (px(480.), px(320.)); - - Some( - v_flex() - .gap_6() - .children(vec![ - example_group(vec![ - single_example( - "No Repo", - div() - .w(width) - .h(height) - .child(no_repo_state) - .into_any_element(), - ), - single_example( - "No Changes", - div() - .w(width) - .h(height) - .child(no_changes_state) - .into_any_element(), - ), - single_example( - "Unknown Upstream", - div() - .w(width) - .h(height) - .child(unknown_upstream_state) - .into_any_element(), - ), - single_example( - "Ahead of Remote", - div() - .w(width) - .h(height) - .child(ahead_of_upstream_state) - .into_any_element(), - ), - ]) - .vertical(), - ]) - .into_any_element(), - ) - } - } -} - struct BranchDiffAddon { branch_diff: Entity, } From 7b9afc8c454607222eaf751bbc38159ececc1f7a Mon Sep 17 00:00:00 2001 From: Finn Eitreim <48069764+feitreim@users.noreply.github.com> Date: Thu, 12 Mar 2026 20:12:31 -0400 Subject: [PATCH 547/548] gpui: Recalculate list layout after the window has been resized (#51414) Closes #51417 I noticed this bug in the settings menu where when I opened the settings menu, I could not scroll down through all the available options, eg. on the initial page I wasn't able to scroll down to privacy. When I saw that no one else had reported this issue, I figured it may be due to my setup, and it turns out that using Aerospace, the window manager I use, was what made this bug visible to me. Because aerospace resizes the window right after it launches, the originally computed heights for the list are incorrect, meaning the scroll bar is the wrong size as well. in the relevant code there was a comment that says "If the width of the list has changed, invalidate all cached item heights" which wasn't incorrect per-se, but it just invalidated them without triggering any re-computation, causing incorrect scroll bars. My intuition is that window resizes/events that change the width of the list bounds are fairly rare, so there shouldn't be a large performance hit from the change. Also implemented a test that directly showcases the behavior, if you run the test without the change it fails, as the max_offset_for_scrollbar will be wrong. Videos: Before https://github.com/user-attachments/assets/2b680222-7071-4098-863f-519361f0756a After: https://github.com/user-attachments/assets/1222a299-23d7-4007-8e88-55d2daccce64 [x] Tests [x] Video of behavior Release Notes: - gpui: fixed list height re-computation when the list width changes. --- crates/gpui/src/elements/list.rs | 36 ++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 92b5389fecf219c0c113f682463498902df4c07d..b84241e9e0f79fe5cf8a24514cbf57982247a76b 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -1103,6 +1103,7 @@ impl Element for List { ); state.items = new_items; + state.measuring_behavior.reset(); } let padding = style @@ -1348,6 +1349,41 @@ mod test { assert_eq!(offset.offset_in_item, px(0.)); } + #[gpui::test] + fn test_measure_all_after_width_change(cx: &mut TestAppContext) { + let cx = cx.add_empty_window(); + + let state = ListState::new(10, crate::ListAlignment::Top, px(0.)).measure_all(); + + struct TestView(ListState); + impl Render for TestView { + fn render(&mut self, _: &mut Window, _: &mut Context) -> impl IntoElement { + list(self.0.clone(), |_, _, _| { + div().h(px(50.)).w_full().into_any() + }) + .w_full() + .h_full() + } + } + + let view = cx.update(|_, cx| cx.new(|_| TestView(state.clone()))); + + // First draw at width 100: all 10 items measured (total 500px). + // Viewport is 200px, so max scroll offset should be 300px. + cx.draw(point(px(0.), px(0.)), size(px(100.), px(200.)), |_, _| { + view.clone().into_any_element() + }); + assert_eq!(state.max_offset_for_scrollbar().y, px(300.)); + + // Second draw at a different width: items get invalidated. + // Without the fix, max_offset would drop because unmeasured items + // contribute 0 height. + cx.draw(point(px(0.), px(0.)), size(px(200.), px(200.)), |_, _| { + view.into_any_element() + }); + assert_eq!(state.max_offset_for_scrollbar().y, px(300.)); + } + #[gpui::test] fn test_remeasure(cx: &mut TestAppContext) { let cx = cx.add_empty_window(); From 7aba1f9691c6b0d08916a2d385d179ba876553a8 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Thu, 12 Mar 2026 19:58:57 -0600 Subject: [PATCH 548/548] Fix leak detector on HeadlessAppContext (#51442) Closes #ISSUE Before you mark this PR as ready for review, make sure that you have: - [ ] Added a solid test coverage and/or screenshots from doing manual testing - [ ] Done a self-review taking into account security and performance aspects - [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) Release Notes: - N/A --- crates/gpui/src/app/headless_app_context.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/crates/gpui/src/app/headless_app_context.rs b/crates/gpui/src/app/headless_app_context.rs index bebade89d9a8417769147e5f64923953e4bc3694..90dc8c8f0c0994e3f118916b2d004f7d90566ea7 100644 --- a/crates/gpui/src/app/headless_app_context.rs +++ b/crates/gpui/src/app/headless_app_context.rs @@ -186,6 +186,14 @@ impl HeadlessAppContext { } } +impl Drop for HeadlessAppContext { + fn drop(&mut self) { + // Shut down the app so windows are closed and entity handles are + // released before the LeakDetector runs. + self.app.borrow_mut().shutdown(); + } +} + impl AppContext for HeadlessAppContext { fn new(&mut self, build_entity: impl FnOnce(&mut Context) -> T) -> Entity { let mut app = self.app.borrow_mut();