From 24a6008e5c0bc691a8ee68f4cbfafd919367efde Mon Sep 17 00:00:00 2001
From: MostlyK <135974627+MostlyKIGuess@users.noreply.github.com>
Date: Tue, 7 Apr 2026 11:12:50 +0530
Subject: [PATCH 01/22] repl: Improve iopub connection error messages (#53014)
Coming from #51834, these would be more helpful than just that it
failed!
Self-Review Checklist:
- [x] I've reviewed my own diff for quality, security, and reliability
- [x] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [x] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable
Release Notes:
- N/A
---
crates/repl/src/kernels/ssh_kernel.rs | 2 +-
crates/repl/src/kernels/wsl_kernel.rs | 3 ++-
2 files changed, 3 insertions(+), 2 deletions(-)
diff --git a/crates/repl/src/kernels/ssh_kernel.rs b/crates/repl/src/kernels/ssh_kernel.rs
index 53be6622379cfcbf3ceeb6db425eeede9b226860..797b111a14345267e01c60c6803787c8f1d0f6a2 100644
--- a/crates/repl/src/kernels/ssh_kernel.rs
+++ b/crates/repl/src/kernels/ssh_kernel.rs
@@ -215,7 +215,7 @@ impl SshRunningKernel {
&session_id,
)
.await
- .context("failed to create iopub connection")?;
+ .context("Failed to create iopub connection. Is `ipykernel` installed in the remote environment? Try running `pip install ipykernel` on the remote host.")?;
let peer_identity = runtimelib::peer_identity_for_session(&session_id)?;
let shell_socket = runtimelib::create_client_shell_connection_with_identity(
diff --git a/crates/repl/src/kernels/wsl_kernel.rs b/crates/repl/src/kernels/wsl_kernel.rs
index d9ac05c5fc8c2cb756898ff449d6714b78cb7997..be76d7ddccb7f199a368b76a1f21bf65fe6f2902 100644
--- a/crates/repl/src/kernels/wsl_kernel.rs
+++ b/crates/repl/src/kernels/wsl_kernel.rs
@@ -354,7 +354,8 @@ impl WslRunningKernel {
"",
&session_id,
)
- .await?;
+ .await
+ .context("Failed to create iopub connection. Is `ipykernel` installed in the WSL environment? Try running `pip install ipykernel` inside your WSL distribution.")?;
let peer_identity = runtimelib::peer_identity_for_session(&session_id)?;
let shell_socket = runtimelib::create_client_shell_connection_with_identity(
From 6f7fab1d68f1fa4945c7717a595b9e9776a14521 Mon Sep 17 00:00:00 2001
From: Smit Barmase
Date: Tue, 7 Apr 2026 11:19:21 +0530
Subject: [PATCH 02/22] http_client: Fix GitHub download unpack failures on
some filesystems (#53286)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Disable mtime preservation when unpacking tar archives, as some
filesystems error when asked to set it. Follows how
[cargo](https://github.com/rust-lang/cargo/blob/1ad92f77a819953bcef75a24019b66681ff28b1c/src/cargo/ops/cargo_package/verify.rs#L59
) and
[uv](https://github.com/astral-sh/uv/blob/0da0cd8b4310d3ac4be96223bd1e24ada109af9e/crates/uv-extract/src/stream.rs#L658)
handle it.
> Caused by:
0: extracting
https://github.com/microsoft/vscode-eslint/archive/refs/tags/release%2F3.0.24.tar.gz
to "/Users/user-name-here/Library/Application
Support/Zed/languages/eslint/.tmp-github-download-pYkrYP"
1: failed to unpack `/Users/user-name-here/Library/Application
Support/Zed/languages/eslint/.tmp-github-download-pYkrYP/vscode-eslint-release-3.0.24/package-lock.json`
2: failed to set mtime for
`/Users/user-name-here/Library/Application
Support/Zed/languages/eslint/.tmp-github-download-pYkrYP/vscode-eslint-release-3.0.24/package-lock.json`
3: No such file or directory (os error 2)
Self-Review Checklist:
- [x] I've reviewed my own diff for quality, security, and reliability
- [x] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [x] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable
Release Notes:
- N/A
---
crates/http_client/src/github_download.rs | 22 ++++++++++++++++------
1 file changed, 16 insertions(+), 6 deletions(-)
diff --git a/crates/http_client/src/github_download.rs b/crates/http_client/src/github_download.rs
index 47ae2c2b36b1ab37b56ab70735c2ce018bc5e275..5d11f3e11b7ea951c6bc9c143c266d8802f88cc3 100644
--- a/crates/http_client/src/github_download.rs
+++ b/crates/http_client/src/github_download.rs
@@ -207,11 +207,7 @@ async fn extract_tar_gz(
from: impl AsyncRead + Unpin,
) -> Result<(), anyhow::Error> {
let decompressed_bytes = GzipDecoder::new(BufReader::new(from));
- let archive = async_tar::Archive::new(decompressed_bytes);
- archive
- .unpack(&destination_path)
- .await
- .with_context(|| format!("extracting {url} to {destination_path:?}"))?;
+ unpack_tar_archive(destination_path, url, decompressed_bytes).await?;
Ok(())
}
@@ -221,7 +217,21 @@ async fn extract_tar_bz2(
from: impl AsyncRead + Unpin,
) -> Result<(), anyhow::Error> {
let decompressed_bytes = BzDecoder::new(BufReader::new(from));
- let archive = async_tar::Archive::new(decompressed_bytes);
+ unpack_tar_archive(destination_path, url, decompressed_bytes).await?;
+ Ok(())
+}
+
+async fn unpack_tar_archive(
+ destination_path: &Path,
+ url: &str,
+ archive_bytes: impl AsyncRead + Unpin,
+) -> Result<(), anyhow::Error> {
+ // We don't need to set the modified time. It's irrelevant to downloaded
+ // archive verification, and some filesystems return errors when asked to
+ // apply it after extraction.
+ let archive = async_tar::ArchiveBuilder::new(archive_bytes)
+ .set_preserve_mtime(false)
+ .build();
archive
.unpack(&destination_path)
.await
From 818991db7781db11bd8b1dea9eb27179713156f1 Mon Sep 17 00:00:00 2001
From: Saketh <126517689+SAKETH11111@users.noreply.github.com>
Date: Tue, 7 Apr 2026 01:14:00 -0500
Subject: [PATCH 03/22] tasks_ui: Fix previously used task tooltip (#53104)
Closes #52941
## Summary
- update the task picker delete button tooltip to describe the recently
used task entry it removes
- keep the change scoped to the inaccurate user-facing copy in the tasks
modal
## Testing
- cargo test -p tasks_ui
Release Notes:
- N/A
---
crates/tasks_ui/src/modal.rs | 4 +---
1 file changed, 1 insertion(+), 3 deletions(-)
diff --git a/crates/tasks_ui/src/modal.rs b/crates/tasks_ui/src/modal.rs
index 285a07c9562849b26b4cbba3de3979614384d875..3b7edef415f10f8723ab041e5a81ac672d603371 100644
--- a/crates/tasks_ui/src/modal.rs
+++ b/crates/tasks_ui/src/modal.rs
@@ -566,9 +566,7 @@ impl PickerDelegate for TasksModalDelegate {
.checked_sub(1);
picker.refresh(window, cx);
}))
- .tooltip(|_, cx| {
- Tooltip::simple("Delete Previously Scheduled Task", cx)
- }),
+ .tooltip(|_, cx| Tooltip::simple("Delete from Recent Tasks", cx)),
);
item.end_slot_on_hover(delete_button)
} else {
From ee6495dce4012019ccc235486afa800da443d680 Mon Sep 17 00:00:00 2001
From: Cameron Mcloughlin
Date: Tue, 7 Apr 2026 09:02:40 +0100
Subject: [PATCH 04/22] collab: Fix UI font size scaling (#53290)
---
crates/collab_ui/src/collab_panel.rs | 7 -------
1 file changed, 7 deletions(-)
diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs
index 8d0cdf351163dadf0ac8cbf6a8dc04886f30f583..1e1aab3b9d4aa0e48ad4a84ec77bdc6dff51c7f5 100644
--- a/crates/collab_ui/src/collab_panel.rs
+++ b/crates/collab_ui/src/collab_panel.rs
@@ -1181,7 +1181,6 @@ impl CollabPanel {
.into();
ListItem::new(project_id as usize)
- .height(px(24.))
.toggle_state(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
this.workspace
@@ -1222,7 +1221,6 @@ impl CollabPanel {
let id = peer_id.map_or(usize::MAX, |id| id.as_u64() as usize);
ListItem::new(("screen", id))
- .height(px(24.))
.toggle_state(is_selected)
.start_slot(
h_flex()
@@ -1269,7 +1267,6 @@ impl CollabPanel {
let has_channel_buffer_changed = channel_store.has_channel_buffer_changed(channel_id);
ListItem::new("channel-notes")
- .height(px(24.))
.toggle_state(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
this.open_channel_notes(channel_id, window, cx);
@@ -3210,12 +3207,9 @@ impl CollabPanel {
(IconName::Star, Color::Default, "Add to Favorites")
};
- let height = px(24.);
-
h_flex()
.id(ix)
.group("")
- .h(height)
.w_full()
.overflow_hidden()
.when(!channel.is_root_channel(), |el| {
@@ -3245,7 +3239,6 @@ impl CollabPanel {
)
.child(
ListItem::new(ix)
- .height(height)
// Add one level of depth for the disclosure arrow.
.indent_level(depth + 1)
.indent_step_size(px(20.))
From 614f67ed2aa7378e5f11359ea01ba873b6a2a103 Mon Sep 17 00:00:00 2001
From: "Angel P."
Date: Tue, 7 Apr 2026 05:00:22 -0400
Subject: [PATCH 05/22] markdown_preview: Fix HTML alignment styles not being
applied (#53196)
## What This PR Does
This PR adds support for HTML alignment styles to be applied to
Paragraph and Heading elements and their children. Here is what this
looks like before vs after this PR (both images use the same markdown
below):
```markdown
```
**BEFORE:**
**AFTER:**
## Notes
I used `style="text-align: center|left|right;"` instead of
`align="center|right|left"` since `align` has been [deprecated in
HTML5](https://www.w3.org/TR/2011/WD-html5-author-20110809/obsolete.html)
for block-level elements. The issue this PR solves mentioned that github
supports the `align="center|right|left"` attribute, so I'm unsure if the
Zed team would want to have parity there. Feel free to let me know if
that would be something that should be added, however for now I've
decided to follow the HTML5 standard.
Self-Review Checklist:
- [x] I've reviewed my own diff for quality, security, and reliability
- [x] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [x] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable
Closes https://github.com/zed-industries/zed/issues/51062
Release Notes:
- Fixed HTML alignment styles not being applied in markdown previews
---------
Co-authored-by: Smit Barmase
---
crates/markdown/src/html/html_parser.rs | 117 ++++++++++++++++++---
crates/markdown/src/html/html_rendering.rs | 18 +++-
crates/markdown/src/markdown.rs | 69 +++++++++---
3 files changed, 172 insertions(+), 32 deletions(-)
diff --git a/crates/markdown/src/html/html_parser.rs b/crates/markdown/src/html/html_parser.rs
index 20338ec2abef2314b7cd6ca91e45ee05be909745..8aa5da0cea7ea160721875fa889a720fe4c8bed1 100644
--- a/crates/markdown/src/html/html_parser.rs
+++ b/crates/markdown/src/html/html_parser.rs
@@ -1,6 +1,6 @@
use std::{cell::RefCell, collections::HashMap, mem, ops::Range};
-use gpui::{DefiniteLength, FontWeight, SharedString, px, relative};
+use gpui::{DefiniteLength, FontWeight, SharedString, TextAlign, px, relative};
use html5ever::{
Attribute, LocalName, ParseOpts, local_name, parse_document, tendril::TendrilSink,
};
@@ -24,10 +24,17 @@ pub(crate) enum ParsedHtmlElement {
List(ParsedHtmlList),
Table(ParsedHtmlTable),
BlockQuote(ParsedHtmlBlockQuote),
- Paragraph(HtmlParagraph),
+ Paragraph(ParsedHtmlParagraph),
Image(HtmlImage),
}
+#[derive(Debug, Clone)]
+#[cfg_attr(test, derive(PartialEq))]
+pub(crate) struct ParsedHtmlParagraph {
+ pub text_align: Option,
+ pub contents: HtmlParagraph,
+}
+
impl ParsedHtmlElement {
pub fn source_range(&self) -> Option> {
Some(match self {
@@ -35,7 +42,7 @@ impl ParsedHtmlElement {
Self::List(list) => list.source_range.clone(),
Self::Table(table) => table.source_range.clone(),
Self::BlockQuote(block_quote) => block_quote.source_range.clone(),
- Self::Paragraph(text) => match text.first()? {
+ Self::Paragraph(paragraph) => match paragraph.contents.first()? {
HtmlParagraphChunk::Text(text) => text.source_range.clone(),
HtmlParagraphChunk::Image(image) => image.source_range.clone(),
},
@@ -83,6 +90,7 @@ pub(crate) struct ParsedHtmlHeading {
pub source_range: Range,
pub level: HeadingLevel,
pub contents: HtmlParagraph,
+ pub text_align: Option,
}
#[derive(Debug, Clone)]
@@ -236,20 +244,21 @@ fn parse_html_node(
consume_children(source_range, node, elements, context);
}
NodeData::Text { contents } => {
- elements.push(ParsedHtmlElement::Paragraph(vec![
- HtmlParagraphChunk::Text(ParsedHtmlText {
+ elements.push(ParsedHtmlElement::Paragraph(ParsedHtmlParagraph {
+ text_align: None,
+ contents: vec![HtmlParagraphChunk::Text(ParsedHtmlText {
source_range,
highlights: Vec::default(),
links: Vec::default(),
contents: contents.borrow().to_string().into(),
- }),
- ]));
+ })],
+ }));
}
NodeData::Comment { .. } => {}
NodeData::Element { name, attrs, .. } => {
- let mut styles = if let Some(styles) =
- html_style_from_html_styles(extract_styles_from_attributes(attrs))
- {
+ let styles_map = extract_styles_from_attributes(attrs);
+ let text_align = text_align_from_attributes(attrs, &styles_map);
+ let mut styles = if let Some(styles) = html_style_from_html_styles(styles_map) {
vec![styles]
} else {
Vec::default()
@@ -270,7 +279,10 @@ fn parse_html_node(
);
if !paragraph.is_empty() {
- elements.push(ParsedHtmlElement::Paragraph(paragraph));
+ elements.push(ParsedHtmlElement::Paragraph(ParsedHtmlParagraph {
+ text_align,
+ contents: paragraph,
+ }));
}
} else if matches!(
name.local,
@@ -303,6 +315,7 @@ fn parse_html_node(
_ => unreachable!(),
},
contents: paragraph,
+ text_align,
}));
}
} else if name.local == local_name!("ul") || name.local == local_name!("ol") {
@@ -589,6 +602,30 @@ fn html_style_from_html_styles(styles: HashMap) -> Option Option {
+ match value.trim().to_ascii_lowercase().as_str() {
+ "left" => Some(TextAlign::Left),
+ "center" => Some(TextAlign::Center),
+ "right" => Some(TextAlign::Right),
+ _ => None,
+ }
+}
+
+fn text_align_from_styles(styles: &HashMap) -> Option {
+ styles
+ .get("text-align")
+ .and_then(|value| parse_text_align(value))
+}
+
+fn text_align_from_attributes(
+ attrs: &RefCell>,
+ styles: &HashMap,
+) -> Option {
+ text_align_from_styles(styles).or_else(|| {
+ attr_value(attrs, local_name!("align")).and_then(|value| parse_text_align(&value))
+ })
+}
+
fn extract_styles_from_attributes(attrs: &RefCell>) -> HashMap {
let mut styles = HashMap::new();
@@ -770,6 +807,7 @@ fn extract_html_table(node: &Node, source_range: Range) -> Optionx
", 0..40).unwrap();
+ let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else {
+ panic!("expected paragraph");
+ };
+ assert_eq!(paragraph.text_align, Some(TextAlign::Center));
+ }
+
+ #[test]
+ fn parses_heading_text_align_from_style() {
+ let parsed = parse_html_block("Title
", 0..45).unwrap();
+ let ParsedHtmlElement::Heading(heading) = &parsed.children[0] else {
+ panic!("expected heading");
+ };
+ assert_eq!(heading.text_align, Some(TextAlign::Right));
+ }
+
+ #[test]
+ fn parses_paragraph_text_align_from_align_attribute() {
+ let parsed = parse_html_block("x
", 0..24).unwrap();
+ let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else {
+ panic!("expected paragraph");
+ };
+ assert_eq!(paragraph.text_align, Some(TextAlign::Center));
+ }
+
+ #[test]
+ fn parses_heading_text_align_from_align_attribute() {
+ let parsed = parse_html_block("Title
", 0..30).unwrap();
+ let ParsedHtmlElement::Heading(heading) = &parsed.children[0] else {
+ panic!("expected heading");
+ };
+ assert_eq!(heading.text_align, Some(TextAlign::Right));
+ }
+
+ #[test]
+ fn prefers_style_text_align_over_align_attribute() {
+ let parsed = parse_html_block(
+ "x
",
+ 0..50,
+ )
+ .unwrap();
+ let ParsedHtmlElement::Paragraph(paragraph) = &parsed.children[0] else {
+ panic!("expected paragraph");
+ };
+ assert_eq!(paragraph.text_align, Some(TextAlign::Center));
+ }
}
diff --git a/crates/markdown/src/html/html_rendering.rs b/crates/markdown/src/html/html_rendering.rs
index 103e2a6accb7dce9bc429419aafd27cbdf5080ce..6ae25eff0b4ba2ec8dedde8118ebd8d60e8fce7d 100644
--- a/crates/markdown/src/html/html_rendering.rs
+++ b/crates/markdown/src/html/html_rendering.rs
@@ -79,9 +79,20 @@ impl MarkdownElement {
match element {
ParsedHtmlElement::Paragraph(paragraph) => {
- self.push_markdown_paragraph(builder, &source_range, markdown_end);
- self.render_html_paragraph(paragraph, source_allocator, builder, cx, markdown_end);
- builder.pop_div();
+ self.push_markdown_paragraph(
+ builder,
+ &source_range,
+ markdown_end,
+ paragraph.text_align,
+ );
+ self.render_html_paragraph(
+ ¶graph.contents,
+ source_allocator,
+ builder,
+ cx,
+ markdown_end,
+ );
+ self.pop_markdown_paragraph(builder);
}
ParsedHtmlElement::Heading(heading) => {
self.push_markdown_heading(
@@ -89,6 +100,7 @@ impl MarkdownElement {
heading.level,
&heading.source_range,
markdown_end,
+ heading.text_align,
);
self.render_html_paragraph(
&heading.contents,
diff --git a/crates/markdown/src/markdown.rs b/crates/markdown/src/markdown.rs
index 247c082d223005a7e0bd6d57696751ce76cc4d86..e6ad1b1f2ac9154eaabc6d18dbcb9c8695ae019d 100644
--- a/crates/markdown/src/markdown.rs
+++ b/crates/markdown/src/markdown.rs
@@ -36,8 +36,8 @@ use gpui::{
FocusHandle, Focusable, FontStyle, FontWeight, GlobalElementId, Hitbox, Hsla, Image,
ImageFormat, ImageSource, KeyContext, Length, MouseButton, MouseDownEvent, MouseEvent,
MouseMoveEvent, MouseUpEvent, Point, ScrollHandle, Stateful, StrikethroughStyle,
- StyleRefinement, StyledText, Task, TextLayout, TextRun, TextStyle, TextStyleRefinement,
- actions, img, point, quad,
+ StyleRefinement, StyledText, Task, TextAlign, TextLayout, TextRun, TextStyle,
+ TextStyleRefinement, actions, img, point, quad,
};
use language::{CharClassifier, Language, LanguageRegistry, Rope};
use parser::CodeBlockMetadata;
@@ -1025,8 +1025,17 @@ impl MarkdownElement {
width: Option,
height: Option,
) {
+ let align = builder.text_style().text_align;
builder.modify_current_div(|el| {
- el.items_center().flex().flex_row().child(
+ let mut image_container = el.flex().flex_row().items_center();
+
+ image_container = match align {
+ TextAlign::Left => image_container.justify_start(),
+ TextAlign::Center => image_container.justify_center(),
+ TextAlign::Right => image_container.justify_end(),
+ };
+
+ image_container.child(
img(source)
.max_w_full()
.when_some(height, |this, height| this.h(height))
@@ -1041,14 +1050,29 @@ impl MarkdownElement {
builder: &mut MarkdownElementBuilder,
range: &Range,
markdown_end: usize,
+ text_align_override: Option,
) {
- builder.push_div(
- div().when(!self.style.height_is_multiple_of_line_height, |el| {
- el.mb_2().line_height(rems(1.3))
- }),
- range,
- markdown_end,
- );
+ let align = text_align_override.unwrap_or(self.style.base_text_style.text_align);
+ let mut paragraph = div().when(!self.style.height_is_multiple_of_line_height, |el| {
+ el.mb_2().line_height(rems(1.3))
+ });
+
+ paragraph = match align {
+ TextAlign::Center => paragraph.text_center(),
+ TextAlign::Left => paragraph.text_left(),
+ TextAlign::Right => paragraph.text_right(),
+ };
+
+ builder.push_text_style(TextStyleRefinement {
+ text_align: Some(align),
+ ..Default::default()
+ });
+ builder.push_div(paragraph, range, markdown_end);
+ }
+
+ fn pop_markdown_paragraph(&self, builder: &mut MarkdownElementBuilder) {
+ builder.pop_div();
+ builder.pop_text_style();
}
fn push_markdown_heading(
@@ -1057,15 +1081,26 @@ impl MarkdownElement {
level: pulldown_cmark::HeadingLevel,
range: &Range,
markdown_end: usize,
+ text_align_override: Option,
) {
+ let align = text_align_override.unwrap_or(self.style.base_text_style.text_align);
let mut heading = div().mb_2();
heading = apply_heading_style(heading, level, self.style.heading_level_styles.as_ref());
+ heading = match align {
+ TextAlign::Center => heading.text_center(),
+ TextAlign::Left => heading.text_left(),
+ TextAlign::Right => heading.text_right(),
+ };
+
let mut heading_style = self.style.heading.clone();
let heading_text_style = heading_style.text_style().clone();
heading.style().refine(&heading_style);
- builder.push_text_style(heading_text_style);
+ builder.push_text_style(TextStyleRefinement {
+ text_align: Some(align),
+ ..heading_text_style
+ });
builder.push_div(heading, range, markdown_end);
}
@@ -1571,10 +1606,16 @@ impl Element for MarkdownElement {
}
}
MarkdownTag::Paragraph => {
- self.push_markdown_paragraph(&mut builder, range, markdown_end);
+ self.push_markdown_paragraph(&mut builder, range, markdown_end, None);
}
MarkdownTag::Heading { level, .. } => {
- self.push_markdown_heading(&mut builder, *level, range, markdown_end);
+ self.push_markdown_heading(
+ &mut builder,
+ *level,
+ range,
+ markdown_end,
+ None,
+ );
}
MarkdownTag::BlockQuote => {
self.push_markdown_block_quote(&mut builder, range, markdown_end);
@@ -1826,7 +1867,7 @@ impl Element for MarkdownElement {
current_img_block_range.take();
}
MarkdownTagEnd::Paragraph => {
- builder.pop_div();
+ self.pop_markdown_paragraph(&mut builder);
}
MarkdownTagEnd::Heading(_) => {
self.pop_markdown_heading(&mut builder);
From ccb9e60a6258d57104cc56db87fe03024dd231ef Mon Sep 17 00:00:00 2001
From: Anthony Eid <56899983+Anthony-Eid@users.noreply.github.com>
Date: Tue, 7 Apr 2026 05:21:47 -0400
Subject: [PATCH 06/22] agent_panel: Add new thread git worktree/branch pickers
(#52979)
This PR allows users to create a new thread based off a git worktree
that already exists or has a custom name. User's can also choose what
branch they want the newly generated worktree to be based off of.
The UI still needs some polish, but I'm merging this early to get the
team using this before our preview launch. I'll be active today and
tomorrow before launch to fix any nits we have with the UI.
Functionality of this feature works! And I have a basic test to prevent
regressions
Self-Review Checklist:
- [x] I've reviewed my own diff for quality, security, and reliability
- [x] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [x] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable
Closes #ISSUE
Release Notes:
- N/A or Added/Fixed/Improved ...
---------
Co-authored-by: cameron
---
crates/agent_ui/src/agent_panel.rs | 673 +++++++++++------
crates/agent_ui/src/agent_ui.rs | 37 +-
.../src/conversation_view/thread_view.rs | 5 +-
crates/agent_ui/src/thread_branch_picker.rs | 695 ++++++++++++++++++
crates/agent_ui/src/thread_worktree_picker.rs | 485 ++++++++++++
crates/collab/tests/integration/git_tests.rs | 12 +-
.../remote_editing_collaboration_tests.rs | 6 +-
crates/fs/src/fake_git_repo.rs | 113 ++-
crates/fs/tests/integration/fake_git_repo.rs | 12 +-
crates/git/src/repository.rs | 120 ++-
crates/git_ui/src/worktree_picker.rs | 9 +-
crates/project/src/git_store.rs | 102 ++-
crates/project/tests/integration/git_store.rs | 12 +-
crates/proto/proto/git.proto | 1 +
crates/zed/src/visual_test_runner.rs | 18 +-
15 files changed, 1941 insertions(+), 359 deletions(-)
create mode 100644 crates/agent_ui/src/thread_branch_picker.rs
create mode 100644 crates/agent_ui/src/thread_worktree_picker.rs
diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs
index 41900e71e5d3ad7e5327ee7e04f73cb05eed5a5b..8f456e0e955b823a5bbaf2815df3b409441bb0af 100644
--- a/crates/agent_ui/src/agent_panel.rs
+++ b/crates/agent_ui/src/agent_panel.rs
@@ -28,21 +28,20 @@ use zed_actions::agent::{
use crate::thread_metadata_store::ThreadMetadataStore;
use crate::{
AddContextServer, AgentDiffPane, ConversationView, CopyThreadToClipboard, CycleStartThreadIn,
- Follow, InlineAssistant, LoadThreadFromClipboard, NewThread, OpenActiveThreadAsMarkdown,
- OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn,
- ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
+ Follow, InlineAssistant, LoadThreadFromClipboard, NewThread, NewWorktreeBranchTarget,
+ OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell,
+ StartThreadIn, ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
agent_configuration::{AgentConfiguration, AssistantConfigurationEvent},
conversation_view::{AcpThreadViewEvent, ThreadView},
+ thread_branch_picker::ThreadBranchPicker,
+ thread_worktree_picker::ThreadWorktreePicker,
ui::EndTrialUpsell,
};
use crate::{
Agent, AgentInitialContent, ExternalSourcePrompt, NewExternalAgentThread,
NewNativeAgentThreadFromSummary,
};
-use crate::{
- DEFAULT_THREAD_TITLE,
- ui::{AcpOnboardingModal, HoldForDefault},
-};
+use crate::{DEFAULT_THREAD_TITLE, ui::AcpOnboardingModal};
use crate::{ExpandMessageEditor, ThreadHistoryView};
use crate::{ManageProfiles, ThreadHistoryViewEvent};
use crate::{ThreadHistory, agent_connection_store::AgentConnectionStore};
@@ -73,8 +72,8 @@ use terminal::terminal_settings::TerminalSettings;
use terminal_view::{TerminalView, terminal_panel::TerminalPanel};
use theme_settings::ThemeSettings;
use ui::{
- Button, Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, DocumentationSide,
- PopoverMenu, PopoverMenuHandle, Tab, Tooltip, prelude::*, utils::WithRemSize,
+ Button, Callout, CommonAnimationExt, ContextMenu, ContextMenuEntry, PopoverMenu,
+ PopoverMenuHandle, Tab, Tooltip, prelude::*, utils::WithRemSize,
};
use util::{ResultExt as _, debug_panic};
use workspace::{
@@ -620,7 +619,31 @@ impl StartThreadIn {
fn label(&self) -> SharedString {
match self {
Self::LocalProject => "Current Worktree".into(),
- Self::NewWorktree => "New Git Worktree".into(),
+ Self::NewWorktree {
+ worktree_name: Some(worktree_name),
+ ..
+ } => format!("New: {worktree_name}").into(),
+ Self::NewWorktree { .. } => "New Git Worktree".into(),
+ Self::LinkedWorktree { display_name, .. } => format!("From: {}", &display_name).into(),
+ }
+ }
+
+ fn worktree_branch_label(&self, default_branch_label: SharedString) -> Option {
+ match self {
+ Self::NewWorktree { branch_target, .. } => match branch_target {
+ NewWorktreeBranchTarget::CurrentBranch => Some(default_branch_label),
+ NewWorktreeBranchTarget::ExistingBranch { name } => {
+ Some(format!("From: {name}").into())
+ }
+ NewWorktreeBranchTarget::CreateBranch { name, from_ref } => {
+ if let Some(from_ref) = from_ref {
+ Some(format!("From: {from_ref}").into())
+ } else {
+ Some(format!("From: {name}").into())
+ }
+ }
+ },
+ _ => None,
}
}
}
@@ -632,6 +655,17 @@ pub enum WorktreeCreationStatus {
Error(SharedString),
}
+#[derive(Clone, Debug)]
+enum WorktreeCreationArgs {
+ New {
+ worktree_name: Option,
+ branch_target: NewWorktreeBranchTarget,
+ },
+ Linked {
+ worktree_path: PathBuf,
+ },
+}
+
impl ActiveView {
pub fn which_font_size_used(&self) -> WhichFontSize {
match self {
@@ -662,7 +696,8 @@ pub struct AgentPanel {
previous_view: Option,
background_threads: HashMap>,
new_thread_menu_handle: PopoverMenuHandle,
- start_thread_in_menu_handle: PopoverMenuHandle,
+ start_thread_in_menu_handle: PopoverMenuHandle,
+ thread_branch_menu_handle: PopoverMenuHandle,
agent_panel_menu_handle: PopoverMenuHandle,
agent_navigation_menu_handle: PopoverMenuHandle,
agent_navigation_menu: Option>,
@@ -689,7 +724,7 @@ impl AgentPanel {
};
let selected_agent = self.selected_agent.clone();
- let start_thread_in = Some(self.start_thread_in);
+ let start_thread_in = Some(self.start_thread_in.clone());
let last_active_thread = self.active_agent_thread(cx).map(|thread| {
let thread = thread.read(cx);
@@ -794,18 +829,21 @@ impl AgentPanel {
} else if let Some(agent) = global_fallback {
panel.selected_agent = agent;
}
- if let Some(start_thread_in) = serialized_panel.start_thread_in {
+ if let Some(ref start_thread_in) = serialized_panel.start_thread_in {
let is_worktree_flag_enabled =
cx.has_flag::();
let is_valid = match &start_thread_in {
StartThreadIn::LocalProject => true,
- StartThreadIn::NewWorktree => {
+ StartThreadIn::NewWorktree { .. } => {
let project = panel.project.read(cx);
is_worktree_flag_enabled && !project.is_via_collab()
}
+ StartThreadIn::LinkedWorktree { path, .. } => {
+ is_worktree_flag_enabled && path.exists()
+ }
};
if is_valid {
- panel.start_thread_in = start_thread_in;
+ panel.start_thread_in = start_thread_in.clone();
} else {
log::info!(
"deserialized start_thread_in {:?} is no longer valid, falling back to LocalProject",
@@ -979,6 +1017,7 @@ impl AgentPanel {
background_threads: HashMap::default(),
new_thread_menu_handle: PopoverMenuHandle::default(),
start_thread_in_menu_handle: PopoverMenuHandle::default(),
+ thread_branch_menu_handle: PopoverMenuHandle::default(),
agent_panel_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu: None,
@@ -1948,24 +1987,43 @@ impl AgentPanel {
window: &mut Window,
cx: &mut Context,
) {
- if matches!(action, StartThreadIn::NewWorktree) && !cx.has_flag::() {
- return;
- }
-
- let new_target = match *action {
+ let new_target = match action {
StartThreadIn::LocalProject => StartThreadIn::LocalProject,
- StartThreadIn::NewWorktree => {
+ StartThreadIn::NewWorktree { .. } => {
+ if !cx.has_flag::() {
+ return;
+ }
+ if !self.project_has_git_repository(cx) {
+ log::error!(
+ "set_start_thread_in: cannot use worktree mode without a git repository"
+ );
+ return;
+ }
+ if self.project.read(cx).is_via_collab() {
+ log::error!(
+ "set_start_thread_in: cannot use worktree mode in a collab project"
+ );
+ return;
+ }
+ action.clone()
+ }
+ StartThreadIn::LinkedWorktree { .. } => {
+ if !cx.has_flag::() {
+ return;
+ }
if !self.project_has_git_repository(cx) {
log::error!(
- "set_start_thread_in: cannot use NewWorktree without a git repository"
+ "set_start_thread_in: cannot use LinkedWorktree without a git repository"
);
return;
}
if self.project.read(cx).is_via_collab() {
- log::error!("set_start_thread_in: cannot use NewWorktree in a collab project");
+ log::error!(
+ "set_start_thread_in: cannot use LinkedWorktree in a collab project"
+ );
return;
}
- StartThreadIn::NewWorktree
+ action.clone()
}
};
self.start_thread_in = new_target;
@@ -1977,9 +2035,14 @@ impl AgentPanel {
}
fn cycle_start_thread_in(&mut self, window: &mut Window, cx: &mut Context) {
- let next = match self.start_thread_in {
- StartThreadIn::LocalProject => StartThreadIn::NewWorktree,
- StartThreadIn::NewWorktree => StartThreadIn::LocalProject,
+ let next = match &self.start_thread_in {
+ StartThreadIn::LocalProject => StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ },
+ StartThreadIn::NewWorktree { .. } | StartThreadIn::LinkedWorktree { .. } => {
+ StartThreadIn::LocalProject
+ }
};
self.set_start_thread_in(&next, window, cx);
}
@@ -1991,7 +2054,10 @@ impl AgentPanel {
NewThreadLocation::LocalProject => StartThreadIn::LocalProject,
NewThreadLocation::NewWorktree => {
if self.project_has_git_repository(cx) {
- StartThreadIn::NewWorktree
+ StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ }
} else {
StartThreadIn::LocalProject
}
@@ -2219,15 +2285,39 @@ impl AgentPanel {
window: &mut Window,
cx: &mut Context,
) {
- if self.start_thread_in == StartThreadIn::NewWorktree {
- self.handle_worktree_creation_requested(content, window, cx);
- } else {
- cx.defer_in(window, move |_this, window, cx| {
- thread_view.update(cx, |thread_view, cx| {
- let editor = thread_view.message_editor.clone();
- thread_view.send_impl(editor, window, cx);
+ match &self.start_thread_in {
+ StartThreadIn::NewWorktree {
+ worktree_name,
+ branch_target,
+ } => {
+ self.handle_worktree_requested(
+ content,
+ WorktreeCreationArgs::New {
+ worktree_name: worktree_name.clone(),
+ branch_target: branch_target.clone(),
+ },
+ window,
+ cx,
+ );
+ }
+ StartThreadIn::LinkedWorktree { path, .. } => {
+ self.handle_worktree_requested(
+ content,
+ WorktreeCreationArgs::Linked {
+ worktree_path: path.clone(),
+ },
+ window,
+ cx,
+ );
+ }
+ StartThreadIn::LocalProject => {
+ cx.defer_in(window, move |_this, window, cx| {
+ thread_view.update(cx, |thread_view, cx| {
+ let editor = thread_view.message_editor.clone();
+ thread_view.send_impl(editor, window, cx);
+ });
});
- });
+ }
}
}
@@ -2289,6 +2379,33 @@ impl AgentPanel {
(git_repos, non_git_paths)
}
+ fn resolve_worktree_branch_target(
+ branch_target: &NewWorktreeBranchTarget,
+ existing_branches: &HashSet,
+ occupied_branches: &HashSet,
+ ) -> Result<(String, bool, Option)> {
+ let generate_branch_name = || -> Result {
+ let refs: Vec<&str> = existing_branches.iter().map(|s| s.as_str()).collect();
+ let mut rng = rand::rng();
+ crate::branch_names::generate_branch_name(&refs, &mut rng)
+ .ok_or_else(|| anyhow!("Failed to generate a unique branch name"))
+ };
+
+ match branch_target {
+ NewWorktreeBranchTarget::CreateBranch { name, from_ref } => {
+ Ok((name.clone(), false, from_ref.clone()))
+ }
+ NewWorktreeBranchTarget::ExistingBranch { name } => {
+ if occupied_branches.contains(name) {
+ Ok((generate_branch_name()?, false, Some(name.clone())))
+ } else {
+ Ok((name.clone(), true, None))
+ }
+ }
+ NewWorktreeBranchTarget::CurrentBranch => Ok((generate_branch_name()?, false, None)),
+ }
+ }
+
/// Kicks off an async git-worktree creation for each repository. Returns:
///
/// - `creation_infos`: a vec of `(repo, new_path, receiver)` tuples—the
@@ -2297,7 +2414,10 @@ impl AgentPanel {
/// later to remap open editor tabs into the new workspace.
fn start_worktree_creations(
git_repos: &[Entity],
+ worktree_name: Option,
branch_name: &str,
+ use_existing_branch: bool,
+ start_point: Option,
worktree_directory_setting: &str,
cx: &mut Context,
) -> Result<(
@@ -2311,12 +2431,27 @@ impl AgentPanel {
let mut creation_infos = Vec::new();
let mut path_remapping = Vec::new();
+ let worktree_name = worktree_name.unwrap_or_else(|| branch_name.to_string());
+
for repo in git_repos {
let (work_dir, new_path, receiver) = repo.update(cx, |repo, _cx| {
let new_path =
- repo.path_for_new_linked_worktree(branch_name, worktree_directory_setting)?;
- let receiver =
- repo.create_worktree(branch_name.to_string(), new_path.clone(), None);
+ repo.path_for_new_linked_worktree(&worktree_name, worktree_directory_setting)?;
+ let target = if use_existing_branch {
+ debug_assert!(
+ git_repos.len() == 1,
+ "use_existing_branch should only be true for a single repo"
+ );
+ git::repository::CreateWorktreeTarget::ExistingBranch {
+ branch_name: branch_name.to_string(),
+ }
+ } else {
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: branch_name.to_string(),
+ base_sha: start_point.clone(),
+ }
+ };
+ let receiver = repo.create_worktree(target, new_path.clone());
let work_dir = repo.work_directory_abs_path.clone();
anyhow::Ok((work_dir, new_path, receiver))
})?;
@@ -2419,9 +2554,10 @@ impl AgentPanel {
cx.notify();
}
- fn handle_worktree_creation_requested(
+ fn handle_worktree_requested(
&mut self,
content: Vec,
+ args: WorktreeCreationArgs,
window: &mut Window,
cx: &mut Context,
) {
@@ -2437,7 +2573,7 @@ impl AgentPanel {
let (git_repos, non_git_paths) = self.classify_worktrees(cx);
- if git_repos.is_empty() {
+ if matches!(args, WorktreeCreationArgs::New { .. }) && git_repos.is_empty() {
self.set_worktree_creation_error(
"No git repositories found in the project".into(),
window,
@@ -2446,17 +2582,31 @@ impl AgentPanel {
return;
}
- // Kick off branch listing as early as possible so it can run
- // concurrently with the remaining synchronous setup work.
- let branch_receivers: Vec<_> = git_repos
- .iter()
- .map(|repo| repo.update(cx, |repo, _cx| repo.branches()))
- .collect();
-
- let worktree_directory_setting = ProjectSettings::get_global(cx)
- .git
- .worktree_directory
- .clone();
+ let (branch_receivers, worktree_receivers, worktree_directory_setting) =
+ if matches!(args, WorktreeCreationArgs::New { .. }) {
+ (
+ Some(
+ git_repos
+ .iter()
+ .map(|repo| repo.update(cx, |repo, _cx| repo.branches()))
+ .collect::>(),
+ ),
+ Some(
+ git_repos
+ .iter()
+ .map(|repo| repo.update(cx, |repo, _cx| repo.worktrees()))
+ .collect::>(),
+ ),
+ Some(
+ ProjectSettings::get_global(cx)
+ .git
+ .worktree_directory
+ .clone(),
+ ),
+ )
+ } else {
+ (None, None, None)
+ };
let active_file_path = self.workspace.upgrade().and_then(|workspace| {
let workspace = workspace.read(cx);
@@ -2476,77 +2626,124 @@ impl AgentPanel {
let selected_agent = self.selected_agent();
let task = cx.spawn_in(window, async move |this, cx| {
- // Await the branch listings we kicked off earlier.
- let mut existing_branches = Vec::new();
- for result in futures::future::join_all(branch_receivers).await {
- match result {
- Ok(Ok(branches)) => {
- for branch in branches {
- existing_branches.push(branch.name().to_string());
+ let (all_paths, path_remapping, has_non_git) = match args {
+ WorktreeCreationArgs::New {
+ worktree_name,
+ branch_target,
+ } => {
+ let branch_receivers = branch_receivers
+ .expect("branch receivers must be prepared for new worktree creation");
+ let worktree_receivers = worktree_receivers
+ .expect("worktree receivers must be prepared for new worktree creation");
+ let worktree_directory_setting = worktree_directory_setting
+ .expect("worktree directory must be prepared for new worktree creation");
+
+ let mut existing_branches = HashSet::default();
+ for result in futures::future::join_all(branch_receivers).await {
+ match result {
+ Ok(Ok(branches)) => {
+ for branch in branches {
+ existing_branches.insert(branch.name().to_string());
+ }
+ }
+ Ok(Err(err)) => {
+ Err::<(), _>(err).log_err();
+ }
+ Err(_) => {}
}
}
- Ok(Err(err)) => {
- Err::<(), _>(err).log_err();
+
+ let mut occupied_branches = HashSet::default();
+ for result in futures::future::join_all(worktree_receivers).await {
+ match result {
+ Ok(Ok(worktrees)) => {
+ for worktree in worktrees {
+ if let Some(branch_name) = worktree.branch_name() {
+ occupied_branches.insert(branch_name.to_string());
+ }
+ }
+ }
+ Ok(Err(err)) => {
+ Err::<(), _>(err).log_err();
+ }
+ Err(_) => {}
+ }
}
- Err(_) => {}
- }
- }
- let existing_branch_refs: Vec<&str> =
- existing_branches.iter().map(|s| s.as_str()).collect();
- let mut rng = rand::rng();
- let branch_name =
- match crate::branch_names::generate_branch_name(&existing_branch_refs, &mut rng) {
- Some(name) => name,
- None => {
- this.update_in(cx, |this, window, cx| {
- this.set_worktree_creation_error(
- "Failed to generate a unique branch name".into(),
- window,
+ let (branch_name, use_existing_branch, start_point) =
+ match Self::resolve_worktree_branch_target(
+ &branch_target,
+ &existing_branches,
+ &occupied_branches,
+ ) {
+ Ok(target) => target,
+ Err(err) => {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ err.to_string().into(),
+ window,
+ cx,
+ );
+ })?;
+ return anyhow::Ok(());
+ }
+ };
+
+ let (creation_infos, path_remapping) =
+ match this.update_in(cx, |_this, _window, cx| {
+ Self::start_worktree_creations(
+ &git_repos,
+ worktree_name,
+ &branch_name,
+ use_existing_branch,
+ start_point,
+ &worktree_directory_setting,
cx,
- );
- })?;
- return anyhow::Ok(());
- }
- };
+ )
+ }) {
+ Ok(Ok(result)) => result,
+ Ok(Err(err)) | Err(err) => {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ format!("Failed to validate worktree directory: {err}")
+ .into(),
+ window,
+ cx,
+ );
+ })
+ .log_err();
+ return anyhow::Ok(());
+ }
+ };
- let (creation_infos, path_remapping) = match this.update_in(cx, |_this, _window, cx| {
- Self::start_worktree_creations(
- &git_repos,
- &branch_name,
- &worktree_directory_setting,
- cx,
- )
- }) {
- Ok(Ok(result)) => result,
- Ok(Err(err)) | Err(err) => {
- this.update_in(cx, |this, window, cx| {
- this.set_worktree_creation_error(
- format!("Failed to validate worktree directory: {err}").into(),
- window,
- cx,
- );
- })
- .log_err();
- return anyhow::Ok(());
- }
- };
+ let created_paths =
+ match Self::await_and_rollback_on_failure(creation_infos, cx).await {
+ Ok(paths) => paths,
+ Err(err) => {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ format!("{err}").into(),
+ window,
+ cx,
+ );
+ })?;
+ return anyhow::Ok(());
+ }
+ };
- let created_paths = match Self::await_and_rollback_on_failure(creation_infos, cx).await
- {
- Ok(paths) => paths,
- Err(err) => {
- this.update_in(cx, |this, window, cx| {
- this.set_worktree_creation_error(format!("{err}").into(), window, cx);
- })?;
- return anyhow::Ok(());
+ let mut all_paths = created_paths;
+ let has_non_git = !non_git_paths.is_empty();
+ all_paths.extend(non_git_paths.iter().cloned());
+ (all_paths, path_remapping, has_non_git)
+ }
+ WorktreeCreationArgs::Linked { worktree_path } => {
+ let mut all_paths = vec![worktree_path];
+ let has_non_git = !non_git_paths.is_empty();
+ all_paths.extend(non_git_paths.iter().cloned());
+ (all_paths, Vec::new(), has_non_git)
}
};
- let mut all_paths = created_paths;
- let has_non_git = !non_git_paths.is_empty();
- all_paths.extend(non_git_paths.iter().cloned());
-
let app_state = match workspace.upgrade() {
Some(workspace) => cx.update(|_, cx| workspace.read(cx).app_state().clone())?,
None => {
@@ -2562,7 +2759,7 @@ impl AgentPanel {
};
let this_for_error = this.clone();
- if let Err(err) = Self::setup_new_workspace(
+ if let Err(err) = Self::open_worktree_workspace_and_start_thread(
this,
all_paths,
app_state,
@@ -2595,7 +2792,7 @@ impl AgentPanel {
}));
}
- async fn setup_new_workspace(
+ async fn open_worktree_workspace_and_start_thread(
this: WeakEntity,
all_paths: Vec,
app_state: Arc,
@@ -3149,25 +3346,15 @@ impl AgentPanel {
}
fn render_start_thread_in_selector(&self, cx: &mut Context) -> impl IntoElement {
- use settings::{NewThreadLocation, Settings};
-
let focus_handle = self.focus_handle(cx);
- let has_git_repo = self.project_has_git_repository(cx);
- let is_via_collab = self.project.read(cx).is_via_collab();
- let fs = self.fs.clone();
let is_creating = matches!(
self.worktree_creation_status,
Some(WorktreeCreationStatus::Creating)
);
- let current_target = self.start_thread_in;
let trigger_label = self.start_thread_in.label();
- let new_thread_location = AgentSettings::get_global(cx).new_thread_location;
- let is_local_default = new_thread_location == NewThreadLocation::LocalProject;
- let is_new_worktree_default = new_thread_location == NewThreadLocation::NewWorktree;
-
let icon = if self.start_thread_in_menu_handle.is_deployed() {
IconName::ChevronUp
} else {
@@ -3178,13 +3365,9 @@ impl AgentPanel {
.end_icon(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted))
.disabled(is_creating);
- let dock_position = AgentSettings::get_global(cx).dock;
- let documentation_side = match dock_position {
- settings::DockPosition::Left => DocumentationSide::Right,
- settings::DockPosition::Bottom | settings::DockPosition::Right => {
- DocumentationSide::Left
- }
- };
+ let project = self.project.clone();
+ let current_target = self.start_thread_in.clone();
+ let fs = self.fs.clone();
PopoverMenu::new("thread-target-selector")
.trigger_with_tooltip(trigger_button, {
@@ -3198,89 +3381,66 @@ impl AgentPanel {
}
})
.menu(move |window, cx| {
- let is_local_selected = current_target == StartThreadIn::LocalProject;
- let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree;
let fs = fs.clone();
+ Some(cx.new(|cx| {
+ ThreadWorktreePicker::new(project.clone(), ¤t_target, fs, window, cx)
+ }))
+ })
+ .with_handle(self.start_thread_in_menu_handle.clone())
+ .anchor(Corner::TopLeft)
+ .offset(gpui::Point {
+ x: px(1.0),
+ y: px(1.0),
+ })
+ }
- Some(ContextMenu::build(window, cx, move |menu, _window, _cx| {
- let new_worktree_disabled = !has_git_repo || is_via_collab;
+ fn render_new_worktree_branch_selector(&self, cx: &mut Context) -> impl IntoElement {
+ let is_creating = matches!(
+ self.worktree_creation_status,
+ Some(WorktreeCreationStatus::Creating)
+ );
+ let default_branch_label = if self.project.read(cx).repositories(cx).len() > 1 {
+ SharedString::from("From: current branches")
+ } else {
+ self.project
+ .read(cx)
+ .active_repository(cx)
+ .and_then(|repo| {
+ repo.read(cx)
+ .branch
+ .as_ref()
+ .map(|branch| SharedString::from(format!("From: {}", branch.name())))
+ })
+ .unwrap_or_else(|| SharedString::from("From: HEAD"))
+ };
+ let trigger_label = self
+ .start_thread_in
+ .worktree_branch_label(default_branch_label)
+ .unwrap_or_else(|| SharedString::from("From: HEAD"));
+ let icon = if self.thread_branch_menu_handle.is_deployed() {
+ IconName::ChevronUp
+ } else {
+ IconName::ChevronDown
+ };
+ let trigger_button = Button::new("thread-branch-trigger", trigger_label)
+ .start_icon(
+ Icon::new(IconName::GitBranch)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .end_icon(Icon::new(icon).size(IconSize::XSmall).color(Color::Muted))
+ .disabled(is_creating);
+ let project = self.project.clone();
+ let current_target = self.start_thread_in.clone();
- menu.header("Start Thread In…")
- .item(
- ContextMenuEntry::new("Current Worktree")
- .toggleable(IconPosition::End, is_local_selected)
- .documentation_aside(documentation_side, move |_| {
- HoldForDefault::new(is_local_default)
- .more_content(false)
- .into_any_element()
- })
- .handler({
- let fs = fs.clone();
- move |window, cx| {
- if window.modifiers().secondary() {
- update_settings_file(fs.clone(), cx, |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .set_new_thread_location(
- NewThreadLocation::LocalProject,
- );
- });
- }
- window.dispatch_action(
- Box::new(StartThreadIn::LocalProject),
- cx,
- );
- }
- }),
- )
- .item({
- let entry = ContextMenuEntry::new("New Git Worktree")
- .toggleable(IconPosition::End, is_new_worktree_selected)
- .disabled(new_worktree_disabled)
- .handler({
- let fs = fs.clone();
- move |window, cx| {
- if window.modifiers().secondary() {
- update_settings_file(fs.clone(), cx, |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .set_new_thread_location(
- NewThreadLocation::NewWorktree,
- );
- });
- }
- window.dispatch_action(
- Box::new(StartThreadIn::NewWorktree),
- cx,
- );
- }
- });
-
- if new_worktree_disabled {
- entry.documentation_aside(documentation_side, move |_| {
- let reason = if !has_git_repo {
- "No git repository found in this project."
- } else {
- "Not available for remote/collab projects yet."
- };
- Label::new(reason)
- .color(Color::Muted)
- .size(LabelSize::Small)
- .into_any_element()
- })
- } else {
- entry.documentation_aside(documentation_side, move |_| {
- HoldForDefault::new(is_new_worktree_default)
- .more_content(false)
- .into_any_element()
- })
- }
- })
+ PopoverMenu::new("thread-branch-selector")
+ .trigger_with_tooltip(trigger_button, Tooltip::text("Choose Worktree Branch…"))
+ .menu(move |window, cx| {
+ Some(cx.new(|cx| {
+ ThreadBranchPicker::new(project.clone(), ¤t_target, window, cx)
}))
})
- .with_handle(self.start_thread_in_menu_handle.clone())
+ .with_handle(self.thread_branch_menu_handle.clone())
.anchor(Corner::TopLeft)
.offset(gpui::Point {
x: px(1.0),
@@ -3621,6 +3781,14 @@ impl AgentPanel {
.when(
has_visible_worktrees && self.project_has_git_repository(cx),
|this| this.child(self.render_start_thread_in_selector(cx)),
+ )
+ .when(
+ has_v2_flag
+ && matches!(
+ self.start_thread_in,
+ StartThreadIn::NewWorktree { .. }
+ ),
+ |this| this.child(self.render_new_worktree_branch_selector(cx)),
),
)
.child(
@@ -5265,13 +5433,23 @@ mod tests {
// Change thread target to NewWorktree.
panel.update_in(cx, |panel, window, cx| {
- panel.set_start_thread_in(&StartThreadIn::NewWorktree, window, cx);
+ panel.set_start_thread_in(
+ &StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ },
+ window,
+ cx,
+ );
});
panel.read_with(cx, |panel, _cx| {
assert_eq!(
*panel.start_thread_in(),
- StartThreadIn::NewWorktree,
+ StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ },
"thread target should be NewWorktree after set_thread_target"
);
});
@@ -5289,7 +5467,10 @@ mod tests {
loaded_panel.read_with(cx, |panel, _cx| {
assert_eq!(
*panel.start_thread_in(),
- StartThreadIn::NewWorktree,
+ StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ },
"thread target should survive serialization round-trip"
);
});
@@ -5420,6 +5601,53 @@ mod tests {
);
}
+ #[test]
+ fn test_resolve_worktree_branch_target() {
+ let existing_branches = HashSet::from_iter([
+ "main".to_string(),
+ "feature".to_string(),
+ "origin/main".to_string(),
+ ]);
+
+ let resolved = AgentPanel::resolve_worktree_branch_target(
+ &NewWorktreeBranchTarget::CreateBranch {
+ name: "new-branch".to_string(),
+ from_ref: Some("main".to_string()),
+ },
+ &existing_branches,
+ &HashSet::from_iter(["main".to_string()]),
+ )
+ .unwrap();
+ assert_eq!(
+ resolved,
+ ("new-branch".to_string(), false, Some("main".to_string()))
+ );
+
+ let resolved = AgentPanel::resolve_worktree_branch_target(
+ &NewWorktreeBranchTarget::ExistingBranch {
+ name: "feature".to_string(),
+ },
+ &existing_branches,
+ &HashSet::default(),
+ )
+ .unwrap();
+ assert_eq!(resolved, ("feature".to_string(), true, None));
+
+ let resolved = AgentPanel::resolve_worktree_branch_target(
+ &NewWorktreeBranchTarget::ExistingBranch {
+ name: "main".to_string(),
+ },
+ &existing_branches,
+ &HashSet::from_iter(["main".to_string()]),
+ )
+ .unwrap();
+ assert_eq!(resolved.1, false);
+ assert_eq!(resolved.2, Some("main".to_string()));
+ assert_ne!(resolved.0, "main");
+ assert!(existing_branches.contains("main"));
+ assert!(!existing_branches.contains(&resolved.0));
+ }
+
#[gpui::test]
async fn test_worktree_creation_preserves_selected_agent(cx: &mut TestAppContext) {
init_test(cx);
@@ -5513,7 +5741,14 @@ mod tests {
panel.selected_agent = Agent::Custom {
id: CODEX_ID.into(),
};
- panel.set_start_thread_in(&StartThreadIn::NewWorktree, window, cx);
+ panel.set_start_thread_in(
+ &StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ },
+ window,
+ cx,
+ );
});
// Verify the panel has the Codex agent selected.
@@ -5532,7 +5767,15 @@ mod tests {
"Hello from test",
))];
panel.update_in(cx, |panel, window, cx| {
- panel.handle_worktree_creation_requested(content, window, cx);
+ panel.handle_worktree_requested(
+ content,
+ WorktreeCreationArgs::New {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ },
+ window,
+ cx,
+ );
});
// Let the async worktree creation + workspace setup complete.
diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs
index 5cff5bfc38d4512d659d919c6e7c4ff02fcc0caf..9daa7c6cd83c276aa99adc9e3aae3e6c82c5ba88 100644
--- a/crates/agent_ui/src/agent_ui.rs
+++ b/crates/agent_ui/src/agent_ui.rs
@@ -28,13 +28,16 @@ mod terminal_codegen;
mod terminal_inline_assistant;
#[cfg(any(test, feature = "test-support"))]
pub mod test_support;
+mod thread_branch_picker;
mod thread_history;
mod thread_history_view;
mod thread_import;
pub mod thread_metadata_store;
+mod thread_worktree_picker;
pub mod threads_archive_view;
mod ui;
+use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
@@ -314,16 +317,42 @@ impl Agent {
}
}
+/// Describes which branch to use when creating a new git worktree.
+#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case", tag = "kind")]
+pub enum NewWorktreeBranchTarget {
+ /// Create a new randomly named branch from the current HEAD.
+ /// Will match worktree name if the newly created worktree was also randomly named.
+ #[default]
+ CurrentBranch,
+ /// Check out an existing branch, or create a new branch from it if it's
+ /// already occupied by another worktree.
+ ExistingBranch { name: String },
+ /// Create a new branch with an explicit name, optionally from a specific ref.
+ CreateBranch {
+ name: String,
+ #[serde(default)]
+ from_ref: Option,
+ },
+}
+
/// Sets where new threads will run.
-#[derive(
- Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action,
-)]
+#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action)]
#[action(namespace = agent)]
#[serde(rename_all = "snake_case", tag = "kind")]
pub enum StartThreadIn {
#[default]
LocalProject,
- NewWorktree,
+ NewWorktree {
+ /// When this is None, Zed will randomly generate a worktree name
+ /// otherwise, the provided name will be used.
+ #[serde(default)]
+ worktree_name: Option,
+ #[serde(default)]
+ branch_target: NewWorktreeBranchTarget,
+ },
+ /// A linked worktree that already exists on disk.
+ LinkedWorktree { path: PathBuf, display_name: String },
}
/// Content to initialize new external agent with.
diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs
index 685621eb3c93632f1e7410bbbad22b623d5e18c7..ff3dab1170064e058c0ebb44505c0906349517ee 100644
--- a/crates/agent_ui/src/conversation_view/thread_view.rs
+++ b/crates/agent_ui/src/conversation_view/thread_view.rs
@@ -869,7 +869,10 @@ impl ThreadView {
.upgrade()
.and_then(|workspace| workspace.read(cx).panel::(cx))
.is_some_and(|panel| {
- panel.read(cx).start_thread_in() == &StartThreadIn::NewWorktree
+ !matches!(
+ panel.read(cx).start_thread_in(),
+ StartThreadIn::LocalProject
+ )
});
if intercept_first_send {
diff --git a/crates/agent_ui/src/thread_branch_picker.rs b/crates/agent_ui/src/thread_branch_picker.rs
new file mode 100644
index 0000000000000000000000000000000000000000..d69cbb4a60054ad83d767928c880f3a43caef4f1
--- /dev/null
+++ b/crates/agent_ui/src/thread_branch_picker.rs
@@ -0,0 +1,695 @@
+use std::collections::{HashMap, HashSet};
+
+use collections::HashSet as CollectionsHashSet;
+use std::path::PathBuf;
+use std::sync::Arc;
+
+use fuzzy::StringMatchCandidate;
+use git::repository::Branch as GitBranch;
+use gpui::{
+ App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, IntoElement,
+ ParentElement, Render, SharedString, Styled, Task, Window, rems,
+};
+use picker::{Picker, PickerDelegate, PickerEditorPosition};
+use project::Project;
+use ui::{
+ HighlightedLabel, Icon, IconName, Label, LabelCommon, ListItem, ListItemSpacing, Tooltip,
+ prelude::*,
+};
+use util::ResultExt as _;
+
+use crate::{NewWorktreeBranchTarget, StartThreadIn};
+
+pub(crate) struct ThreadBranchPicker {
+ picker: Entity>,
+ focus_handle: FocusHandle,
+ _subscription: gpui::Subscription,
+}
+
+impl ThreadBranchPicker {
+ pub fn new(
+ project: Entity,
+ current_target: &StartThreadIn,
+ window: &mut Window,
+ cx: &mut Context,
+ ) -> Self {
+ let project_worktree_paths: HashSet = project
+ .read(cx)
+ .visible_worktrees(cx)
+ .map(|worktree| worktree.read(cx).abs_path().to_path_buf())
+ .collect();
+
+ let has_multiple_repositories = project.read(cx).repositories(cx).len() > 1;
+ let current_branch_name = project
+ .read(cx)
+ .active_repository(cx)
+ .and_then(|repo| {
+ repo.read(cx)
+ .branch
+ .as_ref()
+ .map(|branch| branch.name().to_string())
+ })
+ .unwrap_or_else(|| "HEAD".to_string());
+
+ let repository = if has_multiple_repositories {
+ None
+ } else {
+ project.read(cx).active_repository(cx)
+ };
+ let branches_request = repository
+ .clone()
+ .map(|repo| repo.update(cx, |repo, _| repo.branches()));
+ let default_branch_request = repository
+ .clone()
+ .map(|repo| repo.update(cx, |repo, _| repo.default_branch(false)));
+ let worktrees_request = repository.map(|repo| repo.update(cx, |repo, _| repo.worktrees()));
+
+ let (worktree_name, branch_target) = match current_target {
+ StartThreadIn::NewWorktree {
+ worktree_name,
+ branch_target,
+ } => (worktree_name.clone(), branch_target.clone()),
+ _ => (None, NewWorktreeBranchTarget::default()),
+ };
+
+ let delegate = ThreadBranchPickerDelegate {
+ matches: vec![ThreadBranchEntry::CurrentBranch],
+ all_branches: None,
+ occupied_branches: None,
+ selected_index: 0,
+ worktree_name,
+ branch_target,
+ project_worktree_paths,
+ current_branch_name,
+ default_branch_name: None,
+ has_multiple_repositories,
+ };
+
+ let picker = cx.new(|cx| {
+ Picker::list(delegate, window, cx)
+ .list_measure_all()
+ .modal(false)
+ .max_height(Some(rems(20.).into()))
+ });
+
+ let focus_handle = picker.focus_handle(cx);
+
+ if let (Some(branches_request), Some(default_branch_request), Some(worktrees_request)) =
+ (branches_request, default_branch_request, worktrees_request)
+ {
+ let picker_handle = picker.downgrade();
+ cx.spawn_in(window, async move |_this, cx| {
+ let branches = branches_request.await??;
+ let default_branch = default_branch_request.await.ok().and_then(Result::ok).flatten();
+ let worktrees = worktrees_request.await??;
+
+ let remote_upstreams: CollectionsHashSet<_> = branches
+ .iter()
+ .filter_map(|branch| {
+ branch
+ .upstream
+ .as_ref()
+ .filter(|upstream| upstream.is_remote())
+ .map(|upstream| upstream.ref_name.clone())
+ })
+ .collect();
+
+ let mut occupied_branches = HashMap::new();
+ for worktree in worktrees {
+ let Some(branch_name) = worktree.branch_name().map(ToOwned::to_owned) else {
+ continue;
+ };
+
+ let reason = if picker_handle
+ .read_with(cx, |picker, _| {
+ picker
+ .delegate
+ .project_worktree_paths
+ .contains(&worktree.path)
+ })
+ .unwrap_or(false)
+ {
+ format!(
+ "This branch is already checked out in the current project worktree at {}.",
+ worktree.path.display()
+ )
+ } else {
+ format!(
+ "This branch is already checked out in a linked worktree at {}.",
+ worktree.path.display()
+ )
+ };
+
+ occupied_branches.insert(branch_name, reason);
+ }
+
+ let mut all_branches: Vec<_> = branches
+ .into_iter()
+ .filter(|branch| !remote_upstreams.contains(&branch.ref_name))
+ .collect();
+ all_branches.sort_by_key(|branch| {
+ (
+ branch.is_remote(),
+ !branch.is_head,
+ branch
+ .most_recent_commit
+ .as_ref()
+ .map(|commit| 0 - commit.commit_timestamp),
+ )
+ });
+
+ picker_handle.update_in(cx, |picker, window, cx| {
+ picker.delegate.all_branches = Some(all_branches);
+ picker.delegate.occupied_branches = Some(occupied_branches);
+ picker.delegate.default_branch_name = default_branch.map(|branch| branch.to_string());
+ picker.refresh(window, cx);
+ })?;
+
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+
+ let subscription = cx.subscribe(&picker, |_, _, _, cx| {
+ cx.emit(DismissEvent);
+ });
+
+ Self {
+ picker,
+ focus_handle,
+ _subscription: subscription,
+ }
+ }
+}
+
+impl Focusable for ThreadBranchPicker {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl EventEmitter for ThreadBranchPicker {}
+
+impl Render for ThreadBranchPicker {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement {
+ v_flex()
+ .w(rems(22.))
+ .elevation_3(cx)
+ .child(self.picker.clone())
+ .on_mouse_down_out(cx.listener(|_, _, _, cx| {
+ cx.emit(DismissEvent);
+ }))
+ }
+}
+
+#[derive(Clone)]
+enum ThreadBranchEntry {
+ CurrentBranch,
+ DefaultBranch,
+ ExistingBranch {
+ branch: GitBranch,
+ positions: Vec,
+ occupied_reason: Option,
+ },
+ CreateNamed {
+ name: String,
+ },
+}
+
+pub(crate) struct ThreadBranchPickerDelegate {
+ matches: Vec,
+ all_branches: Option>,
+ occupied_branches: Option>,
+ selected_index: usize,
+ worktree_name: Option,
+ branch_target: NewWorktreeBranchTarget,
+ project_worktree_paths: HashSet,
+ current_branch_name: String,
+ default_branch_name: Option,
+ has_multiple_repositories: bool,
+}
+
+impl ThreadBranchPickerDelegate {
+ fn new_worktree_action(&self, branch_target: NewWorktreeBranchTarget) -> StartThreadIn {
+ StartThreadIn::NewWorktree {
+ worktree_name: self.worktree_name.clone(),
+ branch_target,
+ }
+ }
+
+ fn selected_entry_name(&self) -> Option<&str> {
+ match &self.branch_target {
+ NewWorktreeBranchTarget::CurrentBranch => None,
+ NewWorktreeBranchTarget::ExistingBranch { name } => Some(name),
+ NewWorktreeBranchTarget::CreateBranch {
+ from_ref: Some(from_ref),
+ ..
+ } => Some(from_ref),
+ NewWorktreeBranchTarget::CreateBranch { name, .. } => Some(name),
+ }
+ }
+
+ fn prefer_create_entry(&self) -> bool {
+ matches!(
+ &self.branch_target,
+ NewWorktreeBranchTarget::CreateBranch { from_ref: None, .. }
+ )
+ }
+
+ fn fixed_matches(&self) -> Vec {
+ let mut matches = vec![ThreadBranchEntry::CurrentBranch];
+ if !self.has_multiple_repositories
+ && self
+ .default_branch_name
+ .as_ref()
+ .is_some_and(|default_branch_name| default_branch_name != &self.current_branch_name)
+ {
+ matches.push(ThreadBranchEntry::DefaultBranch);
+ }
+ matches
+ }
+
+ fn current_branch_label(&self) -> SharedString {
+ if self.has_multiple_repositories {
+ SharedString::from("New branch from: current branches")
+ } else {
+ SharedString::from(format!("New branch from: {}", self.current_branch_name))
+ }
+ }
+
+ fn default_branch_label(&self) -> Option {
+ let default_branch_name = self
+ .default_branch_name
+ .as_ref()
+ .filter(|name| *name != &self.current_branch_name)?;
+ let is_occupied = self
+ .occupied_branches
+ .as_ref()
+ .is_some_and(|occupied| occupied.contains_key(default_branch_name));
+ let prefix = if is_occupied {
+ "New branch from"
+ } else {
+ "From"
+ };
+ Some(SharedString::from(format!(
+ "{prefix}: {default_branch_name}"
+ )))
+ }
+
+ fn branch_label_prefix(&self, branch_name: &str) -> &'static str {
+ let is_occupied = self
+ .occupied_branches
+ .as_ref()
+ .is_some_and(|occupied| occupied.contains_key(branch_name));
+ if is_occupied {
+ "New branch from: "
+ } else {
+ "From: "
+ }
+ }
+
+ fn sync_selected_index(&mut self) {
+ let selected_entry_name = self.selected_entry_name().map(ToOwned::to_owned);
+ let prefer_create = self.prefer_create_entry();
+
+ if prefer_create {
+ if let Some(ref selected_entry_name) = selected_entry_name {
+ if let Some(index) = self.matches.iter().position(|entry| {
+ matches!(
+ entry,
+ ThreadBranchEntry::CreateNamed { name } if name == selected_entry_name
+ )
+ }) {
+ self.selected_index = index;
+ return;
+ }
+ }
+ } else if let Some(ref selected_entry_name) = selected_entry_name {
+ if selected_entry_name == &self.current_branch_name {
+ if let Some(index) = self
+ .matches
+ .iter()
+ .position(|entry| matches!(entry, ThreadBranchEntry::CurrentBranch))
+ {
+ self.selected_index = index;
+ return;
+ }
+ }
+
+ if self
+ .default_branch_name
+ .as_ref()
+ .is_some_and(|default_branch_name| default_branch_name == selected_entry_name)
+ {
+ if let Some(index) = self
+ .matches
+ .iter()
+ .position(|entry| matches!(entry, ThreadBranchEntry::DefaultBranch))
+ {
+ self.selected_index = index;
+ return;
+ }
+ }
+
+ if let Some(index) = self.matches.iter().position(|entry| {
+ matches!(
+ entry,
+ ThreadBranchEntry::ExistingBranch { branch, .. }
+ if branch.name() == selected_entry_name.as_str()
+ )
+ }) {
+ self.selected_index = index;
+ return;
+ }
+ }
+
+ if self.matches.len() > 1
+ && self
+ .matches
+ .iter()
+ .skip(1)
+ .all(|entry| matches!(entry, ThreadBranchEntry::CreateNamed { .. }))
+ {
+ self.selected_index = 1;
+ return;
+ }
+
+ self.selected_index = 0;
+ }
+}
+
+impl PickerDelegate for ThreadBranchPickerDelegate {
+ type ListItem = ListItem;
+
+ fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc {
+ "Search branches…".into()
+ }
+
+ fn editor_position(&self) -> PickerEditorPosition {
+ PickerEditorPosition::Start
+ }
+
+ fn match_count(&self) -> usize {
+ self.matches.len()
+ }
+
+ fn selected_index(&self) -> usize {
+ self.selected_index
+ }
+
+ fn set_selected_index(
+ &mut self,
+ ix: usize,
+ _window: &mut Window,
+ _cx: &mut Context>,
+ ) {
+ self.selected_index = ix;
+ }
+
+ fn update_matches(
+ &mut self,
+ query: String,
+ window: &mut Window,
+ cx: &mut Context>,
+ ) -> Task<()> {
+ if self.has_multiple_repositories {
+ let mut matches = self.fixed_matches();
+
+ if query.is_empty() {
+ if let Some(name) = self.selected_entry_name().map(ToOwned::to_owned) {
+ if self.prefer_create_entry() {
+ matches.push(ThreadBranchEntry::CreateNamed { name });
+ }
+ }
+ } else {
+ matches.push(ThreadBranchEntry::CreateNamed {
+ name: query.replace(' ', "-"),
+ });
+ }
+
+ self.matches = matches;
+ self.sync_selected_index();
+ return Task::ready(());
+ }
+
+ let Some(all_branches) = self.all_branches.clone() else {
+ self.matches = self.fixed_matches();
+ self.selected_index = 0;
+ return Task::ready(());
+ };
+ let occupied_branches = self.occupied_branches.clone().unwrap_or_default();
+
+ if query.is_empty() {
+ let mut matches = self.fixed_matches();
+ for branch in all_branches.into_iter().filter(|branch| {
+ branch.name() != self.current_branch_name
+ && self
+ .default_branch_name
+ .as_ref()
+ .is_none_or(|default_branch_name| branch.name() != default_branch_name)
+ }) {
+ matches.push(ThreadBranchEntry::ExistingBranch {
+ occupied_reason: occupied_branches.get(branch.name()).cloned(),
+ branch,
+ positions: Vec::new(),
+ });
+ }
+
+ if let Some(selected_entry_name) = self.selected_entry_name().map(ToOwned::to_owned) {
+ let has_existing = matches.iter().any(|entry| {
+ matches!(
+ entry,
+ ThreadBranchEntry::ExistingBranch { branch, .. }
+ if branch.name() == selected_entry_name
+ )
+ });
+ if self.prefer_create_entry() && !has_existing {
+ matches.push(ThreadBranchEntry::CreateNamed {
+ name: selected_entry_name,
+ });
+ }
+ }
+
+ self.matches = matches;
+ self.sync_selected_index();
+ return Task::ready(());
+ }
+
+ let candidates: Vec<_> = all_branches
+ .iter()
+ .enumerate()
+ .map(|(ix, branch)| StringMatchCandidate::new(ix, branch.name()))
+ .collect();
+ let executor = cx.background_executor().clone();
+ let query_clone = query.clone();
+ let normalized_query = query.replace(' ', "-");
+
+ let task = cx.background_executor().spawn(async move {
+ fuzzy::match_strings(
+ &candidates,
+ &query_clone,
+ true,
+ true,
+ 10000,
+ &Default::default(),
+ executor,
+ )
+ .await
+ });
+
+ let all_branches_clone = all_branches;
+ cx.spawn_in(window, async move |picker, cx| {
+ let fuzzy_matches = task.await;
+
+ picker
+ .update_in(cx, |picker, _window, cx| {
+ let mut matches = picker.delegate.fixed_matches();
+
+ for candidate in &fuzzy_matches {
+ let branch = all_branches_clone[candidate.candidate_id].clone();
+ if branch.name() == picker.delegate.current_branch_name
+ || picker.delegate.default_branch_name.as_ref().is_some_and(
+ |default_branch_name| branch.name() == default_branch_name,
+ )
+ {
+ continue;
+ }
+ let occupied_reason = occupied_branches.get(branch.name()).cloned();
+ matches.push(ThreadBranchEntry::ExistingBranch {
+ branch,
+ positions: candidate.positions.clone(),
+ occupied_reason,
+ });
+ }
+
+ if fuzzy_matches.is_empty() {
+ matches.push(ThreadBranchEntry::CreateNamed {
+ name: normalized_query.clone(),
+ });
+ }
+
+ picker.delegate.matches = matches;
+ if let Some(index) =
+ picker.delegate.matches.iter().position(|entry| {
+ matches!(entry, ThreadBranchEntry::ExistingBranch { .. })
+ })
+ {
+ picker.delegate.selected_index = index;
+ } else if !fuzzy_matches.is_empty() {
+ picker.delegate.selected_index = 0;
+ } else if let Some(index) =
+ picker.delegate.matches.iter().position(|entry| {
+ matches!(entry, ThreadBranchEntry::CreateNamed { .. })
+ })
+ {
+ picker.delegate.selected_index = index;
+ } else {
+ picker.delegate.sync_selected_index();
+ }
+ cx.notify();
+ })
+ .log_err();
+ })
+ }
+
+ fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context>) {
+ let Some(entry) = self.matches.get(self.selected_index) else {
+ return;
+ };
+
+ match entry {
+ ThreadBranchEntry::CurrentBranch => {
+ window.dispatch_action(
+ Box::new(self.new_worktree_action(NewWorktreeBranchTarget::CurrentBranch)),
+ cx,
+ );
+ }
+ ThreadBranchEntry::DefaultBranch => {
+ let Some(default_branch_name) = self.default_branch_name.clone() else {
+ return;
+ };
+ window.dispatch_action(
+ Box::new(
+ self.new_worktree_action(NewWorktreeBranchTarget::ExistingBranch {
+ name: default_branch_name,
+ }),
+ ),
+ cx,
+ );
+ }
+ ThreadBranchEntry::ExistingBranch { branch, .. } => {
+ let branch_target = if branch.is_remote() {
+ let branch_name = branch
+ .ref_name
+ .as_ref()
+ .strip_prefix("refs/remotes/")
+ .and_then(|stripped| stripped.split_once('/').map(|(_, name)| name))
+ .unwrap_or(branch.name())
+ .to_string();
+ NewWorktreeBranchTarget::CreateBranch {
+ name: branch_name,
+ from_ref: Some(branch.name().to_string()),
+ }
+ } else {
+ NewWorktreeBranchTarget::ExistingBranch {
+ name: branch.name().to_string(),
+ }
+ };
+ window.dispatch_action(Box::new(self.new_worktree_action(branch_target)), cx);
+ }
+ ThreadBranchEntry::CreateNamed { name } => {
+ window.dispatch_action(
+ Box::new(
+ self.new_worktree_action(NewWorktreeBranchTarget::CreateBranch {
+ name: name.clone(),
+ from_ref: None,
+ }),
+ ),
+ cx,
+ );
+ }
+ }
+
+ cx.emit(DismissEvent);
+ }
+
+ fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {}
+
+ fn separators_after_indices(&self) -> Vec {
+ let fixed_count = self.fixed_matches().len();
+ if self.matches.len() > fixed_count {
+ vec![fixed_count - 1]
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn render_match(
+ &self,
+ ix: usize,
+ selected: bool,
+ _window: &mut Window,
+ _cx: &mut Context>,
+ ) -> Option {
+ let entry = self.matches.get(ix)?;
+
+ match entry {
+ ThreadBranchEntry::CurrentBranch => Some(
+ ListItem::new("current-branch")
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .start_slot(Icon::new(IconName::GitBranch).color(Color::Muted))
+ .child(Label::new(self.current_branch_label())),
+ ),
+ ThreadBranchEntry::DefaultBranch => Some(
+ ListItem::new("default-branch")
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .start_slot(Icon::new(IconName::GitBranch).color(Color::Muted))
+ .child(Label::new(self.default_branch_label()?)),
+ ),
+ ThreadBranchEntry::ExistingBranch {
+ branch,
+ positions,
+ occupied_reason,
+ } => {
+ let prefix = self.branch_label_prefix(branch.name());
+ let branch_name = branch.name().to_string();
+ let full_label = format!("{prefix}{branch_name}");
+ let adjusted_positions: Vec =
+ positions.iter().map(|&p| p + prefix.len()).collect();
+
+ let item = ListItem::new(SharedString::from(format!("branch-{ix}")))
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .start_slot(Icon::new(IconName::GitBranch).color(Color::Muted))
+ .child(HighlightedLabel::new(full_label, adjusted_positions).truncate());
+
+ Some(if let Some(reason) = occupied_reason.clone() {
+ item.tooltip(Tooltip::text(reason))
+ } else if branch.is_remote() {
+ item.tooltip(Tooltip::text(
+ "Create a new local branch from this remote branch",
+ ))
+ } else {
+ item
+ })
+ }
+ ThreadBranchEntry::CreateNamed { name } => Some(
+ ListItem::new("create-named-branch")
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .start_slot(Icon::new(IconName::Plus).color(Color::Accent))
+ .child(Label::new(format!("Create Branch: \"{name}\"…"))),
+ ),
+ }
+ }
+
+ fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option {
+ None
+ }
+}
diff --git a/crates/agent_ui/src/thread_worktree_picker.rs b/crates/agent_ui/src/thread_worktree_picker.rs
new file mode 100644
index 0000000000000000000000000000000000000000..47a6a12d71822e13ab3523a3a6b0bb1ee57c7b4b
--- /dev/null
+++ b/crates/agent_ui/src/thread_worktree_picker.rs
@@ -0,0 +1,485 @@
+use std::path::PathBuf;
+use std::sync::Arc;
+
+use agent_settings::AgentSettings;
+use fs::Fs;
+use fuzzy::StringMatchCandidate;
+use git::repository::Worktree as GitWorktree;
+use gpui::{
+ App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, IntoElement,
+ ParentElement, Render, SharedString, Styled, Task, Window, rems,
+};
+use picker::{Picker, PickerDelegate, PickerEditorPosition};
+use project::{Project, git_store::RepositoryId};
+use settings::{NewThreadLocation, Settings, update_settings_file};
+use ui::{
+ HighlightedLabel, Icon, IconName, Label, LabelCommon, ListItem, ListItemSpacing, Tooltip,
+ prelude::*,
+};
+use util::ResultExt as _;
+
+use crate::ui::HoldForDefault;
+use crate::{NewWorktreeBranchTarget, StartThreadIn};
+
+pub(crate) struct ThreadWorktreePicker {
+ picker: Entity>,
+ focus_handle: FocusHandle,
+ _subscription: gpui::Subscription,
+}
+
+impl ThreadWorktreePicker {
+ pub fn new(
+ project: Entity,
+ current_target: &StartThreadIn,
+ fs: Arc,
+ window: &mut Window,
+ cx: &mut Context,
+ ) -> Self {
+ let project_worktree_paths: Vec = project
+ .read(cx)
+ .visible_worktrees(cx)
+ .map(|wt| wt.read(cx).abs_path().to_path_buf())
+ .collect();
+
+ let preserved_branch_target = match current_target {
+ StartThreadIn::NewWorktree { branch_target, .. } => branch_target.clone(),
+ _ => NewWorktreeBranchTarget::default(),
+ };
+
+ let delegate = ThreadWorktreePickerDelegate {
+ matches: vec![
+ ThreadWorktreeEntry::CurrentWorktree,
+ ThreadWorktreeEntry::NewWorktree,
+ ],
+ all_worktrees: project
+ .read(cx)
+ .repositories(cx)
+ .iter()
+ .map(|(repo_id, repo)| (*repo_id, repo.read(cx).linked_worktrees.clone()))
+ .collect(),
+ project_worktree_paths,
+ selected_index: match current_target {
+ StartThreadIn::LocalProject => 0,
+ StartThreadIn::NewWorktree { .. } => 1,
+ _ => 0,
+ },
+ project: project.clone(),
+ preserved_branch_target,
+ fs,
+ };
+
+ let picker = cx.new(|cx| {
+ Picker::list(delegate, window, cx)
+ .list_measure_all()
+ .modal(false)
+ .max_height(Some(rems(20.).into()))
+ });
+
+ let subscription = cx.subscribe(&picker, |_, _, _, cx| {
+ cx.emit(DismissEvent);
+ });
+
+ Self {
+ focus_handle: picker.focus_handle(cx),
+ picker,
+ _subscription: subscription,
+ }
+ }
+}
+
+impl Focusable for ThreadWorktreePicker {
+ fn focus_handle(&self, _cx: &App) -> FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl EventEmitter for ThreadWorktreePicker {}
+
+impl Render for ThreadWorktreePicker {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context) -> impl IntoElement {
+ v_flex()
+ .w(rems(20.))
+ .elevation_3(cx)
+ .child(self.picker.clone())
+ .on_mouse_down_out(cx.listener(|_, _, _, cx| {
+ cx.emit(DismissEvent);
+ }))
+ }
+}
+
+#[derive(Clone)]
+enum ThreadWorktreeEntry {
+ CurrentWorktree,
+ NewWorktree,
+ LinkedWorktree {
+ worktree: GitWorktree,
+ positions: Vec,
+ },
+ CreateNamed {
+ name: String,
+ disabled_reason: Option,
+ },
+}
+
+pub(crate) struct ThreadWorktreePickerDelegate {
+ matches: Vec,
+ all_worktrees: Vec<(RepositoryId, Arc<[GitWorktree]>)>,
+ project_worktree_paths: Vec,
+ selected_index: usize,
+ preserved_branch_target: NewWorktreeBranchTarget,
+ project: Entity,
+ fs: Arc,
+}
+
+impl ThreadWorktreePickerDelegate {
+ fn new_worktree_action(&self, worktree_name: Option) -> StartThreadIn {
+ StartThreadIn::NewWorktree {
+ worktree_name,
+ branch_target: self.preserved_branch_target.clone(),
+ }
+ }
+
+ fn sync_selected_index(&mut self) {
+ if let Some(index) = self
+ .matches
+ .iter()
+ .position(|entry| matches!(entry, ThreadWorktreeEntry::LinkedWorktree { .. }))
+ {
+ self.selected_index = index;
+ } else if let Some(index) = self
+ .matches
+ .iter()
+ .position(|entry| matches!(entry, ThreadWorktreeEntry::CreateNamed { .. }))
+ {
+ self.selected_index = index;
+ } else {
+ self.selected_index = 0;
+ }
+ }
+}
+
+impl PickerDelegate for ThreadWorktreePickerDelegate {
+ type ListItem = ListItem;
+
+ fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc {
+ "Search or create worktrees…".into()
+ }
+
+ fn editor_position(&self) -> PickerEditorPosition {
+ PickerEditorPosition::Start
+ }
+
+ fn match_count(&self) -> usize {
+ self.matches.len()
+ }
+
+ fn selected_index(&self) -> usize {
+ self.selected_index
+ }
+
+ fn set_selected_index(
+ &mut self,
+ ix: usize,
+ _window: &mut Window,
+ _cx: &mut Context>,
+ ) {
+ self.selected_index = ix;
+ }
+
+ fn separators_after_indices(&self) -> Vec {
+ if self.matches.len() > 2 {
+ vec![1]
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn update_matches(
+ &mut self,
+ query: String,
+ window: &mut Window,
+ cx: &mut Context>,
+ ) -> Task<()> {
+ let has_multiple_repositories = self.all_worktrees.len() > 1;
+
+ let linked_worktrees: Vec<_> = if has_multiple_repositories {
+ Vec::new()
+ } else {
+ self.all_worktrees
+ .iter()
+ .flat_map(|(_, worktrees)| worktrees.iter())
+ .filter(|worktree| {
+ !self
+ .project_worktree_paths
+ .iter()
+ .any(|project_path| project_path == &worktree.path)
+ })
+ .cloned()
+ .collect()
+ };
+
+ let normalized_query = query.replace(' ', "-");
+ let has_named_worktree = self.all_worktrees.iter().any(|(_, worktrees)| {
+ worktrees
+ .iter()
+ .any(|worktree| worktree.display_name() == normalized_query)
+ });
+ let create_named_disabled_reason = if has_multiple_repositories {
+ Some("Cannot create a named worktree in a project with multiple repositories".into())
+ } else if has_named_worktree {
+ Some("A worktree with this name already exists".into())
+ } else {
+ None
+ };
+
+ let mut matches = vec![
+ ThreadWorktreeEntry::CurrentWorktree,
+ ThreadWorktreeEntry::NewWorktree,
+ ];
+
+ if query.is_empty() {
+ for worktree in &linked_worktrees {
+ matches.push(ThreadWorktreeEntry::LinkedWorktree {
+ worktree: worktree.clone(),
+ positions: Vec::new(),
+ });
+ }
+ } else if linked_worktrees.is_empty() {
+ matches.push(ThreadWorktreeEntry::CreateNamed {
+ name: normalized_query,
+ disabled_reason: create_named_disabled_reason,
+ });
+ } else {
+ let candidates: Vec<_> = linked_worktrees
+ .iter()
+ .enumerate()
+ .map(|(ix, worktree)| StringMatchCandidate::new(ix, worktree.display_name()))
+ .collect();
+
+ let executor = cx.background_executor().clone();
+ let query_clone = query.clone();
+
+ let task = cx.background_executor().spawn(async move {
+ fuzzy::match_strings(
+ &candidates,
+ &query_clone,
+ true,
+ true,
+ 10000,
+ &Default::default(),
+ executor,
+ )
+ .await
+ });
+
+ let linked_worktrees_clone = linked_worktrees;
+ return cx.spawn_in(window, async move |picker, cx| {
+ let fuzzy_matches = task.await;
+
+ picker
+ .update_in(cx, |picker, _window, cx| {
+ let mut new_matches = vec![
+ ThreadWorktreeEntry::CurrentWorktree,
+ ThreadWorktreeEntry::NewWorktree,
+ ];
+
+ for candidate in &fuzzy_matches {
+ new_matches.push(ThreadWorktreeEntry::LinkedWorktree {
+ worktree: linked_worktrees_clone[candidate.candidate_id].clone(),
+ positions: candidate.positions.clone(),
+ });
+ }
+
+ let has_exact_match = linked_worktrees_clone
+ .iter()
+ .any(|worktree| worktree.display_name() == query);
+
+ if !has_exact_match {
+ new_matches.push(ThreadWorktreeEntry::CreateNamed {
+ name: normalized_query.clone(),
+ disabled_reason: create_named_disabled_reason.clone(),
+ });
+ }
+
+ picker.delegate.matches = new_matches;
+ picker.delegate.sync_selected_index();
+
+ cx.notify();
+ })
+ .log_err();
+ });
+ }
+
+ self.matches = matches;
+ self.sync_selected_index();
+
+ Task::ready(())
+ }
+
+ fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context>) {
+ let Some(entry) = self.matches.get(self.selected_index) else {
+ return;
+ };
+
+ match entry {
+ ThreadWorktreeEntry::CurrentWorktree => {
+ if secondary {
+ update_settings_file(self.fs.clone(), cx, |settings, _| {
+ settings
+ .agent
+ .get_or_insert_default()
+ .set_new_thread_location(NewThreadLocation::LocalProject);
+ });
+ }
+ window.dispatch_action(Box::new(StartThreadIn::LocalProject), cx);
+ }
+ ThreadWorktreeEntry::NewWorktree => {
+ if secondary {
+ update_settings_file(self.fs.clone(), cx, |settings, _| {
+ settings
+ .agent
+ .get_or_insert_default()
+ .set_new_thread_location(NewThreadLocation::NewWorktree);
+ });
+ }
+ window.dispatch_action(Box::new(self.new_worktree_action(None)), cx);
+ }
+ ThreadWorktreeEntry::LinkedWorktree { worktree, .. } => {
+ window.dispatch_action(
+ Box::new(StartThreadIn::LinkedWorktree {
+ path: worktree.path.clone(),
+ display_name: worktree.display_name().to_string(),
+ }),
+ cx,
+ );
+ }
+ ThreadWorktreeEntry::CreateNamed {
+ name,
+ disabled_reason: None,
+ } => {
+ window.dispatch_action(Box::new(self.new_worktree_action(Some(name.clone()))), cx);
+ }
+ ThreadWorktreeEntry::CreateNamed {
+ disabled_reason: Some(_),
+ ..
+ } => {
+ return;
+ }
+ }
+
+ cx.emit(DismissEvent);
+ }
+
+ fn dismissed(&mut self, _window: &mut Window, _cx: &mut Context>) {}
+
+ fn render_match(
+ &self,
+ ix: usize,
+ selected: bool,
+ _window: &mut Window,
+ cx: &mut Context>,
+ ) -> Option {
+ let entry = self.matches.get(ix)?;
+ let project = self.project.read(cx);
+ let is_new_worktree_disabled =
+ project.repositories(cx).is_empty() || project.is_via_collab();
+ let new_thread_location = AgentSettings::get_global(cx).new_thread_location;
+ let is_local_default = new_thread_location == NewThreadLocation::LocalProject;
+ let is_new_worktree_default = new_thread_location == NewThreadLocation::NewWorktree;
+
+ match entry {
+ ThreadWorktreeEntry::CurrentWorktree => Some(
+ ListItem::new("current-worktree")
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .start_slot(Icon::new(IconName::Folder).color(Color::Muted))
+ .child(Label::new("Current Worktree"))
+ .end_slot(HoldForDefault::new(is_local_default).more_content(false))
+ .tooltip(Tooltip::text("Use the current project worktree")),
+ ),
+ ThreadWorktreeEntry::NewWorktree => {
+ let item = ListItem::new("new-worktree")
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .disabled(is_new_worktree_disabled)
+ .start_slot(
+ Icon::new(IconName::Plus).color(if is_new_worktree_disabled {
+ Color::Disabled
+ } else {
+ Color::Muted
+ }),
+ )
+ .child(
+ Label::new("New Git Worktree").color(if is_new_worktree_disabled {
+ Color::Disabled
+ } else {
+ Color::Default
+ }),
+ );
+
+ Some(if is_new_worktree_disabled {
+ item.tooltip(Tooltip::text("Requires a Git repository in the project"))
+ } else {
+ item.end_slot(HoldForDefault::new(is_new_worktree_default).more_content(false))
+ .tooltip(Tooltip::text("Start a thread in a new Git worktree"))
+ })
+ }
+ ThreadWorktreeEntry::LinkedWorktree {
+ worktree,
+ positions,
+ } => {
+ let display_name = worktree.display_name();
+ let first_line = display_name.lines().next().unwrap_or(display_name);
+ let positions: Vec<_> = positions
+ .iter()
+ .copied()
+ .filter(|&pos| pos < first_line.len())
+ .collect();
+
+ Some(
+ ListItem::new(SharedString::from(format!("linked-worktree-{ix}")))
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .start_slot(Icon::new(IconName::GitWorktree).color(Color::Muted))
+ .child(HighlightedLabel::new(first_line.to_owned(), positions).truncate()),
+ )
+ }
+ ThreadWorktreeEntry::CreateNamed {
+ name,
+ disabled_reason,
+ } => {
+ let is_disabled = disabled_reason.is_some();
+ let item = ListItem::new("create-named-worktree")
+ .inset(true)
+ .spacing(ListItemSpacing::Sparse)
+ .toggle_state(selected)
+ .disabled(is_disabled)
+ .start_slot(Icon::new(IconName::Plus).color(if is_disabled {
+ Color::Disabled
+ } else {
+ Color::Accent
+ }))
+ .child(Label::new(format!("Create Worktree: \"{name}\"…")).color(
+ if is_disabled {
+ Color::Disabled
+ } else {
+ Color::Default
+ },
+ ));
+
+ Some(if let Some(reason) = disabled_reason.clone() {
+ item.tooltip(Tooltip::text(reason))
+ } else {
+ item
+ })
+ }
+ }
+ }
+
+ fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option {
+ None
+ }
+}
diff --git a/crates/collab/tests/integration/git_tests.rs b/crates/collab/tests/integration/git_tests.rs
index 2fa67b072f1c3d49ef5ca1b90056fd08d57df1ba..c273005264d0a53b6a083a4013f7597a56919016 100644
--- a/crates/collab/tests/integration/git_tests.rs
+++ b/crates/collab/tests/integration/git_tests.rs
@@ -269,9 +269,11 @@ async fn test_remote_git_worktrees(
cx_b.update(|cx| {
repo_b.update(cx, |repository, _| {
repository.create_worktree(
- "feature-branch".to_string(),
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: "feature-branch".to_string(),
+ base_sha: Some("abc123".to_string()),
+ },
worktree_directory.join("feature-branch"),
- Some("abc123".to_string()),
)
})
})
@@ -323,9 +325,11 @@ async fn test_remote_git_worktrees(
cx_b.update(|cx| {
repo_b.update(cx, |repository, _| {
repository.create_worktree(
- "bugfix-branch".to_string(),
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: "bugfix-branch".to_string(),
+ base_sha: None,
+ },
worktree_directory.join("bugfix-branch"),
- None,
)
})
})
diff --git a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs
index 0796323fc5b3d8f6b1cbcb0e108a7d573240f446..d478402a9d66ca9fba4e8f9517cb62898754e677 100644
--- a/crates/collab/tests/integration/remote_editing_collaboration_tests.rs
+++ b/crates/collab/tests/integration/remote_editing_collaboration_tests.rs
@@ -473,9 +473,11 @@ async fn test_ssh_collaboration_git_worktrees(
cx_b.update(|cx| {
repo_b.update(cx, |repo, _| {
repo.create_worktree(
- "feature-branch".to_string(),
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: "feature-branch".to_string(),
+ base_sha: Some("abc123".to_string()),
+ },
worktree_directory.join("feature-branch"),
- Some("abc123".to_string()),
)
})
})
diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs
index 751796fb83164b78dc5d6789f0ae7870eff16ce1..fbebeabf0ac15dde80016958eb358f792f46dd50 100644
--- a/crates/fs/src/fake_git_repo.rs
+++ b/crates/fs/src/fake_git_repo.rs
@@ -6,9 +6,10 @@ use git::{
Oid, RunHook,
blame::Blame,
repository::{
- AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions, FetchOptions,
- GRAPH_CHUNK_SIZE, GitRepository, GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder,
- LogSource, PushOptions, Remote, RepoPath, ResetMode, SearchCommitArgs, Worktree,
+ AskPassDelegate, Branch, CommitDataReader, CommitDetails, CommitOptions,
+ CreateWorktreeTarget, FetchOptions, GRAPH_CHUNK_SIZE, GitRepository,
+ GitRepositoryCheckpoint, InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote,
+ RepoPath, ResetMode, SearchCommitArgs, Worktree,
},
stash::GitStash,
status::{
@@ -540,9 +541,8 @@ impl GitRepository for FakeGitRepository {
fn create_worktree(
&self,
- branch_name: Option,
+ target: CreateWorktreeTarget,
path: PathBuf,
- from_commit: Option,
) -> BoxFuture<'_, Result<()>> {
let fs = self.fs.clone();
let executor = self.executor.clone();
@@ -550,30 +550,82 @@ impl GitRepository for FakeGitRepository {
let common_dir_path = self.common_dir_path.clone();
async move {
executor.simulate_random_delay().await;
- // Check for simulated error and duplicate branch before any side effects.
- fs.with_git_state(&dot_git_path, false, |state| {
- if let Some(message) = &state.simulated_create_worktree_error {
- anyhow::bail!("{message}");
- }
- if let Some(ref name) = branch_name {
- if state.branches.contains(name) {
- bail!("a branch named '{}' already exists", name);
+
+ let branch_name = target.branch_name().map(ToOwned::to_owned);
+ let create_branch_ref = matches!(target, CreateWorktreeTarget::NewBranch { .. });
+
+ // Check for simulated error and validate branch state before any side effects.
+ fs.with_git_state(&dot_git_path, false, {
+ let branch_name = branch_name.clone();
+ move |state| {
+ if let Some(message) = &state.simulated_create_worktree_error {
+ anyhow::bail!("{message}");
}
+
+ match (create_branch_ref, branch_name.as_ref()) {
+ (true, Some(branch_name)) => {
+ if state.branches.contains(branch_name) {
+ bail!("a branch named '{}' already exists", branch_name);
+ }
+ }
+ (false, Some(branch_name)) => {
+ if !state.branches.contains(branch_name) {
+ bail!("no branch named '{}' exists", branch_name);
+ }
+ }
+ (false, None) => {}
+ (true, None) => bail!("branch name is required to create a branch"),
+ }
+
+ Ok(())
}
- Ok(())
})??;
+ let (branch_name, sha, create_branch_ref) = match target {
+ CreateWorktreeTarget::ExistingBranch { branch_name } => {
+ let ref_name = format!("refs/heads/{branch_name}");
+ let sha = fs.with_git_state(&dot_git_path, false, {
+ move |state| {
+ Ok::<_, anyhow::Error>(
+ state
+ .refs
+ .get(&ref_name)
+ .cloned()
+ .unwrap_or_else(|| "fake-sha".to_string()),
+ )
+ }
+ })??;
+ (Some(branch_name), sha, false)
+ }
+ CreateWorktreeTarget::NewBranch {
+ branch_name,
+ base_sha: start_point,
+ } => (
+ Some(branch_name),
+ start_point.unwrap_or_else(|| "fake-sha".to_string()),
+ true,
+ ),
+ CreateWorktreeTarget::Detached {
+ base_sha: start_point,
+ } => (
+ None,
+ start_point.unwrap_or_else(|| "fake-sha".to_string()),
+ false,
+ ),
+ };
+
// Create the worktree checkout directory.
fs.create_dir(&path).await?;
// Create .git/worktrees// directory with HEAD, commondir, gitdir.
- let worktree_entry_name = branch_name
- .as_deref()
- .unwrap_or_else(|| path.file_name().unwrap().to_str().unwrap());
+ let worktree_entry_name = branch_name.as_deref().unwrap_or_else(|| {
+ path.file_name()
+ .and_then(|name| name.to_str())
+ .unwrap_or("detached")
+ });
let worktrees_entry_dir = common_dir_path.join("worktrees").join(worktree_entry_name);
fs.create_dir(&worktrees_entry_dir).await?;
- let sha = from_commit.unwrap_or_else(|| "fake-sha".to_string());
let head_content = if let Some(ref branch_name) = branch_name {
let ref_name = format!("refs/heads/{branch_name}");
format!("ref: {ref_name}")
@@ -604,15 +656,22 @@ impl GitRepository for FakeGitRepository {
false,
)?;
- // Update git state: add ref and branch.
- fs.with_git_state(&dot_git_path, true, move |state| {
- if let Some(branch_name) = branch_name {
- let ref_name = format!("refs/heads/{branch_name}");
- state.refs.insert(ref_name, sha);
- state.branches.insert(branch_name);
- }
- Ok::<(), anyhow::Error>(())
- })??;
+ // Update git state for newly created branches.
+ if create_branch_ref {
+ fs.with_git_state(&dot_git_path, true, {
+ let branch_name = branch_name.clone();
+ let sha = sha.clone();
+ move |state| {
+ if let Some(branch_name) = branch_name {
+ let ref_name = format!("refs/heads/{branch_name}");
+ state.refs.insert(ref_name, sha);
+ state.branches.insert(branch_name);
+ }
+ Ok::<(), anyhow::Error>(())
+ }
+ })??;
+ }
+
Ok(())
}
.boxed()
diff --git a/crates/fs/tests/integration/fake_git_repo.rs b/crates/fs/tests/integration/fake_git_repo.rs
index f4192a22bb42f88f8769ef59f817b2bf2a288fb9..3be81ad7301e6fc4ee6f4529ce8bb587de3b4565 100644
--- a/crates/fs/tests/integration/fake_git_repo.rs
+++ b/crates/fs/tests/integration/fake_git_repo.rs
@@ -24,9 +24,11 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
// Create a worktree
let worktree_1_dir = worktrees_dir.join("feature-branch");
repo.create_worktree(
- Some("feature-branch".to_string()),
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: "feature-branch".to_string(),
+ base_sha: Some("abc123".to_string()),
+ },
worktree_1_dir.clone(),
- Some("abc123".to_string()),
)
.await
.unwrap();
@@ -48,9 +50,11 @@ async fn test_fake_worktree_lifecycle(cx: &mut TestAppContext) {
// Create a second worktree (without explicit commit)
let worktree_2_dir = worktrees_dir.join("bugfix-branch");
repo.create_worktree(
- Some("bugfix-branch".to_string()),
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: "bugfix-branch".to_string(),
+ base_sha: None,
+ },
worktree_2_dir.clone(),
- None,
)
.await
.unwrap();
diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs
index c42d2e28cf041e40404c1b8276ddcf5d10ca5f01..ba717d00c5e40374f5315d3ee8bc12e671f09552 100644
--- a/crates/git/src/repository.rs
+++ b/crates/git/src/repository.rs
@@ -241,20 +241,57 @@ pub struct Worktree {
pub is_main: bool,
}
+/// Describes how a new worktree should choose or create its checked-out HEAD.
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub enum CreateWorktreeTarget {
+ /// Check out an existing local branch in the new worktree.
+ ExistingBranch {
+ /// The existing local branch to check out.
+ branch_name: String,
+ },
+ /// Create a new local branch for the new worktree.
+ NewBranch {
+ /// The new local branch to create and check out.
+ branch_name: String,
+ /// The commit or ref to create the branch from. Uses `HEAD` when `None`.
+ base_sha: Option,
+ },
+ /// Check out a commit or ref in detached HEAD state.
+ Detached {
+ /// The commit or ref to check out. Uses `HEAD` when `None`.
+ base_sha: Option,
+ },
+}
+
+impl CreateWorktreeTarget {
+ pub fn branch_name(&self) -> Option<&str> {
+ match self {
+ Self::ExistingBranch { branch_name } | Self::NewBranch { branch_name, .. } => {
+ Some(branch_name)
+ }
+ Self::Detached { .. } => None,
+ }
+ }
+}
+
impl Worktree {
+ /// Returns the branch name if the worktree is attached to a branch.
+ pub fn branch_name(&self) -> Option<&str> {
+ self.ref_name.as_ref().map(|ref_name| {
+ ref_name
+ .strip_prefix("refs/heads/")
+ .or_else(|| ref_name.strip_prefix("refs/remotes/"))
+ .unwrap_or(ref_name)
+ })
+ }
+
/// Returns a display name for the worktree, suitable for use in the UI.
///
/// If the worktree is attached to a branch, returns the branch name.
/// Otherwise, returns the short SHA of the worktree's HEAD commit.
pub fn display_name(&self) -> &str {
- match self.ref_name {
- Some(ref ref_name) => ref_name
- .strip_prefix("refs/heads/")
- .or_else(|| ref_name.strip_prefix("refs/remotes/"))
- .unwrap_or(ref_name),
- // Detached HEAD — show the short SHA as a fallback.
- None => &self.sha[..self.sha.len().min(SHORT_SHA_LENGTH)],
- }
+ self.branch_name()
+ .unwrap_or(&self.sha[..self.sha.len().min(SHORT_SHA_LENGTH)])
}
}
@@ -716,9 +753,8 @@ pub trait GitRepository: Send + Sync {
fn create_worktree(
&self,
- branch_name: Option,
+ target: CreateWorktreeTarget,
path: PathBuf,
- from_commit: Option,
) -> BoxFuture<'_, Result<()>>;
fn remove_worktree(&self, path: PathBuf, force: bool) -> BoxFuture<'_, Result<()>>;
@@ -1667,24 +1703,36 @@ impl GitRepository for RealGitRepository {
fn create_worktree(
&self,
- branch_name: Option,
+ target: CreateWorktreeTarget,
path: PathBuf,
- from_commit: Option,
) -> BoxFuture<'_, Result<()>> {
let git_binary = self.git_binary();
let mut args = vec![OsString::from("worktree"), OsString::from("add")];
- if let Some(branch_name) = &branch_name {
- args.push(OsString::from("-b"));
- args.push(OsString::from(branch_name.as_str()));
- } else {
- args.push(OsString::from("--detach"));
- }
- args.push(OsString::from("--"));
- args.push(OsString::from(path.as_os_str()));
- if let Some(from_commit) = from_commit {
- args.push(OsString::from(from_commit));
- } else {
- args.push(OsString::from("HEAD"));
+
+ match &target {
+ CreateWorktreeTarget::ExistingBranch { branch_name } => {
+ args.push(OsString::from("--"));
+ args.push(OsString::from(path.as_os_str()));
+ args.push(OsString::from(branch_name));
+ }
+ CreateWorktreeTarget::NewBranch {
+ branch_name,
+ base_sha: start_point,
+ } => {
+ args.push(OsString::from("-b"));
+ args.push(OsString::from(branch_name));
+ args.push(OsString::from("--"));
+ args.push(OsString::from(path.as_os_str()));
+ args.push(OsString::from(start_point.as_deref().unwrap_or("HEAD")));
+ }
+ CreateWorktreeTarget::Detached {
+ base_sha: start_point,
+ } => {
+ args.push(OsString::from("--detach"));
+ args.push(OsString::from("--"));
+ args.push(OsString::from(path.as_os_str()));
+ args.push(OsString::from(start_point.as_deref().unwrap_or("HEAD")));
+ }
}
self.executor
@@ -4054,9 +4102,11 @@ mod tests {
// Create a new worktree
repo.create_worktree(
- Some("test-branch".to_string()),
+ CreateWorktreeTarget::NewBranch {
+ branch_name: "test-branch".to_string(),
+ base_sha: Some("HEAD".to_string()),
+ },
worktree_path.clone(),
- Some("HEAD".to_string()),
)
.await
.unwrap();
@@ -4113,9 +4163,11 @@ mod tests {
// Create a worktree
let worktree_path = worktrees_dir.join("worktree-to-remove");
repo.create_worktree(
- Some("to-remove".to_string()),
+ CreateWorktreeTarget::NewBranch {
+ branch_name: "to-remove".to_string(),
+ base_sha: Some("HEAD".to_string()),
+ },
worktree_path.clone(),
- Some("HEAD".to_string()),
)
.await
.unwrap();
@@ -4137,9 +4189,11 @@ mod tests {
// Create a worktree
let worktree_path = worktrees_dir.join("dirty-wt");
repo.create_worktree(
- Some("dirty-wt".to_string()),
+ CreateWorktreeTarget::NewBranch {
+ branch_name: "dirty-wt".to_string(),
+ base_sha: Some("HEAD".to_string()),
+ },
worktree_path.clone(),
- Some("HEAD".to_string()),
)
.await
.unwrap();
@@ -4207,9 +4261,11 @@ mod tests {
// Create a worktree
let old_path = worktrees_dir.join("old-worktree-name");
repo.create_worktree(
- Some("old-name".to_string()),
+ CreateWorktreeTarget::NewBranch {
+ branch_name: "old-name".to_string(),
+ base_sha: Some("HEAD".to_string()),
+ },
old_path.clone(),
- Some("HEAD".to_string()),
)
.await
.unwrap();
diff --git a/crates/git_ui/src/worktree_picker.rs b/crates/git_ui/src/worktree_picker.rs
index 1b4497be1f4ea96bd4f0431c97bb538eda9faa57..bd1d694fa30bb914569fbb5e6e3c67de3e3d86a0 100644
--- a/crates/git_ui/src/worktree_picker.rs
+++ b/crates/git_ui/src/worktree_picker.rs
@@ -318,8 +318,13 @@ impl WorktreeListDelegate {
.clone();
let new_worktree_path =
repo.path_for_new_linked_worktree(&branch, &worktree_directory_setting)?;
- let receiver =
- repo.create_worktree(branch.clone(), new_worktree_path.clone(), commit);
+ let receiver = repo.create_worktree(
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: branch.clone(),
+ base_sha: commit,
+ },
+ new_worktree_path.clone(),
+ );
anyhow::Ok((receiver, new_worktree_path))
})?;
receiver.await??;
diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs
index e7e84ffe673881d898a56b64892887b9c8d6c809..8da5a14e41d9cb97865d78f4dfc2ed79f76faebd 100644
--- a/crates/project/src/git_store.rs
+++ b/crates/project/src/git_store.rs
@@ -32,10 +32,10 @@ use git::{
blame::Blame,
parse_git_remote_url,
repository::{
- Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
- GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
- LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode, SearchCommitArgs,
- UpstreamTrackingStatus, Worktree as GitWorktree,
+ Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, CreateWorktreeTarget,
+ DiffType, FetchOptions, GitRepository, GitRepositoryCheckpoint, GraphCommitData,
+ InitialGraphCommitData, LogOrder, LogSource, PushOptions, Remote, RemoteCommandOutput,
+ RepoPath, ResetMode, SearchCommitArgs, UpstreamTrackingStatus, Worktree as GitWorktree,
},
stash::{GitStash, StashEntry},
status::{
@@ -329,12 +329,6 @@ pub struct GraphDataResponse<'a> {
pub error: Option,
}
-#[derive(Clone, Debug)]
-enum CreateWorktreeStartPoint {
- Detached,
- Branched { name: String },
-}
-
pub struct Repository {
this: WeakEntity,
snapshot: RepositorySnapshot,
@@ -2414,18 +2408,23 @@ impl GitStore {
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
let directory = PathBuf::from(envelope.payload.directory);
- let start_point = if envelope.payload.name.is_empty() {
- CreateWorktreeStartPoint::Detached
+ let name = envelope.payload.name;
+ let commit = envelope.payload.commit;
+ let use_existing_branch = envelope.payload.use_existing_branch;
+ let target = if name.is_empty() {
+ CreateWorktreeTarget::Detached { base_sha: commit }
+ } else if use_existing_branch {
+ CreateWorktreeTarget::ExistingBranch { branch_name: name }
} else {
- CreateWorktreeStartPoint::Branched {
- name: envelope.payload.name,
+ CreateWorktreeTarget::NewBranch {
+ branch_name: name,
+ base_sha: commit,
}
};
- let commit = envelope.payload.commit;
repository_handle
.update(&mut cx, |repository_handle, _| {
- repository_handle.create_worktree_with_start_point(start_point, directory, commit)
+ repository_handle.create_worktree(target, directory)
})
.await??;
@@ -6004,50 +6003,43 @@ impl Repository {
})
}
- fn create_worktree_with_start_point(
+ pub fn create_worktree(
&mut self,
- start_point: CreateWorktreeStartPoint,
+ target: CreateWorktreeTarget,
path: PathBuf,
- commit: Option,
) -> oneshot::Receiver> {
- if matches!(
- &start_point,
- CreateWorktreeStartPoint::Branched { name } if name.is_empty()
- ) {
- let (sender, receiver) = oneshot::channel();
- sender
- .send(Err(anyhow!("branch name cannot be empty")))
- .ok();
- return receiver;
- }
-
let id = self.id;
- let message = match &start_point {
- CreateWorktreeStartPoint::Detached => "git worktree add (detached)".into(),
- CreateWorktreeStartPoint::Branched { name } => {
- format!("git worktree add: {name}").into()
- }
+ let job_description = match target.branch_name() {
+ Some(branch_name) => format!("git worktree add: {branch_name}"),
+ None => "git worktree add (detached)".to_string(),
};
-
- self.send_job(Some(message), move |repo, _cx| async move {
- let branch_name = match start_point {
- CreateWorktreeStartPoint::Detached => None,
- CreateWorktreeStartPoint::Branched { name } => Some(name),
- };
- let remote_name = branch_name.clone().unwrap_or_default();
-
+ self.send_job(Some(job_description.into()), move |repo, _cx| async move {
match repo {
RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
- backend.create_worktree(branch_name, path, commit).await
+ backend.create_worktree(target, path).await
}
RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
+ let (name, commit, use_existing_branch) = match target {
+ CreateWorktreeTarget::ExistingBranch { branch_name } => {
+ (branch_name, None, true)
+ }
+ CreateWorktreeTarget::NewBranch {
+ branch_name,
+ base_sha: start_point,
+ } => (branch_name, start_point, false),
+ CreateWorktreeTarget::Detached {
+ base_sha: start_point,
+ } => (String::new(), start_point, false),
+ };
+
client
.request(proto::GitCreateWorktree {
project_id: project_id.0,
repository_id: id.to_proto(),
- name: remote_name,
+ name,
directory: path.to_string_lossy().to_string(),
commit,
+ use_existing_branch,
})
.await?;
@@ -6057,28 +6049,16 @@ impl Repository {
})
}
- pub fn create_worktree(
- &mut self,
- branch_name: String,
- path: PathBuf,
- commit: Option,
- ) -> oneshot::Receiver> {
- self.create_worktree_with_start_point(
- CreateWorktreeStartPoint::Branched { name: branch_name },
- path,
- commit,
- )
- }
-
pub fn create_worktree_detached(
&mut self,
path: PathBuf,
commit: String,
) -> oneshot::Receiver> {
- self.create_worktree_with_start_point(
- CreateWorktreeStartPoint::Detached,
+ self.create_worktree(
+ CreateWorktreeTarget::Detached {
+ base_sha: Some(commit),
+ },
path,
- Some(commit),
)
}
diff --git a/crates/project/tests/integration/git_store.rs b/crates/project/tests/integration/git_store.rs
index 02f752b28b24a8135e2cba9307a5eacdc16f0fa3..bbe5c64d7cf7f5b2ffa9160df6130cd88ddc5d69 100644
--- a/crates/project/tests/integration/git_store.rs
+++ b/crates/project/tests/integration/git_store.rs
@@ -1267,9 +1267,11 @@ mod git_worktrees {
cx.update(|cx| {
repository.update(cx, |repository, _| {
repository.create_worktree(
- "feature-branch".to_string(),
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: "feature-branch".to_string(),
+ base_sha: Some("abc123".to_string()),
+ },
worktree_1_directory.clone(),
- Some("abc123".to_string()),
)
})
})
@@ -1297,9 +1299,11 @@ mod git_worktrees {
cx.update(|cx| {
repository.update(cx, |repository, _| {
repository.create_worktree(
- "bugfix-branch".to_string(),
+ git::repository::CreateWorktreeTarget::NewBranch {
+ branch_name: "bugfix-branch".to_string(),
+ base_sha: None,
+ },
worktree_2_directory.clone(),
- None,
)
})
})
diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto
index 9324feb21b1f50ac1041ed0afc8b59cb9b7fe2c6..d0a594a2817ec50d9d35383587619e311f2950d8 100644
--- a/crates/proto/proto/git.proto
+++ b/crates/proto/proto/git.proto
@@ -594,6 +594,7 @@ message GitCreateWorktree {
string name = 3;
string directory = 4;
optional string commit = 5;
+ bool use_existing_branch = 6;
}
message GitCreateCheckpoint {
diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs
index b59123a1a159487f802210f3916e16856daf8e61..9f69cd3458c194228f37cfdeedcf0c9023b9b7bd 100644
--- a/crates/zed/src/visual_test_runner.rs
+++ b/crates/zed/src/visual_test_runner.rs
@@ -3080,7 +3080,7 @@ fn run_start_thread_in_selector_visual_tests(
cx: &mut VisualTestAppContext,
update_baseline: bool,
) -> Result {
- use agent_ui::{AgentPanel, StartThreadIn, WorktreeCreationStatus};
+ use agent_ui::{AgentPanel, NewWorktreeBranchTarget, StartThreadIn, WorktreeCreationStatus};
// Enable feature flags so the thread target selector renders
cx.update(|cx| {
@@ -3401,7 +3401,13 @@ edition = "2021"
cx.update_window(workspace_window.into(), |_, _window, cx| {
panel.update(cx, |panel, cx| {
- panel.set_start_thread_in_for_tests(StartThreadIn::NewWorktree, cx);
+ panel.set_start_thread_in_for_tests(
+ StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ },
+ cx,
+ );
});
})?;
cx.run_until_parked();
@@ -3474,7 +3480,13 @@ edition = "2021"
cx.run_until_parked();
cx.update_window(workspace_window.into(), |_, window, cx| {
- window.dispatch_action(Box::new(StartThreadIn::NewWorktree), cx);
+ window.dispatch_action(
+ Box::new(StartThreadIn::NewWorktree {
+ worktree_name: None,
+ branch_target: NewWorktreeBranchTarget::default(),
+ }),
+ cx,
+ );
})?;
cx.run_until_parked();
From 9c731640c7f5a4d91a94b3e68fa92eb8bc5e38ee Mon Sep 17 00:00:00 2001
From: Shardul Vaidya <31039336+5herlocked@users.noreply.github.com>
Date: Tue, 7 Apr 2026 05:59:12 -0400
Subject: [PATCH 07/22] bedrock: Add new Bedrock models (NVIDIA, Z.AI, Mistral,
MiniMax) (#53043)
Add 9 new models across 3 new providers (NVIDIA, Z.AI) and expanded
coverage for existing providers (Mistral, MiniMax):
- NVIDIA Nemotron Super 3 120B, Nemotron Nano 3 30B
- Mistral Devstral 2 123B, Ministral 14B
- MiniMax M2.1, M2.5
- Z.AI GLM 5, GLM 4.7, GLM 4.7 Flash
Self-Review Checklist:
- [x] I've reviewed my own diff for quality, security, and reliability
- [x] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [x] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable
Closes #ISSUE
Release Notes:
- bedrock: Added 9 new models across 3 new providers (NVIDIA, Z.AI) and
expanded coverage for existing providers (Mistral, MiniMax)
---
crates/bedrock/src/models.rs | 64 ++++++++++++++++++++++++++++++++++--
1 file changed, 61 insertions(+), 3 deletions(-)
diff --git a/crates/bedrock/src/models.rs b/crates/bedrock/src/models.rs
index 8b6113e4d5521fb3c7e27a7f2f6547c7a9db86ce..7c1e6e0e4e6ef873345c30c0af4c9e8842699c77 100644
--- a/crates/bedrock/src/models.rs
+++ b/crates/bedrock/src/models.rs
@@ -113,6 +113,10 @@ pub enum Model {
MistralLarge3,
#[serde(rename = "pixtral-large")]
PixtralLarge,
+ #[serde(rename = "devstral-2-123b")]
+ Devstral2_123B,
+ #[serde(rename = "ministral-14b")]
+ Ministral14B,
// Qwen models
#[serde(rename = "qwen3-32b")]
@@ -146,9 +150,27 @@ pub enum Model {
#[serde(rename = "gpt-oss-120b")]
GptOss120B,
+ // NVIDIA Nemotron models
+ #[serde(rename = "nemotron-super-3-120b")]
+ NemotronSuper3_120B,
+ #[serde(rename = "nemotron-nano-3-30b")]
+ NemotronNano3_30B,
+
// MiniMax models
#[serde(rename = "minimax-m2")]
MiniMaxM2,
+ #[serde(rename = "minimax-m2-1")]
+ MiniMaxM2_1,
+ #[serde(rename = "minimax-m2-5")]
+ MiniMaxM2_5,
+
+ // Z.AI GLM models
+ #[serde(rename = "glm-5")]
+ GLM5,
+ #[serde(rename = "glm-4-7")]
+ GLM4_7,
+ #[serde(rename = "glm-4-7-flash")]
+ GLM4_7Flash,
// Moonshot models
#[serde(rename = "kimi-k2-thinking")]
@@ -217,6 +239,8 @@ impl Model {
Self::MagistralSmall => "magistral-small",
Self::MistralLarge3 => "mistral-large-3",
Self::PixtralLarge => "pixtral-large",
+ Self::Devstral2_123B => "devstral-2-123b",
+ Self::Ministral14B => "ministral-14b",
Self::Qwen3_32B => "qwen3-32b",
Self::Qwen3VL235B => "qwen3-vl-235b",
Self::Qwen3_235B => "qwen3-235b",
@@ -230,7 +254,14 @@ impl Model {
Self::Nova2Lite => "nova-2-lite",
Self::GptOss20B => "gpt-oss-20b",
Self::GptOss120B => "gpt-oss-120b",
+ Self::NemotronSuper3_120B => "nemotron-super-3-120b",
+ Self::NemotronNano3_30B => "nemotron-nano-3-30b",
Self::MiniMaxM2 => "minimax-m2",
+ Self::MiniMaxM2_1 => "minimax-m2-1",
+ Self::MiniMaxM2_5 => "minimax-m2-5",
+ Self::GLM5 => "glm-5",
+ Self::GLM4_7 => "glm-4-7",
+ Self::GLM4_7Flash => "glm-4-7-flash",
Self::KimiK2Thinking => "kimi-k2-thinking",
Self::KimiK2_5 => "kimi-k2-5",
Self::DeepSeekR1 => "deepseek-r1",
@@ -257,6 +288,8 @@ impl Model {
Self::MagistralSmall => "mistral.magistral-small-2509",
Self::MistralLarge3 => "mistral.mistral-large-3-675b-instruct",
Self::PixtralLarge => "mistral.pixtral-large-2502-v1:0",
+ Self::Devstral2_123B => "mistral.devstral-2-123b",
+ Self::Ministral14B => "mistral.ministral-3-14b-instruct",
Self::Qwen3VL235B => "qwen.qwen3-vl-235b-a22b",
Self::Qwen3_32B => "qwen.qwen3-32b-v1:0",
Self::Qwen3_235B => "qwen.qwen3-235b-a22b-2507-v1:0",
@@ -270,7 +303,14 @@ impl Model {
Self::Nova2Lite => "amazon.nova-2-lite-v1:0",
Self::GptOss20B => "openai.gpt-oss-20b-1:0",
Self::GptOss120B => "openai.gpt-oss-120b-1:0",
+ Self::NemotronSuper3_120B => "nvidia.nemotron-super-3-120b",
+ Self::NemotronNano3_30B => "nvidia.nemotron-nano-3-30b",
Self::MiniMaxM2 => "minimax.minimax-m2",
+ Self::MiniMaxM2_1 => "minimax.minimax-m2.1",
+ Self::MiniMaxM2_5 => "minimax.minimax-m2.5",
+ Self::GLM5 => "zai.glm-5",
+ Self::GLM4_7 => "zai.glm-4.7",
+ Self::GLM4_7Flash => "zai.glm-4.7-flash",
Self::KimiK2Thinking => "moonshot.kimi-k2-thinking",
Self::KimiK2_5 => "moonshotai.kimi-k2.5",
Self::DeepSeekR1 => "deepseek.r1-v1:0",
@@ -297,6 +337,8 @@ impl Model {
Self::MagistralSmall => "Magistral Small",
Self::MistralLarge3 => "Mistral Large 3",
Self::PixtralLarge => "Pixtral Large",
+ Self::Devstral2_123B => "Devstral 2 123B",
+ Self::Ministral14B => "Ministral 14B",
Self::Qwen3VL235B => "Qwen3 VL 235B",
Self::Qwen3_32B => "Qwen3 32B",
Self::Qwen3_235B => "Qwen3 235B",
@@ -310,7 +352,14 @@ impl Model {
Self::Nova2Lite => "Amazon Nova 2 Lite",
Self::GptOss20B => "GPT OSS 20B",
Self::GptOss120B => "GPT OSS 120B",
+ Self::NemotronSuper3_120B => "Nemotron Super 3 120B",
+ Self::NemotronNano3_30B => "Nemotron Nano 3 30B",
Self::MiniMaxM2 => "MiniMax M2",
+ Self::MiniMaxM2_1 => "MiniMax M2.1",
+ Self::MiniMaxM2_5 => "MiniMax M2.5",
+ Self::GLM5 => "GLM 5",
+ Self::GLM4_7 => "GLM 4.7",
+ Self::GLM4_7Flash => "GLM 4.7 Flash",
Self::KimiK2Thinking => "Kimi K2 Thinking",
Self::KimiK2_5 => "Kimi K2.5",
Self::DeepSeekR1 => "DeepSeek R1",
@@ -338,6 +387,7 @@ impl Model {
Self::Llama4Scout17B | Self::Llama4Maverick17B => 128_000,
Self::Gemma3_4B | Self::Gemma3_12B | Self::Gemma3_27B => 128_000,
Self::MagistralSmall | Self::MistralLarge3 | Self::PixtralLarge => 128_000,
+ Self::Devstral2_123B | Self::Ministral14B => 256_000,
Self::Qwen3_32B
| Self::Qwen3VL235B
| Self::Qwen3_235B
@@ -349,7 +399,9 @@ impl Model {
Self::NovaPremier => 1_000_000,
Self::Nova2Lite => 300_000,
Self::GptOss20B | Self::GptOss120B => 128_000,
- Self::MiniMaxM2 => 128_000,
+ Self::NemotronSuper3_120B | Self::NemotronNano3_30B => 262_000,
+ Self::MiniMaxM2 | Self::MiniMaxM2_1 | Self::MiniMaxM2_5 => 196_000,
+ Self::GLM5 | Self::GLM4_7 | Self::GLM4_7Flash => 203_000,
Self::KimiK2Thinking | Self::KimiK2_5 => 128_000,
Self::DeepSeekR1 | Self::DeepSeekV3_1 | Self::DeepSeekV3_2 => 128_000,
Self::Custom { max_tokens, .. } => *max_tokens,
@@ -373,6 +425,7 @@ impl Model {
| Self::MagistralSmall
| Self::MistralLarge3
| Self::PixtralLarge => 8_192,
+ Self::Devstral2_123B | Self::Ministral14B => 131_000,
Self::Qwen3_32B
| Self::Qwen3VL235B
| Self::Qwen3_235B
@@ -382,7 +435,9 @@ impl Model {
| Self::Qwen3Coder480B => 8_192,
Self::NovaLite | Self::NovaPro | Self::NovaPremier | Self::Nova2Lite => 5_000,
Self::GptOss20B | Self::GptOss120B => 16_000,
- Self::MiniMaxM2 => 16_000,
+ Self::NemotronSuper3_120B | Self::NemotronNano3_30B => 131_000,
+ Self::MiniMaxM2 | Self::MiniMaxM2_1 | Self::MiniMaxM2_5 => 98_000,
+ Self::GLM5 | Self::GLM4_7 | Self::GLM4_7Flash => 101_000,
Self::KimiK2Thinking | Self::KimiK2_5 => 16_000,
Self::DeepSeekR1 | Self::DeepSeekV3_1 | Self::DeepSeekV3_2 => 16_000,
Self::Custom {
@@ -419,6 +474,7 @@ impl Model {
| Self::ClaudeSonnet4_6 => true,
Self::NovaLite | Self::NovaPro | Self::NovaPremier | Self::Nova2Lite => true,
Self::MistralLarge3 | Self::PixtralLarge | Self::MagistralSmall => true,
+ Self::Devstral2_123B | Self::Ministral14B => true,
// Gemma accepts toolConfig without error but produces unreliable tool
// calls -- malformed JSON args, hallucinated tool names, dropped calls.
Self::Qwen3_32B
@@ -428,7 +484,9 @@ impl Model {
| Self::Qwen3Coder30B
| Self::Qwen3CoderNext
| Self::Qwen3Coder480B => true,
- Self::MiniMaxM2 => true,
+ Self::MiniMaxM2 | Self::MiniMaxM2_1 | Self::MiniMaxM2_5 => true,
+ Self::NemotronSuper3_120B | Self::NemotronNano3_30B => true,
+ Self::GLM5 | Self::GLM4_7 | Self::GLM4_7Flash => true,
Self::KimiK2Thinking | Self::KimiK2_5 => true,
Self::DeepSeekR1 | Self::DeepSeekV3_1 | Self::DeepSeekV3_2 => true,
_ => false,
From 93438829c75f7f73dc14bba3c79b4626709a4b4e Mon Sep 17 00:00:00 2001
From: Bhuminjay Soni
Date: Tue, 7 Apr 2026 15:35:02 +0530
Subject: [PATCH 08/22] Add fuzzy_nucleo crate for order independent file
finder search (#51164)
Closes #14428
Before you mark this PR as ready for review, make sure that you have:
- [ ] Added a solid test coverage and/or screenshots from doing manual
testing
https://github.com/user-attachments/assets/7e0d67ff-cc4e-4609-880d-5c1794c64dcf
- [x] Done a self-review taking into account security and performance
aspects
- [x] Aligned any UI changes with the [UI
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
Release Notes:
- Adds a new `fuzzy_nucleo` crate that implements order independent path
matching using the `nucleo` library. currently integrated for file
finder.
---------
Signed-off-by: Bhuminjay
Signed-off-by: 11happy
---
Cargo.lock | 32 ++
Cargo.toml | 3 +
crates/file_finder/Cargo.toml | 1 +
crates/file_finder/src/file_finder.rs | 69 ++--
crates/file_finder/src/file_finder_tests.rs | 230 +++++++++++++
crates/fuzzy_nucleo/Cargo.toml | 21 ++
crates/fuzzy_nucleo/LICENSE-GPL | 1 +
crates/fuzzy_nucleo/src/fuzzy_nucleo.rs | 5 +
crates/fuzzy_nucleo/src/matcher.rs | 39 +++
crates/fuzzy_nucleo/src/paths.rs | 352 ++++++++++++++++++++
crates/project/Cargo.toml | 1 +
crates/project/src/project.rs | 70 ++++
12 files changed, 774 insertions(+), 50 deletions(-)
create mode 100644 crates/fuzzy_nucleo/Cargo.toml
create mode 120000 crates/fuzzy_nucleo/LICENSE-GPL
create mode 100644 crates/fuzzy_nucleo/src/fuzzy_nucleo.rs
create mode 100644 crates/fuzzy_nucleo/src/matcher.rs
create mode 100644 crates/fuzzy_nucleo/src/paths.rs
diff --git a/Cargo.lock b/Cargo.lock
index 97412711a55667a4976a35313eb6c0388acc74ef..cbc494f9dc0fc1858a846fabe168b3538de4dbe5 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -6183,6 +6183,7 @@ dependencies = [
"file_icons",
"futures 0.3.32",
"fuzzy",
+ "fuzzy_nucleo",
"gpui",
"menu",
"open_path_prompt",
@@ -6740,6 +6741,15 @@ dependencies = [
"thread_local",
]
+[[package]]
+name = "fuzzy_nucleo"
+version = "0.1.0"
+dependencies = [
+ "gpui",
+ "nucleo",
+ "util",
+]
+
[[package]]
name = "gaoya"
version = "0.2.0"
@@ -11063,6 +11073,27 @@ dependencies = [
"windows-sys 0.61.2",
]
+[[package]]
+name = "nucleo"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5262af4c94921c2646c5ac6ff7900c2af9cbb08dc26a797e18130a7019c039d4"
+dependencies = [
+ "nucleo-matcher",
+ "parking_lot",
+ "rayon",
+]
+
+[[package]]
+name = "nucleo-matcher"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bf33f538733d1a5a3494b836ba913207f14d9d4a1d3cd67030c5061bdd2cac85"
+dependencies = [
+ "memchr",
+ "unicode-segmentation",
+]
+
[[package]]
name = "num"
version = "0.4.3"
@@ -13203,6 +13234,7 @@ dependencies = [
"fs",
"futures 0.3.32",
"fuzzy",
+ "fuzzy_nucleo",
"git",
"git2",
"git_hosting_providers",
diff --git a/Cargo.toml b/Cargo.toml
index 5cb5b991b645ec1b78b16f48493c7c8dc1426344..4c75dafae5df4d63815e0da5cabb95ccdad25e9d 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -78,6 +78,7 @@ members = [
"crates/fs",
"crates/fs_benchmarks",
"crates/fuzzy",
+ "crates/fuzzy_nucleo",
"crates/git",
"crates/git_graph",
"crates/git_hosting_providers",
@@ -325,6 +326,7 @@ file_finder = { path = "crates/file_finder" }
file_icons = { path = "crates/file_icons" }
fs = { path = "crates/fs" }
fuzzy = { path = "crates/fuzzy" }
+fuzzy_nucleo = { path = "crates/fuzzy_nucleo" }
git = { path = "crates/git" }
git_graph = { path = "crates/git_graph" }
git_hosting_providers = { path = "crates/git_hosting_providers" }
@@ -609,6 +611,7 @@ naga = { version = "29.0", features = ["wgsl-in"] }
nanoid = "0.4"
nbformat = "1.2.0"
nix = "0.29"
+nucleo = "0.5"
num-format = "0.4.4"
objc = "0.2"
objc2-app-kit = { version = "0.3", default-features = false, features = [ "NSGraphics" ] }
diff --git a/crates/file_finder/Cargo.toml b/crates/file_finder/Cargo.toml
index 5eb36f0f5150263629b407dbe07dc73b6eff31cf..67ebab62295e8db90a12f99cbc05e9b9e56c2c6b 100644
--- a/crates/file_finder/Cargo.toml
+++ b/crates/file_finder/Cargo.toml
@@ -21,6 +21,7 @@ editor.workspace = true
file_icons.workspace = true
futures.workspace = true
fuzzy.workspace = true
+fuzzy_nucleo.workspace = true
gpui.workspace = true
menu.workspace = true
open_path_prompt.workspace = true
diff --git a/crates/file_finder/src/file_finder.rs b/crates/file_finder/src/file_finder.rs
index 4302669ddc11c94f7df128534217d00c27ef083a..a4d9ea042dea898b9dd9db7d40354cf960d210d5 100644
--- a/crates/file_finder/src/file_finder.rs
+++ b/crates/file_finder/src/file_finder.rs
@@ -9,7 +9,8 @@ use client::ChannelId;
use collections::HashMap;
use editor::Editor;
use file_icons::FileIcons;
-use fuzzy::{CharBag, PathMatch, PathMatchCandidate, StringMatch, StringMatchCandidate};
+use fuzzy::{StringMatch, StringMatchCandidate};
+use fuzzy_nucleo::{PathMatch, PathMatchCandidate};
use gpui::{
Action, AnyElement, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
KeyContext, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task, WeakEntity,
@@ -663,15 +664,6 @@ impl Matches {
// For file-vs-file matches, use the existing detailed comparison.
if let (Some(a_panel), Some(b_panel)) = (a.panel_match(), b.panel_match()) {
- let a_in_filename = Self::is_filename_match(a_panel);
- let b_in_filename = Self::is_filename_match(b_panel);
-
- match (a_in_filename, b_in_filename) {
- (true, false) => return cmp::Ordering::Greater,
- (false, true) => return cmp::Ordering::Less,
- _ => {}
- }
-
return a_panel.cmp(b_panel);
}
@@ -691,32 +683,6 @@ impl Matches {
Match::CreateNew(_) => 0.0,
}
}
-
- /// Determines if the match occurred within the filename rather than in the path
- fn is_filename_match(panel_match: &ProjectPanelOrdMatch) -> bool {
- if panel_match.0.positions.is_empty() {
- return false;
- }
-
- if let Some(filename) = panel_match.0.path.file_name() {
- let path_str = panel_match.0.path.as_unix_str();
-
- if let Some(filename_pos) = path_str.rfind(filename)
- && panel_match.0.positions[0] >= filename_pos
- {
- let mut prev_position = panel_match.0.positions[0];
- for p in &panel_match.0.positions[1..] {
- if *p != prev_position + 1 {
- return false;
- }
- prev_position = *p;
- }
- return true;
- }
- }
-
- false
- }
}
fn matching_history_items<'a>(
@@ -731,25 +697,16 @@ fn matching_history_items<'a>(
let history_items_by_worktrees = history_items
.into_iter()
.chain(currently_opened)
- .filter_map(|found_path| {
+ .map(|found_path| {
let candidate = PathMatchCandidate {
is_dir: false, // You can't open directories as project items
path: &found_path.project.path,
// Only match history items names, otherwise their paths may match too many queries, producing false positives.
// E.g. `foo` would match both `something/foo/bar.rs` and `something/foo/foo.rs` and if the former is a history item,
// it would be shown first always, despite the latter being a better match.
- char_bag: CharBag::from_iter(
- found_path
- .project
- .path
- .file_name()?
- .to_string()
- .to_lowercase()
- .chars(),
- ),
};
candidates_paths.insert(&found_path.project, found_path);
- Some((found_path.project.worktree_id, candidate))
+ (found_path.project.worktree_id, candidate)
})
.fold(
HashMap::default(),
@@ -767,8 +724,9 @@ fn matching_history_items<'a>(
let worktree_root_name = worktree_name_by_id
.as_ref()
.and_then(|w| w.get(&worktree).cloned());
+
matching_history_paths.extend(
- fuzzy::match_fixed_path_set(
+ fuzzy_nucleo::match_fixed_path_set(
candidates,
worktree.to_usize(),
worktree_root_name,
@@ -778,6 +736,18 @@ fn matching_history_items<'a>(
path_style,
)
.into_iter()
+ // filter matches where at least one matched position is in filename portion, to prevent directory matches, nucleo scores them higher as history items are matched against their full path
+ .filter(|path_match| {
+ if let Some(filename) = path_match.path.file_name() {
+ let filename_start = path_match.path.as_unix_str().len() - filename.len();
+ path_match
+ .positions
+ .iter()
+ .any(|&pos| pos >= filename_start)
+ } else {
+ true
+ }
+ })
.filter_map(|path_match| {
candidates_paths
.remove_entry(&ProjectPath {
@@ -940,7 +910,7 @@ impl FileFinderDelegate {
self.cancel_flag = Arc::new(AtomicBool::new(false));
let cancel_flag = self.cancel_flag.clone();
cx.spawn_in(window, async move |picker, cx| {
- let matches = fuzzy::match_path_sets(
+ let matches = fuzzy_nucleo::match_path_sets(
candidate_sets.as_slice(),
query.path_query(),
&relative_to,
@@ -1452,7 +1422,6 @@ impl PickerDelegate for FileFinderDelegate {
window: &mut Window,
cx: &mut Context>,
) -> Task<()> {
- let raw_query = raw_query.replace(' ', "");
let raw_query = raw_query.trim();
let raw_query = match &raw_query.get(0..2) {
diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs
index cd9cdeee1ff266717d380aeaecf7cbeb66ec8309..7a17202a5e4ba96b001ea46ed310518d02baf1ff 100644
--- a/crates/file_finder/src/file_finder_tests.rs
+++ b/crates/file_finder/src/file_finder_tests.rs
@@ -4161,3 +4161,233 @@ async fn test_clear_navigation_history(cx: &mut TestAppContext) {
"Should have no history items after clearing"
);
}
+
+#[gpui::test]
+async fn test_order_independent_search(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "internal": {
+ "auth": {
+ "login.rs": "",
+ }
+ }
+ }),
+ )
+ .await;
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let (picker, _, cx) = build_find_picker(project, cx);
+
+ // forward order
+ picker
+ .update_in(cx, |picker, window, cx| {
+ picker
+ .delegate
+ .spawn_search(test_path_position("auth internal"), window, cx)
+ })
+ .await;
+ picker.update(cx, |picker, _| {
+ let matches = collect_search_matches(picker).search_matches_only();
+ assert_eq!(matches.len(), 1);
+ assert_eq!(matches[0].path.as_unix_str(), "internal/auth/login.rs");
+ });
+
+ // reverse order should give same result
+ picker
+ .update_in(cx, |picker, window, cx| {
+ picker
+ .delegate
+ .spawn_search(test_path_position("internal auth"), window, cx)
+ })
+ .await;
+ picker.update(cx, |picker, _| {
+ let matches = collect_search_matches(picker).search_matches_only();
+ assert_eq!(matches.len(), 1);
+ assert_eq!(matches[0].path.as_unix_str(), "internal/auth/login.rs");
+ });
+}
+
+#[gpui::test]
+async fn test_filename_preferred_over_directory_match(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "crates": {
+ "settings_ui": {
+ "src": {
+ "pages": {
+ "audio_test_window.rs": "",
+ "audio_input_output_setup.rs": "",
+ }
+ }
+ },
+ "audio": {
+ "src": {
+ "audio_settings.rs": "",
+ }
+ }
+ }
+ }),
+ )
+ .await;
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let (picker, _, cx) = build_find_picker(project, cx);
+
+ picker
+ .update_in(cx, |picker, window, cx| {
+ picker
+ .delegate
+ .spawn_search(test_path_position("settings audio"), window, cx)
+ })
+ .await;
+ picker.update(cx, |picker, _| {
+ let matches = collect_search_matches(picker).search_matches_only();
+ assert!(!matches.is_empty(),);
+ assert_eq!(
+ matches[0].path.as_unix_str(),
+ "crates/audio/src/audio_settings.rs"
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_start_of_word_preferred_over_scattered_match(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "crates": {
+ "livekit_client": {
+ "src": {
+ "livekit_client": {
+ "playback.rs": "",
+ }
+ }
+ },
+ "vim": {
+ "test_data": {
+ "test_record_replay_interleaved.json": "",
+ }
+ }
+ }
+ }),
+ )
+ .await;
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let (picker, _, cx) = build_find_picker(project, cx);
+
+ picker
+ .update_in(cx, |picker, window, cx| {
+ picker
+ .delegate
+ .spawn_search(test_path_position("live pla"), window, cx)
+ })
+ .await;
+ picker.update(cx, |picker, _| {
+ let matches = collect_search_matches(picker).search_matches_only();
+ assert!(!matches.is_empty(),);
+ assert_eq!(
+ matches[0].path.as_unix_str(),
+ "crates/livekit_client/src/livekit_client/playback.rs",
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_exact_filename_stem_preferred(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "assets": {
+ "icons": {
+ "file_icons": {
+ "nix.svg": "",
+ }
+ }
+ },
+ "crates": {
+ "zed": {
+ "resources": {
+ "app-icon-nightly@2x.png": "",
+ "app-icon-preview@2x.png": "",
+ }
+ }
+ }
+ }),
+ )
+ .await;
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let (picker, _, cx) = build_find_picker(project, cx);
+
+ picker
+ .update_in(cx, |picker, window, cx| {
+ picker
+ .delegate
+ .spawn_search(test_path_position("nix icon"), window, cx)
+ })
+ .await;
+ picker.update(cx, |picker, _| {
+ let matches = collect_search_matches(picker).search_matches_only();
+ assert!(!matches.is_empty(),);
+ assert_eq!(
+ matches[0].path.as_unix_str(),
+ "assets/icons/file_icons/nix.svg",
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_exact_filename_with_directory_token(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "crates": {
+ "agent_servers": {
+ "src": {
+ "acp.rs": "",
+ "agent_server.rs": "",
+ "custom.rs": "",
+ }
+ }
+ }
+ }),
+ )
+ .await;
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ let (picker, _, cx) = build_find_picker(project, cx);
+
+ picker
+ .update_in(cx, |picker, window, cx| {
+ picker
+ .delegate
+ .spawn_search(test_path_position("acp server"), window, cx)
+ })
+ .await;
+ picker.update(cx, |picker, _| {
+ let matches = collect_search_matches(picker).search_matches_only();
+ assert!(!matches.is_empty(),);
+ assert_eq!(
+ matches[0].path.as_unix_str(),
+ "crates/agent_servers/src/acp.rs",
+ );
+ });
+}
diff --git a/crates/fuzzy_nucleo/Cargo.toml b/crates/fuzzy_nucleo/Cargo.toml
new file mode 100644
index 0000000000000000000000000000000000000000..59e8b642524777f449f79edba85093eef069ebff
--- /dev/null
+++ b/crates/fuzzy_nucleo/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "fuzzy_nucleo"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[lib]
+path = "src/fuzzy_nucleo.rs"
+doctest = false
+
+[dependencies]
+nucleo.workspace = true
+gpui.workspace = true
+util.workspace = true
+
+[dev-dependencies]
+util = {workspace = true, features = ["test-support"]}
diff --git a/crates/fuzzy_nucleo/LICENSE-GPL b/crates/fuzzy_nucleo/LICENSE-GPL
new file mode 120000
index 0000000000000000000000000000000000000000..89e542f750cd3860a0598eff0dc34b56d7336dc4
--- /dev/null
+++ b/crates/fuzzy_nucleo/LICENSE-GPL
@@ -0,0 +1 @@
+../../LICENSE-GPL
\ No newline at end of file
diff --git a/crates/fuzzy_nucleo/src/fuzzy_nucleo.rs b/crates/fuzzy_nucleo/src/fuzzy_nucleo.rs
new file mode 100644
index 0000000000000000000000000000000000000000..ddaa5c3489cf55d41d31440f037214b1dce0358c
--- /dev/null
+++ b/crates/fuzzy_nucleo/src/fuzzy_nucleo.rs
@@ -0,0 +1,5 @@
+mod matcher;
+mod paths;
+pub use paths::{
+ PathMatch, PathMatchCandidate, PathMatchCandidateSet, match_fixed_path_set, match_path_sets,
+};
diff --git a/crates/fuzzy_nucleo/src/matcher.rs b/crates/fuzzy_nucleo/src/matcher.rs
new file mode 100644
index 0000000000000000000000000000000000000000..b31da011106341420095bcffbfd012f40014ad6c
--- /dev/null
+++ b/crates/fuzzy_nucleo/src/matcher.rs
@@ -0,0 +1,39 @@
+use std::sync::Mutex;
+
+static MATCHERS: Mutex> = Mutex::new(Vec::new());
+
+pub const LENGTH_PENALTY: f64 = 0.01;
+
+pub fn get_matcher(config: nucleo::Config) -> nucleo::Matcher {
+ let mut matchers = MATCHERS.lock().unwrap();
+ match matchers.pop() {
+ Some(mut matcher) => {
+ matcher.config = config;
+ matcher
+ }
+ None => nucleo::Matcher::new(config),
+ }
+}
+
+pub fn return_matcher(matcher: nucleo::Matcher) {
+ MATCHERS.lock().unwrap().push(matcher);
+}
+
+pub fn get_matchers(n: usize, config: nucleo::Config) -> Vec {
+ let mut matchers: Vec<_> = {
+ let mut pool = MATCHERS.lock().unwrap();
+ let available = pool.len().min(n);
+ pool.drain(..available)
+ .map(|mut matcher| {
+ matcher.config = config.clone();
+ matcher
+ })
+ .collect()
+ };
+ matchers.resize_with(n, || nucleo::Matcher::new(config.clone()));
+ matchers
+}
+
+pub fn return_matchers(mut matchers: Vec) {
+ MATCHERS.lock().unwrap().append(&mut matchers);
+}
diff --git a/crates/fuzzy_nucleo/src/paths.rs b/crates/fuzzy_nucleo/src/paths.rs
new file mode 100644
index 0000000000000000000000000000000000000000..ac766622c9d12c6e2a119fbcd7dd7fe7a3b5a90d
--- /dev/null
+++ b/crates/fuzzy_nucleo/src/paths.rs
@@ -0,0 +1,352 @@
+use gpui::BackgroundExecutor;
+use std::{
+ cmp::Ordering,
+ sync::{
+ Arc,
+ atomic::{self, AtomicBool},
+ },
+};
+use util::{paths::PathStyle, rel_path::RelPath};
+
+use nucleo::Utf32Str;
+use nucleo::pattern::{Atom, AtomKind, CaseMatching, Normalization};
+
+use crate::matcher::{self, LENGTH_PENALTY};
+
+#[derive(Clone, Debug)]
+pub struct PathMatchCandidate<'a> {
+ pub is_dir: bool,
+ pub path: &'a RelPath,
+}
+
+#[derive(Clone, Debug)]
+pub struct PathMatch {
+ pub score: f64,
+ pub positions: Vec,
+ pub worktree_id: usize,
+ pub path: Arc,
+ pub path_prefix: Arc,
+ pub is_dir: bool,
+ /// Number of steps removed from a shared parent with the relative path
+ /// Used to order closer paths first in the search list
+ pub distance_to_relative_ancestor: usize,
+}
+
+pub trait PathMatchCandidateSet<'a>: Send + Sync {
+ type Candidates: Iterator- >;
+ fn id(&self) -> usize;
+ fn len(&self) -> usize;
+ fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+ fn root_is_file(&self) -> bool;
+ fn prefix(&self) -> Arc;
+ fn candidates(&'a self, start: usize) -> Self::Candidates;
+ fn path_style(&self) -> PathStyle;
+}
+
+impl PartialEq for PathMatch {
+ fn eq(&self, other: &Self) -> bool {
+ self.cmp(other).is_eq()
+ }
+}
+
+impl Eq for PathMatch {}
+
+impl PartialOrd for PathMatch {
+ fn partial_cmp(&self, other: &Self) -> Option {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for PathMatch {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.score
+ .partial_cmp(&other.score)
+ .unwrap_or(Ordering::Equal)
+ .then_with(|| self.worktree_id.cmp(&other.worktree_id))
+ .then_with(|| {
+ other
+ .distance_to_relative_ancestor
+ .cmp(&self.distance_to_relative_ancestor)
+ })
+ .then_with(|| self.path.cmp(&other.path))
+ }
+}
+
+fn make_atoms(query: &str, smart_case: bool) -> Vec {
+ let case = if smart_case {
+ CaseMatching::Smart
+ } else {
+ CaseMatching::Ignore
+ };
+ query
+ .split_whitespace()
+ .map(|word| Atom::new(word, case, Normalization::Smart, AtomKind::Fuzzy, false))
+ .collect()
+}
+
+pub(crate) fn distance_between_paths(path: &RelPath, relative_to: &RelPath) -> usize {
+ let mut path_components = path.components();
+ let mut relative_components = relative_to.components();
+
+ while path_components
+ .next()
+ .zip(relative_components.next())
+ .map(|(path_component, relative_component)| path_component == relative_component)
+ .unwrap_or_default()
+ {}
+ path_components.count() + relative_components.count() + 1
+}
+
+fn get_filename_match_bonus(
+ candidate_buf: &str,
+ query_atoms: &[Atom],
+ matcher: &mut nucleo::Matcher,
+) -> f64 {
+ let filename = match std::path::Path::new(candidate_buf).file_name() {
+ Some(f) => f.to_str().unwrap_or(""),
+ None => return 0.0,
+ };
+ if filename.is_empty() || query_atoms.is_empty() {
+ return 0.0;
+ }
+ let mut buf = Vec::new();
+ let haystack = Utf32Str::new(filename, &mut buf);
+ let mut total_score = 0u32;
+ for atom in query_atoms {
+ if let Some(score) = atom.score(haystack, matcher) {
+ total_score = total_score.saturating_add(score as u32);
+ }
+ }
+ total_score as f64 / filename.len().max(1) as f64
+}
+struct Cancelled;
+
+fn path_match_helper<'a>(
+ matcher: &mut nucleo::Matcher,
+ atoms: &[Atom],
+ candidates: impl Iterator
- >,
+ results: &mut Vec,
+ worktree_id: usize,
+ path_prefix: &Arc,
+ root_is_file: bool,
+ relative_to: &Option>,
+ path_style: PathStyle,
+ cancel_flag: &AtomicBool,
+) -> Result<(), Cancelled> {
+ let mut candidate_buf = if !path_prefix.is_empty() && !root_is_file {
+ let mut s = path_prefix.display(path_style).to_string();
+ s.push_str(path_style.primary_separator());
+ s
+ } else {
+ String::new()
+ };
+ let path_prefix_len = candidate_buf.len();
+ let mut buf = Vec::new();
+ let mut matched_chars: Vec = Vec::new();
+ let mut atom_matched_chars = Vec::new();
+ for candidate in candidates {
+ buf.clear();
+ matched_chars.clear();
+ if cancel_flag.load(atomic::Ordering::Relaxed) {
+ return Err(Cancelled);
+ }
+
+ candidate_buf.truncate(path_prefix_len);
+ if root_is_file {
+ candidate_buf.push_str(path_prefix.as_unix_str());
+ } else {
+ candidate_buf.push_str(candidate.path.as_unix_str());
+ }
+
+ let haystack = Utf32Str::new(&candidate_buf, &mut buf);
+
+ let mut total_score: u32 = 0;
+ let mut all_matched = true;
+
+ for atom in atoms {
+ atom_matched_chars.clear();
+ if let Some(score) = atom.indices(haystack, matcher, &mut atom_matched_chars) {
+ total_score = total_score.saturating_add(score as u32);
+ matched_chars.extend_from_slice(&atom_matched_chars);
+ } else {
+ all_matched = false;
+ break;
+ }
+ }
+
+ if all_matched && !atoms.is_empty() {
+ matched_chars.sort_unstable();
+ matched_chars.dedup();
+
+ let length_penalty = candidate_buf.len() as f64 * LENGTH_PENALTY;
+ let filename_bonus = get_filename_match_bonus(&candidate_buf, atoms, matcher);
+ let adjusted_score = total_score as f64 + filename_bonus - length_penalty;
+ let mut positions: Vec = candidate_buf
+ .char_indices()
+ .enumerate()
+ .filter_map(|(char_offset, (byte_offset, _))| {
+ matched_chars
+ .contains(&(char_offset as u32))
+ .then_some(byte_offset)
+ })
+ .collect();
+ positions.sort_unstable();
+
+ results.push(PathMatch {
+ score: adjusted_score,
+ positions,
+ worktree_id,
+ path: if root_is_file {
+ Arc::clone(path_prefix)
+ } else {
+ candidate.path.into()
+ },
+ path_prefix: if root_is_file {
+ RelPath::empty().into()
+ } else {
+ Arc::clone(path_prefix)
+ },
+ is_dir: candidate.is_dir,
+ distance_to_relative_ancestor: relative_to
+ .as_ref()
+ .map_or(usize::MAX, |relative_to| {
+ distance_between_paths(candidate.path, relative_to.as_ref())
+ }),
+ });
+ }
+ }
+ Ok(())
+}
+
+pub fn match_fixed_path_set(
+ candidates: Vec,
+ worktree_id: usize,
+ worktree_root_name: Option>,
+ query: &str,
+ smart_case: bool,
+ max_results: usize,
+ path_style: PathStyle,
+) -> Vec {
+ let mut config = nucleo::Config::DEFAULT;
+ config.set_match_paths();
+ let mut matcher = matcher::get_matcher(config);
+
+ let atoms = make_atoms(query, smart_case);
+
+ let root_is_file = worktree_root_name.is_some() && candidates.iter().all(|c| c.path.is_empty());
+
+ let path_prefix = worktree_root_name.unwrap_or_else(|| RelPath::empty().into());
+
+ let mut results = Vec::new();
+
+ path_match_helper(
+ &mut matcher,
+ &atoms,
+ candidates.into_iter(),
+ &mut results,
+ worktree_id,
+ &path_prefix,
+ root_is_file,
+ &None,
+ path_style,
+ &AtomicBool::new(false),
+ )
+ .ok();
+ util::truncate_to_bottom_n_sorted_by(&mut results, max_results, &|a, b| b.cmp(a));
+ matcher::return_matcher(matcher);
+ results
+}
+
+pub async fn match_path_sets<'a, Set: PathMatchCandidateSet<'a>>(
+ candidate_sets: &'a [Set],
+ query: &str,
+ relative_to: &Option>,
+ smart_case: bool,
+ max_results: usize,
+ cancel_flag: &AtomicBool,
+ executor: BackgroundExecutor,
+) -> Vec {
+ let path_count: usize = candidate_sets.iter().map(|s| s.len()).sum();
+ if path_count == 0 {
+ return Vec::new();
+ }
+
+ let path_style = candidate_sets[0].path_style();
+
+ let query = if path_style.is_windows() {
+ query.replace('\\', "/")
+ } else {
+ query.to_owned()
+ };
+
+ let atoms = make_atoms(&query, smart_case);
+
+ let num_cpus = executor.num_cpus().min(path_count);
+ let segment_size = path_count.div_ceil(num_cpus);
+ let mut segment_results = (0..num_cpus)
+ .map(|_| Vec::with_capacity(max_results))
+ .collect::>();
+ let mut config = nucleo::Config::DEFAULT;
+ config.set_match_paths();
+ let mut matchers = matcher::get_matchers(num_cpus, config);
+ executor
+ .scoped(|scope| {
+ for (segment_idx, (results, matcher)) in segment_results
+ .iter_mut()
+ .zip(matchers.iter_mut())
+ .enumerate()
+ {
+ let atoms = atoms.clone();
+ let relative_to = relative_to.clone();
+ scope.spawn(async move {
+ let segment_start = segment_idx * segment_size;
+ let segment_end = segment_start + segment_size;
+
+ let mut tree_start = 0;
+ for candidate_set in candidate_sets {
+ let tree_end = tree_start + candidate_set.len();
+
+ if tree_start < segment_end && segment_start < tree_end {
+ let start = tree_start.max(segment_start) - tree_start;
+ let end = tree_end.min(segment_end) - tree_start;
+ let candidates = candidate_set.candidates(start).take(end - start);
+
+ if path_match_helper(
+ matcher,
+ &atoms,
+ candidates,
+ results,
+ candidate_set.id(),
+ &candidate_set.prefix(),
+ candidate_set.root_is_file(),
+ &relative_to,
+ path_style,
+ cancel_flag,
+ )
+ .is_err()
+ {
+ break;
+ }
+ }
+
+ if tree_end >= segment_end {
+ break;
+ }
+ tree_start = tree_end;
+ }
+ });
+ }
+ })
+ .await;
+
+ matcher::return_matchers(matchers);
+ if cancel_flag.load(atomic::Ordering::Acquire) {
+ return Vec::new();
+ }
+
+ let mut results = segment_results.concat();
+ util::truncate_to_bottom_n_sorted_by(&mut results, max_results, &|a, b| b.cmp(a));
+ results
+}
diff --git a/crates/project/Cargo.toml b/crates/project/Cargo.toml
index cd037786a399eb979fd5d9053c57efe3100dd473..628e979aab939a74bb4838477ae3e3657e2c91bc 100644
--- a/crates/project/Cargo.toml
+++ b/crates/project/Cargo.toml
@@ -52,6 +52,7 @@ fancy-regex.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
+fuzzy_nucleo.workspace = true
git.workspace = true
git_hosting_providers.workspace = true
globset.workspace = true
diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs
index 0ec3366ca8f9f6c6e4e3cbd411e1894de4d0f2b8..b90972b3489c25f8a2bf10d7dbdb6d6cfe0c4c6c 100644
--- a/crates/project/src/project.rs
+++ b/crates/project/src/project.rs
@@ -6186,6 +6186,76 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
}
}
+impl<'a> fuzzy_nucleo::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
+ type Candidates = PathMatchCandidateSetNucleoIter<'a>;
+ fn id(&self) -> usize {
+ self.snapshot.id().to_usize()
+ }
+ fn len(&self) -> usize {
+ match self.candidates {
+ Candidates::Files => {
+ if self.include_ignored {
+ self.snapshot.file_count()
+ } else {
+ self.snapshot.visible_file_count()
+ }
+ }
+ Candidates::Directories => {
+ if self.include_ignored {
+ self.snapshot.dir_count()
+ } else {
+ self.snapshot.visible_dir_count()
+ }
+ }
+ Candidates::Entries => {
+ if self.include_ignored {
+ self.snapshot.entry_count()
+ } else {
+ self.snapshot.visible_entry_count()
+ }
+ }
+ }
+ }
+ fn prefix(&self) -> Arc {
+ if self.snapshot.root_entry().is_some_and(|e| e.is_file()) || self.include_root_name {
+ self.snapshot.root_name().into()
+ } else {
+ RelPath::empty().into()
+ }
+ }
+ fn root_is_file(&self) -> bool {
+ self.snapshot.root_entry().is_some_and(|f| f.is_file())
+ }
+ fn path_style(&self) -> PathStyle {
+ self.snapshot.path_style()
+ }
+ fn candidates(&'a self, start: usize) -> Self::Candidates {
+ PathMatchCandidateSetNucleoIter {
+ traversal: match self.candidates {
+ Candidates::Directories => self.snapshot.directories(self.include_ignored, start),
+ Candidates::Files => self.snapshot.files(self.include_ignored, start),
+ Candidates::Entries => self.snapshot.entries(self.include_ignored, start),
+ },
+ }
+ }
+}
+
+pub struct PathMatchCandidateSetNucleoIter<'a> {
+ traversal: Traversal<'a>,
+}
+
+impl<'a> Iterator for PathMatchCandidateSetNucleoIter<'a> {
+ type Item = fuzzy_nucleo::PathMatchCandidate<'a>;
+ fn next(&mut self) -> Option {
+ self.traversal
+ .next()
+ .map(|entry| fuzzy_nucleo::PathMatchCandidate {
+ is_dir: entry.kind.is_dir(),
+ path: &entry.path,
+ })
+ }
+}
+
impl EventEmitter for Project {}
impl<'a> From<&'a ProjectPath> for SettingsLocation<'a> {
From 1dc3bb90e96be26cab72e7392c4042e1e5d0d71a Mon Sep 17 00:00:00 2001
From: Pratik Karki
Date: Tue, 7 Apr 2026 17:10:55 +0545
Subject: [PATCH 09/22] Fix pane::RevealInProjectPanel to focus/open project
panel for non-project buffers (#51246)
Update how `workspace::pane::Pane` handles the `RevealInProjectPanel`
action so as to display a notification when the user attempts to reveal
an unsaved buffer or a file that does not belong to any of the open
projects.
Closes #23967
Release Notes:
- Update `pane: reveal in project panel` to display a notification when
the user attempts to use it with an unsaved buffer or a file that is not
part of the open projects
---------
Signed-off-by: Pratik Karki
Co-authored-by: dino
---
.../project_panel/src/project_panel_tests.rs | 146 +++++++++++++++++-
crates/workspace/src/pane.rs | 66 +++++++-
2 files changed, 203 insertions(+), 9 deletions(-)
diff --git a/crates/project_panel/src/project_panel_tests.rs b/crates/project_panel/src/project_panel_tests.rs
index 55b53cde8b6252f8b9732cf4effc35ea53c073e0..603cfd892a218d866383f485d058296ad179da05 100644
--- a/crates/project_panel/src/project_panel_tests.rs
+++ b/crates/project_panel/src/project_panel_tests.rs
@@ -11,7 +11,7 @@ use std::path::{Path, PathBuf};
use util::{path, paths::PathStyle, rel_path::rel_path};
use workspace::{
AppState, ItemHandle, MultiWorkspace, Pane, Workspace,
- item::{Item, ProjectItem},
+ item::{Item, ProjectItem, test::TestItem},
register_project_item,
};
@@ -6015,6 +6015,150 @@ async fn test_explicit_reveal(cx: &mut gpui::TestAppContext) {
);
}
+#[gpui::test]
+async fn test_reveal_in_project_panel_notifications(cx: &mut gpui::TestAppContext) {
+ init_test_with_editor(cx);
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/workspace",
+ json!({
+ "README.md": ""
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/workspace".as_ref()], cx).await;
+ let window = cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+ let workspace = window
+ .read_with(cx, |mw, _| mw.workspace().clone())
+ .unwrap();
+ let cx = &mut VisualTestContext::from_window(window.into(), cx);
+ let panel = workspace.update_in(cx, ProjectPanel::new);
+ cx.run_until_parked();
+
+ // Ensure that, attempting to run `pane: reveal in project panel` without
+ // any active item does nothing, i.e., does not focus the project panel but
+ // it also does not show a notification.
+ cx.dispatch_action(workspace::RevealInProjectPanel::default());
+ cx.run_until_parked();
+
+ panel.update_in(cx, |panel, window, cx| {
+ assert!(
+ !panel.focus_handle(cx).is_focused(window),
+ "Project panel should not be focused after attempting to reveal an invisible worktree entry"
+ );
+
+ panel.workspace.update(cx, |workspace, cx| {
+ assert!(
+ workspace.active_item(cx).is_none(),
+ "Workspace should not have an active item"
+ );
+ assert_eq!(
+ workspace.notification_ids(),
+ vec![],
+ "No notification should be shown when there's no active item"
+ );
+ }).unwrap();
+ });
+
+ // Create a file in a different folder than the one in the project so we can
+ // later open it and ensure that, attempting to reveal it in the project
+ // panel shows a notification and does not focus the project panel.
+ fs.insert_tree(
+ "/external",
+ json!({
+ "file.txt": "External File",
+ }),
+ )
+ .await;
+
+ let (worktree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_worktree("/external/file.txt", false, cx)
+ })
+ .await
+ .unwrap();
+
+ workspace
+ .update_in(cx, |workspace, window, cx| {
+ let worktree_id = worktree.read(cx).id();
+ let path = rel_path("").into();
+ let project_path = ProjectPath { worktree_id, path };
+
+ workspace.open_path(project_path, None, true, window, cx)
+ })
+ .await
+ .unwrap();
+ cx.run_until_parked();
+
+ cx.dispatch_action(workspace::RevealInProjectPanel::default());
+ cx.run_until_parked();
+
+ panel.update_in(cx, |panel, window, cx| {
+ assert!(
+ !panel.focus_handle(cx).is_focused(window),
+ "Project panel should not be focused after attempting to reveal an invisible worktree entry"
+ );
+
+ panel.workspace.update(cx, |workspace, cx| {
+ assert!(
+ workspace.active_item(cx).is_some(),
+ "Workspace should have an active item"
+ );
+
+ let notification_ids = workspace.notification_ids();
+ assert_eq!(
+ notification_ids.len(),
+ 1,
+ "A notification should be shown when trying to reveal an invisible worktree entry"
+ );
+
+ workspace.dismiss_notification(¬ification_ids[0], cx);
+ assert_eq!(
+ workspace.notification_ids().len(),
+ 0,
+ "No notifications should be left after dismissing"
+ );
+ }).unwrap();
+ });
+
+ // Create an empty buffer so we can ensure that, attempting to reveal it in
+ // the project panel shows a notification and does not focus the project
+ // panel.
+ let pane = workspace.update(cx, |workspace, _| workspace.active_pane().clone());
+ pane.update_in(cx, |pane, window, cx| {
+ let item = cx.new(|cx| TestItem::new(cx).with_label("Unsaved buffer"));
+ pane.add_item(Box::new(item), false, false, None, window, cx);
+ });
+
+ cx.dispatch_action(workspace::RevealInProjectPanel::default());
+ cx.run_until_parked();
+
+ panel.update_in(cx, |panel, window, cx| {
+ assert!(
+ !panel.focus_handle(cx).is_focused(window),
+ "Project panel should not be focused after attempting to reveal an unsaved buffer"
+ );
+
+ panel
+ .workspace
+ .update(cx, |workspace, cx| {
+ assert!(
+ workspace.active_item(cx).is_some(),
+ "Workspace should have an active item"
+ );
+
+ let notification_ids = workspace.notification_ids();
+ assert_eq!(
+ notification_ids.len(),
+ 1,
+ "A notification should be shown when trying to reveal an unsaved buffer"
+ );
+ })
+ .unwrap();
+ });
+}
+
#[gpui::test]
async fn test_creating_excluded_entries(cx: &mut gpui::TestAppContext) {
init_test(cx);
diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs
index 27cc96ae80a010db2dd5357a9a0bc037ca762875..a09ba73add7e94fbe6910eb400b1364bd21cd313 100644
--- a/crates/workspace/src/pane.rs
+++ b/crates/workspace/src/pane.rs
@@ -10,7 +10,10 @@ use crate::{
TabContentParams, TabTooltipContent, WeakItemHandle,
},
move_item,
- notifications::NotifyResultExt,
+ notifications::{
+ NotificationId, NotifyResultExt, show_app_notification,
+ simple_message_notification::MessageNotification,
+ },
toolbar::Toolbar,
workspace_settings::{AutosaveSetting, FocusFollowsMouse, TabBarSettings, WorkspaceSettings},
};
@@ -4400,17 +4403,64 @@ impl Render for Pane {
))
.on_action(
cx.listener(|pane: &mut Self, action: &RevealInProjectPanel, _, cx| {
+ let Some(active_item) = pane.active_item() else {
+ return;
+ };
+
let entry_id = action
.entry_id
.map(ProjectEntryId::from_proto)
- .or_else(|| pane.active_item()?.project_entry_ids(cx).first().copied());
- if let Some(entry_id) = entry_id {
- pane.project
- .update(cx, |_, cx| {
- cx.emit(project::Event::RevealInProjectPanel(entry_id))
- })
- .ok();
+ .or_else(|| active_item.project_entry_ids(cx).first().copied());
+
+ let show_reveal_error_toast = |display_name: &str, cx: &mut App| {
+ let notification_id = NotificationId::unique::();
+ let message = SharedString::from(format!(
+ "\"{display_name}\" is not part of any open projects."
+ ));
+
+ show_app_notification(notification_id, cx, move |cx| {
+ let message = message.clone();
+ cx.new(|cx| MessageNotification::new(message, cx))
+ });
+ };
+
+ let Some(entry_id) = entry_id else {
+ // When working with an unsaved buffer, display a toast
+ // informing the user that the buffer is not present in
+ // any of the open projects and stop execution, as we
+ // don't want to open the project panel.
+ let display_name = active_item
+ .tab_tooltip_text(cx)
+ .unwrap_or_else(|| active_item.tab_content_text(0, cx));
+
+ return show_reveal_error_toast(&display_name, cx);
+ };
+
+ // We'll now check whether the entry belongs to a visible
+ // worktree and, if that's not the case, it means the user
+ // is interacting with a file that does not belong to any of
+ // the open projects, so we'll show a toast informing them
+ // of this and stop execution.
+ let display_name = pane
+ .project
+ .read_with(cx, |project, cx| {
+ project
+ .worktree_for_entry(entry_id, cx)
+ .filter(|worktree| !worktree.read(cx).is_visible())
+ .map(|worktree| worktree.read(cx).root_name_str().to_string())
+ })
+ .ok()
+ .flatten();
+
+ if let Some(display_name) = display_name {
+ return show_reveal_error_toast(&display_name, cx);
}
+
+ pane.project
+ .update(cx, |_, cx| {
+ cx.emit(project::Event::RevealInProjectPanel(entry_id))
+ })
+ .log_err();
}),
)
.on_action(cx.listener(|_, _: &menu::Cancel, window, cx| {
From eaf14d028a6c9cca193f725871116cd05a21c305 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jo=C3=A3o=20Soares?=
<37777652+Dnreikronos@users.noreply.github.com>
Date: Tue, 7 Apr 2026 09:12:30 -0300
Subject: [PATCH 10/22] gpui: Fix SVG renderer not rendering text when system
fonts are unavailable (#51623)
Closes #51466
Before you mark this PR as ready for review, make sure that you have:
- [x] Added a solid test coverage and/or screenshots from doing manual
testing
- [x] Done a self-review taking into account security and performance
aspects
- [ ] Aligned any UI changes with the [UI
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
Release Notes:
- Fixed mermaid diagrams not showing text in markdown preview by
bundling fallback fonts and fixing generic font family resolution in the
SVG renderer.
---
crates/gpui/src/svg_renderer.rs | 127 ++++++++++++++++++++++++++++++--
1 file changed, 120 insertions(+), 7 deletions(-)
diff --git a/crates/gpui/src/svg_renderer.rs b/crates/gpui/src/svg_renderer.rs
index 8653ab9b162031772ab29367b60ff988e33cd823..a766a25cc1ef66039f5b2a1d0aeaab51ace89578 100644
--- a/crates/gpui/src/svg_renderer.rs
+++ b/crates/gpui/src/svg_renderer.rs
@@ -105,18 +105,36 @@ pub enum SvgSize {
impl SvgRenderer {
/// Creates a new SVG renderer with the provided asset source.
pub fn new(asset_source: Arc) -> Self {
- static FONT_DB: LazyLock> = LazyLock::new(|| {
+ static SYSTEM_FONT_DB: LazyLock> = LazyLock::new(|| {
let mut db = usvg::fontdb::Database::new();
db.load_system_fonts();
Arc::new(db)
});
+
+ let fontdb = {
+ let mut db = (**SYSTEM_FONT_DB).clone();
+ load_bundled_fonts(&*asset_source, &mut db);
+ fix_generic_font_families(&mut db);
+ Arc::new(db)
+ };
+
let default_font_resolver = usvg::FontResolver::default_font_selector();
let font_resolver = Box::new(
move |font: &usvg::Font, db: &mut Arc| {
if db.is_empty() {
- *db = FONT_DB.clone();
+ *db = fontdb.clone();
+ }
+ if let Some(id) = default_font_resolver(font, db) {
+ return Some(id);
}
- default_font_resolver(font, db)
+ // fontdb doesn't recognize CSS system font keywords like "system-ui"
+ // or "ui-sans-serif", so fall back to sans-serif before any face.
+ let sans_query = usvg::fontdb::Query {
+ families: &[usvg::fontdb::Family::SansSerif],
+ ..Default::default()
+ };
+ db.query(&sans_query)
+ .or_else(|| db.faces().next().map(|f| f.id))
},
);
let default_fallback_selection = usvg::FontResolver::default_fallback_selector();
@@ -226,14 +244,69 @@ impl SvgRenderer {
}
}
+fn load_bundled_fonts(asset_source: &dyn AssetSource, db: &mut usvg::fontdb::Database) {
+ let font_paths = [
+ "fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf",
+ "fonts/lilex/Lilex-Regular.ttf",
+ ];
+ for path in font_paths {
+ match asset_source.load(path) {
+ Ok(Some(data)) => db.load_font_data(data.into_owned()),
+ Ok(None) => log::warn!("Bundled font not found: {path}"),
+ Err(error) => log::warn!("Failed to load bundled font {path}: {error}"),
+ }
+ }
+}
+
+// fontdb defaults generic families to Microsoft fonts ("Arial", "Times New Roman")
+// which aren't installed on most Linux systems. fontconfig normally overrides these,
+// but when it fails the defaults remain and all generic family queries return None.
+fn fix_generic_font_families(db: &mut usvg::fontdb::Database) {
+ use usvg::fontdb::{Family, Query};
+
+ let families_and_fallbacks: &[(Family<'_>, &str)] = &[
+ (Family::SansSerif, "IBM Plex Sans"),
+ // No serif font bundled; use sans-serif as best available fallback.
+ (Family::Serif, "IBM Plex Sans"),
+ (Family::Monospace, "Lilex"),
+ (Family::Cursive, "IBM Plex Sans"),
+ (Family::Fantasy, "IBM Plex Sans"),
+ ];
+
+ for (family, fallback_name) in families_and_fallbacks {
+ let query = Query {
+ families: &[*family],
+ ..Default::default()
+ };
+ if db.query(&query).is_none() {
+ match family {
+ Family::SansSerif => db.set_sans_serif_family(*fallback_name),
+ Family::Serif => db.set_serif_family(*fallback_name),
+ Family::Monospace => db.set_monospace_family(*fallback_name),
+ Family::Cursive => db.set_cursive_family(*fallback_name),
+ Family::Fantasy => db.set_fantasy_family(*fallback_name),
+ _ => {}
+ }
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
+ use usvg::fontdb::{Database, Family, Query};
const IBM_PLEX_REGULAR: &[u8] =
include_bytes!("../../../assets/fonts/ibm-plex-sans/IBMPlexSans-Regular.ttf");
const LILEX_REGULAR: &[u8] = include_bytes!("../../../assets/fonts/lilex/Lilex-Regular.ttf");
+ fn db_with_bundled_fonts() -> Database {
+ let mut db = Database::new();
+ db.load_font_data(IBM_PLEX_REGULAR.to_vec());
+ db.load_font_data(LILEX_REGULAR.to_vec());
+ db
+ }
+
#[test]
fn test_is_emoji_presentation() {
let cases = [
@@ -266,11 +339,33 @@ mod tests {
}
#[test]
- fn test_select_emoji_font_skips_family_without_glyph() {
- let mut db = usvg::fontdb::Database::new();
+ fn fix_generic_font_families_sets_all_families() {
+ let mut db = db_with_bundled_fonts();
+ fix_generic_font_families(&mut db);
+
+ let families = [
+ Family::SansSerif,
+ Family::Serif,
+ Family::Monospace,
+ Family::Cursive,
+ Family::Fantasy,
+ ];
- db.load_font_data(IBM_PLEX_REGULAR.to_vec());
- db.load_font_data(LILEX_REGULAR.to_vec());
+ for family in families {
+ let query = Query {
+ families: &[family],
+ ..Default::default()
+ };
+ assert!(
+ db.query(&query).is_some(),
+ "Expected generic family {family:?} to resolve after fix_generic_font_families"
+ );
+ }
+ }
+
+ #[test]
+ fn test_select_emoji_font_skips_family_without_glyph() {
+ let mut db = db_with_bundled_fonts();
let ibm_plex_sans = db
.query(&usvg::fontdb::Query {
@@ -294,4 +389,22 @@ mod tests {
assert!(!font_has_char(&db, ibm_plex_sans, '│'));
assert!(font_has_char(&db, selected, '│'));
}
+
+ #[test]
+ fn fix_generic_font_families_monospace_resolves_to_lilex() {
+ let mut db = db_with_bundled_fonts();
+ fix_generic_font_families(&mut db);
+
+ let query = Query {
+ families: &[Family::Monospace],
+ ..Default::default()
+ };
+ let id = db.query(&query).expect("Monospace should resolve");
+ let face = db.face(id).expect("Face should exist");
+ assert!(
+ face.families.iter().any(|(name, _)| name.contains("Lilex")),
+ "Monospace should map to Lilex, got {:?}",
+ face.families
+ );
+ }
}
From 0bde5094f695c9ddf4e5fa591712baab546d3b4b Mon Sep 17 00:00:00 2001
From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com>
Date: Tue, 7 Apr 2026 09:13:05 -0300
Subject: [PATCH 11/22] agent_ui: Set max-width for thread view content
(#52730)
This PR adds a configurable max-width to the agent panel. This will be
particularly useful when opting into an agentic-first layout where the
thread will be at the center of the UI (with the panel most likely
full-screen'ed, which is why I'm also adding here the button to make it
full screen in the toolbar). The default max-width is 850, which is a
bit bigger than the one generally considered as a standard (~66
characters wide, which usually sums up to 750 pixels).
Release Notes:
- Agent: Added a max-width to the thread view for better readability,
particularly when the panel is zoomed in.
---
assets/settings/default.json | 3 +
crates/agent/src/tool_permissions.rs | 1 +
crates/agent_settings/src/agent_settings.rs | 2 +
crates/agent_ui/src/agent_panel.rs | 85 +++---
crates/agent_ui/src/agent_ui.rs | 1 +
.../src/conversation_view/thread_view.rs | 242 ++++++++++--------
crates/settings_content/src/agent.rs | 6 +
crates/settings_ui/src/page_data.rs | 20 +-
8 files changed, 203 insertions(+), 157 deletions(-)
diff --git a/assets/settings/default.json b/assets/settings/default.json
index 63e906e3b11206fc458f8d7353f3ecba0abeb825..a32e1b27aee08bf2676922fea3790a99b7d7844b 100644
--- a/assets/settings/default.json
+++ b/assets/settings/default.json
@@ -965,6 +965,9 @@
"default_width": 640,
// Default height when the agent panel is docked to the bottom.
"default_height": 320,
+ // Maximum content width when the agent panel is wider than this value.
+ // Content will be centered within the panel.
+ "max_content_width": 850,
// The default model to use when creating new threads.
"default_model": {
// The provider to use.
diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs
index 58e779da59aef176464839ed6f2d6a5c16e4bc12..ff9e735b6c4181588ed5cddbd6dada7fbae5f18f 100644
--- a/crates/agent/src/tool_permissions.rs
+++ b/crates/agent/src/tool_permissions.rs
@@ -574,6 +574,7 @@ mod tests {
flexible: true,
default_width: px(300.),
default_height: px(600.),
+ max_content_width: px(850.),
default_model: None,
inline_assistant_model: None,
inline_assistant_use_streaming_tools: false,
diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs
index 0c68d2f25d54f966d1cc0a93476457bbba79c959..5d6dca9322482daecf7525f79ead63b4471b7a53 100644
--- a/crates/agent_settings/src/agent_settings.rs
+++ b/crates/agent_settings/src/agent_settings.rs
@@ -154,6 +154,7 @@ pub struct AgentSettings {
pub sidebar_side: SidebarDockPosition,
pub default_width: Pixels,
pub default_height: Pixels,
+ pub max_content_width: Pixels,
pub default_model: Option,
pub inline_assistant_model: Option,
pub inline_assistant_use_streaming_tools: bool,
@@ -600,6 +601,7 @@ impl Settings for AgentSettings {
sidebar_side: agent.sidebar_side.unwrap(),
default_width: px(agent.default_width.unwrap()),
default_height: px(agent.default_height.unwrap()),
+ max_content_width: px(agent.max_content_width.unwrap()),
flexible: agent.flexible.unwrap(),
default_model: Some(agent.default_model.unwrap()),
inline_assistant_model: agent.inline_assistant_model,
diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs
index 8f456e0e955b823a5bbaf2815df3b409441bb0af..01b897fc63da76247b5624f8316ea06b2c1f85e5 100644
--- a/crates/agent_ui/src/agent_panel.rs
+++ b/crates/agent_ui/src/agent_panel.rs
@@ -3186,17 +3186,11 @@ impl AgentPanel {
fn render_panel_options_menu(
&self,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut Context,
) -> impl IntoElement {
let focus_handle = self.focus_handle(cx);
- let full_screen_label = if self.is_zoomed(window, cx) {
- "Disable Full Screen"
- } else {
- "Enable Full Screen"
- };
-
let conversation_view = match &self.active_view {
ActiveView::AgentThread { conversation_view } => Some(conversation_view.clone()),
_ => None,
@@ -3272,8 +3266,7 @@ impl AgentPanel {
.action("Profiles", Box::new(ManageProfiles::default()))
.action("Settings", Box::new(OpenSettings))
.separator()
- .action("Toggle Threads Sidebar", Box::new(ToggleWorkspaceSidebar))
- .action(full_screen_label, Box::new(ToggleZoom));
+ .action("Toggle Threads Sidebar", Box::new(ToggleWorkspaceSidebar));
if has_auth_methods {
menu = menu.action("Reauthenticate", Box::new(ReauthenticateAgent))
@@ -3709,21 +3702,37 @@ impl AgentPanel {
);
let is_full_screen = self.is_zoomed(window, cx);
+ let full_screen_button = if is_full_screen {
+ IconButton::new("disable-full-screen", IconName::Minimize)
+ .icon_size(IconSize::Small)
+ .tooltip(move |_, cx| Tooltip::for_action("Disable Full Screen", &ToggleZoom, cx))
+ .on_click(cx.listener(move |this, _, window, cx| {
+ this.toggle_zoom(&ToggleZoom, window, cx);
+ }))
+ } else {
+ IconButton::new("enable-full-screen", IconName::Maximize)
+ .icon_size(IconSize::Small)
+ .tooltip(move |_, cx| Tooltip::for_action("Enable Full Screen", &ToggleZoom, cx))
+ .on_click(cx.listener(move |this, _, window, cx| {
+ this.toggle_zoom(&ToggleZoom, window, cx);
+ }))
+ };
let use_v2_empty_toolbar = has_v2_flag && is_empty_state && !is_in_history_or_config;
+ let max_content_width = AgentSettings::get_global(cx).max_content_width;
+
let base_container = h_flex()
- .id("agent-panel-toolbar")
- .h(Tab::container_height(cx))
- .max_w_full()
+ .size_full()
+ // TODO: This is only until we remove Agent settings from the panel.
+ .when(!is_in_history_or_config, |this| {
+ this.max_w(max_content_width).mx_auto()
+ })
.flex_none()
.justify_between()
- .gap_2()
- .bg(cx.theme().colors().tab_bar_background)
- .border_b_1()
- .border_color(cx.theme().colors().border);
+ .gap_2();
- if use_v2_empty_toolbar {
+ let toolbar_content = if use_v2_empty_toolbar {
let (chevron_icon, icon_color, label_color) =
if self.new_thread_menu_handle.is_deployed() {
(IconName::ChevronUp, Color::Accent, Color::Accent)
@@ -3805,20 +3814,7 @@ impl AgentPanel {
cx,
))
})
- .when(is_full_screen, |this| {
- this.child(
- IconButton::new("disable-full-screen", IconName::Minimize)
- .icon_size(IconSize::Small)
- .tooltip(move |_, cx| {
- Tooltip::for_action("Disable Full Screen", &ToggleZoom, cx)
- })
- .on_click({
- cx.listener(move |_, _, window, cx| {
- window.dispatch_action(ToggleZoom.boxed_clone(), cx);
- })
- }),
- )
- })
+ .child(full_screen_button)
.child(self.render_panel_options_menu(window, cx)),
)
.into_any_element()
@@ -3871,24 +3867,21 @@ impl AgentPanel {
cx,
))
})
- .when(is_full_screen, |this| {
- this.child(
- IconButton::new("disable-full-screen", IconName::Minimize)
- .icon_size(IconSize::Small)
- .tooltip(move |_, cx| {
- Tooltip::for_action("Disable Full Screen", &ToggleZoom, cx)
- })
- .on_click({
- cx.listener(move |_, _, window, cx| {
- window.dispatch_action(ToggleZoom.boxed_clone(), cx);
- })
- }),
- )
- })
+ .child(full_screen_button)
.child(self.render_panel_options_menu(window, cx)),
)
.into_any_element()
- }
+ };
+
+ h_flex()
+ .id("agent-panel-toolbar")
+ .h(Tab::container_height(cx))
+ .flex_shrink_0()
+ .max_w_full()
+ .bg(cx.theme().colors().tab_bar_background)
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(toolbar_content)
}
fn render_worktree_creation_status(&self, cx: &mut Context) -> Option {
diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs
index 9daa7c6cd83c276aa99adc9e3aae3e6c82c5ba88..58b52d9ea2eb10a4f7f483402b98c4be4b08924f 100644
--- a/crates/agent_ui/src/agent_ui.rs
+++ b/crates/agent_ui/src/agent_ui.rs
@@ -742,6 +742,7 @@ mod tests {
flexible: true,
default_width: px(300.),
default_height: px(600.),
+ max_content_width: px(850.),
default_model: None,
inline_assistant_model: None,
inline_assistant_use_streaming_tools: false,
diff --git a/crates/agent_ui/src/conversation_view/thread_view.rs b/crates/agent_ui/src/conversation_view/thread_view.rs
index ff3dab1170064e058c0ebb44505c0906349517ee..27ebadade8047db5f2b4de63c5c3731708d9af59 100644
--- a/crates/agent_ui/src/conversation_view/thread_view.rs
+++ b/crates/agent_ui/src/conversation_view/thread_view.rs
@@ -3014,14 +3014,12 @@ impl ThreadView {
let is_done = thread.read(cx).status() == ThreadStatus::Idle;
let is_canceled_or_failed = self.is_subagent_canceled_or_failed(cx);
+ let max_content_width = AgentSettings::get_global(cx).max_content_width;
+
Some(
h_flex()
- .h(Tab::container_height(cx))
- .pl_2()
- .pr_1p5()
.w_full()
- .justify_between()
- .gap_1()
+ .h(Tab::container_height(cx))
.border_b_1()
.when(is_done && is_canceled_or_failed, |this| {
this.border_dashed()
@@ -3030,50 +3028,61 @@ impl ThreadView {
.bg(cx.theme().colors().editor_background.opacity(0.2))
.child(
h_flex()
- .flex_1()
- .gap_2()
+ .size_full()
+ .max_w(max_content_width)
+ .mx_auto()
+ .pl_2()
+ .pr_1()
+ .flex_shrink_0()
+ .justify_between()
+ .gap_1()
.child(
- Icon::new(IconName::ForwardArrowUp)
- .size(IconSize::Small)
- .color(Color::Muted),
+ h_flex()
+ .flex_1()
+ .gap_2()
+ .child(
+ Icon::new(IconName::ForwardArrowUp)
+ .size(IconSize::Small)
+ .color(Color::Muted),
+ )
+ .child(self.title_editor.clone())
+ .when(is_done && is_canceled_or_failed, |this| {
+ this.child(Icon::new(IconName::Close).color(Color::Error))
+ })
+ .when(is_done && !is_canceled_or_failed, |this| {
+ this.child(Icon::new(IconName::Check).color(Color::Success))
+ }),
)
- .child(self.title_editor.clone())
- .when(is_done && is_canceled_or_failed, |this| {
- this.child(Icon::new(IconName::Close).color(Color::Error))
- })
- .when(is_done && !is_canceled_or_failed, |this| {
- this.child(Icon::new(IconName::Check).color(Color::Success))
- }),
- )
- .child(
- h_flex()
- .gap_0p5()
- .when(!is_done, |this| {
- this.child(
- IconButton::new("stop_subagent", IconName::Stop)
- .icon_size(IconSize::Small)
- .icon_color(Color::Error)
- .tooltip(Tooltip::text("Stop Subagent"))
- .on_click(move |_, _, cx| {
- thread.update(cx, |thread, cx| {
- thread.cancel(cx).detach();
- });
- }),
- )
- })
.child(
- IconButton::new("minimize_subagent", IconName::Minimize)
- .icon_size(IconSize::Small)
- .tooltip(Tooltip::text("Minimize Subagent"))
- .on_click(move |_, window, cx| {
- let _ = server_view.update(cx, |server_view, cx| {
- server_view.navigate_to_session(
- parent_session_id.clone(),
- window,
- cx,
- );
- });
- }),
+ h_flex()
+ .gap_0p5()
+ .when(!is_done, |this| {
+ this.child(
+ IconButton::new("stop_subagent", IconName::Stop)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Error)
+ .tooltip(Tooltip::text("Stop Subagent"))
+ .on_click(move |_, _, cx| {
+ thread.update(cx, |thread, cx| {
+ thread.cancel(cx).detach();
+ });
+ }),
+ )
+ })
+ .child(
+ IconButton::new("minimize_subagent", IconName::Dash)
+ .icon_size(IconSize::Small)
+ .tooltip(Tooltip::text("Minimize Subagent"))
+ .on_click(move |_, window, cx| {
+ let _ = server_view.update(cx, |server_view, cx| {
+ server_view.navigate_to_session(
+ parent_session_id.clone(),
+ window,
+ cx,
+ );
+ });
+ }),
+ ),
),
),
)
@@ -3099,6 +3108,8 @@ impl ThreadView {
(IconName::Maximize, "Expand Message Editor")
};
+ let max_content_width = AgentSettings::get_global(cx).max_content_width;
+
v_flex()
.on_action(cx.listener(Self::expand_message_editor))
.p_2()
@@ -3113,73 +3124,80 @@ impl ThreadView {
})
.child(
v_flex()
- .relative()
- .size_full()
- .when(v2_empty_state, |this| this.flex_1())
- .pt_1()
- .pr_2p5()
- .child(self.message_editor.clone())
- .when(!v2_empty_state, |this| {
- this.child(
- h_flex()
- .absolute()
- .top_0()
- .right_0()
- .opacity(0.5)
- .hover(|this| this.opacity(1.0))
- .child(
- IconButton::new("toggle-height", expand_icon)
- .icon_size(IconSize::Small)
- .icon_color(Color::Muted)
- .tooltip({
- move |_window, cx| {
- Tooltip::for_action_in(
- expand_tooltip,
- &ExpandMessageEditor,
- &focus_handle,
- cx,
- )
- }
- })
- .on_click(cx.listener(|this, _, window, cx| {
- this.expand_message_editor(
- &ExpandMessageEditor,
- window,
- cx,
- );
- })),
- ),
- )
- }),
- )
- .child(
- h_flex()
- .flex_none()
- .flex_wrap()
- .justify_between()
+ .flex_1()
+ .w_full()
+ .max_w(max_content_width)
+ .mx_auto()
.child(
- h_flex()
- .gap_0p5()
- .child(self.render_add_context_button(cx))
- .child(self.render_follow_toggle(cx))
- .children(self.render_fast_mode_control(cx))
- .children(self.render_thinking_control(cx)),
+ v_flex()
+ .relative()
+ .size_full()
+ .when(v2_empty_state, |this| this.flex_1())
+ .pt_1()
+ .pr_2p5()
+ .child(self.message_editor.clone())
+ .when(!v2_empty_state, |this| {
+ this.child(
+ h_flex()
+ .absolute()
+ .top_0()
+ .right_0()
+ .opacity(0.5)
+ .hover(|this| this.opacity(1.0))
+ .child(
+ IconButton::new("toggle-height", expand_icon)
+ .icon_size(IconSize::Small)
+ .icon_color(Color::Muted)
+ .tooltip({
+ move |_window, cx| {
+ Tooltip::for_action_in(
+ expand_tooltip,
+ &ExpandMessageEditor,
+ &focus_handle,
+ cx,
+ )
+ }
+ })
+ .on_click(cx.listener(|this, _, window, cx| {
+ this.expand_message_editor(
+ &ExpandMessageEditor,
+ window,
+ cx,
+ );
+ })),
+ ),
+ )
+ }),
)
.child(
h_flex()
- .gap_1()
- .children(self.render_token_usage(cx))
- .children(self.profile_selector.clone())
- .map(|this| {
- // Either config_options_view OR (mode_selector + model_selector)
- match self.config_options_view.clone() {
- Some(config_view) => this.child(config_view),
- None => this
- .children(self.mode_selector.clone())
- .children(self.model_selector.clone()),
- }
- })
- .child(self.render_send_button(cx)),
+ .flex_none()
+ .flex_wrap()
+ .justify_between()
+ .child(
+ h_flex()
+ .gap_0p5()
+ .child(self.render_add_context_button(cx))
+ .child(self.render_follow_toggle(cx))
+ .children(self.render_fast_mode_control(cx))
+ .children(self.render_thinking_control(cx)),
+ )
+ .child(
+ h_flex()
+ .gap_1()
+ .children(self.render_token_usage(cx))
+ .children(self.profile_selector.clone())
+ .map(|this| {
+ // Either config_options_view OR (mode_selector + model_selector)
+ match self.config_options_view.clone() {
+ Some(config_view) => this.child(config_view),
+ None => this
+ .children(self.mode_selector.clone())
+ .children(self.model_selector.clone()),
+ }
+ })
+ .child(self.render_send_button(cx)),
+ ),
),
)
.into_any()
@@ -8559,8 +8577,12 @@ impl Render for ThreadView {
let has_messages = self.list_state.item_count() > 0;
let v2_empty_state = cx.has_flag::() && !has_messages;
+ let max_content_width = AgentSettings::get_global(cx).max_content_width;
+
let conversation = v_flex()
- .when(!v2_empty_state, |this| this.flex_1())
+ .mx_auto()
+ .max_w(max_content_width)
+ .when(!v2_empty_state, |this| this.flex_1().size_full())
.map(|this| {
let this = this.when(self.resumed_without_history, |this| {
this.child(Self::render_resume_notice(cx))
diff --git a/crates/settings_content/src/agent.rs b/crates/settings_content/src/agent.rs
index 5b1b3c014f8c538cb0dff506e05d84a80dc863d1..7a9a1ddb16ac91f90f73e17b3972cd31536d7a66 100644
--- a/crates/settings_content/src/agent.rs
+++ b/crates/settings_content/src/agent.rs
@@ -128,6 +128,12 @@ pub struct AgentSettingsContent {
/// Default: 320
#[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
pub default_height: Option,
+ /// Maximum content width in pixels for the agent panel. Content will be
+ /// centered when the panel is wider than this value.
+ ///
+ /// Default: 850
+ #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")]
+ pub max_content_width: Option,
/// The default model to use when creating new chats and for other features when a specific model is not specified.
pub default_model: Option,
/// Favorite models to show at the top of the model selector.
diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs
index 9978832c05bb29c97f118fccbe301214d81fa0c6..259ee2cf261f9e435a5431ddf3c470640daf41f9 100644
--- a/crates/settings_ui/src/page_data.rs
+++ b/crates/settings_ui/src/page_data.rs
@@ -5737,7 +5737,7 @@ fn panels_page() -> SettingsPage {
]
}
- fn agent_panel_section() -> [SettingsPageItem; 6] {
+ fn agent_panel_section() -> [SettingsPageItem; 7] {
[
SettingsPageItem::SectionHeader("Agent Panel"),
SettingsPageItem::SettingItem(SettingItem {
@@ -5812,6 +5812,24 @@ fn panels_page() -> SettingsPage {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Agent Panel Max Content Width",
+ description: "Maximum content width in pixels. Content will be centered when the panel is wider than this value.",
+ field: Box::new(SettingField {
+ json_path: Some("agent.max_content_width"),
+ pick: |settings_content| {
+ settings_content.agent.as_ref()?.max_content_width.as_ref()
+ },
+ write: |settings_content, value| {
+ settings_content
+ .agent
+ .get_or_insert_default()
+ .max_content_width = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
]
}
From 8292ab440d87172c6663e2dffa1fad33d10ddb11 Mon Sep 17 00:00:00 2001
From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com>
Date: Tue, 7 Apr 2026 09:26:09 -0300
Subject: [PATCH 12/22] collab_panel: Make channel items have a fixed height
(#53304)
Follow-up to https://github.com/zed-industries/zed/pull/53290
This PR fixes a mistake I pushed before of making the `ListItem`'s
height method take pixels instead of a scalable unit like rems. Now, it
takes `DefiniteLength` which can house both values, meaning we should be
clear to set an explicit height for all of these items while still
preserving font-size scaling.
Release Notes:
- N/A
---
crates/collab_ui/src/collab_panel.rs | 7 +++++++
crates/ui/src/components/list/list_item.rs | 6 +++---
2 files changed, 10 insertions(+), 3 deletions(-)
diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs
index 1e1aab3b9d4aa0e48ad4a84ec77bdc6dff51c7f5..7dc807998760a8e65d373164eec5c7663171e5d0 100644
--- a/crates/collab_ui/src/collab_panel.rs
+++ b/crates/collab_ui/src/collab_panel.rs
@@ -1181,6 +1181,7 @@ impl CollabPanel {
.into();
ListItem::new(project_id as usize)
+ .height(rems_from_px(24.))
.toggle_state(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
this.workspace
@@ -1221,6 +1222,7 @@ impl CollabPanel {
let id = peer_id.map_or(usize::MAX, |id| id.as_u64() as usize);
ListItem::new(("screen", id))
+ .height(rems_from_px(24.))
.toggle_state(is_selected)
.start_slot(
h_flex()
@@ -1267,6 +1269,7 @@ impl CollabPanel {
let has_channel_buffer_changed = channel_store.has_channel_buffer_changed(channel_id);
ListItem::new("channel-notes")
+ .height(rems_from_px(24.))
.toggle_state(is_selected)
.on_click(cx.listener(move |this, _, window, cx| {
this.open_channel_notes(channel_id, window, cx);
@@ -3207,9 +3210,12 @@ impl CollabPanel {
(IconName::Star, Color::Default, "Add to Favorites")
};
+ let height = rems_from_px(24.);
+
h_flex()
.id(ix)
.group("")
+ .h(height)
.w_full()
.overflow_hidden()
.when(!channel.is_root_channel(), |el| {
@@ -3239,6 +3245,7 @@ impl CollabPanel {
)
.child(
ListItem::new(ix)
+ .height(height)
// Add one level of depth for the disclosure arrow.
.indent_level(depth + 1)
.indent_step_size(px(20.))
diff --git a/crates/ui/src/components/list/list_item.rs b/crates/ui/src/components/list/list_item.rs
index 9a764efd58cfd3365d92e534a715a0f23ce46e90..ece1fd3c61ec486c090808891a8eec662138b1b4 100644
--- a/crates/ui/src/components/list/list_item.rs
+++ b/crates/ui/src/components/list/list_item.rs
@@ -52,7 +52,7 @@ pub struct ListItem {
overflow_x: bool,
focused: Option,
docked_right: bool,
- height: Option,
+ height: Option,
}
impl ListItem {
@@ -207,8 +207,8 @@ impl ListItem {
self
}
- pub fn height(mut self, height: Pixels) -> Self {
- self.height = Some(height);
+ pub fn height(mut self, height: impl Into) -> Self {
+ self.height = Some(height.into());
self
}
}
From 3ed1c32bf9a1ebb485e3da6cabc8b3c0a423beea Mon Sep 17 00:00:00 2001
From: Xin Zhao
Date: Tue, 7 Apr 2026 22:15:33 +0800
Subject: [PATCH 13/22] editor: Fix diagnostic rendering when semantic tokens
set to full (#53008)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Self-Review Checklist:
- [x] I've reviewed my own diff for quality, security, and reliability
- [x] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [ ] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable
Closes #50212
There are two unreasonable coupling account for this issue, the coupling
of `use_tree_sitter` with `languge_aware` in
https://github.com/zed-industries/zed/blob/7892b932795911516f26f3c1c1c72249ed181ba8/crates/editor/src/element.rs#L3820-L3822
and the coupling of `language_aware` with `diagnostics` in
https://github.com/zed-industries/zed/blob/7892b932795911516f26f3c1c1c72249ed181ba8/crates/language/src/buffer.rs#L3736-L3746
Because of these couplings, when the editor stops using Tree-sitter
highlighting when `"semantic_tokens"` set to `"full"`, it also
accidentally stops fetching diagnostic information. This is why error
and warning underlines disappear.
I’ve fixed this by adding a separate `use_tree_sitter` parameter to
`highlighted_chunks`. This way, we can keep `language_aware` true to get
the diagnostic data we need, but still decide whether or not to apply
Tree-sitter highlights. I chose to fix this at the `highlighted_chunks`
level because I’m worried that changing the logic in the deeper layers
of the DisplayMap or Buffer might have too many side effects that are
hard to predict. This approach feels like a safer way to solve the
problem.
Release Notes:
- Fixed a bug where diagnostic underlines disappeared when
"semantic_tokens" set to "full"
---------
Co-authored-by: Kirill Bulatov
---
crates/editor/src/display_map.rs | 48 +++++--
crates/editor/src/display_map/block_map.rs | 14 +-
.../src/display_map/custom_highlights.rs | 9 +-
crates/editor/src/display_map/fold_map.rs | 33 ++++-
crates/editor/src/display_map/inlay_map.rs | 37 +++--
crates/editor/src/display_map/tab_map.rs | 71 ++++++++--
crates/editor/src/display_map/wrap_map.rs | 19 ++-
crates/editor/src/editor.rs | 22 ++-
crates/editor/src/element.rs | 17 ++-
crates/editor/src/semantic_tokens.rs | 132 +++++++++++++++++-
crates/language/src/buffer.rs | 43 +++++-
crates/language/src/buffer_tests.rs | 8 +-
crates/multi_buffer/src/multi_buffer.rs | 33 +++--
crates/multi_buffer/src/multi_buffer_tests.rs | 24 +++-
crates/outline_panel/src/outline_panel.rs | 13 +-
crates/project/src/lsp_store.rs | 15 +-
.../tests/integration/project_tests.rs | 15 +-
crates/vim/src/state.rs | 12 +-
18 files changed, 468 insertions(+), 97 deletions(-)
diff --git a/crates/editor/src/display_map.rs b/crates/editor/src/display_map.rs
index f95f1030276015af4825119fc98ac68b876d0e5f..7cb8040e282a47d27cf5d7b33e5453295b4f645f 100644
--- a/crates/editor/src/display_map.rs
+++ b/crates/editor/src/display_map.rs
@@ -98,7 +98,7 @@ use gpui::{
WeakEntity,
};
use language::{
- Point, Subscription as BufferSubscription,
+ LanguageAwareStyling, Point, Subscription as BufferSubscription,
language_settings::{AllLanguageSettings, LanguageSettings},
};
@@ -1769,7 +1769,10 @@ impl DisplaySnapshot {
self.block_snapshot
.chunks(
BlockRow(display_row.0)..BlockRow(self.max_point().row().next_row().0),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
self.masked,
Highlights::default(),
)
@@ -1783,7 +1786,10 @@ impl DisplaySnapshot {
self.block_snapshot
.chunks(
BlockRow(row)..BlockRow(row + 1),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
self.masked,
Highlights::default(),
)
@@ -1798,7 +1804,7 @@ impl DisplaySnapshot {
pub fn chunks(
&self,
display_rows: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
highlight_styles: HighlightStyles,
) -> DisplayChunks<'_> {
self.block_snapshot.chunks(
@@ -1818,7 +1824,7 @@ impl DisplaySnapshot {
pub fn highlighted_chunks<'a>(
&'a self,
display_rows: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
editor_style: &'a EditorStyle,
) -> impl Iterator
- > {
self.chunks(
@@ -1910,7 +1916,10 @@ impl DisplaySnapshot {
let chunks = custom_highlights::CustomHighlightsChunks::new(
multibuffer_range,
- true,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
None,
Some(&self.semantic_token_highlights),
multibuffer,
@@ -1961,7 +1970,14 @@ impl DisplaySnapshot {
let mut line = String::new();
let range = display_row..display_row.next_row();
- for chunk in self.highlighted_chunks(range, false, editor_style) {
+ for chunk in self.highlighted_chunks(
+ range,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ editor_style,
+ ) {
line.push_str(chunk.text);
let text_style = if let Some(style) = chunk.style {
@@ -3388,7 +3404,14 @@ pub mod tests {
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
let mut chunks = Vec::<(String, Option, Rgba)>::new();
- for chunk in snapshot.chunks(DisplayRow(0)..DisplayRow(5), true, Default::default()) {
+ for chunk in snapshot.chunks(
+ DisplayRow(0)..DisplayRow(5),
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ Default::default(),
+ ) {
let color = chunk
.highlight_style
.and_then(|style| style.color)
@@ -3940,7 +3963,14 @@ pub mod tests {
) -> Vec<(String, Option, Option)> {
let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
let mut chunks: Vec<(String, Option, Option)> = Vec::new();
- for chunk in snapshot.chunks(rows, true, HighlightStyles::default()) {
+ for chunk in snapshot.chunks(
+ rows,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ HighlightStyles::default(),
+ ) {
let syntax_color = chunk
.syntax_highlight_id
.and_then(|id| theme.get(id)?.color);
diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs
index 67318e3300e73085fe40c2e22edfcd06778902c8..17fa7e3de4a361f6728664e76368583788053cfd 100644
--- a/crates/editor/src/display_map/block_map.rs
+++ b/crates/editor/src/display_map/block_map.rs
@@ -9,7 +9,7 @@ use crate::{
};
use collections::{Bound, HashMap, HashSet};
use gpui::{AnyElement, App, EntityId, Pixels, Window};
-use language::{Patch, Point};
+use language::{LanguageAwareStyling, Patch, Point};
use multi_buffer::{
Anchor, ExcerptBoundaryInfo, MultiBuffer, MultiBufferOffset, MultiBufferPoint, MultiBufferRow,
MultiBufferSnapshot, RowInfo, ToOffset, ToPoint as _,
@@ -2140,7 +2140,10 @@ impl BlockSnapshot {
pub fn text(&self) -> String {
self.chunks(
BlockRow(0)..self.transforms.summary().output_rows,
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
false,
Highlights::default(),
)
@@ -2152,7 +2155,7 @@ impl BlockSnapshot {
pub(crate) fn chunks<'a>(
&'a self,
rows: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
masked: bool,
highlights: Highlights<'a>,
) -> BlockChunks<'a> {
@@ -4300,7 +4303,10 @@ mod tests {
let actual_text = blocks_snapshot
.chunks(
BlockRow(start_row as u32)..BlockRow(end_row as u32),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
false,
Highlights::default(),
)
diff --git a/crates/editor/src/display_map/custom_highlights.rs b/crates/editor/src/display_map/custom_highlights.rs
index 39eabef2f9627b8088dc826ec64379bf76a6c9fa..6e93e562172decb0843da35c7f55fafd92ed21cc 100644
--- a/crates/editor/src/display_map/custom_highlights.rs
+++ b/crates/editor/src/display_map/custom_highlights.rs
@@ -1,6 +1,6 @@
use collections::BTreeMap;
use gpui::HighlightStyle;
-use language::Chunk;
+use language::{Chunk, LanguageAwareStyling};
use multi_buffer::{MultiBufferChunks, MultiBufferOffset, MultiBufferSnapshot, ToOffset as _};
use std::{
cmp,
@@ -34,7 +34,7 @@ impl<'a> CustomHighlightsChunks<'a> {
#[ztracing::instrument(skip_all)]
pub fn new(
range: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
text_highlights: Option<&'a TextHighlights>,
semantic_token_highlights: Option<&'a SemanticTokensHighlights>,
multibuffer_snapshot: &'a MultiBufferSnapshot,
@@ -308,7 +308,10 @@ mod tests {
// Get all chunks and verify their bitmaps
let chunks = CustomHighlightsChunks::new(
MultiBufferOffset(0)..buffer_snapshot.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
None,
None,
&buffer_snapshot,
diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs
index 1554bb96dab0e2f76a17df1396bd945f332af208..4c6c04b86cc3e2fb9ef10be58c14faae623dc65f 100644
--- a/crates/editor/src/display_map/fold_map.rs
+++ b/crates/editor/src/display_map/fold_map.rs
@@ -5,7 +5,7 @@ use super::{
inlay_map::{InlayBufferRows, InlayChunks, InlayEdit, InlayOffset, InlayPoint, InlaySnapshot},
};
use gpui::{AnyElement, App, ElementId, HighlightStyle, Pixels, SharedString, Stateful, Window};
-use language::{Edit, HighlightId, Point};
+use language::{Edit, HighlightId, LanguageAwareStyling, Point};
use multi_buffer::{
Anchor, AnchorRangeExt, MBTextSummary, MultiBufferOffset, MultiBufferRow, MultiBufferSnapshot,
RowInfo, ToOffset,
@@ -707,7 +707,10 @@ impl FoldSnapshot {
pub fn text(&self) -> String {
self.chunks(
FoldOffset(MultiBufferOffset(0))..self.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
)
.map(|c| c.text)
@@ -909,7 +912,7 @@ impl FoldSnapshot {
pub(crate) fn chunks<'a>(
&'a self,
range: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
highlights: Highlights<'a>,
) -> FoldChunks<'a> {
let mut transform_cursor = self
@@ -954,7 +957,10 @@ impl FoldSnapshot {
pub fn chars_at(&self, start: FoldPoint) -> impl '_ + Iterator
- {
self.chunks(
start.to_offset(self)..self.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
)
.flat_map(|chunk| chunk.text.chars())
@@ -964,7 +970,10 @@ impl FoldSnapshot {
pub fn chunks_at(&self, start: FoldPoint) -> FoldChunks<'_> {
self.chunks(
start.to_offset(self)..self.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
)
}
@@ -2131,7 +2140,14 @@ mod tests {
let text = &expected_text[start.0.0..end.0.0];
assert_eq!(
snapshot
- .chunks(start..end, false, Highlights::default())
+ .chunks(
+ start..end,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ Highlights::default()
+ )
.map(|c| c.text)
.collect::(),
text,
@@ -2303,7 +2319,10 @@ mod tests {
// Get all chunks and verify their bitmaps
let chunks = snapshot.chunks(
FoldOffset(MultiBufferOffset(0))..FoldOffset(snapshot.len().0),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
);
diff --git a/crates/editor/src/display_map/inlay_map.rs b/crates/editor/src/display_map/inlay_map.rs
index 47ca295ccb1a08768ce129b92d10506294a9cf78..698b58682d7ef7682094e7728f419348fd5d32d9 100644
--- a/crates/editor/src/display_map/inlay_map.rs
+++ b/crates/editor/src/display_map/inlay_map.rs
@@ -10,7 +10,7 @@ use crate::{
inlays::{Inlay, InlayContent},
};
use collections::BTreeSet;
-use language::{Chunk, Edit, Point, TextSummary};
+use language::{Chunk, Edit, LanguageAwareStyling, Point, TextSummary};
use multi_buffer::{
MBTextSummary, MultiBufferOffset, MultiBufferRow, MultiBufferRows, MultiBufferSnapshot,
RowInfo, ToOffset,
@@ -1200,7 +1200,7 @@ impl InlaySnapshot {
pub(crate) fn chunks<'a>(
&'a self,
range: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
highlights: Highlights<'a>,
) -> InlayChunks<'a> {
let mut cursor = self
@@ -1234,9 +1234,16 @@ impl InlaySnapshot {
#[cfg(test)]
#[ztracing::instrument(skip_all)]
pub fn text(&self) -> String {
- self.chunks(Default::default()..self.len(), false, Highlights::default())
- .map(|chunk| chunk.chunk.text)
- .collect()
+ self.chunks(
+ Default::default()..self.len(),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ Highlights::default(),
+ )
+ .map(|chunk| chunk.chunk.text)
+ .collect()
}
#[ztracing::instrument(skip_all)]
@@ -1979,7 +1986,10 @@ mod tests {
let actual_text = inlay_snapshot
.chunks(
range,
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights {
text_highlights: Some(&text_highlights),
inlay_highlights: Some(&inlay_highlights),
@@ -2158,7 +2168,10 @@ mod tests {
// Get all chunks and verify their bitmaps
let chunks = snapshot.chunks(
InlayOffset(MultiBufferOffset(0))..snapshot.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
);
@@ -2293,7 +2306,10 @@ mod tests {
let chunks: Vec<_> = inlay_snapshot
.chunks(
InlayOffset(MultiBufferOffset(0))..inlay_snapshot.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
highlights,
)
.collect();
@@ -2408,7 +2424,10 @@ mod tests {
let chunks: Vec<_> = inlay_snapshot
.chunks(
InlayOffset(MultiBufferOffset(0))..inlay_snapshot.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
highlights,
)
.collect();
diff --git a/crates/editor/src/display_map/tab_map.rs b/crates/editor/src/display_map/tab_map.rs
index 187ed8614e01ddb8dcdae930fd484de9594cf63f..bb0e642df380e04fcfa9b9533f027be7171b4975 100644
--- a/crates/editor/src/display_map/tab_map.rs
+++ b/crates/editor/src/display_map/tab_map.rs
@@ -3,7 +3,7 @@ use super::{
fold_map::{self, Chunk, FoldChunks, FoldEdit, FoldPoint, FoldSnapshot},
};
-use language::Point;
+use language::{LanguageAwareStyling, Point};
use multi_buffer::MultiBufferSnapshot;
use std::{cmp, num::NonZeroU32, ops::Range};
use sum_tree::Bias;
@@ -101,7 +101,10 @@ impl TabMap {
let mut last_tab_with_changed_expansion_offset = None;
'outer: for chunk in old_snapshot.fold_snapshot.chunks(
fold_edit.old.end..old_end_row_successor_offset,
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
) {
let mut remaining_tabs = chunk.tabs;
@@ -244,7 +247,14 @@ impl TabSnapshot {
self.max_point()
};
let first_line_chars = self
- .chunks(range.start..line_end, false, Highlights::default())
+ .chunks(
+ range.start..line_end,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ Highlights::default(),
+ )
.flat_map(|chunk| chunk.text.chars())
.take_while(|&c| c != '\n')
.count() as u32;
@@ -254,7 +264,10 @@ impl TabSnapshot {
} else {
self.chunks(
TabPoint::new(range.end.row(), 0)..range.end,
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
)
.flat_map(|chunk| chunk.text.chars())
@@ -274,7 +287,7 @@ impl TabSnapshot {
pub(crate) fn chunks<'a>(
&'a self,
range: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
highlights: Highlights<'a>,
) -> TabChunks<'a> {
let (input_start, expanded_char_column, to_next_stop) =
@@ -324,7 +337,10 @@ impl TabSnapshot {
pub fn text(&self) -> String {
self.chunks(
TabPoint::zero()..self.max_point(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
)
.map(|chunk| chunk.text)
@@ -1170,7 +1186,10 @@ mod tests {
tab_snapshot
.chunks(
TabPoint::new(0, ix as u32)..tab_snapshot.max_point(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
)
.map(|c| c.text)
@@ -1246,8 +1265,14 @@ mod tests {
let mut chunks = Vec::new();
let mut was_tab = false;
let mut text = String::new();
- for chunk in snapshot.chunks(start..snapshot.max_point(), false, Highlights::default())
- {
+ for chunk in snapshot.chunks(
+ start..snapshot.max_point(),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ Highlights::default(),
+ ) {
if chunk.is_tab != was_tab {
if !text.is_empty() {
chunks.push((mem::take(&mut text), was_tab));
@@ -1296,7 +1321,14 @@ mod tests {
// This should not panic.
let result: String = tab_snapshot
- .chunks(start..end, false, Highlights::default())
+ .chunks(
+ start..end,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ Highlights::default(),
+ )
.map(|c| c.text)
.collect();
assert!(!result.is_empty());
@@ -1354,7 +1386,14 @@ mod tests {
let expected_summary = TextSummary::from(expected_text.as_str());
assert_eq!(
tabs_snapshot
- .chunks(start..end, false, Highlights::default())
+ .chunks(
+ start..end,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ Highlights::default()
+ )
.map(|c| c.text)
.collect::(),
expected_text,
@@ -1436,7 +1475,10 @@ mod tests {
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let chunks = fold_snapshot.chunks(
FoldOffset(MultiBufferOffset(0))..fold_snapshot.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Default::default(),
);
let mut cursor = TabStopCursor::new(chunks);
@@ -1598,7 +1640,10 @@ mod tests {
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
let chunks = fold_snapshot.chunks(
FoldOffset(MultiBufferOffset(0))..fold_snapshot.len(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Default::default(),
);
let mut cursor = TabStopCursor::new(chunks);
diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs
index d21642977ed923e15a583dfe767fd566e78c5de9..4ff11b1ef67971c5159a81278a5afaaaea171a28 100644
--- a/crates/editor/src/display_map/wrap_map.rs
+++ b/crates/editor/src/display_map/wrap_map.rs
@@ -5,7 +5,7 @@ use super::{
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
};
use gpui::{App, AppContext as _, Context, Entity, Font, LineWrapper, Pixels, Task};
-use language::Point;
+use language::{LanguageAwareStyling, Point};
use multi_buffer::{MultiBufferSnapshot, RowInfo};
use smol::future::yield_now;
use std::{cmp, collections::VecDeque, mem, ops::Range, sync::LazyLock, time::Duration};
@@ -513,7 +513,10 @@ impl WrapSnapshot {
let mut remaining = None;
let mut chunks = new_tab_snapshot.chunks(
TabPoint::new(edit.new_rows.start, 0)..new_tab_snapshot.max_point(),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
);
let mut edit_transforms = Vec::::new();
@@ -656,7 +659,7 @@ impl WrapSnapshot {
pub(crate) fn chunks<'a>(
&'a self,
rows: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
highlights: Highlights<'a>,
) -> WrapChunks<'a> {
let output_start = WrapPoint::new(rows.start, 0);
@@ -960,7 +963,10 @@ impl WrapSnapshot {
pub fn text_chunks(&self, wrap_row: WrapRow) -> impl Iterator
- {
self.chunks(
wrap_row..self.max_point().row() + WrapRow(1),
- false,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
Highlights::default(),
)
.map(|h| h.text)
@@ -1719,7 +1725,10 @@ mod tests {
let actual_text = self
.chunks(
WrapRow(start_row)..WrapRow(end_row),
- true,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
Highlights::default(),
)
.map(|c| c.text)
diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs
index 6550d79c9f73799d37ccf6433db38f2719636ee6..ae852b1055b33f151b402ee999ce50ba064788a4 100644
--- a/crates/editor/src/editor.rs
+++ b/crates/editor/src/editor.rs
@@ -132,9 +132,9 @@ use language::{
AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow,
BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape,
DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind,
- IndentSize, Language, LanguageName, LanguageRegistry, LanguageScope, LocalFile, OffsetRangeExt,
- OutlineItem, Point, Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions,
- WordsQuery,
+ IndentSize, Language, LanguageAwareStyling, LanguageName, LanguageRegistry, LanguageScope,
+ LocalFile, OffsetRangeExt, OutlineItem, Point, Selection, SelectionGoal, TextObject,
+ TransactionId, TreeSitterOptions, WordsQuery,
language_settings::{
self, AllLanguageSettings, LanguageSettings, LspInsertMode, RewrapBehavior,
WordsCompletionMode, all_language_settings,
@@ -19147,7 +19147,13 @@ impl Editor {
let range = buffer.anchor_before(rename_start)..buffer.anchor_after(rename_end);
let mut old_highlight_id = None;
let old_name: Arc = buffer
- .chunks(rename_start..rename_end, true)
+ .chunks(
+ rename_start..rename_end,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ )
.map(|chunk| {
if old_highlight_id.is_none() {
old_highlight_id = chunk.syntax_highlight_id;
@@ -25005,7 +25011,13 @@ impl Editor {
selection.range()
};
- let chunks = snapshot.chunks(range, true);
+ let chunks = snapshot.chunks(
+ range,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ );
let mut lines = Vec::new();
let mut line: VecDeque = VecDeque::new();
diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs
index 7a532dc7a75ea3583456be6611ef072cd7692bc7..512fbb8855aa11d8c540065a55eb296919012821 100644
--- a/crates/editor/src/element.rs
+++ b/crates/editor/src/element.rs
@@ -51,7 +51,10 @@ use gpui::{
pattern_slash, point, px, quad, relative, size, solid_background, transparent_black,
};
use itertools::Itertools;
-use language::{HighlightedText, IndentGuideSettings, language_settings::ShowWhitespaceSetting};
+use language::{
+ HighlightedText, IndentGuideSettings, LanguageAwareStyling,
+ language_settings::ShowWhitespaceSetting,
+};
use markdown::Markdown;
use multi_buffer::{
Anchor, ExcerptBoundaryInfo, ExpandExcerptDirection, ExpandInfo, MultiBufferPoint,
@@ -3819,7 +3822,11 @@ impl EditorElement {
} else {
let use_tree_sitter = !snapshot.semantic_tokens_enabled
|| snapshot.use_tree_sitter_for_syntax(rows.start, cx);
- let chunks = snapshot.highlighted_chunks(rows.clone(), use_tree_sitter, style);
+ let language_aware = LanguageAwareStyling {
+ tree_sitter: use_tree_sitter,
+ diagnostics: true,
+ };
+ let chunks = snapshot.highlighted_chunks(rows.clone(), language_aware, style);
LineWithInvisibles::from_chunks(
chunks,
style,
@@ -11999,7 +12006,11 @@ pub fn layout_line(
) -> LineWithInvisibles {
let use_tree_sitter =
!snapshot.semantic_tokens_enabled || snapshot.use_tree_sitter_for_syntax(row, cx);
- let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), use_tree_sitter, style);
+ let language_aware = LanguageAwareStyling {
+ tree_sitter: use_tree_sitter,
+ diagnostics: true,
+ };
+ let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), language_aware, style);
LineWithInvisibles::from_chunks(
chunks,
style,
diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs
index 5e78be70d5627bd4f484a3efd44b13519b31b400..d485cfa70237fed542a240f202a8dc47b07467c4 100644
--- a/crates/editor/src/semantic_tokens.rs
+++ b/crates/editor/src/semantic_tokens.rs
@@ -475,13 +475,17 @@ mod tests {
use gpui::{
AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
};
- use language::{Language, LanguageConfig, LanguageMatcher};
+ use language::{
+ Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageAwareStyling, LanguageConfig,
+ LanguageMatcher,
+ };
use languages::FakeLspAdapter;
+ use lsp::LanguageServerId;
use multi_buffer::{
AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
};
use project::Project;
- use rope::Point;
+ use rope::{Point, PointUtf16};
use serde_json::json;
use settings::{
GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
@@ -2088,6 +2092,130 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_diagnostics_visible_when_semantic_token_set_to_full(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, &|language_settings| {
+ language_settings.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..LanguageSettingsContent::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["function".into()],
+ token_modifiers: Vec::new(),
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..lsp::SemanticTokensOptions::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ },
+ cx,
+ )
+ .await;
+
+ let mut full_request = cx
+ .set_request_handler::(
+ move |_, _, _| {
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, // delta_line
+ 3, // delta_start
+ 4, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: Some("a".into()),
+ },
+ )))
+ }
+ },
+ );
+
+ cx.set_state("ˇfn main() {}");
+ assert!(full_request.next().await.is_some());
+
+ let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
+ task.await;
+
+ cx.update_buffer(|buffer, cx| {
+ buffer.update_diagnostics(
+ LanguageServerId(0),
+ DiagnosticSet::new(
+ [DiagnosticEntry {
+ range: PointUtf16::new(0, 3)..PointUtf16::new(0, 7),
+ diagnostic: Diagnostic {
+ severity: lsp::DiagnosticSeverity::ERROR,
+ group_id: 1,
+ message: "unused function".into(),
+ ..Default::default()
+ },
+ }],
+ buffer,
+ ),
+ cx,
+ )
+ });
+
+ cx.run_until_parked();
+ let chunks = cx.update_editor(|editor, window, cx| {
+ editor
+ .snapshot(window, cx)
+ .display_snapshot
+ .chunks(
+ crate::display_map::DisplayRow(0)..crate::display_map::DisplayRow(1),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: true,
+ },
+ crate::HighlightStyles::default(),
+ )
+ .map(|chunk| {
+ (
+ chunk.text.to_string(),
+ chunk.diagnostic_severity,
+ chunk.highlight_style,
+ )
+ })
+ .collect::>()
+ });
+
+ assert_eq!(
+ extract_semantic_highlights(&cx.editor, &cx),
+ vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
+ );
+
+ assert!(
+ chunks.iter().any(
+ |(text, severity, style): &(
+ String,
+ Option,
+ Option
+ )| {
+ text == "main"
+ && *severity == Some(lsp::DiagnosticSeverity::ERROR)
+ && style.is_some()
+ }
+ ),
+ "expected 'main' chunk to have both diagnostic and semantic styling: {:?}",
+ chunks
+ );
+ }
+
fn extract_semantic_highlight_styles(
editor: &Entity,
cx: &TestAppContext,
diff --git a/crates/language/src/buffer.rs b/crates/language/src/buffer.rs
index a467cd789555d39a32ad4e1d7b21da7b14df9c25..1e54134efcab4f0074a73b241f8e0d04cfbcbcdd 100644
--- a/crates/language/src/buffer.rs
+++ b/crates/language/src/buffer.rs
@@ -3733,16 +3733,24 @@ impl BufferSnapshot {
/// returned in chunks where each chunk has a single syntax highlighting style and
/// diagnostic status.
#[ztracing::instrument(skip_all)]
- pub fn chunks(&self, range: Range, language_aware: bool) -> BufferChunks<'_> {
+ pub fn chunks(
+ &self,
+ range: Range,
+ language_aware: LanguageAwareStyling,
+ ) -> BufferChunks<'_> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut syntax = None;
- if language_aware {
+ if language_aware.tree_sitter {
syntax = Some(self.get_highlights(range.clone()));
}
- // We want to look at diagnostic spans only when iterating over language-annotated chunks.
- let diagnostics = language_aware;
- BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
+ BufferChunks::new(
+ self.text.as_rope(),
+ range,
+ syntax,
+ language_aware.diagnostics,
+ Some(self),
+ )
}
pub fn highlighted_text_for_range(
@@ -4477,7 +4485,13 @@ impl BufferSnapshot {
let mut text = String::new();
let mut highlight_ranges = Vec::new();
let mut name_ranges = Vec::new();
- let mut chunks = self.chunks(source_range_for_text.clone(), true);
+ let mut chunks = self.chunks(
+ source_range_for_text.clone(),
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ );
let mut last_buffer_range_end = 0;
for (buffer_range, is_name) in buffer_ranges {
let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
@@ -5402,7 +5416,13 @@ impl BufferSnapshot {
let mut words = BTreeMap::default();
let mut current_word_start_ix = None;
let mut chunk_ix = query.range.start;
- for chunk in self.chunks(query.range, false) {
+ for chunk in self.chunks(
+ query.range,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ ) {
for (i, c) in chunk.text.char_indices() {
let ix = chunk_ix + i;
if classifier.is_word(c) {
@@ -5441,6 +5461,15 @@ impl BufferSnapshot {
}
}
+/// A configuration to use when producing styled text chunks.
+#[derive(Clone, Copy)]
+pub struct LanguageAwareStyling {
+ /// Whether to highlight text chunks using tree-sitter.
+ pub tree_sitter: bool,
+ /// Whether to highlight text chunks based on the diagnostics data.
+ pub diagnostics: bool,
+}
+
pub struct WordsQuery<'a> {
/// Only returns words with all chars from the fuzzy string in them.
pub fuzzy_contents: Option<&'a str>,
diff --git a/crates/language/src/buffer_tests.rs b/crates/language/src/buffer_tests.rs
index 9308ee6f0a0ee207b30be9e6fafa73ba9452d94c..9f4562bf547f389c5ecc5ca29470ac4e49da0e04 100644
--- a/crates/language/src/buffer_tests.rs
+++ b/crates/language/src/buffer_tests.rs
@@ -4102,7 +4102,13 @@ fn test_random_chunk_bitmaps(cx: &mut App, mut rng: StdRng) {
let snapshot = buffer.read(cx).snapshot();
// Get all chunks and verify their bitmaps
- let chunks = snapshot.chunks(0..snapshot.len(), false);
+ let chunks = snapshot.chunks(
+ 0..snapshot.len(),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ );
for chunk in chunks {
let chunk_text = chunk.text;
diff --git a/crates/multi_buffer/src/multi_buffer.rs b/crates/multi_buffer/src/multi_buffer.rs
index a54ff64af028f44adced1758933f794e9a002c5a..47c1288c8f9baeebf4afd54dd0597bfe5a41d15f 100644
--- a/crates/multi_buffer/src/multi_buffer.rs
+++ b/crates/multi_buffer/src/multi_buffer.rs
@@ -21,9 +21,9 @@ use itertools::Itertools;
use language::{
AutoindentMode, Buffer, BufferChunks, BufferRow, BufferSnapshot, Capability, CharClassifier,
CharKind, CharScopeContext, Chunk, CursorShape, DiagnosticEntryRef, File, IndentGuideSettings,
- IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16, Outline, OutlineItem, Point,
- PointUtf16, Selection, TextDimension, TextObject, ToOffset as _, ToPoint as _, TransactionId,
- TreeSitterOptions, Unclipped,
+ IndentSize, Language, LanguageAwareStyling, LanguageScope, OffsetRangeExt, OffsetUtf16,
+ Outline, OutlineItem, Point, PointUtf16, Selection, TextDimension, TextObject, ToOffset as _,
+ ToPoint as _, TransactionId, TreeSitterOptions, Unclipped,
language_settings::{AllLanguageSettings, LanguageSettings},
};
@@ -1072,7 +1072,7 @@ pub struct MultiBufferChunks<'a> {
range: Range,
excerpt_offset_range: Range,
excerpt_chunks: Option>,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
snapshot: &'a MultiBufferSnapshot,
}
@@ -3340,9 +3340,15 @@ impl EventEmitter for MultiBuffer {}
impl MultiBufferSnapshot {
pub fn text(&self) -> String {
- self.chunks(MultiBufferOffset::ZERO..self.len(), false)
- .map(|chunk| chunk.text)
- .collect()
+ self.chunks(
+ MultiBufferOffset::ZERO..self.len(),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ )
+ .map(|chunk| chunk.text)
+ .collect()
}
pub fn reversed_chars_at(&self, position: T) -> impl Iterator
- + '_ {
@@ -3378,7 +3384,14 @@ impl MultiBufferSnapshot {
}
pub fn text_for_range(&self, range: Range) -> impl Iterator
- + '_ {
- self.chunks(range, false).map(|chunk| chunk.text)
+ self.chunks(
+ range,
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ )
+ .map(|chunk| chunk.text)
}
pub fn is_line_blank(&self, row: MultiBufferRow) -> bool {
@@ -4178,7 +4191,7 @@ impl MultiBufferSnapshot {
pub fn chunks(
&self,
range: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
) -> MultiBufferChunks<'_> {
let mut chunks = MultiBufferChunks {
excerpt_offset_range: ExcerptDimension(MultiBufferOffset::ZERO)
@@ -7227,7 +7240,7 @@ impl Excerpt {
fn chunks_in_range<'a>(
&'a self,
range: Range,
- language_aware: bool,
+ language_aware: LanguageAwareStyling,
snapshot: &'a MultiBufferSnapshot,
) -> ExcerptChunks<'a> {
let buffer = self.buffer_snapshot(snapshot);
diff --git a/crates/multi_buffer/src/multi_buffer_tests.rs b/crates/multi_buffer/src/multi_buffer_tests.rs
index bc904d1a05488ee365ebddf36c3b30accdfb9301..cebc9073e9d87a3c6eaf71d78e181d3e833ad56a 100644
--- a/crates/multi_buffer/src/multi_buffer_tests.rs
+++ b/crates/multi_buffer/src/multi_buffer_tests.rs
@@ -5039,7 +5039,13 @@ fn check_edits(
fn assert_chunks_in_ranges(snapshot: &MultiBufferSnapshot) {
let full_text = snapshot.text();
for ix in 0..full_text.len() {
- let mut chunks = snapshot.chunks(MultiBufferOffset(0)..snapshot.len(), false);
+ let mut chunks = snapshot.chunks(
+ MultiBufferOffset(0)..snapshot.len(),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ );
chunks.seek(MultiBufferOffset(ix)..snapshot.len());
let tail = chunks.map(|chunk| chunk.text).collect::();
assert_eq!(tail, &full_text[ix..], "seek to range: {:?}", ix..);
@@ -5300,7 +5306,13 @@ fn test_random_chunk_bitmaps(cx: &mut App, mut rng: StdRng) {
let snapshot = multibuffer.read(cx).snapshot(cx);
- let chunks = snapshot.chunks(MultiBufferOffset(0)..snapshot.len(), false);
+ let chunks = snapshot.chunks(
+ MultiBufferOffset(0)..snapshot.len(),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ );
for chunk in chunks {
let chunk_text = chunk.text;
@@ -5466,7 +5478,13 @@ fn test_random_chunk_bitmaps_with_diffs(cx: &mut App, mut rng: StdRng) {
let snapshot = multibuffer.read(cx).snapshot(cx);
- let chunks = snapshot.chunks(MultiBufferOffset(0)..snapshot.len(), false);
+ let chunks = snapshot.chunks(
+ MultiBufferOffset(0)..snapshot.len(),
+ LanguageAwareStyling {
+ tree_sitter: false,
+ diagnostics: false,
+ },
+ );
for chunk in chunks {
let chunk_text = chunk.text;
diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs
index b7d5afcb687c017fdf253717a9dae2c95c55b53b..fa23b805cd48461dabaddbb7670155cdfe1ba8b0 100644
--- a/crates/outline_panel/src/outline_panel.rs
+++ b/crates/outline_panel/src/outline_panel.rs
@@ -23,8 +23,8 @@ use gpui::{
uniform_list,
};
use itertools::Itertools;
-use language::language_settings::LanguageSettings;
use language::{Anchor, BufferId, BufferSnapshot, OffsetRangeExt, OutlineItem};
+use language::{LanguageAwareStyling, language_settings::LanguageSettings};
use menu::{Cancel, SelectFirst, SelectLast, SelectNext, SelectPrevious};
use std::{
@@ -217,10 +217,13 @@ impl SearchState {
let mut offset = context_offset_range.start;
let mut context_text = String::new();
let mut highlight_ranges = Vec::new();
- for mut chunk in highlight_arguments
- .multi_buffer_snapshot
- .chunks(context_offset_range.start..context_offset_range.end, true)
- {
+ for mut chunk in highlight_arguments.multi_buffer_snapshot.chunks(
+ context_offset_range.start..context_offset_range.end,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ ) {
if !non_whitespace_symbol_occurred {
for c in chunk.text.chars() {
if c.is_whitespace() {
diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs
index 2f579f5a724db143bbd4b0f9853a217bd6b14655..9ea50fdc8f12b68147c1073219625c4fd257afd3 100644
--- a/crates/project/src/lsp_store.rs
+++ b/crates/project/src/lsp_store.rs
@@ -72,9 +72,10 @@ use itertools::Itertools as _;
use language::{
Bias, BinaryStatus, Buffer, BufferRow, BufferSnapshot, CachedLspAdapter, Capability, CodeLabel,
CodeLabelExt, Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, Diff,
- File as _, Language, LanguageName, LanguageRegistry, LocalFile, LspAdapter, LspAdapterDelegate,
- LspInstaller, ManifestDelegate, ManifestName, ModelineSettings, OffsetUtf16, Patch, PointUtf16,
- TextBufferSnapshot, ToOffset, ToOffsetUtf16, ToPointUtf16, Toolchain, Transaction, Unclipped,
+ File as _, Language, LanguageAwareStyling, LanguageName, LanguageRegistry, LocalFile,
+ LspAdapter, LspAdapterDelegate, LspInstaller, ManifestDelegate, ManifestName, ModelineSettings,
+ OffsetUtf16, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToOffsetUtf16, ToPointUtf16,
+ Toolchain, Transaction, Unclipped,
language_settings::{
AllLanguageSettings, FormatOnSave, Formatter, LanguageSettings, all_language_settings,
},
@@ -13527,7 +13528,13 @@ fn resolve_word_completion(snapshot: &BufferSnapshot, completion: &mut Completio
}
let mut offset = 0;
- for chunk in snapshot.chunks(word_range.clone(), true) {
+ for chunk in snapshot.chunks(
+ word_range.clone(),
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ ) {
let end_offset = offset + chunk.text.len();
if let Some(highlight_id) = chunk.syntax_highlight_id {
completion
diff --git a/crates/project/tests/integration/project_tests.rs b/crates/project/tests/integration/project_tests.rs
index d6c2ce37c9e60e17bd43c3f6c3ad10cde52b4bec..f680ccee78e997064af2647f68d8aa3631fa4bd3 100644
--- a/crates/project/tests/integration/project_tests.rs
+++ b/crates/project/tests/integration/project_tests.rs
@@ -41,9 +41,10 @@ use gpui::{
use itertools::Itertools;
use language::{
Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
- DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
- LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
- ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
+ DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
+ LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
+ ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
+ ToolchainMetadata,
language_settings::{LanguageSettings, LanguageSettingsContent},
markdown_lang, rust_lang, tree_sitter_typescript,
};
@@ -4382,7 +4383,13 @@ fn chunks_with_diagnostics(
range: Range,
) -> Vec<(String, Option)> {
let mut chunks: Vec<(String, Option)> = Vec::new();
- for chunk in buffer.snapshot().chunks(range, true) {
+ for chunk in buffer.snapshot().chunks(
+ range,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
+ ) {
if chunks
.last()
.is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
diff --git a/crates/vim/src/state.rs b/crates/vim/src/state.rs
index 4dd557199ab9aebe0a2b26438bdaa0e321a956b2..9e9b42d31900e0ceb160df4ad4dd3ce3a530e155 100644
--- a/crates/vim/src/state.rs
+++ b/crates/vim/src/state.rs
@@ -17,7 +17,7 @@ use gpui::{
Action, App, AppContext, BorrowAppContext, ClipboardEntry, ClipboardItem, DismissEvent, Entity,
EntityId, Global, HighlightStyle, StyledText, Subscription, Task, TextStyle, WeakEntity,
};
-use language::{Buffer, BufferEvent, BufferId, Chunk, Point};
+use language::{Buffer, BufferEvent, BufferId, Chunk, LanguageAwareStyling, Point};
use multi_buffer::MultiBufferRow;
use picker::{Picker, PickerDelegate};
@@ -1504,7 +1504,10 @@ impl PickerDelegate for MarksViewDelegate {
position.row,
snapshot.line_len(MultiBufferRow(position.row)),
),
- true,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
);
matches.push(MarksMatch {
name: name.clone(),
@@ -1530,7 +1533,10 @@ impl PickerDelegate for MarksViewDelegate {
let chunks = snapshot.chunks(
Point::new(position.row, 0)
..Point::new(position.row, snapshot.line_len(position.row)),
- true,
+ LanguageAwareStyling {
+ tree_sitter: true,
+ diagnostics: true,
+ },
);
matches.push(MarksMatch {
From a856093ccafa7b422080f3073097560b04e8918d Mon Sep 17 00:00:00 2001
From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com>
Date: Tue, 7 Apr 2026 11:51:46 -0300
Subject: [PATCH 14/22] sidebar: Fix focus movement while toggling it on and
off (#53283)
I was testing out the changes in
https://github.com/zed-industries/zed/pull/52730 and realized that the
agent panel, when full screen, would be auto-dismissed if I toggled the
sidebar off. Turns out this happens because we were "hard-coding" the
focus back to the center pane, which was automatically dismissing zoomed
items. So, in this PR, I essentially am copying the ModalLayer approach
of storing whatever was focused before so we can return focus back to it
if possible.
Release Notes:
- N/A
---
crates/workspace/src/multi_workspace.rs | 36 ++++++++++++++++++-------
1 file changed, 27 insertions(+), 9 deletions(-)
diff --git a/crates/workspace/src/multi_workspace.rs b/crates/workspace/src/multi_workspace.rs
index a61ad3576c57ecd8b1811363d6b5607ead737821..1b057e3fb1e3b5e0639e4a44462fc7528f6db85d 100644
--- a/crates/workspace/src/multi_workspace.rs
+++ b/crates/workspace/src/multi_workspace.rs
@@ -276,6 +276,7 @@ pub struct MultiWorkspace {
pending_removal_tasks: Vec>,
_serialize_task: Option>,
_subscriptions: Vec,
+ previous_focus_handle: Option,
}
impl EventEmitter for MultiWorkspace {}
@@ -333,6 +334,7 @@ impl MultiWorkspace {
quit_subscription,
settings_subscription,
],
+ previous_focus_handle: None,
}
}
@@ -387,6 +389,7 @@ impl MultiWorkspace {
if self.sidebar_open() {
self.close_sidebar(window, cx);
} else {
+ self.previous_focus_handle = window.focused(cx);
self.open_sidebar(cx);
if let Some(sidebar) = &self.sidebar {
sidebar.prepare_for_focus(window, cx);
@@ -417,14 +420,16 @@ impl MultiWorkspace {
.is_some_and(|s| s.focus_handle(cx).contains_focused(window, cx));
if sidebar_is_focused {
- let pane = self.workspace().read(cx).active_pane().clone();
- let pane_focus = pane.read(cx).focus_handle(cx);
- window.focus(&pane_focus, cx);
- } else if let Some(sidebar) = &self.sidebar {
- sidebar.prepare_for_focus(window, cx);
- sidebar.focus(window, cx);
+ self.restore_previous_focus(false, window, cx);
+ } else {
+ self.previous_focus_handle = window.focused(cx);
+ if let Some(sidebar) = &self.sidebar {
+ sidebar.prepare_for_focus(window, cx);
+ sidebar.focus(window, cx);
+ }
}
} else {
+ self.previous_focus_handle = window.focused(cx);
self.open_sidebar(cx);
if let Some(sidebar) = &self.sidebar {
sidebar.prepare_for_focus(window, cx);
@@ -457,13 +462,26 @@ impl MultiWorkspace {
workspace.set_sidebar_focus_handle(None);
});
}
- let pane = self.workspace().read(cx).active_pane().clone();
- let pane_focus = pane.read(cx).focus_handle(cx);
- window.focus(&pane_focus, cx);
+ self.restore_previous_focus(true, window, cx);
self.serialize(cx);
cx.notify();
}
+ fn restore_previous_focus(&mut self, clear: bool, window: &mut Window, cx: &mut Context) {
+ let focus_handle = if clear {
+ self.previous_focus_handle.take()
+ } else {
+ self.previous_focus_handle.clone()
+ };
+
+ if let Some(previous_focus) = focus_handle {
+ previous_focus.focus(window, cx);
+ } else {
+ let pane = self.workspace().read(cx).active_pane().clone();
+ window.focus(&pane.read(cx).focus_handle(cx), cx);
+ }
+ }
+
pub fn close_window(&mut self, _: &CloseWindow, window: &mut Window, cx: &mut Context) {
cx.spawn_in(window, async move |this, cx| {
let workspaces = this.update(cx, |multi_workspace, _cx| {
From 98c17ca1607a9bb223f831af6e221b3e7d47b28c Mon Sep 17 00:00:00 2001
From: Agus Zubiaga
Date: Tue, 7 Apr 2026 12:28:19 -0300
Subject: [PATCH 15/22] language_models: Refactor deps and extract cloud
(#53270)
- `language_model` no longer depends on provider-specific crates such as
`anthropic` and `open_ai` (inverted dependency)
- `language_model_core` was extracted from `language_model` which
contains the types for the provider-specific crates to convert to/from.
- `gpui::SharedString` has been extracted into its own crate (still
exposed by `gpui`), so `language_model_core` and provider API crates
don't have to depend on `gpui`.
- Removes some unnecessary `&'static str` | `SharedString` -> `String`
-> `SharedString` conversions across the codebase.
- Extracts the core logic of the cloud `LanguageModelProvider` into its
own crate with simpler dependencies.
Release Notes:
- N/A
---------
Co-authored-by: John Tur
---
Cargo.lock | 89 +-
Cargo.toml | 6 +
crates/agent/src/tools/read_file_tool.rs | 2 +-
crates/agent_servers/src/acp.rs | 2 +-
crates/agent_ui/src/agent_registry_ui.rs | 2 +-
crates/agent_ui/src/mention_set.rs | 2 +-
crates/anthropic/Cargo.toml | 4 +
crates/anthropic/src/anthropic.rs | 84 +
crates/anthropic/src/completion.rs | 765 +++++++
crates/client/Cargo.toml | 1 -
crates/client/src/client.rs | 2 +-
crates/client/src/llm_token.rs | 2 +-
crates/cloud_api_client/Cargo.toml | 1 +
.../cloud_api_client/src/cloud_api_client.rs | 3 +
crates/cloud_api_client/src/llm_token.rs | 74 +
crates/cloud_llm_client/Cargo.toml | 3 +-
.../cloud_llm_client/src/cloud_llm_client.rs | 1 +
crates/collab_ui/src/collab_panel.rs | 4 +-
crates/edit_prediction/Cargo.toml | 3 +-
crates/edit_prediction/src/edit_prediction.rs | 2 +-
crates/edit_prediction/src/ollama.rs | 2 +-
.../src/zed_edit_prediction_delegate.rs | 4 +-
crates/edit_prediction_cli/Cargo.toml | 2 +-
crates/env_var/Cargo.toml | 2 +-
crates/env_var/src/env_var.rs | 2 +-
crates/git_ui/src/branch_picker.rs | 2 +-
crates/google_ai/Cargo.toml | 4 +-
crates/google_ai/src/completion.rs | 492 +++++
crates/google_ai/src/google_ai.rs | 3 +-
crates/gpui/Cargo.toml | 1 +
crates/gpui/src/gpui.rs | 3 +-
crates/gpui/src/text_system/line.rs | 2 +-
crates/gpui_shared_string/Cargo.toml | 17 +
crates/gpui_shared_string/LICENSE-APACHE | 1 +
.../gpui_shared_string.rs} | 0
crates/language_core/Cargo.toml | 4 +-
crates/language_core/src/diagnostic.rs | 2 +-
crates/language_core/src/grammar.rs | 2 +-
crates/language_core/src/language_config.rs | 2 +-
crates/language_core/src/language_name.rs | 2 +-
crates/language_core/src/lsp_adapter.rs | 2 +-
crates/language_core/src/manifest.rs | 2 +-
crates/language_core/src/toolchain.rs | 2 +-
crates/language_model/Cargo.toml | 9 +-
crates/language_model/src/fake_provider.rs | 3 +-
crates/language_model/src/language_model.rs | 633 +-----
.../language_model/src/model/cloud_model.rs | 73 -
crates/language_model/src/provider.rs | 12 -
.../language_model/src/provider/anthropic.rs | 80 -
crates/language_model/src/provider/google.rs | 5 -
crates/language_model/src/provider/open_ai.rs | 28 -
.../src/provider/open_router.rs | 69 -
crates/language_model/src/provider/x_ai.rs | 4 -
crates/language_model/src/provider/zed.rs | 5 -
crates/language_model/src/registry.rs | 4 +-
crates/language_model/src/request.rs | 626 +-----
crates/language_model_core/Cargo.toml | 27 +
crates/language_model_core/LICENSE-GPL | 1 +
.../src/language_model_core.rs | 658 ++++++
crates/language_model_core/src/provider.rs | 21 +
.../src/rate_limiter.rs | 0
crates/language_model_core/src/request.rs | 463 +++++
.../src/role.rs | 0
.../src/tool_schema.rs | 12 -
.../src}/util.rs | 18 +-
crates/language_models/Cargo.toml | 7 +-
crates/language_models/src/provider.rs | 2 +-
.../language_models/src/provider/anthropic.rs | 779 +-------
.../language_models/src/provider/bedrock.rs | 2 +-
crates/language_models/src/provider/cloud.rs | 1159 +----------
.../src/provider/copilot_chat.rs | 8 +-
.../language_models/src/provider/deepseek.rs | 2 +-
crates/language_models/src/provider/google.rs | 805 +-------
.../language_models/src/provider/lmstudio.rs | 2 +-
.../language_models/src/provider/mistral.rs | 2 +-
.../language_models/src/provider/open_ai.rs | 1756 +----------------
.../src/provider/open_ai_compatible.rs | 4 +-
.../src/provider/open_router.rs | 2 +-
crates/language_models/src/provider/x_ai.rs | 40 +-
crates/language_models_cloud/Cargo.toml | 33 +
crates/language_models_cloud/LICENSE-GPL | 1 +
.../src/language_models_cloud.rs | 1059 ++++++++++
crates/open_ai/Cargo.toml | 7 +-
crates/open_ai/src/completion.rs | 1693 ++++++++++++++++
crates/open_ai/src/open_ai.rs | 26 +-
crates/open_router/Cargo.toml | 1 +
crates/open_router/src/open_router.rs | 68 +
crates/project/src/prettier_store.rs | 2 +-
crates/settings_content/Cargo.toml | 1 +
crates/settings_content/src/language_model.rs | 34 +-
crates/web_search_providers/Cargo.toml | 1 +
crates/web_search_providers/src/cloud.rs | 2 +-
crates/x_ai/Cargo.toml | 2 +
crates/x_ai/src/completion.rs | 30 +
crates/x_ai/src/x_ai.rs | 2 +
95 files changed, 5895 insertions(+), 5995 deletions(-)
create mode 100644 crates/anthropic/src/completion.rs
create mode 100644 crates/cloud_api_client/src/llm_token.rs
create mode 100644 crates/google_ai/src/completion.rs
create mode 100644 crates/gpui_shared_string/Cargo.toml
create mode 120000 crates/gpui_shared_string/LICENSE-APACHE
rename crates/{gpui/src/shared_string.rs => gpui_shared_string/gpui_shared_string.rs} (100%)
delete mode 100644 crates/language_model/src/provider.rs
delete mode 100644 crates/language_model/src/provider/anthropic.rs
delete mode 100644 crates/language_model/src/provider/google.rs
delete mode 100644 crates/language_model/src/provider/open_ai.rs
delete mode 100644 crates/language_model/src/provider/open_router.rs
delete mode 100644 crates/language_model/src/provider/x_ai.rs
delete mode 100644 crates/language_model/src/provider/zed.rs
create mode 100644 crates/language_model_core/Cargo.toml
create mode 120000 crates/language_model_core/LICENSE-GPL
create mode 100644 crates/language_model_core/src/language_model_core.rs
create mode 100644 crates/language_model_core/src/provider.rs
rename crates/{language_model => language_model_core}/src/rate_limiter.rs (100%)
create mode 100644 crates/language_model_core/src/request.rs
rename crates/{language_model => language_model_core}/src/role.rs (100%)
rename crates/{language_model => language_model_core}/src/tool_schema.rs (92%)
rename crates/{language_models/src/provider => language_model_core/src}/util.rs (88%)
create mode 100644 crates/language_models_cloud/Cargo.toml
create mode 120000 crates/language_models_cloud/LICENSE-GPL
create mode 100644 crates/language_models_cloud/src/language_models_cloud.rs
create mode 100644 crates/open_ai/src/completion.rs
create mode 100644 crates/x_ai/src/completion.rs
diff --git a/Cargo.lock b/Cargo.lock
index cbc494f9dc0fc1858a846fabe168b3538de4dbe5..3fccd850ae697925330d15ed6b72804c39f4795e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -629,13 +629,17 @@ version = "0.1.0"
dependencies = [
"anyhow",
"chrono",
+ "collections",
"futures 0.3.32",
"http_client",
+ "language_model_core",
+ "log",
"schemars",
"serde",
"serde_json",
"strum 0.27.2",
"thiserror 2.0.17",
+ "tiktoken-rs",
]
[[package]]
@@ -2903,7 +2907,6 @@ dependencies = [
"http_client",
"http_client_tls",
"httparse",
- "language_model",
"log",
"objc2-foundation",
"parking_lot",
@@ -2959,6 +2962,7 @@ dependencies = [
"http_client",
"parking_lot",
"serde_json",
+ "smol",
"thiserror 2.0.17",
"yawc",
]
@@ -5162,6 +5166,7 @@ dependencies = [
"buffer_diff",
"client",
"clock",
+ "cloud_api_client",
"cloud_api_types",
"cloud_llm_client",
"collections",
@@ -5641,7 +5646,7 @@ dependencies = [
name = "env_var"
version = "0.1.0"
dependencies = [
- "gpui",
+ "gpui_shared_string",
]
[[package]]
@@ -7468,11 +7473,13 @@ dependencies = [
"anyhow",
"futures 0.3.32",
"http_client",
+ "language_model_core",
+ "log",
"schemars",
"serde",
"serde_json",
- "settings",
"strum 0.27.2",
+ "tiktoken-rs",
]
[[package]]
@@ -7541,6 +7548,7 @@ dependencies = [
"getrandom 0.3.4",
"gpui_macros",
"gpui_platform",
+ "gpui_shared_string",
"gpui_util",
"gpui_web",
"http_client",
@@ -7710,6 +7718,16 @@ dependencies = [
"gpui_windows",
]
+[[package]]
+name = "gpui_shared_string"
+version = "0.1.0"
+dependencies = [
+ "derive_more",
+ "gpui_util",
+ "schemars",
+ "serde",
+]
+
[[package]]
name = "gpui_tokio"
version = "0.1.0"
@@ -9358,7 +9376,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "gpui",
+ "gpui_shared_string",
"log",
"lsp",
"parking_lot",
@@ -9397,12 +9415,8 @@ dependencies = [
name = "language_model"
version = "0.1.0"
dependencies = [
- "anthropic",
"anyhow",
"base64 0.22.1",
- "cloud_api_client",
- "cloud_api_types",
- "cloud_llm_client",
"collections",
"credentials_provider",
"env_var",
@@ -9411,16 +9425,31 @@ dependencies = [
"http_client",
"icons",
"image",
+ "language_model_core",
"log",
- "open_ai",
- "open_router",
"parking_lot",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.17",
+ "util",
+]
+
+[[package]]
+name = "language_model_core"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "cloud_llm_client",
+ "futures 0.3.32",
+ "gpui_shared_string",
+ "http_client",
+ "partial-json-fixer",
"schemars",
"serde",
"serde_json",
"smol",
+ "strum 0.27.2",
"thiserror 2.0.17",
- "util",
]
[[package]]
@@ -9436,8 +9465,8 @@ dependencies = [
"base64 0.22.1",
"bedrock",
"client",
+ "cloud_api_client",
"cloud_api_types",
- "cloud_llm_client",
"collections",
"component",
"convert_case 0.8.0",
@@ -9456,6 +9485,7 @@ dependencies = [
"http_client",
"language",
"language_model",
+ "language_models_cloud",
"lmstudio",
"log",
"menu",
@@ -9464,17 +9494,14 @@ dependencies = [
"open_ai",
"open_router",
"opencode",
- "partial-json-fixer",
"pretty_assertions",
"release_channel",
"schemars",
- "semver",
"serde",
"serde_json",
"settings",
"smol",
"strum 0.27.2",
- "thiserror 2.0.17",
"tiktoken-rs",
"tokio",
"ui",
@@ -9484,6 +9511,28 @@ dependencies = [
"x_ai",
]
+[[package]]
+name = "language_models_cloud"
+version = "0.1.0"
+dependencies = [
+ "anthropic",
+ "anyhow",
+ "cloud_llm_client",
+ "futures 0.3.32",
+ "google_ai",
+ "gpui",
+ "http_client",
+ "language_model",
+ "open_ai",
+ "schemars",
+ "semver",
+ "serde",
+ "serde_json",
+ "smol",
+ "thiserror 2.0.17",
+ "x_ai",
+]
+
[[package]]
name = "language_onboarding"
version = "0.1.0"
@@ -11631,16 +11680,19 @@ name = "open_ai"
version = "0.1.0"
dependencies = [
"anyhow",
+ "collections",
"futures 0.3.32",
"http_client",
+ "language_model_core",
"log",
+ "pretty_assertions",
"rand 0.9.2",
"schemars",
"serde",
"serde_json",
- "settings",
"strum 0.27.2",
"thiserror 2.0.17",
+ "tiktoken-rs",
]
[[package]]
@@ -11672,6 +11724,7 @@ dependencies = [
"anyhow",
"futures 0.3.32",
"http_client",
+ "language_model_core",
"schemars",
"serde",
"serde_json",
@@ -15801,6 +15854,7 @@ dependencies = [
"collections",
"derive_more",
"gpui",
+ "language_model_core",
"log",
"schemars",
"serde",
@@ -20180,6 +20234,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"client",
+ "cloud_api_client",
"cloud_api_types",
"cloud_llm_client",
"futures 0.3.32",
@@ -21783,9 +21838,11 @@ name = "x_ai"
version = "0.1.0"
dependencies = [
"anyhow",
+ "language_model_core",
"schemars",
"serde",
"strum 0.27.2",
+ "tiktoken-rs",
]
[[package]]
diff --git a/Cargo.toml b/Cargo.toml
index 4c75dafae5df4d63815e0da5cabb95ccdad25e9d..5a7fc9caaf982953168855671bebbcf4f010df03 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -87,6 +87,7 @@ members = [
"crates/google_ai",
"crates/grammars",
"crates/gpui",
+ "crates/gpui_shared_string",
"crates/gpui_linux",
"crates/gpui_macos",
"crates/gpui_macros",
@@ -110,7 +111,9 @@ members = [
"crates/language_core",
"crates/language_extension",
"crates/language_model",
+ "crates/language_model_core",
"crates/language_models",
+ "crates/language_models_cloud",
"crates/language_onboarding",
"crates/language_selector",
"crates/language_tools",
@@ -335,6 +338,7 @@ go_to_line = { path = "crates/go_to_line" }
google_ai = { path = "crates/google_ai" }
grammars = { path = "crates/grammars" }
gpui = { path = "crates/gpui", default-features = false }
+gpui_shared_string = { path = "crates/gpui_shared_string" }
gpui_linux = { path = "crates/gpui_linux", default-features = false }
gpui_macos = { path = "crates/gpui_macos", default-features = false }
gpui_macros = { path = "crates/gpui_macros" }
@@ -361,7 +365,9 @@ language = { path = "crates/language" }
language_core = { path = "crates/language_core" }
language_extension = { path = "crates/language_extension" }
language_model = { path = "crates/language_model" }
+language_model_core = { path = "crates/language_model_core" }
language_models = { path = "crates/language_models" }
+language_models_cloud = { path = "crates/language_models_cloud" }
language_onboarding = { path = "crates/language_onboarding" }
language_selector = { path = "crates/language_selector" }
language_tools = { path = "crates/language_tools" }
diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs
index 0086a82f4e79c9924502202873ceb2b25d2e66fb..9b013f111e7eaa981652d8868dfcf3c098d9dc7e 100644
--- a/crates/agent/src/tools/read_file_tool.rs
+++ b/crates/agent/src/tools/read_file_tool.rs
@@ -5,7 +5,7 @@ use futures::FutureExt as _;
use gpui::{App, Entity, SharedString, Task};
use indoc::formatdoc;
use language::Point;
-use language_model::{LanguageModelImage, LanguageModelToolResultContent};
+use language_model::{LanguageModelImage, LanguageModelImageExt, LanguageModelToolResultContent};
use project::{AgentLocation, ImageItem, Project, WorktreeSettings, image_store};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs
index 5f452bc9c0e2e9c2322042583295894a5866b053..e56db9df927ab3cdf838587f1cb4f9514eb5a758 100644
--- a/crates/agent_servers/src/acp.rs
+++ b/crates/agent_servers/src/acp.rs
@@ -325,7 +325,7 @@ impl AcpConnection {
// Use the one the agent provides if we have one
.map(|info| info.name.into())
// Otherwise, just use the name
- .unwrap_or_else(|| agent_id.0.to_string().into());
+ .unwrap_or_else(|| agent_id.0.clone());
let session_list = if response
.agent_capabilities
diff --git a/crates/agent_ui/src/agent_registry_ui.rs b/crates/agent_ui/src/agent_registry_ui.rs
index 78b4e3a5a3965c72b96d4ec201139b1d8e510fb2..e19afdecc390268cefbd7be4e5d0759aa2a29c19 100644
--- a/crates/agent_ui/src/agent_registry_ui.rs
+++ b/crates/agent_ui/src/agent_registry_ui.rs
@@ -382,7 +382,7 @@ impl AgentRegistryPage {
self.install_button(agent, install_status, supports_current_platform, cx);
let repository_button = agent.repository().map(|repository| {
- let repository_for_tooltip: SharedString = repository.to_string().into();
+ let repository_for_tooltip = repository.clone();
let repository_for_click = repository.to_string();
IconButton::new(
diff --git a/crates/agent_ui/src/mention_set.rs b/crates/agent_ui/src/mention_set.rs
index 1b2ec0ad2fd460b4eec5a8b757bdd3058d4a3704..880257e3f942bf71d1d51b1e661d911474aa786b 100644
--- a/crates/agent_ui/src/mention_set.rs
+++ b/crates/agent_ui/src/mention_set.rs
@@ -18,7 +18,7 @@ use gpui::{
use http_client::{AsyncBody, HttpClientWithUrl};
use itertools::Either;
use language::Buffer;
-use language_model::LanguageModelImage;
+use language_model::{LanguageModelImage, LanguageModelImageExt};
use multi_buffer::MultiBufferRow;
use postage::stream::Stream as _;
use project::{Project, ProjectItem, ProjectPath, Worktree};
diff --git a/crates/anthropic/Cargo.toml b/crates/anthropic/Cargo.toml
index 1e2587435489dea6952c697b0e0a4cf627226728..458f9bfae7da4736c4e54e42f08b5e3a926ed30a 100644
--- a/crates/anthropic/Cargo.toml
+++ b/crates/anthropic/Cargo.toml
@@ -18,12 +18,16 @@ path = "src/anthropic.rs"
[dependencies]
anyhow.workspace = true
chrono.workspace = true
+collections.workspace = true
futures.workspace = true
http_client.workspace = true
+language_model_core.workspace = true
+log.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
strum.workspace = true
thiserror.workspace = true
+tiktoken-rs.workspace = true
diff --git a/crates/anthropic/src/anthropic.rs b/crates/anthropic/src/anthropic.rs
index 5d7790b86b09853e22436252fcde1bebf5feff9b..48fa318d7c1d87e63725cef836baf9c945966206 100644
--- a/crates/anthropic/src/anthropic.rs
+++ b/crates/anthropic/src/anthropic.rs
@@ -12,6 +12,7 @@ use strum::{EnumIter, EnumString};
use thiserror::Error;
pub mod batches;
+pub mod completion;
pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
@@ -1026,6 +1027,89 @@ pub async fn count_tokens(
}
}
+// -- Conversions from/to `language_model_core` types --
+
+impl From for Speed {
+ fn from(speed: language_model_core::Speed) -> Self {
+ match speed {
+ language_model_core::Speed::Standard => Speed::Standard,
+ language_model_core::Speed::Fast => Speed::Fast,
+ }
+ }
+}
+
+impl From for language_model_core::LanguageModelCompletionError {
+ fn from(error: AnthropicError) -> Self {
+ let provider = language_model_core::ANTHROPIC_PROVIDER_NAME;
+ match error {
+ AnthropicError::SerializeRequest(error) => Self::SerializeRequest { provider, error },
+ AnthropicError::BuildRequestBody(error) => Self::BuildRequestBody { provider, error },
+ AnthropicError::HttpSend(error) => Self::HttpSend { provider, error },
+ AnthropicError::DeserializeResponse(error) => {
+ Self::DeserializeResponse { provider, error }
+ }
+ AnthropicError::ReadResponse(error) => Self::ApiReadResponseError { provider, error },
+ AnthropicError::HttpResponseError {
+ status_code,
+ message,
+ } => Self::HttpResponseError {
+ provider,
+ status_code,
+ message,
+ },
+ AnthropicError::RateLimit { retry_after } => Self::RateLimitExceeded {
+ provider,
+ retry_after: Some(retry_after),
+ },
+ AnthropicError::ServerOverloaded { retry_after } => Self::ServerOverloaded {
+ provider,
+ retry_after,
+ },
+ AnthropicError::ApiError(api_error) => api_error.into(),
+ }
+ }
+}
+
+impl From for language_model_core::LanguageModelCompletionError {
+ fn from(error: ApiError) -> Self {
+ use ApiErrorCode::*;
+ let provider = language_model_core::ANTHROPIC_PROVIDER_NAME;
+ match error.code() {
+ Some(code) => match code {
+ InvalidRequestError => Self::BadRequestFormat {
+ provider,
+ message: error.message,
+ },
+ AuthenticationError => Self::AuthenticationError {
+ provider,
+ message: error.message,
+ },
+ PermissionError => Self::PermissionError {
+ provider,
+ message: error.message,
+ },
+ NotFoundError => Self::ApiEndpointNotFound { provider },
+ RequestTooLarge => Self::PromptTooLarge {
+ tokens: language_model_core::parse_prompt_too_long(&error.message),
+ },
+ RateLimitError => Self::RateLimitExceeded {
+ provider,
+ retry_after: None,
+ },
+ ApiError => Self::ApiInternalServerError {
+ provider,
+ message: error.message,
+ },
+ OverloadedError => Self::ServerOverloaded {
+ provider,
+ retry_after: None,
+ },
+ },
+ None => Self::Other(error.into()),
+ }
+ }
+}
+
#[test]
fn test_match_window_exceeded() {
let error = ApiError {
diff --git a/crates/anthropic/src/completion.rs b/crates/anthropic/src/completion.rs
new file mode 100644
index 0000000000000000000000000000000000000000..a6175a4f7c24b3b724734b2edef48ef8acfaa159
--- /dev/null
+++ b/crates/anthropic/src/completion.rs
@@ -0,0 +1,765 @@
+use anyhow::Result;
+use collections::HashMap;
+use futures::{Stream, StreamExt};
+use language_model_core::{
+ LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelRequest,
+ LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
+ Role, StopReason, TokenUsage,
+ util::{fix_streamed_json, parse_tool_arguments},
+};
+use std::pin::Pin;
+use std::str::FromStr;
+
+use crate::{
+ AnthropicError, AnthropicModelMode, CacheControl, CacheControlType, ContentDelta,
+ CountTokensRequest, Event, ImageSource, Message, RequestContent, ResponseContent,
+ StringOrContents, Thinking, Tool, ToolChoice, ToolResultContent, ToolResultPart, Usage,
+};
+
+fn to_anthropic_content(content: MessageContent) -> Option {
+ match content {
+ MessageContent::Text(text) => {
+ let text = if text.chars().last().is_some_and(|c| c.is_whitespace()) {
+ text.trim_end().to_string()
+ } else {
+ text
+ };
+ if !text.is_empty() {
+ Some(RequestContent::Text {
+ text,
+ cache_control: None,
+ })
+ } else {
+ None
+ }
+ }
+ MessageContent::Thinking {
+ text: thinking,
+ signature,
+ } => {
+ if let Some(signature) = signature
+ && !thinking.is_empty()
+ {
+ Some(RequestContent::Thinking {
+ thinking,
+ signature,
+ cache_control: None,
+ })
+ } else {
+ None
+ }
+ }
+ MessageContent::RedactedThinking(data) => {
+ if !data.is_empty() {
+ Some(RequestContent::RedactedThinking { data })
+ } else {
+ None
+ }
+ }
+ MessageContent::Image(image) => Some(RequestContent::Image {
+ source: ImageSource {
+ source_type: "base64".to_string(),
+ media_type: "image/png".to_string(),
+ data: image.source.to_string(),
+ },
+ cache_control: None,
+ }),
+ MessageContent::ToolUse(tool_use) => Some(RequestContent::ToolUse {
+ id: tool_use.id.to_string(),
+ name: tool_use.name.to_string(),
+ input: tool_use.input,
+ cache_control: None,
+ }),
+ MessageContent::ToolResult(tool_result) => Some(RequestContent::ToolResult {
+ tool_use_id: tool_result.tool_use_id.to_string(),
+ is_error: tool_result.is_error,
+ content: match tool_result.content {
+ LanguageModelToolResultContent::Text(text) => {
+ ToolResultContent::Plain(text.to_string())
+ }
+ LanguageModelToolResultContent::Image(image) => {
+ ToolResultContent::Multipart(vec![ToolResultPart::Image {
+ source: ImageSource {
+ source_type: "base64".to_string(),
+ media_type: "image/png".to_string(),
+ data: image.source.to_string(),
+ },
+ }])
+ }
+ },
+ cache_control: None,
+ }),
+ }
+}
+
+/// Convert a LanguageModelRequest to an Anthropic CountTokensRequest.
+pub fn into_anthropic_count_tokens_request(
+ request: LanguageModelRequest,
+ model: String,
+ mode: AnthropicModelMode,
+) -> CountTokensRequest {
+ let mut new_messages: Vec = Vec::new();
+ let mut system_message = String::new();
+
+ for message in request.messages {
+ if message.contents_empty() {
+ continue;
+ }
+
+ match message.role {
+ Role::User | Role::Assistant => {
+ let anthropic_message_content: Vec = message
+ .content
+ .into_iter()
+ .filter_map(to_anthropic_content)
+ .collect();
+ let anthropic_role = match message.role {
+ Role::User => crate::Role::User,
+ Role::Assistant => crate::Role::Assistant,
+ Role::System => unreachable!("System role should never occur here"),
+ };
+ if anthropic_message_content.is_empty() {
+ continue;
+ }
+
+ if let Some(last_message) = new_messages.last_mut()
+ && last_message.role == anthropic_role
+ {
+ last_message.content.extend(anthropic_message_content);
+ continue;
+ }
+
+ new_messages.push(Message {
+ role: anthropic_role,
+ content: anthropic_message_content,
+ });
+ }
+ Role::System => {
+ if !system_message.is_empty() {
+ system_message.push_str("\n\n");
+ }
+ system_message.push_str(&message.string_contents());
+ }
+ }
+ }
+
+ CountTokensRequest {
+ model,
+ messages: new_messages,
+ system: if system_message.is_empty() {
+ None
+ } else {
+ Some(StringOrContents::String(system_message))
+ },
+ thinking: if request.thinking_allowed {
+ match mode {
+ AnthropicModelMode::Thinking { budget_tokens } => {
+ Some(Thinking::Enabled { budget_tokens })
+ }
+ AnthropicModelMode::AdaptiveThinking => Some(Thinking::Adaptive),
+ AnthropicModelMode::Default => None,
+ }
+ } else {
+ None
+ },
+ tools: request
+ .tools
+ .into_iter()
+ .map(|tool| Tool {
+ name: tool.name,
+ description: tool.description,
+ input_schema: tool.input_schema,
+ eager_input_streaming: tool.use_input_streaming,
+ })
+ .collect(),
+ tool_choice: request.tool_choice.map(|choice| match choice {
+ LanguageModelToolChoice::Auto => ToolChoice::Auto,
+ LanguageModelToolChoice::Any => ToolChoice::Any,
+ LanguageModelToolChoice::None => ToolChoice::None,
+ }),
+ }
+}
+
+/// Estimate tokens using tiktoken. Used as a fallback when the API is unavailable,
+/// or by providers (like Zed Cloud) that don't have direct Anthropic API access.
+pub fn count_anthropic_tokens_with_tiktoken(request: LanguageModelRequest) -> Result {
+ let messages = request.messages;
+ let mut tokens_from_images = 0;
+ let mut string_messages = Vec::with_capacity(messages.len());
+
+ for message in messages {
+ let mut string_contents = String::new();
+
+ for content in message.content {
+ match content {
+ MessageContent::Text(text) => {
+ string_contents.push_str(&text);
+ }
+ MessageContent::Thinking { .. } => {
+ // Thinking blocks are not included in the input token count.
+ }
+ MessageContent::RedactedThinking(_) => {
+ // Thinking blocks are not included in the input token count.
+ }
+ MessageContent::Image(image) => {
+ tokens_from_images += image.estimate_tokens();
+ }
+ MessageContent::ToolUse(_tool_use) => {
+ // TODO: Estimate token usage from tool uses.
+ }
+ MessageContent::ToolResult(tool_result) => match &tool_result.content {
+ LanguageModelToolResultContent::Text(text) => {
+ string_contents.push_str(text);
+ }
+ LanguageModelToolResultContent::Image(image) => {
+ tokens_from_images += image.estimate_tokens();
+ }
+ },
+ }
+ }
+
+ if !string_contents.is_empty() {
+ string_messages.push(tiktoken_rs::ChatCompletionRequestMessage {
+ role: match message.role {
+ Role::User => "user".into(),
+ Role::Assistant => "assistant".into(),
+ Role::System => "system".into(),
+ },
+ content: Some(string_contents),
+ name: None,
+ function_call: None,
+ });
+ }
+ }
+
+ // Tiktoken doesn't yet support these models, so we manually use the
+ // same tokenizer as GPT-4.
+ tiktoken_rs::num_tokens_from_messages("gpt-4", &string_messages)
+ .map(|tokens| (tokens + tokens_from_images) as u64)
+}
+
+pub fn into_anthropic(
+ request: LanguageModelRequest,
+ model: String,
+ default_temperature: f32,
+ max_output_tokens: u64,
+ mode: AnthropicModelMode,
+) -> crate::Request {
+ let mut new_messages: Vec = Vec::new();
+ let mut system_message = String::new();
+
+ for message in request.messages {
+ if message.contents_empty() {
+ continue;
+ }
+
+ match message.role {
+ Role::User | Role::Assistant => {
+ let mut anthropic_message_content: Vec = message
+ .content
+ .into_iter()
+ .filter_map(to_anthropic_content)
+ .collect();
+ let anthropic_role = match message.role {
+ Role::User => crate::Role::User,
+ Role::Assistant => crate::Role::Assistant,
+ Role::System => unreachable!("System role should never occur here"),
+ };
+ if anthropic_message_content.is_empty() {
+ continue;
+ }
+
+ if let Some(last_message) = new_messages.last_mut()
+ && last_message.role == anthropic_role
+ {
+ last_message.content.extend(anthropic_message_content);
+ continue;
+ }
+
+ // Mark the last segment of the message as cached
+ if message.cache {
+ let cache_control_value = Some(CacheControl {
+ cache_type: CacheControlType::Ephemeral,
+ });
+ for message_content in anthropic_message_content.iter_mut().rev() {
+ match message_content {
+ RequestContent::RedactedThinking { .. } => {
+ // Caching is not possible, fallback to next message
+ }
+ RequestContent::Text { cache_control, .. }
+ | RequestContent::Thinking { cache_control, .. }
+ | RequestContent::Image { cache_control, .. }
+ | RequestContent::ToolUse { cache_control, .. }
+ | RequestContent::ToolResult { cache_control, .. } => {
+ *cache_control = cache_control_value;
+ break;
+ }
+ }
+ }
+ }
+
+ new_messages.push(Message {
+ role: anthropic_role,
+ content: anthropic_message_content,
+ });
+ }
+ Role::System => {
+ if !system_message.is_empty() {
+ system_message.push_str("\n\n");
+ }
+ system_message.push_str(&message.string_contents());
+ }
+ }
+ }
+
+ crate::Request {
+ model,
+ messages: new_messages,
+ max_tokens: max_output_tokens,
+ system: if system_message.is_empty() {
+ None
+ } else {
+ Some(StringOrContents::String(system_message))
+ },
+ thinking: if request.thinking_allowed {
+ match mode {
+ AnthropicModelMode::Thinking { budget_tokens } => {
+ Some(Thinking::Enabled { budget_tokens })
+ }
+ AnthropicModelMode::AdaptiveThinking => Some(Thinking::Adaptive),
+ AnthropicModelMode::Default => None,
+ }
+ } else {
+ None
+ },
+ tools: request
+ .tools
+ .into_iter()
+ .map(|tool| Tool {
+ name: tool.name,
+ description: tool.description,
+ input_schema: tool.input_schema,
+ eager_input_streaming: tool.use_input_streaming,
+ })
+ .collect(),
+ tool_choice: request.tool_choice.map(|choice| match choice {
+ LanguageModelToolChoice::Auto => ToolChoice::Auto,
+ LanguageModelToolChoice::Any => ToolChoice::Any,
+ LanguageModelToolChoice::None => ToolChoice::None,
+ }),
+ metadata: None,
+ output_config: if request.thinking_allowed
+ && matches!(mode, AnthropicModelMode::AdaptiveThinking)
+ {
+ request.thinking_effort.as_deref().and_then(|effort| {
+ let effort = match effort {
+ "low" => Some(crate::Effort::Low),
+ "medium" => Some(crate::Effort::Medium),
+ "high" => Some(crate::Effort::High),
+ "max" => Some(crate::Effort::Max),
+ _ => None,
+ };
+ effort.map(|effort| crate::OutputConfig {
+ effort: Some(effort),
+ })
+ })
+ } else {
+ None
+ },
+ stop_sequences: Vec::new(),
+ speed: request.speed.map(Into::into),
+ temperature: request.temperature.or(Some(default_temperature)),
+ top_k: None,
+ top_p: None,
+ }
+}
+
+pub struct AnthropicEventMapper {
+ tool_uses_by_index: HashMap,
+ usage: Usage,
+ stop_reason: StopReason,
+}
+
+impl AnthropicEventMapper {
+ pub fn new() -> Self {
+ Self {
+ tool_uses_by_index: HashMap::default(),
+ usage: Usage::default(),
+ stop_reason: StopReason::EndTurn,
+ }
+ }
+
+ pub fn map_stream(
+ mut self,
+ events: Pin>>>,
+ ) -> impl Stream
- >
+ {
+ events.flat_map(move |event| {
+ futures::stream::iter(match event {
+ Ok(event) => self.map_event(event),
+ Err(error) => vec![Err(error.into())],
+ })
+ })
+ }
+
+ pub fn map_event(
+ &mut self,
+ event: Event,
+ ) -> Vec> {
+ match event {
+ Event::ContentBlockStart {
+ index,
+ content_block,
+ } => match content_block {
+ ResponseContent::Text { text } => {
+ vec![Ok(LanguageModelCompletionEvent::Text(text))]
+ }
+ ResponseContent::Thinking { thinking } => {
+ vec![Ok(LanguageModelCompletionEvent::Thinking {
+ text: thinking,
+ signature: None,
+ })]
+ }
+ ResponseContent::RedactedThinking { data } => {
+ vec![Ok(LanguageModelCompletionEvent::RedactedThinking { data })]
+ }
+ ResponseContent::ToolUse { id, name, .. } => {
+ self.tool_uses_by_index.insert(
+ index,
+ RawToolUse {
+ id,
+ name,
+ input_json: String::new(),
+ },
+ );
+ Vec::new()
+ }
+ },
+ Event::ContentBlockDelta { index, delta } => match delta {
+ ContentDelta::TextDelta { text } => {
+ vec![Ok(LanguageModelCompletionEvent::Text(text))]
+ }
+ ContentDelta::ThinkingDelta { thinking } => {
+ vec![Ok(LanguageModelCompletionEvent::Thinking {
+ text: thinking,
+ signature: None,
+ })]
+ }
+ ContentDelta::SignatureDelta { signature } => {
+ vec![Ok(LanguageModelCompletionEvent::Thinking {
+ text: "".to_string(),
+ signature: Some(signature),
+ })]
+ }
+ ContentDelta::InputJsonDelta { partial_json } => {
+ if let Some(tool_use) = self.tool_uses_by_index.get_mut(&index) {
+ tool_use.input_json.push_str(&partial_json);
+
+ // Try to convert invalid (incomplete) JSON into
+ // valid JSON that serde can accept, e.g. by closing
+ // unclosed delimiters. This way, we can update the
+ // UI with whatever has been streamed back so far.
+ if let Ok(input) =
+ serde_json::Value::from_str(&fix_streamed_json(&tool_use.input_json))
+ {
+ return vec![Ok(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: tool_use.id.clone().into(),
+ name: tool_use.name.clone().into(),
+ is_input_complete: false,
+ raw_input: tool_use.input_json.clone(),
+ input,
+ thought_signature: None,
+ },
+ ))];
+ }
+ }
+ vec![]
+ }
+ },
+ Event::ContentBlockStop { index } => {
+ if let Some(tool_use) = self.tool_uses_by_index.remove(&index) {
+ let input_json = tool_use.input_json.trim();
+ let event_result = match parse_tool_arguments(input_json) {
+ Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
+ LanguageModelToolUse {
+ id: tool_use.id.into(),
+ name: tool_use.name.into(),
+ is_input_complete: true,
+ input,
+ raw_input: tool_use.input_json.clone(),
+ thought_signature: None,
+ },
+ )),
+ Err(json_parse_err) => {
+ Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
+ id: tool_use.id.into(),
+ tool_name: tool_use.name.into(),
+ raw_input: input_json.into(),
+ json_parse_error: json_parse_err.to_string(),
+ })
+ }
+ };
+
+ vec![event_result]
+ } else {
+ Vec::new()
+ }
+ }
+ Event::MessageStart { message } => {
+ update_usage(&mut self.usage, &message.usage);
+ vec![
+ Ok(LanguageModelCompletionEvent::UsageUpdate(convert_usage(
+ &self.usage,
+ ))),
+ Ok(LanguageModelCompletionEvent::StartMessage {
+ message_id: message.id,
+ }),
+ ]
+ }
+ Event::MessageDelta { delta, usage } => {
+ update_usage(&mut self.usage, &usage);
+ if let Some(stop_reason) = delta.stop_reason.as_deref() {
+ self.stop_reason = match stop_reason {
+ "end_turn" => StopReason::EndTurn,
+ "max_tokens" => StopReason::MaxTokens,
+ "tool_use" => StopReason::ToolUse,
+ "refusal" => StopReason::Refusal,
+ _ => {
+ log::error!("Unexpected anthropic stop_reason: {stop_reason}");
+ StopReason::EndTurn
+ }
+ };
+ }
+ vec![Ok(LanguageModelCompletionEvent::UsageUpdate(
+ convert_usage(&self.usage),
+ ))]
+ }
+ Event::MessageStop => {
+ vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))]
+ }
+ Event::Error { error } => {
+ vec![Err(error.into())]
+ }
+ _ => Vec::new(),
+ }
+ }
+}
+
+struct RawToolUse {
+ id: String,
+ name: String,
+ input_json: String,
+}
+
+/// Updates usage data by preferring counts from `new`.
+fn update_usage(usage: &mut Usage, new: &Usage) {
+ if let Some(input_tokens) = new.input_tokens {
+ usage.input_tokens = Some(input_tokens);
+ }
+ if let Some(output_tokens) = new.output_tokens {
+ usage.output_tokens = Some(output_tokens);
+ }
+ if let Some(cache_creation_input_tokens) = new.cache_creation_input_tokens {
+ usage.cache_creation_input_tokens = Some(cache_creation_input_tokens);
+ }
+ if let Some(cache_read_input_tokens) = new.cache_read_input_tokens {
+ usage.cache_read_input_tokens = Some(cache_read_input_tokens);
+ }
+}
+
+fn convert_usage(usage: &Usage) -> TokenUsage {
+ TokenUsage {
+ input_tokens: usage.input_tokens.unwrap_or(0),
+ output_tokens: usage.output_tokens.unwrap_or(0),
+ cache_creation_input_tokens: usage.cache_creation_input_tokens.unwrap_or(0),
+ cache_read_input_tokens: usage.cache_read_input_tokens.unwrap_or(0),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::AnthropicModelMode;
+ use language_model_core::{LanguageModelImage, LanguageModelRequestMessage, MessageContent};
+
+ #[test]
+ fn test_cache_control_only_on_last_segment() {
+ let request = LanguageModelRequest {
+ messages: vec![LanguageModelRequestMessage {
+ role: Role::User,
+ content: vec![
+ MessageContent::Text("Some prompt".to_string()),
+ MessageContent::Image(LanguageModelImage::empty()),
+ MessageContent::Image(LanguageModelImage::empty()),
+ MessageContent::Image(LanguageModelImage::empty()),
+ MessageContent::Image(LanguageModelImage::empty()),
+ ],
+ cache: true,
+ reasoning_details: None,
+ }],
+ thread_id: None,
+ prompt_id: None,
+ intent: None,
+ stop: vec![],
+ temperature: None,
+ tools: vec![],
+ tool_choice: None,
+ thinking_allowed: true,
+ thinking_effort: None,
+ speed: None,
+ };
+
+ let anthropic_request = into_anthropic(
+ request,
+ "claude-3-5-sonnet".to_string(),
+ 0.7,
+ 4096,
+ AnthropicModelMode::Default,
+ );
+
+ assert_eq!(anthropic_request.messages.len(), 1);
+
+ let message = &anthropic_request.messages[0];
+ assert_eq!(message.content.len(), 5);
+
+ assert!(matches!(
+ message.content[0],
+ RequestContent::Text {
+ cache_control: None,
+ ..
+ }
+ ));
+ for i in 1..3 {
+ assert!(matches!(
+ message.content[i],
+ RequestContent::Image {
+ cache_control: None,
+ ..
+ }
+ ));
+ }
+
+ assert!(matches!(
+ message.content[4],
+ RequestContent::Image {
+ cache_control: Some(CacheControl {
+ cache_type: CacheControlType::Ephemeral,
+ }),
+ ..
+ }
+ ));
+ }
+
+ fn request_with_assistant_content(assistant_content: Vec) -> crate::Request {
+ let mut request = LanguageModelRequest {
+ messages: vec![LanguageModelRequestMessage {
+ role: Role::User,
+ content: vec![MessageContent::Text("Hello".to_string())],
+ cache: false,
+ reasoning_details: None,
+ }],
+ thinking_effort: None,
+ thread_id: None,
+ prompt_id: None,
+ intent: None,
+ stop: vec![],
+ temperature: None,
+ tools: vec![],
+ tool_choice: None,
+ thinking_allowed: true,
+ speed: None,
+ };
+ request.messages.push(LanguageModelRequestMessage {
+ role: Role::Assistant,
+ content: assistant_content,
+ cache: false,
+ reasoning_details: None,
+ });
+ into_anthropic(
+ request,
+ "claude-sonnet-4-5".to_string(),
+ 1.0,
+ 16000,
+ AnthropicModelMode::Thinking {
+ budget_tokens: Some(10000),
+ },
+ )
+ }
+
+ #[test]
+ fn test_unsigned_thinking_blocks_stripped() {
+ let result = request_with_assistant_content(vec![
+ MessageContent::Thinking {
+ text: "Cancelled mid-think, no signature".to_string(),
+ signature: None,
+ },
+ MessageContent::Text("Some response text".to_string()),
+ ]);
+
+ let assistant_message = result
+ .messages
+ .iter()
+ .find(|m| m.role == crate::Role::Assistant)
+ .expect("assistant message should still exist");
+
+ assert_eq!(
+ assistant_message.content.len(),
+ 1,
+ "Only the text content should remain; unsigned thinking block should be stripped"
+ );
+ assert!(matches!(
+ &assistant_message.content[0],
+ RequestContent::Text { text, .. } if text == "Some response text"
+ ));
+ }
+
+ #[test]
+ fn test_signed_thinking_blocks_preserved() {
+ let result = request_with_assistant_content(vec![
+ MessageContent::Thinking {
+ text: "Completed thinking".to_string(),
+ signature: Some("valid-signature".to_string()),
+ },
+ MessageContent::Text("Response".to_string()),
+ ]);
+
+ let assistant_message = result
+ .messages
+ .iter()
+ .find(|m| m.role == crate::Role::Assistant)
+ .expect("assistant message should exist");
+
+ assert_eq!(
+ assistant_message.content.len(),
+ 2,
+ "Both the signed thinking block and text should be preserved"
+ );
+ assert!(matches!(
+ &assistant_message.content[0],
+ RequestContent::Thinking { thinking, signature, .. }
+ if thinking == "Completed thinking" && signature == "valid-signature"
+ ));
+ }
+
+ #[test]
+ fn test_only_unsigned_thinking_block_omits_entire_message() {
+ let result = request_with_assistant_content(vec![MessageContent::Thinking {
+ text: "Cancelled before any text or signature".to_string(),
+ signature: None,
+ }]);
+
+ let assistant_messages: Vec<_> = result
+ .messages
+ .iter()
+ .filter(|m| m.role == crate::Role::Assistant)
+ .collect();
+
+ assert_eq!(
+ assistant_messages.len(),
+ 0,
+ "An assistant message whose only content was an unsigned thinking block \
+ should be omitted entirely"
+ );
+ }
+}
diff --git a/crates/client/Cargo.toml b/crates/client/Cargo.toml
index 7bbaccb22e0e6c7508240186103e216f83be2f0c..532fe38f7df1f686730ed862a81806e9a531e156 100644
--- a/crates/client/Cargo.toml
+++ b/crates/client/Cargo.toml
@@ -36,7 +36,6 @@ gpui_tokio.workspace = true
http_client.workspace = true
http_client_tls.workspace = true
httparse = "1.10"
-language_model.workspace = true
log.workspace = true
parking_lot.workspace = true
paths.workspace = true
diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs
index dfd9963a0ee52d167f8d4edb0b850f4debed7fd4..05ca974f80438542b232262dd375e0e38ab4327c 100644
--- a/crates/client/src/client.rs
+++ b/crates/client/src/client.rs
@@ -14,6 +14,7 @@ use async_tungstenite::tungstenite::{
http::{HeaderValue, Request, StatusCode},
};
use clock::SystemClock;
+use cloud_api_client::LlmApiToken;
use cloud_api_client::websocket_protocol::MessageToClient;
use cloud_api_client::{ClientApiError, CloudApiClient};
use cloud_api_types::OrganizationId;
@@ -26,7 +27,6 @@ use futures::{
};
use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions};
use http_client::{HttpClient, HttpClientWithUrl, http, read_proxy_from_env};
-use language_model::LlmApiToken;
use parking_lot::{Mutex, RwLock};
use postage::watch;
use proxy::connect_proxy_stream;
diff --git a/crates/client/src/llm_token.rs b/crates/client/src/llm_token.rs
index f62aa6dd4dc3462bc3a0f6f46c35f0e4e5499816..70457679e4b965e3251ae4861d3052bfa41fd65a 100644
--- a/crates/client/src/llm_token.rs
+++ b/crates/client/src/llm_token.rs
@@ -1,10 +1,10 @@
use super::{Client, UserStore};
+use cloud_api_client::LlmApiToken;
use cloud_api_types::websocket_protocol::MessageToClient;
use cloud_llm_client::{EXPIRED_LLM_TOKEN_HEADER_NAME, OUTDATED_LLM_TOKEN_HEADER_NAME};
use gpui::{
App, AppContext as _, Context, Entity, EventEmitter, Global, ReadGlobal as _, Subscription,
};
-use language_model::LlmApiToken;
use std::sync::Arc;
pub trait NeedsLlmTokenRefresh {
diff --git a/crates/cloud_api_client/Cargo.toml b/crates/cloud_api_client/Cargo.toml
index 78c684e3e54ee29a5f3f3ae5620d4a52b445f92e..cf293d83f848e1266dec977c0925af7f66608ce6 100644
--- a/crates/cloud_api_client/Cargo.toml
+++ b/crates/cloud_api_client/Cargo.toml
@@ -20,5 +20,6 @@ gpui_tokio.workspace = true
http_client.workspace = true
parking_lot.workspace = true
serde_json.workspace = true
+smol.workspace = true
thiserror.workspace = true
yawc.workspace = true
diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs
index 13d67838b216f4990f15ec22c1701aa7aef9dbf2..8c605bb3490ef5c7aea6e96045680338e8344a83 100644
--- a/crates/cloud_api_client/src/cloud_api_client.rs
+++ b/crates/cloud_api_client/src/cloud_api_client.rs
@@ -1,3 +1,4 @@
+mod llm_token;
mod websocket;
use std::sync::Arc;
@@ -18,6 +19,8 @@ use yawc::WebSocket;
use crate::websocket::Connection;
+pub use llm_token::LlmApiToken;
+
struct Credentials {
user_id: u32,
access_token: String,
diff --git a/crates/cloud_api_client/src/llm_token.rs b/crates/cloud_api_client/src/llm_token.rs
new file mode 100644
index 0000000000000000000000000000000000000000..711e0d51b89bf34db255d7cb1e58483c9de340fc
--- /dev/null
+++ b/crates/cloud_api_client/src/llm_token.rs
@@ -0,0 +1,74 @@
+use std::sync::Arc;
+
+use cloud_api_types::OrganizationId;
+use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
+
+use crate::{ClientApiError, CloudApiClient};
+
+#[derive(Clone, Default)]
+pub struct LlmApiToken(Arc>>);
+
+impl LlmApiToken {
+ pub async fn acquire(
+ &self,
+ client: &CloudApiClient,
+ system_id: Option,
+ organization_id: Option,
+ ) -> Result {
+ let lock = self.0.upgradable_read().await;
+ if let Some(token) = lock.as_ref() {
+ Ok(token.to_string())
+ } else {
+ Self::fetch(
+ RwLockUpgradableReadGuard::upgrade(lock).await,
+ client,
+ system_id,
+ organization_id,
+ )
+ .await
+ }
+ }
+
+ pub async fn refresh(
+ &self,
+ client: &CloudApiClient,
+ system_id: Option,
+ organization_id: Option,
+ ) -> Result {
+ Self::fetch(self.0.write().await, client, system_id, organization_id).await
+ }
+
+ /// Clears the existing token before attempting to fetch a new one.
+ ///
+ /// Used when switching organizations so that a failed refresh doesn't
+ /// leave a token for the wrong organization.
+ pub async fn clear_and_refresh(
+ &self,
+ client: &CloudApiClient,
+ system_id: Option,
+ organization_id: Option,
+ ) -> Result {
+ let mut lock = self.0.write().await;
+ *lock = None;
+ Self::fetch(lock, client, system_id, organization_id).await
+ }
+
+ async fn fetch(
+ mut lock: RwLockWriteGuard<'_, Option>,
+ client: &CloudApiClient,
+ system_id: Option,
+ organization_id: Option,
+ ) -> Result {
+ let result = client.create_llm_token(system_id, organization_id).await;
+ match result {
+ Ok(response) => {
+ *lock = Some(response.token.0.clone());
+ Ok(response.token.0)
+ }
+ Err(err) => {
+ *lock = None;
+ Err(err)
+ }
+ }
+ }
+}
diff --git a/crates/cloud_llm_client/Cargo.toml b/crates/cloud_llm_client/Cargo.toml
index a7b4f925a9302296e8fe25a14177a583e5f44b33..7cc59f255abeb27c6e35a2064654d8eca1a581fe 100644
--- a/crates/cloud_llm_client/Cargo.toml
+++ b/crates/cloud_llm_client/Cargo.toml
@@ -7,6 +7,7 @@ license = "Apache-2.0"
[features]
test-support = []
+predict-edits = ["dep:zeta_prompt"]
[lints]
workspace = true
@@ -20,6 +21,6 @@ serde = { workspace = true, features = ["derive", "rc"] }
serde_json.workspace = true
strum = { workspace = true, features = ["derive"] }
uuid = { workspace = true, features = ["serde"] }
-zeta_prompt.workspace = true
+zeta_prompt = { workspace = true, optional = true }
diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs
index 35eb3f2b80dd400558b1f027781f5b8cf63bb6cb..ac8bdd462a9c4754ef42a6afa41f1bef8b5bbe6a 100644
--- a/crates/cloud_llm_client/src/cloud_llm_client.rs
+++ b/crates/cloud_llm_client/src/cloud_llm_client.rs
@@ -1,3 +1,4 @@
+#[cfg(feature = "predict-edits")]
pub mod predict_edits_v3;
use std::str::FromStr;
diff --git a/crates/collab_ui/src/collab_panel.rs b/crates/collab_ui/src/collab_panel.rs
index 7dc807998760a8e65d373164eec5c7663171e5d0..327ef1cf6003eb959bd0926d67d2b0ed3b4ab0ba 100644
--- a/crates/collab_ui/src/collab_panel.rs
+++ b/crates/collab_ui/src/collab_panel.rs
@@ -2846,11 +2846,11 @@ impl CollabPanel {
}
};
- Some(channel.name.as_ref())
+ Some(channel.name.clone())
});
if let Some(name) = channel_name {
- SharedString::from(name.to_string())
+ name
} else {
SharedString::from("Current Call")
}
diff --git a/crates/edit_prediction/Cargo.toml b/crates/edit_prediction/Cargo.toml
index eabb1641fd4fbec7b2f8ef0ba399a8fe9600dfa3..87ad4e42e7826cdda4fc6a8c31a27afe888830f0 100644
--- a/crates/edit_prediction/Cargo.toml
+++ b/crates/edit_prediction/Cargo.toml
@@ -21,8 +21,9 @@ heapless.workspace = true
buffer_diff.workspace = true
client.workspace = true
clock.workspace = true
+cloud_api_client.workspace = true
cloud_api_types.workspace = true
-cloud_llm_client.workspace = true
+cloud_llm_client = { workspace = true, features = ["predict-edits"] }
collections.workspace = true
copilot.workspace = true
copilot_ui.workspace = true
diff --git a/crates/edit_prediction/src/edit_prediction.rs b/crates/edit_prediction/src/edit_prediction.rs
index 280427df006b510e1854ffb40cd7f995fcd9fdc6..2d90e13fb9b45aedd354f753502cd4e616ae3bcd 100644
--- a/crates/edit_prediction/src/edit_prediction.rs
+++ b/crates/edit_prediction/src/edit_prediction.rs
@@ -1,5 +1,6 @@
use anyhow::Result;
use client::{Client, EditPredictionUsage, NeedsLlmTokenRefresh, UserStore, global_llm_token};
+use cloud_api_client::LlmApiToken;
use cloud_api_types::{OrganizationId, SubmitEditPredictionFeedbackBody};
use cloud_llm_client::predict_edits_v3::{
PredictEditsV3Request, PredictEditsV3Response, RawCompletionRequest, RawCompletionResponse,
@@ -31,7 +32,6 @@ use heapless::Vec as ArrayVec;
use language::language_settings::all_language_settings;
use language::{Anchor, Buffer, File, Point, TextBufferSnapshot, ToOffset, ToPoint};
use language::{BufferSnapshot, OffsetRangeExt};
-use language_model::LlmApiToken;
use project::{DisableAiSettings, Project, ProjectPath, WorktreeId};
use release_channel::AppVersion;
use semver::Version;
diff --git a/crates/edit_prediction/src/ollama.rs b/crates/edit_prediction/src/ollama.rs
index 0250ec44a46cf081c6badc6fa11a9c34ebb65c4a..0ae90dd9f6eca4bfe9f87950a5a66916d8894df4 100644
--- a/crates/edit_prediction/src/ollama.rs
+++ b/crates/edit_prediction/src/ollama.rs
@@ -57,7 +57,7 @@ pub fn fetch_models(cx: &mut App) -> Vec {
let mut models: Vec = provider
.provided_models(cx)
.into_iter()
- .map(|model| SharedString::from(model.id().0.to_string()))
+ .map(|model| model.id().0)
.collect();
models.sort();
models
diff --git a/crates/edit_prediction/src/zed_edit_prediction_delegate.rs b/crates/edit_prediction/src/zed_edit_prediction_delegate.rs
index c5e97fd87eaad9b98aeb9b946a9a69b1c1071db2..1a574e9389715ce888f8b8c5ec8be921ceab4a38 100644
--- a/crates/edit_prediction/src/zed_edit_prediction_delegate.rs
+++ b/crates/edit_prediction/src/zed_edit_prediction_delegate.rs
@@ -177,7 +177,7 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
BufferEditPrediction::Local { prediction } => prediction,
BufferEditPrediction::Jump { prediction } => {
return Some(edit_prediction_types::EditPrediction::Jump {
- id: Some(prediction.id.to_string().into()),
+ id: Some(prediction.id.0.clone()),
snapshot: prediction.snapshot.clone(),
target: prediction.edits.first().unwrap().0.start,
});
@@ -228,7 +228,7 @@ impl EditPredictionDelegate for ZedEditPredictionDelegate {
}
Some(edit_prediction_types::EditPrediction::Local {
- id: Some(prediction.id.to_string().into()),
+ id: Some(prediction.id.0.clone()),
edits: edits[edit_start_ix..edit_end_ix].to_vec(),
cursor_position: prediction.cursor_position,
edit_preview: Some(prediction.edit_preview.clone()),
diff --git a/crates/edit_prediction_cli/Cargo.toml b/crates/edit_prediction_cli/Cargo.toml
index 323ee3de41902b2140f95da22b0e37fb98d31fd5..a999fed2baf990273f0801bac15573b3aed0cc78 100644
--- a/crates/edit_prediction_cli/Cargo.toml
+++ b/crates/edit_prediction_cli/Cargo.toml
@@ -22,7 +22,7 @@ http_client.workspace = true
chrono.workspace = true
clap = "4"
client.workspace = true
-cloud_llm_client.workspace= true
+cloud_llm_client = { workspace = true, features = ["predict-edits"] }
collections.workspace = true
db.workspace = true
debug_adapter_extension.workspace = true
diff --git a/crates/env_var/Cargo.toml b/crates/env_var/Cargo.toml
index 2cbbd08c7833d3e57a09766d42ffffe35c620a93..3c879a2f49184e19a131046320d767931e1ca8ec 100644
--- a/crates/env_var/Cargo.toml
+++ b/crates/env_var/Cargo.toml
@@ -12,4 +12,4 @@ workspace = true
path = "src/env_var.rs"
[dependencies]
-gpui.workspace = true
+gpui_shared_string.workspace = true
diff --git a/crates/env_var/src/env_var.rs b/crates/env_var/src/env_var.rs
index 79f671e0147ebfaad4ab76a123cc477dc7e55cb7..cb436e95e0e734e4b7d8d271199246e1558a074d 100644
--- a/crates/env_var/src/env_var.rs
+++ b/crates/env_var/src/env_var.rs
@@ -1,4 +1,4 @@
-use gpui::SharedString;
+use gpui_shared_string::SharedString;
#[derive(Clone)]
pub struct EnvVar {
diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs
index 83c8119a077ac1c024dbb3b3df948f762b072ec1..2bf4a1991f7a302ed73fe098e8914fedd0f9eb2a 100644
--- a/crates/git_ui/src/branch_picker.rs
+++ b/crates/git_ui/src/branch_picker.rs
@@ -1906,7 +1906,7 @@ mod tests {
assert_eq!(
remotes,
vec![Remote {
- name: SharedString::from("my_new_remote".to_string())
+ name: SharedString::from("my_new_remote")
}]
);
}
diff --git a/crates/google_ai/Cargo.toml b/crates/google_ai/Cargo.toml
index 81e05e4836529e9b73b58b72683a7e72a4d5c984..d91d28851997723835ba85be343a453918301c71 100644
--- a/crates/google_ai/Cargo.toml
+++ b/crates/google_ai/Cargo.toml
@@ -18,8 +18,10 @@ schemars = ["dep:schemars"]
anyhow.workspace = true
futures.workspace = true
http_client.workspace = true
+language_model_core.workspace = true
+log.workspace = true
schemars = { workspace = true, optional = true }
serde.workspace = true
serde_json.workspace = true
-settings.workspace = true
strum.workspace = true
+tiktoken-rs.workspace = true
diff --git a/crates/google_ai/src/completion.rs b/crates/google_ai/src/completion.rs
new file mode 100644
index 0000000000000000000000000000000000000000..3a15fdaa0187e52cb82dc8c71b5b861eb797f1a8
--- /dev/null
+++ b/crates/google_ai/src/completion.rs
@@ -0,0 +1,492 @@
+use anyhow::Result;
+use futures::{Stream, StreamExt};
+use language_model_core::{
+ LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelRequest,
+ LanguageModelToolChoice, LanguageModelToolUse, LanguageModelToolUseId, MessageContent, Role,
+ StopReason, TokenUsage,
+};
+use std::pin::Pin;
+use std::sync::Arc;
+use std::sync::atomic::{self, AtomicU64};
+
+use crate::{
+ Content, FunctionCallingConfig, FunctionCallingMode, FunctionDeclaration,
+ GenerateContentResponse, GenerationConfig, GenerativeContentBlob, GoogleModelMode,
+ InlineDataPart, ModelName, Part, SystemInstruction, TextPart, ThinkingConfig, ToolConfig,
+ UsageMetadata,
+};
+
+pub fn into_google(
+ mut request: LanguageModelRequest,
+ model_id: String,
+ mode: GoogleModelMode,
+) -> crate::GenerateContentRequest {
+ fn map_content(content: Vec) -> Vec {
+ content
+ .into_iter()
+ .flat_map(|content| match content {
+ MessageContent::Text(text) => {
+ if !text.is_empty() {
+ vec![Part::TextPart(TextPart { text })]
+ } else {
+ vec![]
+ }
+ }
+ MessageContent::Thinking {
+ text: _,
+ signature: Some(signature),
+ } => {
+ if !signature.is_empty() {
+ vec![Part::ThoughtPart(crate::ThoughtPart {
+ thought: true,
+ thought_signature: signature,
+ })]
+ } else {
+ vec![]
+ }
+ }
+ MessageContent::Thinking { .. } => {
+ vec![]
+ }
+ MessageContent::RedactedThinking(_) => vec![],
+ MessageContent::Image(image) => {
+ vec![Part::InlineDataPart(InlineDataPart {
+ inline_data: GenerativeContentBlob {
+ mime_type: "image/png".to_string(),
+ data: image.source.to_string(),
+ },
+ })]
+ }
+ MessageContent::ToolUse(tool_use) => {
+ // Normalize empty string signatures to None
+ let thought_signature = tool_use.thought_signature.filter(|s| !s.is_empty());
+
+ vec![Part::FunctionCallPart(crate::FunctionCallPart {
+ function_call: crate::FunctionCall {
+ name: tool_use.name.to_string(),
+ args: tool_use.input,
+ },
+ thought_signature,
+ })]
+ }
+ MessageContent::ToolResult(tool_result) => {
+ match tool_result.content {
+ language_model_core::LanguageModelToolResultContent::Text(text) => {
+ vec![Part::FunctionResponsePart(crate::FunctionResponsePart {
+ function_response: crate::FunctionResponse {
+ name: tool_result.tool_name.to_string(),
+ // The API expects a valid JSON object
+ response: serde_json::json!({
+ "output": text
+ }),
+ },
+ })]
+ }
+ language_model_core::LanguageModelToolResultContent::Image(image) => {
+ vec![
+ Part::FunctionResponsePart(crate::FunctionResponsePart {
+ function_response: crate::FunctionResponse {
+ name: tool_result.tool_name.to_string(),
+ // The API expects a valid JSON object
+ response: serde_json::json!({
+ "output": "Tool responded with an image"
+ }),
+ },
+ }),
+ Part::InlineDataPart(InlineDataPart {
+ inline_data: GenerativeContentBlob {
+ mime_type: "image/png".to_string(),
+ data: image.source.to_string(),
+ },
+ }),
+ ]
+ }
+ }
+ }
+ })
+ .collect()
+ }
+
+ let system_instructions = if request
+ .messages
+ .first()
+ .is_some_and(|msg| matches!(msg.role, Role::System))
+ {
+ let message = request.messages.remove(0);
+ Some(SystemInstruction {
+ parts: map_content(message.content),
+ })
+ } else {
+ None
+ };
+
+ crate::GenerateContentRequest {
+ model: ModelName { model_id },
+ system_instruction: system_instructions,
+ contents: request
+ .messages
+ .into_iter()
+ .filter_map(|message| {
+ let parts = map_content(message.content);
+ if parts.is_empty() {
+ None
+ } else {
+ Some(Content {
+ parts,
+ role: match message.role {
+ Role::User => crate::Role::User,
+ Role::Assistant => crate::Role::Model,
+ Role::System => crate::Role::User, // Google AI doesn't have a system role
+ },
+ })
+ }
+ })
+ .collect(),
+ generation_config: Some(GenerationConfig {
+ candidate_count: Some(1),
+ stop_sequences: Some(request.stop),
+ max_output_tokens: None,
+ temperature: request.temperature.map(|t| t as f64).or(Some(1.0)),
+ thinking_config: match (request.thinking_allowed, mode) {
+ (true, GoogleModelMode::Thinking { budget_tokens }) => {
+ budget_tokens.map(|thinking_budget| ThinkingConfig { thinking_budget })
+ }
+ _ => None,
+ },
+ top_p: None,
+ top_k: None,
+ }),
+ safety_settings: None,
+ tools: (!request.tools.is_empty()).then(|| {
+ vec![crate::Tool {
+ function_declarations: request
+ .tools
+ .into_iter()
+ .map(|tool| FunctionDeclaration {
+ name: tool.name,
+ description: tool.description,
+ parameters: tool.input_schema,
+ })
+ .collect(),
+ }]
+ }),
+ tool_config: request.tool_choice.map(|choice| ToolConfig {
+ function_calling_config: FunctionCallingConfig {
+ mode: match choice {
+ LanguageModelToolChoice::Auto => FunctionCallingMode::Auto,
+ LanguageModelToolChoice::Any => FunctionCallingMode::Any,
+ LanguageModelToolChoice::None => FunctionCallingMode::None,
+ },
+ allowed_function_names: None,
+ },
+ }),
+ }
+}
+
+pub struct GoogleEventMapper {
+ usage: UsageMetadata,
+ stop_reason: StopReason,
+}
+
+impl GoogleEventMapper {
+ pub fn new() -> Self {
+ Self {
+ usage: UsageMetadata::default(),
+ stop_reason: StopReason::EndTurn,
+ }
+ }
+
+ pub fn map_stream(
+ mut self,
+ events: Pin>>>,
+ ) -> impl Stream