From c60d31a726b014e1780f294a93208f4d3e6c60b3 Mon Sep 17 00:00:00 2001 From: Mayank Verma Date: Mon, 10 Nov 2025 07:54:20 +0530 Subject: [PATCH 01/74] git: Track worktree references to resolve stale repository state (#41592) Closes #35997 Closes #38018 Closes #41516 Release Notes: - Fixes stale git repositories persisting after removal --- crates/project/src/git_store.rs | 59 ++++++++++++++- crates/project/src/project_tests.rs | 113 ++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+), 2 deletions(-) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 3ea02d9e49a5edbd951a46a55eed8f48953c12b7..8767e5e2931288519d78739f67292728b6d5b77d 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -55,9 +55,10 @@ use rpc::{ proto::{self, git_reset, split_repository_update}, }; use serde::Deserialize; +use settings::WorktreeId; use std::{ cmp::Ordering, - collections::{BTreeSet, VecDeque}, + collections::{BTreeSet, HashSet, VecDeque}, future::Future, mem, ops::Range, @@ -89,6 +90,7 @@ pub struct GitStore { buffer_store: Entity, worktree_store: Entity, repositories: HashMap>, + worktree_ids: HashMap>, active_repo_id: Option, #[allow(clippy::type_complexity)] loading_diffs: @@ -409,6 +411,7 @@ impl GitStore { buffer_store, worktree_store, repositories: HashMap::default(), + worktree_ids: HashMap::default(), active_repo_id: None, _subscriptions, loading_diffs: HashMap::default(), @@ -1167,6 +1170,7 @@ impl GitStore { return; } self.update_repositories_from_worktree( + *worktree_id, project_environment.clone(), next_repository_id.clone(), downstream @@ -1178,6 +1182,45 @@ impl GitStore { ); self.local_worktree_git_repos_changed(worktree, changed_repos, cx); } + WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => { + let repos_without_worktree: Vec = self + .worktree_ids + .iter_mut() + .filter_map(|(repo_id, worktree_ids)| { + worktree_ids.remove(worktree_id); + if worktree_ids.is_empty() { + Some(*repo_id) + } else { + None + } + }) + .collect(); + let is_active_repo_removed = repos_without_worktree + .iter() + .any(|repo_id| self.active_repo_id == Some(*repo_id)); + + for repo_id in repos_without_worktree { + self.repositories.remove(&repo_id); + self.worktree_ids.remove(&repo_id); + if let Some(updates_tx) = + downstream.as_ref().map(|downstream| &downstream.updates_tx) + { + updates_tx + .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id)) + .ok(); + } + } + + if is_active_repo_removed { + if let Some((&repo_id, _)) = self.repositories.iter().next() { + self.active_repo_id = Some(repo_id); + cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id))); + } else { + self.active_repo_id = None; + cx.emit(GitStoreEvent::ActiveRepositoryChanged(None)); + } + } + } _ => {} } } @@ -1228,6 +1271,7 @@ impl GitStore { /// Update our list of repositories and schedule git scans in response to a notification from a worktree, fn update_repositories_from_worktree( &mut self, + worktree_id: WorktreeId, project_environment: Entity, next_repository_id: Arc, updates_tx: Option>, @@ -1245,15 +1289,25 @@ impl GitStore { || Some(&existing_work_directory_abs_path) == update.new_work_directory_abs_path.as_ref() }) { + let repo_id = *id; if let Some(new_work_directory_abs_path) = update.new_work_directory_abs_path.clone() { + self.worktree_ids + .entry(repo_id) + .or_insert_with(HashSet::new) + .insert(worktree_id); existing.update(cx, |existing, cx| { existing.snapshot.work_directory_abs_path = new_work_directory_abs_path; existing.schedule_scan(updates_tx.clone(), cx); }); } else { - removed_ids.push(*id); + if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) { + worktree_ids.remove(&worktree_id); + if worktree_ids.is_empty() { + removed_ids.push(repo_id); + } + } } } else if let UpdatedGitRepository { new_work_directory_abs_path: Some(work_directory_abs_path), @@ -1291,6 +1345,7 @@ impl GitStore { self._subscriptions .push(cx.subscribe(&repo, Self::on_jobs_updated)); self.repositories.insert(id, repo); + self.worktree_ids.insert(id, HashSet::from([worktree_id])); cx.emit(GitStoreEvent::RepositoryAdded); self.active_repo_id.get_or_insert_with(|| { cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id))); diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 65a7803d64a122c4f9a3774a09a47ec91455da5f..1f76b905be2843605b32918e6d3bf1a037ced636 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -10477,3 +10477,116 @@ async fn test_find_project_path_abs( ); }); } + +#[gpui::test] +async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/root"), + json!({ + "a": { + ".git": {}, + "src": { + "main.rs": "fn main() {}", + } + }, + "b": { + ".git": {}, + "src": { + "main.rs": "fn main() {}", + }, + "script": { + "run.sh": "#!/bin/bash" + } + } + }), + ) + .await; + + let project = Project::test( + fs.clone(), + [ + path!("/root/a").as_ref(), + path!("/root/b/script").as_ref(), + path!("/root/b").as_ref(), + ], + cx, + ) + .await; + let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx)); + scan_complete.await; + + let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::>()); + assert_eq!(worktrees.len(), 3); + + let worktree_id_by_abs_path = worktrees + .into_iter() + .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id()))) + .collect::>(); + let worktree_id = worktree_id_by_abs_path + .get(Path::new(path!("/root/b/script"))) + .unwrap(); + + let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone()); + assert_eq!(repos.len(), 2); + + project.update(cx, |project, cx| { + project.remove_worktree(*worktree_id, cx); + }); + cx.run_until_parked(); + + let mut repo_paths = project + .update(cx, |p, cx| p.git_store().read(cx).repositories().clone()) + .values() + .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone())) + .collect::>(); + repo_paths.sort(); + + pretty_assertions::assert_eq!( + repo_paths, + [ + Path::new(path!("/root/a")).into(), + Path::new(path!("/root/b")).into(), + ] + ); + + let active_repo_path = project + .read_with(cx, |p, cx| { + p.active_repository(cx) + .map(|r| r.read(cx).work_directory_abs_path.clone()) + }) + .unwrap(); + assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a"))); + + let worktree_id = worktree_id_by_abs_path + .get(Path::new(path!("/root/a"))) + .unwrap(); + project.update(cx, |project, cx| { + project.remove_worktree(*worktree_id, cx); + }); + cx.run_until_parked(); + + let active_repo_path = project + .read_with(cx, |p, cx| { + p.active_repository(cx) + .map(|r| r.read(cx).work_directory_abs_path.clone()) + }) + .unwrap(); + assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b"))); + + let worktree_id = worktree_id_by_abs_path + .get(Path::new(path!("/root/b"))) + .unwrap(); + project.update(cx, |project, cx| { + project.remove_worktree(*worktree_id, cx); + }); + cx.run_until_parked(); + + let active_repo_path = project.read_with(cx, |p, cx| { + p.active_repository(cx) + .map(|r| r.read(cx).work_directory_abs_path.clone()) + }); + assert!(active_repo_path.is_none()); +} From e025ee6a11729985666faf2507739c2522ba9f5a Mon Sep 17 00:00:00 2001 From: Ayush Chandekar Date: Mon, 10 Nov 2025 08:05:29 +0530 Subject: [PATCH 02/74] git: Add base branch support to create_branch (#42151) Closes [#41674](https://github.com/zed-industries/zed/issues/41674) Description: Creating a branch from a base requires switching to the base branch first, then creating the new branch and checking out to it, which requires multiple operations. Add base_branch parameter to create_branch to allow a new branch from a base branch in one operation which is synonymous to the command `git switch -c `. Below is the video after solving the issue: (`master` branch is the default branch here, and I create a branch `new-branch-2` based off the `master` branch. I also show the error which used to appear before the fix.) [Screencast from 2025-11-07 05-14-32.webm](https://github.com/user-attachments/assets/d37d1b58-af5f-44e8-b867-2aa5d4ef3d90) Release Notes: - Fixed the branch-picking error by replacing multiple sequential switch operations with just one switch operation. Signed-off-by: ayu-ch --- crates/collab/src/tests/integration_tests.rs | 2 +- .../remote_editing_collaboration_tests.rs | 2 +- crates/fs/src/fake_git_repo.rs | 6 ++- crates/git/src/repository.rs | 29 ++++++++--- crates/git_ui/src/branch_picker.rs | 12 +---- crates/project/src/git_store.rs | 48 +++++++++++-------- .../remote_server/src/remote_editing_tests.rs | 2 +- 7 files changed, 59 insertions(+), 42 deletions(-) diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index 4fa32b6c9ba55e6962547510f52251f16fc9be81..a4c8dc0e5b7e5eb01f099c11f29a5d651da09303 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -7065,7 +7065,7 @@ async fn test_remote_git_branches( // Also try creating a new branch cx_b.update(|cx| { repo_b.update(cx, |repository, _cx| { - repository.create_branch("totally-new-branch".to_string()) + repository.create_branch("totally-new-branch".to_string(), None) }) }) .await diff --git a/crates/collab/src/tests/remote_editing_collaboration_tests.rs b/crates/collab/src/tests/remote_editing_collaboration_tests.rs index 883bfa18725b02e4359aba371710c6e96efef73b..e5cc506bbca8b0a4a2fca972df61d373a288702c 100644 --- a/crates/collab/src/tests/remote_editing_collaboration_tests.rs +++ b/crates/collab/src/tests/remote_editing_collaboration_tests.rs @@ -326,7 +326,7 @@ async fn test_ssh_collaboration_git_branches( // Also try creating a new branch cx_b.update(|cx| { repo_b.update(cx, |repo_b, _cx| { - repo_b.create_branch("totally-new-branch".to_string()) + repo_b.create_branch("totally-new-branch".to_string(), None) }) }) .await diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index 4652e0fdb098a8b7ee6bfdb5ac9f3215831afb97..aeaed1d6fc2947e55551026d518da18952cc051a 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -407,7 +407,11 @@ impl GitRepository for FakeGitRepository { }) } - fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { + fn create_branch( + &self, + name: String, + _base_branch: Option, + ) -> BoxFuture<'_, Result<()>> { self.with_state_async(true, move |state| { state.branches.insert(name); Ok(()) diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 225ab81abdb3e5299880731951832eee000d8e5c..6fcf285e384f4a03a0f3fe8d2a613a56ace4666e 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -431,7 +431,8 @@ pub trait GitRepository: Send + Sync { fn branches(&self) -> BoxFuture<'_, Result>>; fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>>; - fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>>; + fn create_branch(&self, name: String, base_branch: Option) + -> BoxFuture<'_, Result<()>>; fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>>; fn worktrees(&self) -> BoxFuture<'_, Result>>; @@ -1358,14 +1359,28 @@ impl GitRepository for RealGitRepository { .boxed() } - fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>> { - let repo = self.repository.clone(); + fn create_branch( + &self, + name: String, + base_branch: Option, + ) -> BoxFuture<'_, Result<()>> { + let git_binary_path = self.any_git_binary_path.clone(); + let working_directory = self.working_directory(); + let executor = self.executor.clone(); + self.executor .spawn(async move { - let repo = repo.lock(); - let current_commit = repo.head()?.peel_to_commit()?; - repo.branch(&name, ¤t_commit, false)?; - Ok(()) + let mut args = vec!["switch", "-c", &name]; + let base_branch_str; + if let Some(ref base) = base_branch { + base_branch_str = base.clone(); + args.push(&base_branch_str); + } + + GitBinary::new(git_binary_path, working_directory?, executor) + .run(&args) + .await?; + anyhow::Ok(()) }) .boxed() } diff --git a/crates/git_ui/src/branch_picker.rs b/crates/git_ui/src/branch_picker.rs index e10568ff37aaa47924632558228294feca84ac61..3ae9059b2a12f178931a5271b92c5fdf44f319d4 100644 --- a/crates/git_ui/src/branch_picker.rs +++ b/crates/git_ui/src/branch_picker.rs @@ -241,18 +241,10 @@ impl BranchListDelegate { return; }; let new_branch_name = new_branch_name.to_string().replace(' ', "-"); + let base_branch = from_branch.map(|b| b.to_string()); cx.spawn(async move |_, cx| { - if let Some(based_branch) = from_branch { - repo.update(cx, |repo, _| repo.change_branch(based_branch.to_string()))? - .await??; - } - - repo.update(cx, |repo, _| { - repo.create_branch(new_branch_name.to_string()) - })? - .await??; repo.update(cx, |repo, _| { - repo.change_branch(new_branch_name.to_string()) + repo.create_branch(new_branch_name, base_branch) })? .await??; diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 8767e5e2931288519d78739f67292728b6d5b77d..90d76f51be27c66894519ea22ddcaa19baedc9c4 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -2094,7 +2094,7 @@ impl GitStore { repository_handle .update(&mut cx, |repository_handle, _| { - repository_handle.create_branch(branch_name) + repository_handle.create_branch(branch_name, None) })? .await??; @@ -4747,29 +4747,35 @@ impl Repository { }) } - pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver> { + pub fn create_branch( + &mut self, + branch_name: String, + base_branch: Option, + ) -> oneshot::Receiver> { let id = self.id; - self.send_job( - Some(format!("git switch -c {branch_name}").into()), - move |repo, _cx| async move { - match repo { - RepositoryState::Local { backend, .. } => { - backend.create_branch(branch_name).await - } - RepositoryState::Remote { project_id, client } => { - client - .request(proto::GitCreateBranch { - project_id: project_id.0, - repository_id: id.to_proto(), - branch_name, - }) - .await?; + let status_msg = if let Some(ref base) = base_branch { + format!("git switch -c {branch_name} {base}").into() + } else { + format!("git switch -c {branch_name}").into() + }; + self.send_job(Some(status_msg), move |repo, _cx| async move { + match repo { + RepositoryState::Local { backend, .. } => { + backend.create_branch(branch_name, base_branch).await + } + RepositoryState::Remote { project_id, client } => { + client + .request(proto::GitCreateBranch { + project_id: project_id.0, + repository_id: id.to_proto(), + branch_name, + }) + .await?; - Ok(()) - } + Ok(()) } - }, - ) + } + }) } pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver> { diff --git a/crates/remote_server/src/remote_editing_tests.rs b/crates/remote_server/src/remote_editing_tests.rs index de0d58e50dbc036723365ca6099efabff1f8449d..98a0aab70bcb4e5590f477f6e6de9aebd512b3c2 100644 --- a/crates/remote_server/src/remote_editing_tests.rs +++ b/crates/remote_server/src/remote_editing_tests.rs @@ -1662,7 +1662,7 @@ async fn test_remote_git_branches(cx: &mut TestAppContext, server_cx: &mut TestA // Also try creating a new branch cx.update(|cx| { repository.update(cx, |repo, _cx| { - repo.create_branch("totally-new-branch".to_string()) + repo.create_branch("totally-new-branch".to_string(), None) }) }) .await From 35c58151eb0a1cbb9ebcc9be605b00b9fedfd5ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E1=B4=80=E1=B4=8D=E1=B4=9B=E1=B4=8F=E1=B4=80=E1=B4=87?= =?UTF-8?q?=CA=80?= Date: Mon, 10 Nov 2025 10:37:22 +0800 Subject: [PATCH 03/74] git: Fix support for self-hosted Bitbucket (#42002) Closes #41995 Release Notes: - Fixed support for self-hosted Bitbucket --- .../src/git_hosting_providers.rs | 2 + .../src/providers/bitbucket.rs | 205 +++++++++++++++++- 2 files changed, 200 insertions(+), 7 deletions(-) diff --git a/crates/git_hosting_providers/src/git_hosting_providers.rs b/crates/git_hosting_providers/src/git_hosting_providers.rs index 307db49a8ebd33228e6c1bccfbffc065b5f563b3..6940ea382a1a21dbb3e97b55d74ee2489a1691ba 100644 --- a/crates/git_hosting_providers/src/git_hosting_providers.rs +++ b/crates/git_hosting_providers/src/git_hosting_providers.rs @@ -49,6 +49,8 @@ pub fn register_additional_providers( provider_registry.register_hosting_provider(Arc::new(forgejo_self_hosted)); } else if let Ok(gitea_self_hosted) = Gitea::from_remote_url(&origin_url) { provider_registry.register_hosting_provider(Arc::new(gitea_self_hosted)); + } else if let Ok(bitbucket_self_hosted) = Bitbucket::from_remote_url(&origin_url) { + provider_registry.register_hosting_provider(Arc::new(bitbucket_self_hosted)); } } diff --git a/crates/git_hosting_providers/src/providers/bitbucket.rs b/crates/git_hosting_providers/src/providers/bitbucket.rs index a6bb83b0f9d6025301db309c4d64ea39ade42076..0c30a13758a8339087ebb146f0029baee0d3ea7e 100644 --- a/crates/git_hosting_providers/src/providers/bitbucket.rs +++ b/crates/git_hosting_providers/src/providers/bitbucket.rs @@ -1,6 +1,7 @@ use std::str::FromStr; use std::sync::LazyLock; +use anyhow::{Result, bail}; use regex::Regex; use url::Url; @@ -9,6 +10,8 @@ use git::{ PullRequest, RemoteUrl, }; +use crate::get_host_from_git_remote_url; + fn pull_request_regex() -> &'static Regex { static PULL_REQUEST_REGEX: LazyLock = LazyLock::new(|| { // This matches Bitbucket PR reference pattern: (pull request #xxx) @@ -33,6 +36,31 @@ impl Bitbucket { pub fn public_instance() -> Self { Self::new("Bitbucket", Url::parse("https://bitbucket.org").unwrap()) } + + pub fn from_remote_url(remote_url: &str) -> Result { + let host = get_host_from_git_remote_url(remote_url)?; + if host == "bitbucket.org" { + bail!("the BitBucket instance is not self-hosted"); + } + + // TODO: detecting self hosted instances by checking whether "bitbucket" is in the url or not + // is not very reliable. See https://github.com/zed-industries/zed/issues/26393 for more + // information. + if !host.contains("bitbucket") { + bail!("not a BitBucket URL"); + } + + Ok(Self::new( + "BitBucket Self-Hosted", + Url::parse(&format!("https://{}", host))?, + )) + } + + fn is_self_hosted(&self) -> bool { + self.base_url + .host_str() + .is_some_and(|host| host != "bitbucket.org") + } } impl GitHostingProvider for Bitbucket { @@ -49,10 +77,16 @@ impl GitHostingProvider for Bitbucket { } fn format_line_number(&self, line: u32) -> String { + if self.is_self_hosted() { + return format!("{line}"); + } format!("lines-{line}") } fn format_line_numbers(&self, start_line: u32, end_line: u32) -> String { + if self.is_self_hosted() { + return format!("{start_line}-{end_line}"); + } format!("lines-{start_line}:{end_line}") } @@ -60,7 +94,7 @@ impl GitHostingProvider for Bitbucket { let url = RemoteUrl::from_str(url).ok()?; let host = url.host_str()?; - if host != "bitbucket.org" { + if host != self.base_url.host_str()? { return None; } @@ -81,7 +115,12 @@ impl GitHostingProvider for Bitbucket { ) -> Url { let BuildCommitPermalinkParams { sha } = params; let ParsedGitRemote { owner, repo } = remote; - + if self.is_self_hosted() { + return self + .base_url() + .join(&format!("projects/{owner}/repos/{repo}/commits/{sha}")) + .unwrap(); + } self.base_url() .join(&format!("{owner}/{repo}/commits/{sha}")) .unwrap() @@ -95,10 +134,18 @@ impl GitHostingProvider for Bitbucket { selection, } = params; - let mut permalink = self - .base_url() - .join(&format!("{owner}/{repo}/src/{sha}/{path}")) - .unwrap(); + let mut permalink = if self.is_self_hosted() { + self.base_url() + .join(&format!( + "projects/{owner}/repos/{repo}/browse/{path}?at={sha}" + )) + .unwrap() + } else { + self.base_url() + .join(&format!("{owner}/{repo}/src/{sha}/{path}")) + .unwrap() + }; + permalink.set_fragment( selection .map(|selection| self.line_fragment(&selection)) @@ -117,7 +164,14 @@ impl GitHostingProvider for Bitbucket { // Construct the PR URL in Bitbucket format let mut url = self.base_url(); - let path = format!("/{}/{}/pull-requests/{}", remote.owner, remote.repo, number); + let path = if self.is_self_hosted() { + format!( + "/projects/{}/repos/{}/pull-requests/{}", + remote.owner, remote.repo, number + ) + } else { + format!("/{}/{}/pull-requests/{}", remote.owner, remote.repo, number) + }; url.set_path(&path); Some(PullRequest { number, url }) @@ -176,6 +230,60 @@ mod tests { ); } + #[test] + fn test_parse_remote_url_given_self_hosted_ssh_url() { + let remote_url = "git@bitbucket.company.com:zed-industries/zed.git"; + + let parsed_remote = Bitbucket::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_self_hosted_https_url() { + let remote_url = "https://bitbucket.company.com/zed-industries/zed.git"; + + let parsed_remote = Bitbucket::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + + #[test] + fn test_parse_remote_url_given_self_hosted_https_url_with_username() { + let remote_url = "https://thorstenballzed@bitbucket.company.com/zed-industries/zed.git"; + + let parsed_remote = Bitbucket::from_remote_url(remote_url) + .unwrap() + .parse_remote_url(remote_url) + .unwrap(); + + assert_eq!( + parsed_remote, + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + } + ); + } + #[test] fn test_build_bitbucket_permalink() { let permalink = Bitbucket::public_instance().build_permalink( @@ -190,6 +298,23 @@ mod tests { assert_eq!(permalink.to_string(), expected_url.to_string()) } + #[test] + fn test_build_bitbucket_self_hosted_permalink() { + let permalink = + Bitbucket::from_remote_url("git@bitbucket.company.com:zed-industries/zed.git") + .unwrap() + .build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, + BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), None), + ); + + let expected_url = "https://bitbucket.company.com/projects/zed-industries/repos/zed/browse/main.rs?at=f00b4r"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } + #[test] fn test_build_bitbucket_permalink_with_single_line_selection() { let permalink = Bitbucket::public_instance().build_permalink( @@ -204,6 +329,23 @@ mod tests { assert_eq!(permalink.to_string(), expected_url.to_string()) } + #[test] + fn test_build_bitbucket_self_hosted_permalink_with_single_line_selection() { + let permalink = + Bitbucket::from_remote_url("https://bitbucket.company.com/zed-industries/zed.git") + .unwrap() + .build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, + BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(6..6)), + ); + + let expected_url = "https://bitbucket.company.com/projects/zed-industries/repos/zed/browse/main.rs?at=f00b4r#7"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } + #[test] fn test_build_bitbucket_permalink_with_multi_line_selection() { let permalink = Bitbucket::public_instance().build_permalink( @@ -219,6 +361,23 @@ mod tests { assert_eq!(permalink.to_string(), expected_url.to_string()) } + #[test] + fn test_build_bitbucket_self_hosted_permalink_with_multi_line_selection() { + let permalink = + Bitbucket::from_remote_url("git@bitbucket.company.com:zed-industries/zed.git") + .unwrap() + .build_permalink( + ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }, + BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(23..47)), + ); + + let expected_url = "https://bitbucket.company.com/projects/zed-industries/repos/zed/browse/main.rs?at=f00b4r#24-48"; + assert_eq!(permalink.to_string(), expected_url.to_string()) + } + #[test] fn test_bitbucket_pull_requests() { use indoc::indoc; @@ -248,4 +407,36 @@ mod tests { "https://bitbucket.org/zed-industries/zed/pull-requests/123" ); } + + #[test] + fn test_bitbucket_self_hosted_pull_requests() { + use indoc::indoc; + + let remote = ParsedGitRemote { + owner: "zed-industries".into(), + repo: "zed".into(), + }; + + let bitbucket = + Bitbucket::from_remote_url("https://bitbucket.company.com/zed-industries/zed.git") + .unwrap(); + + // Test message without PR reference + let message = "This does not contain a pull request"; + assert!(bitbucket.extract_pull_request(&remote, message).is_none()); + + // Pull request number at end of first line + let message = indoc! {r#" + Merged in feature-branch (pull request #123) + + Some detailed description of the changes. + "#}; + + let pr = bitbucket.extract_pull_request(&remote, message).unwrap(); + assert_eq!(pr.number, 123); + assert_eq!( + pr.url.as_str(), + "https://bitbucket.company.com/projects/zed-industries/repos/zed/pull-requests/123" + ); + } } From b8081ad7a66c6a8b444bae1cd1f195e9daa739c7 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Sun, 9 Nov 2025 21:10:36 -0800 Subject: [PATCH 04/74] Make it easy to point zeta2 at ollama (#42329) I wanted to be able to work offline, so I made it a little bit more convenient to point zeta2 at ollama. * For zeta2, don't require that request ids be UUIDs * Add an env var `ZED_ZETA2_OLLAMA` that sets the edit prediction URL and model id to work w/ ollama. Release Notes: - N/A --- .../cloud_llm_client/src/cloud_llm_client.rs | 4 +- crates/zeta/src/zeta.rs | 6 ++- crates/zeta2/src/prediction.rs | 17 +++------ crates/zeta2/src/zeta2.rs | 37 ++++++++++++++----- 4 files changed, 39 insertions(+), 25 deletions(-) diff --git a/crates/cloud_llm_client/src/cloud_llm_client.rs b/crates/cloud_llm_client/src/cloud_llm_client.rs index bb77c3a5b7f8009093cbf7bc427160ed535e6c62..ff8275fe40eae6945691a7b8d315414617be0235 100644 --- a/crates/cloud_llm_client/src/cloud_llm_client.rs +++ b/crates/cloud_llm_client/src/cloud_llm_client.rs @@ -183,13 +183,13 @@ pub struct PredictEditsGitInfo { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct PredictEditsResponse { - pub request_id: Uuid, + pub request_id: String, pub output_excerpt: String, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AcceptEditPredictionBody { - pub request_id: Uuid, + pub request_id: String, } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)] diff --git a/crates/zeta/src/zeta.rs b/crates/zeta/src/zeta.rs index 0b939bdee27851a9ec9975b586c9a7bcad67484f..708a53ff47bd2c60e6b9620e8bed30b16419ba14 100644 --- a/crates/zeta/src/zeta.rs +++ b/crates/zeta/src/zeta.rs @@ -652,7 +652,7 @@ impl Zeta { .header(ZED_VERSION_HEADER_NAME, app_version.to_string()) .body( serde_json::to_string(&AcceptEditPredictionBody { - request_id: request_id.0, + request_id: request_id.0.to_string(), })? .into(), )?) @@ -735,6 +735,8 @@ impl Zeta { return anyhow::Ok(None); }; + let request_id = Uuid::from_str(&request_id).context("failed to parse request id")?; + let edit_preview = edit_preview.await; Ok(Some(EditPrediction { @@ -2162,7 +2164,7 @@ mod tests { .status(200) .body( serde_json::to_string(&PredictEditsResponse { - request_id: Uuid::new_v4(), + request_id: Uuid::new_v4().to_string(), output_excerpt: completion_response.lock().clone(), }) .unwrap() diff --git a/crates/zeta2/src/prediction.rs b/crates/zeta2/src/prediction.rs index 54a6987b3f781a48fe928636dc3537117ee6a401..e9f726ce00c36b5235919c0e185876996f4fda03 100644 --- a/crates/zeta2/src/prediction.rs +++ b/crates/zeta2/src/prediction.rs @@ -1,21 +1,14 @@ use std::{ops::Range, sync::Arc}; -use gpui::{AsyncApp, Entity}; +use gpui::{AsyncApp, Entity, SharedString}; use language::{Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, TextBufferSnapshot}; -use uuid::Uuid; -#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)] -pub struct EditPredictionId(pub Uuid); - -impl Into for EditPredictionId { - fn into(self) -> Uuid { - self.0 - } -} +#[derive(Clone, Default, Debug, PartialEq, Eq, Hash)] +pub struct EditPredictionId(pub SharedString); impl From for gpui::ElementId { fn from(value: EditPredictionId) -> Self { - gpui::ElementId::Uuid(value.0) + gpui::ElementId::Name(value.0) } } @@ -149,7 +142,7 @@ mod tests { .await; let prediction = EditPrediction { - id: EditPredictionId(Uuid::new_v4()), + id: EditPredictionId("prediction-1".into()), edits, snapshot: cx.read(|cx| buffer.read(cx).snapshot()), buffer: buffer.clone(), diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index ff0ff4f1ba2af59f32cddee96e4b9c0dd25af22d..3a51f9975ccbcf3fb325712f7aafadc5187da541 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -30,8 +30,8 @@ use project::Project; use release_channel::AppVersion; use serde::de::DeserializeOwned; use std::collections::{VecDeque, hash_map}; -use uuid::Uuid; +use std::env; use std::ops::Range; use std::path::Path; use std::str::FromStr as _; @@ -88,8 +88,24 @@ pub const DEFAULT_OPTIONS: ZetaOptions = ZetaOptions { buffer_change_grouping_interval: Duration::from_secs(1), }; -static MODEL_ID: LazyLock = - LazyLock::new(|| std::env::var("ZED_ZETA2_MODEL").unwrap_or("yqvev8r3".to_string())); +static USE_OLLAMA: LazyLock = + LazyLock::new(|| env::var("ZED_ZETA2_OLLAMA").is_ok_and(|var| !var.is_empty())); +static MODEL_ID: LazyLock = LazyLock::new(|| { + env::var("ZED_ZETA2_MODEL").unwrap_or(if *USE_OLLAMA { + "qwen3-coder:30b".to_string() + } else { + "yqvev8r3".to_string() + }) +}); +static PREDICT_EDITS_URL: LazyLock> = LazyLock::new(|| { + env::var("ZED_PREDICT_EDITS_URL").ok().or_else(|| { + if *USE_OLLAMA { + Some("http://localhost:11434/v1/chat/completions".into()) + } else { + None + } + }) +}); pub struct Zeta2FeatureFlag; @@ -567,13 +583,13 @@ impl Zeta { let Some(prediction) = project_state.current_prediction.take() else { return; }; - let request_id = prediction.prediction.id.into(); + let request_id = prediction.prediction.id.to_string(); let client = self.client.clone(); let llm_token = self.llm_token.clone(); let app_version = AppVersion::global(cx); cx.spawn(async move |this, cx| { - let url = if let Ok(predict_edits_url) = std::env::var("ZED_ACCEPT_PREDICTION_URL") { + let url = if let Ok(predict_edits_url) = env::var("ZED_ACCEPT_PREDICTION_URL") { http_client::Url::parse(&predict_edits_url)? } else { client @@ -585,7 +601,10 @@ impl Zeta { .background_spawn(Self::send_api_request::<()>( move |builder| { let req = builder.uri(url.as_ref()).body( - serde_json::to_string(&AcceptEditPredictionBody { request_id })?.into(), + serde_json::to_string(&AcceptEditPredictionBody { + request_id: request_id.clone(), + })? + .into(), ); Ok(req?) }, @@ -875,7 +894,7 @@ impl Zeta { None }; - if cfg!(debug_assertions) && std::env::var("ZED_ZETA2_SKIP_REQUEST").is_ok() { + if cfg!(debug_assertions) && env::var("ZED_ZETA2_SKIP_REQUEST").is_ok() { if let Some(debug_response_tx) = debug_response_tx { debug_response_tx .send((Err("Request skipped".to_string()), TimeDelta::zero())) @@ -923,7 +942,7 @@ impl Zeta { } let (res, usage) = response?; - let request_id = EditPredictionId(Uuid::from_str(&res.id)?); + let request_id = EditPredictionId(res.id.clone().into()); let Some(output_text) = text_from_response(res) else { return Ok((None, usage)) }; @@ -980,7 +999,7 @@ impl Zeta { app_version: SemanticVersion, request: open_ai::Request, ) -> Result<(open_ai::Response, Option)> { - let url = if let Ok(predict_edits_url) = std::env::var("ZED_PREDICT_EDITS_URL") { + let url = if let Some(predict_edits_url) = PREDICT_EDITS_URL.as_ref() { http_client::Url::parse(&predict_edits_url)? } else { client From 359160c8b15a276d11af5d269d9fb1f0ebcd1765 Mon Sep 17 00:00:00 2001 From: Jakub Konka Date: Mon, 10 Nov 2025 08:57:50 +0100 Subject: [PATCH 05/74] git: Add askpass delegate to git-commit handlers (#42239) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In my local setup, I always enforce git-commit signing with GPG/SSH which automatically enforces `git commit -S` when committing. This changeset will now show a modal to the user for them to specify the passphrase (if any) so that they can unlock their private key for signing when committing in Zed. Screenshot 2025-11-07 at 11 09
09 PM Release Notes: - Handle automatic git-commit signing by presenting the user with an askpass modal --- crates/fs/src/fake_git_repo.rs | 1 + crates/git/src/repository.rs | 70 +++++++++++++++++++++------------ crates/git_ui/src/git_panel.rs | 5 ++- crates/project/src/git_store.rs | 21 +++++++++- crates/proto/proto/git.proto | 1 + 5 files changed, 69 insertions(+), 29 deletions(-) diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index aeaed1d6fc2947e55551026d518da18952cc051a..e3d58f3001c407f8d4deea115b460000bc666574 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -526,6 +526,7 @@ impl GitRepository for FakeGitRepository { _message: gpui::SharedString, _name_and_email: Option<(gpui::SharedString, gpui::SharedString)>, _options: CommitOptions, + _askpass: AskPassDelegate, _env: Arc>, ) -> BoxFuture<'_, Result<()>> { unimplemented!() diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 6fcf285e384f4a03a0f3fe8d2a613a56ace4666e..1ad21d993607c75777302ddb6f4ec1964a916ad0 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -491,6 +491,7 @@ pub trait GitRepository: Send + Sync { message: SharedString, name_and_email: Option<(SharedString, SharedString)>, options: CommitOptions, + askpass: AskPassDelegate, env: Arc>, ) -> BoxFuture<'_, Result<()>>; @@ -1630,41 +1631,39 @@ impl GitRepository for RealGitRepository { message: SharedString, name_and_email: Option<(SharedString, SharedString)>, options: CommitOptions, + ask_pass: AskPassDelegate, env: Arc>, ) -> BoxFuture<'_, Result<()>> { let working_directory = self.working_directory(); let git_binary_path = self.any_git_binary_path.clone(); - self.executor - .spawn(async move { - let mut cmd = new_smol_command(git_binary_path); - cmd.current_dir(&working_directory?) - .envs(env.iter()) - .args(["commit", "--quiet", "-m"]) - .arg(&message.to_string()) - .arg("--cleanup=strip"); + let executor = self.executor.clone(); + async move { + let mut cmd = new_smol_command(git_binary_path); + cmd.current_dir(&working_directory?) + .envs(env.iter()) + .args(["commit", "--quiet", "-m"]) + .arg(&message.to_string()) + .arg("--cleanup=strip") + .stdout(smol::process::Stdio::piped()) + .stderr(smol::process::Stdio::piped()); - if options.amend { - cmd.arg("--amend"); - } + if options.amend { + cmd.arg("--amend"); + } - if options.signoff { - cmd.arg("--signoff"); - } + if options.signoff { + cmd.arg("--signoff"); + } - if let Some((name, email)) = name_and_email { - cmd.arg("--author").arg(&format!("{name} <{email}>")); - } + if let Some((name, email)) = name_and_email { + cmd.arg("--author").arg(&format!("{name} <{email}>")); + } - let output = cmd.output().await?; + run_git_command(env, ask_pass, cmd, &executor).await?; - anyhow::ensure!( - output.status.success(), - "Failed to commit:\n{}", - String::from_utf8_lossy(&output.stderr) - ); - Ok(()) - }) - .boxed() + Ok(()) + } + .boxed() } fn push( @@ -2469,8 +2468,17 @@ mod tests { use super::*; use gpui::TestAppContext; + fn disable_git_global_config() { + unsafe { + std::env::set_var("GIT_CONFIG_GLOBAL", ""); + std::env::set_var("GIT_CONFIG_SYSTEM", ""); + } + } + #[gpui::test] async fn test_checkpoint_basic(cx: &mut TestAppContext) { + disable_git_global_config(); + cx.executor().allow_parking(); let repo_dir = tempfile::tempdir().unwrap(); @@ -2486,6 +2494,7 @@ mod tests { cx.executor(), ) .unwrap(); + repo.stage_paths(vec![repo_path("file")], Arc::new(HashMap::default())) .await .unwrap(); @@ -2493,6 +2502,7 @@ mod tests { "Initial commit".into(), None, CommitOptions::default(), + AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), Arc::new(checkpoint_author_envs()), ) .await @@ -2519,6 +2529,7 @@ mod tests { "Commit after checkpoint".into(), None, CommitOptions::default(), + AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), Arc::new(checkpoint_author_envs()), ) .await @@ -2556,6 +2567,8 @@ mod tests { #[gpui::test] async fn test_checkpoint_empty_repo(cx: &mut TestAppContext) { + disable_git_global_config(); + cx.executor().allow_parking(); let repo_dir = tempfile::tempdir().unwrap(); @@ -2600,6 +2613,8 @@ mod tests { #[gpui::test] async fn test_compare_checkpoints(cx: &mut TestAppContext) { + disable_git_global_config(); + cx.executor().allow_parking(); let repo_dir = tempfile::tempdir().unwrap(); @@ -2639,6 +2654,8 @@ mod tests { #[gpui::test] async fn test_checkpoint_exclude_binary_files(cx: &mut TestAppContext) { + disable_git_global_config(); + cx.executor().allow_parking(); let repo_dir = tempfile::tempdir().unwrap(); @@ -2669,6 +2686,7 @@ mod tests { "Initial commit".into(), None, CommitOptions::default(), + AskPassDelegate::new(&mut cx.to_async(), |_, _, _| {}), Arc::new(checkpoint_author_envs()), ) .await diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index cb292a62bf6ebf4ae3e3c90f269bd839b971e22e..aec36e1730b282e94e5dba6847eb55ab464beb00 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -1585,6 +1585,7 @@ impl GitPanel { return; } + let askpass = self.askpass_delegate("git commit", window, cx); let commit_message = self.custom_or_suggested_commit_message(window, cx); let Some(mut message) = commit_message else { @@ -1599,7 +1600,7 @@ impl GitPanel { let task = if self.has_staged_changes() { // Repository serializes all git operations, so we can just send a commit immediately let commit_task = active_repository.update(cx, |repo, cx| { - repo.commit(message.into(), None, options, cx) + repo.commit(message.into(), None, options, askpass, cx) }); cx.background_spawn(async move { commit_task.await? }) } else { @@ -1621,7 +1622,7 @@ impl GitPanel { cx.spawn(async move |_, cx| { stage_task.await?; let commit_task = active_repository.update(cx, |repo, cx| { - repo.commit(message.into(), None, options, cx) + repo.commit(message.into(), None, options, askpass, cx) })?; commit_task.await? }) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 90d76f51be27c66894519ea22ddcaa19baedc9c4..b4b353faadcaa359f85d263e63d4e370aaec1e4a 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -1957,6 +1957,15 @@ impl GitStore { ) -> Result { let repository_id = RepositoryId::from_proto(envelope.payload.repository_id); let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?; + let askpass_id = envelope.payload.askpass_id; + + let askpass = make_remote_delegate( + this, + envelope.payload.project_id, + repository_id, + askpass_id, + &mut cx, + ); let message = SharedString::from(envelope.payload.message); let name = envelope.payload.name.map(SharedString::from); @@ -1972,6 +1981,7 @@ impl GitStore { amend: options.amend, signoff: options.signoff, }, + askpass, cx, ) })? @@ -4216,9 +4226,12 @@ impl Repository { message: SharedString, name_and_email: Option<(SharedString, SharedString)>, options: CommitOptions, + askpass: AskPassDelegate, _cx: &mut App, ) -> oneshot::Receiver> { let id = self.id; + let askpass_delegates = self.askpass_delegates.clone(); + let askpass_id = util::post_inc(&mut self.latest_askpass_id); self.send_job(Some("git commit".into()), move |git_repo, _cx| async move { match git_repo { @@ -4228,10 +4241,15 @@ impl Repository { .. } => { backend - .commit(message, name_and_email, options, environment) + .commit(message, name_and_email, options, askpass, environment) .await } RepositoryState::Remote { project_id, client } => { + askpass_delegates.lock().insert(askpass_id, askpass); + let _defer = util::defer(|| { + let askpass_delegate = askpass_delegates.lock().remove(&askpass_id); + debug_assert!(askpass_delegate.is_some()); + }); let (name, email) = name_and_email.unzip(); client .request(proto::Commit { @@ -4244,6 +4262,7 @@ impl Repository { amend: options.amend, signoff: options.signoff, }), + askpass_id, }) .await .context("sending commit request")?; diff --git a/crates/proto/proto/git.proto b/crates/proto/proto/git.proto index d8ce1d2a75633eb4f31378ee574ffe043f956e05..efbd7f616f9e75c4e0409f4dc73c67f9eb1836e0 100644 --- a/crates/proto/proto/git.proto +++ b/crates/proto/proto/git.proto @@ -347,6 +347,7 @@ message Commit { string message = 6; optional CommitOptions options = 7; reserved 8; + uint64 askpass_id = 9; message CommitOptions { bool amend = 1; From 0149de4b54c55df8ec9ebc6a1da5b43c68e407e9 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 10 Nov 2025 10:27:51 +0100 Subject: [PATCH 06/74] git: Fix panic in `git2` due to empty repo paths (#42304) Fixes ZED-1VR Release Notes: - Fixed sporadic panic in git features --- crates/agent_servers/src/acp.rs | 2 +- crates/editor/src/test/editor_test_context.rs | 6 +- crates/fs/src/fake_git_repo.rs | 4 +- crates/fs/src/fs.rs | 3 +- crates/git/src/repository.rs | 72 +++++++++---------- crates/git/src/status.rs | 4 +- crates/git_ui/src/commit_view.rs | 2 +- crates/git_ui/src/git_panel.rs | 8 +-- crates/git_ui/src/project_diff.rs | 4 +- crates/project/src/git_store.rs | 31 ++++---- crates/project/src/git_store/conflict_set.rs | 4 +- crates/project/src/git_store/pending_op.rs | 4 +- crates/project/src/project_tests.rs | 26 +++---- 13 files changed, 88 insertions(+), 82 deletions(-) diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index 4e41b247599e511b6345882701a34a6b66f3c418..15f56bf2ed4ee100fd22dc0d7df73f2e8a3274ea 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -136,7 +136,7 @@ impl AcpConnection { while let Ok(n) = stderr.read_line(&mut line).await && n > 0 { - log::warn!("agent stderr: {}", &line); + log::warn!("agent stderr: {}", line.trim()); line.clear(); } Ok(()) diff --git a/crates/editor/src/test/editor_test_context.rs b/crates/editor/src/test/editor_test_context.rs index c6779d1e564deb57233dd9e4719ca87f8d6a2da1..7f5bb227fb98d1ebe5df51d59bdae22825bc4fef 100644 --- a/crates/editor/src/test/editor_test_context.rs +++ b/crates/editor/src/test/editor_test_context.rs @@ -6,6 +6,7 @@ use buffer_diff::DiffHunkStatusKind; use collections::BTreeMap; use futures::Future; +use git::repository::RepoPath; use gpui::{ AnyWindowHandle, App, Context, Entity, Focusable as _, Keystroke, Pixels, Point, VisualTestContext, Window, WindowHandle, prelude::*, @@ -334,7 +335,10 @@ impl EditorTestContext { let path = self.update_buffer(|buffer, _| buffer.file().unwrap().path().clone()); let mut found = None; fs.with_git_state(&Self::root_path().join(".git"), false, |git_state| { - found = git_state.index_contents.get(&path.into()).cloned(); + found = git_state + .index_contents + .get(&RepoPath::from_rel_path(&path)) + .cloned(); }) .unwrap(); assert_eq!(expected, found.as_deref()); diff --git a/crates/fs/src/fake_git_repo.rs b/crates/fs/src/fake_git_repo.rs index e3d58f3001c407f8d4deea115b460000bc666574..97cd13d185817453c369356bdc60cbc1517bf1e1 100644 --- a/crates/fs/src/fake_git_repo.rs +++ b/crates/fs/src/fake_git_repo.rs @@ -272,7 +272,7 @@ impl GitRepository for FakeGitRepository { .ok() .map(|content| String::from_utf8(content).unwrap())?; let repo_path = RelPath::new(repo_path, PathStyle::local()).ok()?; - Some((repo_path.into(), (content, is_ignored))) + Some((RepoPath::from_rel_path(&repo_path), (content, is_ignored))) }) .collect(); @@ -436,7 +436,7 @@ impl GitRepository for FakeGitRepository { state .blames .get(&path) - .with_context(|| format!("failed to get blame for {:?}", path.0)) + .with_context(|| format!("failed to get blame for {:?}", path)) .cloned() }) } diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 0202b2134f4fd0d3f983b2c67e97414a44457143..53af6ba6afc50cb0e568a01e25d1af22c02d9e36 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -1792,7 +1792,8 @@ impl FakeFs { for (path, content) in workdir_contents { use util::{paths::PathStyle, rel_path::RelPath}; - let repo_path: RepoPath = RelPath::new(path.strip_prefix(&workdir_path).unwrap(), PathStyle::local()).unwrap().into(); + let repo_path = RelPath::new(path.strip_prefix(&workdir_path).unwrap(), PathStyle::local()).unwrap(); + let repo_path = RepoPath::from_rel_path(&repo_path); let status = statuses .iter() .find_map(|(p, status)| (*p == repo_path.as_unix_str()).then_some(status)); diff --git a/crates/git/src/repository.rs b/crates/git/src/repository.rs index 1ad21d993607c75777302ddb6f4ec1964a916ad0..2a1cd9478d3079716eda8234c02c8122b9381b38 100644 --- a/crates/git/src/repository.rs +++ b/crates/git/src/repository.rs @@ -14,7 +14,6 @@ use rope::Rope; use schemars::JsonSchema; use serde::Deserialize; use smol::io::{AsyncBufReadExt, AsyncReadExt, BufReader}; -use std::borrow::Cow; use std::ffi::{OsStr, OsString}; use std::process::{ExitStatus, Stdio}; use std::{ @@ -848,7 +847,7 @@ impl GitRepository for RealGitRepository { } files.push(CommitFile { - path: rel_path.into(), + path: RepoPath(Arc::from(rel_path)), old_text, new_text, }) @@ -2049,6 +2048,11 @@ fn git_status_args(path_prefixes: &[RepoPath]) -> Vec { OsString::from("--no-renames"), OsString::from("-z"), ]; + args.extend( + path_prefixes + .iter() + .map(|path_prefix| path_prefix.as_std_path().into()), + ); args.extend(path_prefixes.iter().map(|path_prefix| { if path_prefix.is_empty() { Path::new(".").into() @@ -2304,52 +2308,54 @@ async fn run_askpass_command( } } -#[derive(Clone, Debug, Ord, Hash, PartialOrd, Eq, PartialEq)] -pub struct RepoPath(pub Arc); +#[derive(Clone, Ord, Hash, PartialOrd, Eq, PartialEq)] +pub struct RepoPath(Arc); + +impl std::fmt::Debug for RepoPath { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} impl RepoPath { pub fn new + ?Sized>(s: &S) -> Result { let rel_path = RelPath::unix(s.as_ref())?; - Ok(rel_path.into()) - } - - pub fn from_proto(proto: &str) -> Result { - let rel_path = RelPath::from_proto(proto)?; - Ok(rel_path.into()) + Ok(Self::from_rel_path(rel_path)) } pub fn from_std_path(path: &Path, path_style: PathStyle) -> Result { let rel_path = RelPath::new(path, path_style)?; - Ok(Self(rel_path.as_ref().into())) + Ok(Self::from_rel_path(&rel_path)) } -} -#[cfg(any(test, feature = "test-support"))] -pub fn repo_path + ?Sized>(s: &S) -> RepoPath { - RepoPath(RelPath::unix(s.as_ref()).unwrap().into()) -} + pub fn from_proto(proto: &str) -> Result { + let rel_path = RelPath::from_proto(proto)?; + Ok(Self(rel_path)) + } -impl From<&RelPath> for RepoPath { - fn from(value: &RelPath) -> Self { - RepoPath(value.into()) + pub fn from_rel_path(path: &RelPath) -> RepoPath { + Self(Arc::from(path)) } -} -impl<'a> From> for RepoPath { - fn from(value: Cow<'a, RelPath>) -> Self { - value.as_ref().into() + pub fn as_std_path(&self) -> &Path { + // git2 does not like empty paths and our RelPath infra turns `.` into `` + // so undo that here + if self.is_empty() { + Path::new(".") + } else { + self.0.as_std_path() + } } } -impl From> for RepoPath { - fn from(value: Arc) -> Self { - RepoPath(value) - } +#[cfg(any(test, feature = "test-support"))] +pub fn repo_path + ?Sized>(s: &S) -> RepoPath { + RepoPath(RelPath::unix(s.as_ref()).unwrap().into()) } -impl Default for RepoPath { - fn default() -> Self { - RepoPath(RelPath::empty().into()) +impl AsRef> for RepoPath { + fn as_ref(&self) -> &Arc { + &self.0 } } @@ -2361,12 +2367,6 @@ impl std::ops::Deref for RepoPath { } } -// impl AsRef for RepoPath { -// fn as_ref(&self) -> &Path { -// RelPath::as_ref(&self.0) -// } -// } - #[derive(Debug)] pub struct RepoPathDescendants<'a>(pub &'a RepoPath); diff --git a/crates/git/src/status.rs b/crates/git/src/status.rs index a36e24dd3bf0a8c67ee1c70566f4564ba8362616..2cf7cc7c1810620f1cf1aaea831fb337810c83d8 100644 --- a/crates/git/src/status.rs +++ b/crates/git/src/status.rs @@ -454,7 +454,7 @@ impl FromStr for GitStatus { let status = entry.as_bytes()[0..2].try_into().unwrap(); let status = FileStatus::from_bytes(status).log_err()?; // git-status outputs `/`-delimited repo paths, even on Windows. - let path = RepoPath(RelPath::unix(path).log_err()?.into()); + let path = RepoPath::from_rel_path(RelPath::unix(path).log_err()?); Some((path, status)) }) .collect::>(); @@ -539,7 +539,7 @@ impl FromStr for TreeDiff { let mut fields = s.split('\0'); let mut parsed = HashMap::default(); while let Some((status, path)) = fields.next().zip(fields.next()) { - let path = RepoPath(RelPath::unix(path)?.into()); + let path = RepoPath::from_rel_path(RelPath::unix(path)?); let mut fields = status.split(" ").skip(2); let old_sha = fields diff --git a/crates/git_ui/src/commit_view.rs b/crates/git_ui/src/commit_view.rs index 4608397c0a6f2462bcc4bc50e06de99d1749af30..765e1f84a4a3a5b7e257e51df9a9542d0abff067 100644 --- a/crates/git_ui/src/commit_view.rs +++ b/crates/git_ui/src/commit_view.rs @@ -266,7 +266,7 @@ impl language::File for GitBlob { } fn path(&self) -> &Arc { - &self.path.0 + self.path.as_ref() } fn full_path(&self, _: &App) -> PathBuf { diff --git a/crates/git_ui/src/git_panel.rs b/crates/git_ui/src/git_panel.rs index aec36e1730b282e94e5dba6847eb55ab464beb00..85cfb3b499f5cc2baefdc23f8e0ffc91f09b620d 100644 --- a/crates/git_ui/src/git_panel.rs +++ b/crates/git_ui/src/git_panel.rs @@ -879,7 +879,7 @@ impl GitPanel { let active_repository = self.active_repository.as_ref()?.downgrade(); cx.spawn(async move |_, cx| { - let file_path_str = repo_path.0.display(PathStyle::Posix); + let file_path_str = repo_path.as_ref().display(PathStyle::Posix); let repo_root = active_repository.read_with(cx, |repository, _| { repository.snapshot().work_directory_abs_path @@ -1074,7 +1074,7 @@ impl GitPanel { } let mut details = entries .iter() - .filter_map(|entry| entry.repo_path.0.file_name()) + .filter_map(|entry| entry.repo_path.as_ref().file_name()) .map(|filename| filename.to_string()) .take(5) .join("\n"); @@ -1129,7 +1129,7 @@ impl GitPanel { .map(|entry| { entry .repo_path - .0 + .as_ref() .file_name() .map(|f| f.to_string()) .unwrap_or_default() @@ -5647,7 +5647,7 @@ mod tests { assert_eq!( entry.status_entry().map(|status| status .repo_path - .0 + .as_ref() .as_std_path() .to_string_lossy() .to_string()), diff --git a/crates/git_ui/src/project_diff.rs b/crates/git_ui/src/project_diff.rs index 305e30d9d7a49d30aad09863e04dc11641f3017e..6f8195c8b718640de4fed421253d5f1bd2f8f14e 100644 --- a/crates/git_ui/src/project_diff.rs +++ b/crates/git_ui/src/project_diff.rs @@ -336,7 +336,7 @@ impl ProjectDiff { }; let repo = git_repo.read(cx); let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx); - let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0); + let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.as_ref().clone()); self.move_to_path(path_key, window, cx) } @@ -566,7 +566,7 @@ impl ProjectDiff { for entry in buffers_to_load.iter() { let sort_prefix = sort_prefix(&repo, &entry.repo_path, entry.file_status, cx); let path_key = - PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone()); + PathKey::with_sort_prefix(sort_prefix, entry.repo_path.as_ref().clone()); previous_paths.remove(&path_key); path_keys.push(path_key) } diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index b4b353faadcaa359f85d263e63d4e370aaec1e4a..5fcf28aff3554149ece954074f312e0fe37a9208 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -227,7 +227,7 @@ impl sum_tree::Item for StatusEntry { fn summary(&self, _: ::Context<'_>) -> Self::Summary { PathSummary { - max_path: self.repo_path.0.clone(), + max_path: self.repo_path.as_ref().clone(), item_summary: self.status.summary(), } } @@ -237,7 +237,7 @@ impl sum_tree::KeyedItem for StatusEntry { type Key = PathKey; fn key(&self) -> Self::Key { - PathKey(self.repo_path.0.clone()) + PathKey(self.repo_path.as_ref().clone()) } } @@ -990,7 +990,7 @@ impl GitStore { RepositoryState::Local { backend, .. } => backend .blame(repo_path.clone(), content) .await - .with_context(|| format!("Failed to blame {:?}", repo_path.0)) + .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref())) .map(Some), RepositoryState::Remote { project_id, client } => { let response = client @@ -2376,7 +2376,7 @@ impl GitStore { .entries .into_iter() .map(|(path, status)| proto::TreeDiffStatus { - path: path.0.to_proto(), + path: path.as_ref().to_proto(), status: match status { TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(), TreeDiffStatus::Modified { .. } => { @@ -3152,13 +3152,13 @@ impl RepositorySnapshot { pub fn status_for_path(&self, path: &RepoPath) -> Option { self.statuses_by_path - .get(&PathKey(path.0.clone()), ()) + .get(&PathKey(path.as_ref().clone()), ()) .cloned() } pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option { self.pending_ops_by_path - .get(&PathKey(path.0.clone()), ()) + .get(&PathKey(path.as_ref().clone()), ()) .cloned() } @@ -4727,7 +4727,9 @@ impl Repository { } }; Some(( - RepoPath(RelPath::from_proto(&entry.path).log_err()?), + RepoPath::from_rel_path( + &RelPath::from_proto(&entry.path).log_err()?, + ), status, )) }) @@ -5289,7 +5291,8 @@ impl Repository { let mut cursor = prev_statuses.cursor::(()); for path in changed_paths.into_iter() { if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) { - changed_path_statuses.push(Edit::Remove(PathKey(path.0))); + changed_path_statuses + .push(Edit::Remove(PathKey(path.as_ref().clone()))); } } changed_path_statuses @@ -5435,10 +5438,8 @@ fn get_permalink_in_rust_registry_src( remote, BuildPermalinkParams::new( &cargo_vcs_info.git.sha1, - &RepoPath( - RelPath::new(&path, PathStyle::local()) - .context("invalid path")? - .into_arc(), + &RepoPath::from_rel_path( + &RelPath::new(&path, PathStyle::local()).context("invalid path")?, ), Some(selection), ), @@ -5640,7 +5641,11 @@ async fn compute_snapshot( let mut events = Vec::new(); let branches = backend.branches().await?; let branch = branches.into_iter().find(|branch| branch.is_head); - let statuses = backend.status(&[RelPath::empty().into()]).await?; + let statuses = backend + .status(&[RepoPath::from_rel_path( + &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(), + )]) + .await?; let stash_entries = backend.stash_entries().await?; let statuses_by_path = SumTree::from_iter( statuses diff --git a/crates/project/src/git_store/conflict_set.rs b/crates/project/src/git_store/conflict_set.rs index 7d99571b5b88d7f5d37d56d47ff32a71fd5a29ff..bd80214c2c0b6d5b1a5da3ba497c5670cb26cb93 100644 --- a/crates/project/src/git_store/conflict_set.rs +++ b/crates/project/src/git_store/conflict_set.rs @@ -264,7 +264,7 @@ mod tests { use super::*; use fs::FakeFs; use git::{ - repository::repo_path, + repository::{RepoPath, repo_path}, status::{UnmergedStatus, UnmergedStatusCode}, }; use gpui::{BackgroundExecutor, TestAppContext}; @@ -617,7 +617,7 @@ mod tests { cx.run_until_parked(); fs.with_git_state(path!("/project/.git").as_ref(), true, |state| { state.unmerged_paths.insert( - rel_path("a.txt").into(), + RepoPath::from_rel_path(rel_path("a.txt")), UnmergedStatus { first_head: UnmergedStatusCode::Updated, second_head: UnmergedStatusCode::Updated, diff --git a/crates/project/src/git_store/pending_op.rs b/crates/project/src/git_store/pending_op.rs index fd1b35035a8e334acdd244d8e663212f39bc383e..1991eed407833d47fd35f6f573fbb46c692aed91 100644 --- a/crates/project/src/git_store/pending_op.rs +++ b/crates/project/src/git_store/pending_op.rs @@ -46,7 +46,7 @@ impl Item for PendingOps { fn summary(&self, _cx: ()) -> Self::Summary { PathSummary { - max_path: self.repo_path.0.clone(), + max_path: self.repo_path.as_ref().clone(), item_summary: PendingOpsSummary { staged_count: self.staged() as usize, staging_count: self.staging() as usize, @@ -73,7 +73,7 @@ impl KeyedItem for PendingOps { type Key = PathKey; fn key(&self) -> Self::Key { - PathKey(self.repo_path.0.clone()) + PathKey(self.repo_path.as_ref().clone()) } } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index 1f76b905be2843605b32918e6d3bf1a037ced636..ad2c339d22fdd49d6565ff5be491749cfcac7830 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -7937,7 +7937,7 @@ async fn test_staging_random_hunks( log::info!( "index text:\n{}", - repo.load_index_text(rel_path("file.txt").into()) + repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt"))) .await .unwrap() ); @@ -8523,7 +8523,7 @@ async fn test_repository_pending_ops_staging( assert_eq!( pending_ops_all .lock() - .get(&worktree::PathKey(repo_path("a.txt").0), ()) + .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ()) .unwrap() .ops, vec![ @@ -8644,7 +8644,7 @@ async fn test_repository_pending_ops_long_running_staging( assert_eq!( pending_ops_all .lock() - .get(&worktree::PathKey(repo_path("a.txt").0), ()) + .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ()) .unwrap() .ops, vec![ @@ -8752,7 +8752,7 @@ async fn test_repository_pending_ops_stage_all( assert_eq!( pending_ops_all .lock() - .get(&worktree::PathKey(repo_path("a.txt").0), ()) + .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ()) .unwrap() .ops, vec![ @@ -8771,7 +8771,7 @@ async fn test_repository_pending_ops_stage_all( assert_eq!( pending_ops_all .lock() - .get(&worktree::PathKey(repo_path("b.txt").0), ()) + .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ()) .unwrap() .ops, vec![ @@ -9309,11 +9309,9 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) { repository.read_with(cx, |repository, _cx| { assert_eq!( repository - .status_for_path( - &rel_path(renamed_dir_name) - .join(rel_path(RENAMED_FILE)) - .into() - ) + .status_for_path(&RepoPath::from_rel_path( + &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE)) + )) .unwrap() .status, FileStatus::Untracked, @@ -9337,11 +9335,9 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) { repository.read_with(cx, |repository, _cx| { assert_eq!( repository - .status_for_path( - &rel_path(renamed_dir_name) - .join(rel_path(RENAMED_FILE)) - .into() - ) + .status_for_path(&RepoPath::from_rel_path( + &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE)) + )) .unwrap() .status, FileStatus::Untracked, From 8d632958db199ffc6061be0b1a1fa6c1289ac34d Mon Sep 17 00:00:00 2001 From: Caleb Van Dyke Date: Mon, 10 Nov 2025 06:28:12 -0600 Subject: [PATCH 07/74] Add better labels for completions for ty lsp (#42233) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Verified that this works locally. I modeled it after how basedpyright and pyright work. Here is a screenshot of what it looks like (issue has screenshots of the old state): Screenshot 2025-11-07 at 2 40 50 PM Closes #42232 Release Notes: - python/ty: Code completion menu now shows packages that will be imported when a given entry is accepted. --- crates/languages/src/python.rs | 39 ++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/crates/languages/src/python.rs b/crates/languages/src/python.rs index 3f25a5c5ce50d0aade7b1b575443b5d681f67c63..03ce559b87bb5f318758735c5903bfc51b7c1267 100644 --- a/crates/languages/src/python.rs +++ b/crates/languages/src/python.rs @@ -163,6 +163,45 @@ impl LspAdapter for TyLspAdapter { Self::SERVER_NAME } + async fn label_for_completion( + &self, + item: &lsp::CompletionItem, + language: &Arc, + ) -> Option { + let label = &item.label; + let label_len = label.len(); + let grammar = language.grammar()?; + let highlight_id = match item.kind? { + lsp::CompletionItemKind::METHOD => grammar.highlight_id_for_name("function.method"), + lsp::CompletionItemKind::FUNCTION => grammar.highlight_id_for_name("function"), + lsp::CompletionItemKind::CLASS => grammar.highlight_id_for_name("type"), + lsp::CompletionItemKind::CONSTANT => grammar.highlight_id_for_name("constant"), + lsp::CompletionItemKind::VARIABLE => grammar.highlight_id_for_name("variable"), + _ => { + return None; + } + }; + + let mut text = label.clone(); + if let Some(completion_details) = item + .label_details + .as_ref() + .and_then(|details| details.detail.as_ref()) + { + write!(&mut text, " {}", completion_details).ok(); + } + + Some(language::CodeLabel::filtered( + text, + label_len, + item.filter_text.as_deref(), + highlight_id + .map(|id| (0..label_len, id)) + .into_iter() + .collect(), + )) + } + async fn workspace_configuration( self: Arc, delegate: &Arc, From 7aacc7566c6bae7b477441763a03e4f98f9e2271 Mon Sep 17 00:00:00 2001 From: Abdugani Toshmukhamedov <79999469+toshmukhamedov@users.noreply.github.com> Date: Mon, 10 Nov 2025 19:09:37 +0500 Subject: [PATCH 08/74] Add support for closing window tabs with middle mouse click (#41628) This change adds support for closing a system window tabs by pressing the middle mouse button. It improves tab management UX by matching common tab behavior. Release Notes: - Added support for closing system window tabs with middle mouse click. --- crates/title_bar/src/system_window_tabs.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/crates/title_bar/src/system_window_tabs.rs b/crates/title_bar/src/system_window_tabs.rs index ba898da716f042573840f8f9c9f375747ac5cc04..a9bf46cc4f9f33586d1129dec1c64a67f1e42198 100644 --- a/crates/title_bar/src/system_window_tabs.rs +++ b/crates/title_bar/src/system_window_tabs.rs @@ -227,6 +227,15 @@ impl SystemWindowTabs { window.activate_window(); }); }) + .on_mouse_up(MouseButton::Middle, move |_, window, cx| { + if item.handle.window_id() == window.window_handle().window_id() { + window.dispatch_action(Box::new(CloseWindow), cx); + } else { + let _ = item.handle.update(cx, |_, window, cx| { + window.dispatch_action(Box::new(CloseWindow), cx); + }); + } + }) .child(label) .map(|this| match show_close_button { ShowCloseButton::Hidden => this, From 2d84af91bf3e272770c07ddb4056618cb5e5e9a8 Mon Sep 17 00:00:00 2001 From: David <688326+dvcrn@users.noreply.github.com> Date: Mon, 10 Nov 2025 21:11:24 +0700 Subject: [PATCH 09/74] agent: Add ability to set a default_model per profile (#39220) Split off from https://github.com/zed-industries/zed/pull/39175 Requires https://github.com/zed-industries/zed/pull/39219 to be merged first Adds support for `default_model` for profiles: ``` "my-profile": { "name": "Coding Agent", "tools": {}, "enable_all_context_servers": false, "context_servers": {}, "default_model": { "provider": "copilot_chat", "model": "grok-code-fast-1" } } ``` Which will then switch to the default model whenever the profile is activated ![2025-09-30 17 09 06](https://github.com/user-attachments/assets/43f07b7b-85d9-4aff-82ce-25d6f5050d50) Release Notes: - Added `default_model` configuration to agent profile --------- Co-authored-by: Danilo Leal --- crates/agent/src/tests/mod.rs | 12 +- crates/agent/src/thread.rs | 64 ++++++++-- crates/agent_settings/src/agent_profile.rs | 58 ++++++--- crates/agent_ui/src/acp/thread_view.rs | 5 +- .../src/agent_configuration/tool_picker.rs | 1 + crates/agent_ui/src/profile_selector.rs | 114 ++++++++++-------- crates/eval/src/instance.rs | 2 +- crates/settings/src/settings_content/agent.rs | 2 + 8 files changed, 177 insertions(+), 81 deletions(-) diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index d80edca35de03578d0d557eb320dc77471a3b8fb..5d4bdce27cc05d1cf46a4b73821f0a97878fd6f4 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -933,7 +933,7 @@ async fn test_profiles(cx: &mut TestAppContext) { // Test that test-1 profile (default) has echo and delay tools thread .update(cx, |thread, cx| { - thread.set_profile(AgentProfileId("test-1".into())); + thread.set_profile(AgentProfileId("test-1".into()), cx); thread.send(UserMessageId::new(), ["test"], cx) }) .unwrap(); @@ -953,7 +953,7 @@ async fn test_profiles(cx: &mut TestAppContext) { // Switch to test-2 profile, and verify that it has only the infinite tool. thread .update(cx, |thread, cx| { - thread.set_profile(AgentProfileId("test-2".into())); + thread.set_profile(AgentProfileId("test-2".into()), cx); thread.send(UserMessageId::new(), ["test2"], cx) }) .unwrap(); @@ -1002,8 +1002,8 @@ async fn test_mcp_tools(cx: &mut TestAppContext) { ) .await; cx.run_until_parked(); - thread.update(cx, |thread, _| { - thread.set_profile(AgentProfileId("test".into())) + thread.update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("test".into()), cx) }); let mut mcp_tool_calls = setup_context_server( @@ -1169,8 +1169,8 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) { .await; cx.run_until_parked(); - thread.update(cx, |thread, _| { - thread.set_profile(AgentProfileId("test".into())); + thread.update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("test".into()), cx); thread.add_tool(EchoTool); thread.add_tool(DelayTool); thread.add_tool(WordListTool); diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index 78f20152b4daf461de40cfa7746216092f82cf41..5cf230629c8e542a23ea7ffc5bdb0fa5a1c73a53 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -30,16 +30,17 @@ use gpui::{ }; use language_model::{ LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt, - LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest, - LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult, - LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse, - LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID, + LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, + LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool, + LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat, + LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, + ZED_CLOUD_PROVIDER_ID, }; use project::Project; use prompt_store::ProjectContext; use schemars::{JsonSchema, Schema}; use serde::{Deserialize, Serialize}; -use settings::{Settings, update_settings_file}; +use settings::{LanguageModelSelection, Settings, update_settings_file}; use smol::stream::StreamExt; use std::{ collections::BTreeMap, @@ -798,7 +799,8 @@ impl Thread { let profile_id = db_thread .profile .unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone()); - let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + + let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| { db_thread .model .and_then(|model| { @@ -811,6 +813,16 @@ impl Thread { .or_else(|| registry.default_model()) .map(|model| model.model) }); + + if model.is_none() { + model = Self::resolve_profile_model(&profile_id, cx); + } + if model.is_none() { + model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| { + registry.default_model().map(|model| model.model) + }); + } + let (prompt_capabilities_tx, prompt_capabilities_rx) = watch::channel(Self::prompt_capabilities(model.as_deref())); @@ -1007,8 +1019,17 @@ impl Thread { &self.profile_id } - pub fn set_profile(&mut self, profile_id: AgentProfileId) { + pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context) { + if self.profile_id == profile_id { + return; + } + self.profile_id = profile_id; + + // Swap to the profile's preferred model when available. + if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) { + self.set_model(model, cx); + } } pub fn cancel(&mut self, cx: &mut Context) { @@ -1065,6 +1086,35 @@ impl Thread { }) } + /// Look up the active profile and resolve its preferred model if one is configured. + fn resolve_profile_model( + profile_id: &AgentProfileId, + cx: &mut Context, + ) -> Option> { + let selection = AgentSettings::get_global(cx) + .profiles + .get(profile_id)? + .default_model + .clone()?; + Self::resolve_model_from_selection(&selection, cx) + } + + /// Translate a stored model selection into the configured model from the registry. + fn resolve_model_from_selection( + selection: &LanguageModelSelection, + cx: &mut Context, + ) -> Option> { + let selected = SelectedModel { + provider: LanguageModelProviderId::from(selection.provider.0.clone()), + model: LanguageModelId::from(selection.model.clone()), + }; + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry + .select_model(&selected, cx) + .map(|configured| configured.model) + }) + } + pub fn resume( &mut self, cx: &mut Context, diff --git a/crates/agent_settings/src/agent_profile.rs b/crates/agent_settings/src/agent_profile.rs index 999ddc8083a1a4b4c271ea9bde4c1e45307e9542..aff666e01111dc5db539b370cd440fa88438fe8d 100644 --- a/crates/agent_settings/src/agent_profile.rs +++ b/crates/agent_settings/src/agent_profile.rs @@ -6,8 +6,8 @@ use convert_case::{Case, Casing as _}; use fs::Fs; use gpui::{App, SharedString}; use settings::{ - AgentProfileContent, ContextServerPresetContent, Settings as _, SettingsContent, - update_settings_file, + AgentProfileContent, ContextServerPresetContent, LanguageModelSelection, Settings as _, + SettingsContent, update_settings_file, }; use util::ResultExt as _; @@ -53,19 +53,30 @@ impl AgentProfile { let base_profile = base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned()); + // Copy toggles from the base profile so the new profile starts with familiar defaults. + let tools = base_profile + .as_ref() + .map(|profile| profile.tools.clone()) + .unwrap_or_default(); + let enable_all_context_servers = base_profile + .as_ref() + .map(|profile| profile.enable_all_context_servers) + .unwrap_or_default(); + let context_servers = base_profile + .as_ref() + .map(|profile| profile.context_servers.clone()) + .unwrap_or_default(); + // Preserve the base profile's model preference when cloning into a new profile. + let default_model = base_profile + .as_ref() + .and_then(|profile| profile.default_model.clone()); + let profile_settings = AgentProfileSettings { name: name.into(), - tools: base_profile - .as_ref() - .map(|profile| profile.tools.clone()) - .unwrap_or_default(), - enable_all_context_servers: base_profile - .as_ref() - .map(|profile| profile.enable_all_context_servers) - .unwrap_or_default(), - context_servers: base_profile - .map(|profile| profile.context_servers) - .unwrap_or_default(), + tools, + enable_all_context_servers, + context_servers, + default_model, }; update_settings_file(fs, cx, { @@ -96,6 +107,8 @@ pub struct AgentProfileSettings { pub tools: IndexMap, bool>, pub enable_all_context_servers: bool, pub context_servers: IndexMap, ContextServerPreset>, + /// Default language model to apply when this profile becomes active. + pub default_model: Option, } impl AgentProfileSettings { @@ -144,6 +157,7 @@ impl AgentProfileSettings { ) }) .collect(), + default_model: self.default_model.clone(), }, ); @@ -153,15 +167,23 @@ impl AgentProfileSettings { impl From for AgentProfileSettings { fn from(content: AgentProfileContent) -> Self { + let AgentProfileContent { + name, + tools, + enable_all_context_servers, + context_servers, + default_model, + } = content; + Self { - name: content.name.into(), - tools: content.tools, - enable_all_context_servers: content.enable_all_context_servers.unwrap_or_default(), - context_servers: content - .context_servers + name: name.into(), + tools, + enable_all_context_servers: enable_all_context_servers.unwrap_or_default(), + context_servers: context_servers .into_iter() .map(|(server_id, preset)| (server_id, preset.into())) .collect(), + default_model, } } } diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index daf2249909fc9a29df6969dba1ad51cc099c891c..306976473d772f55cfdf1ee9caa65eab4f1d5552 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -125,8 +125,9 @@ impl ProfileProvider for Entity { } fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) { - self.update(cx, |thread, _cx| { - thread.set_profile(profile_id); + self.update(cx, |thread, cx| { + // Apply the profile and let the thread swap to its default model. + thread.set_profile(profile_id, cx); }); } diff --git a/crates/agent_ui/src/agent_configuration/tool_picker.rs b/crates/agent_ui/src/agent_configuration/tool_picker.rs index 6b84205e1bd6336d70751090d8f0451b1b1925b0..1c99f665ab1c8fc995d47682f92365852bbc9637 100644 --- a/crates/agent_ui/src/agent_configuration/tool_picker.rs +++ b/crates/agent_ui/src/agent_configuration/tool_picker.rs @@ -314,6 +314,7 @@ impl PickerDelegate for ToolPickerDelegate { ) }) .collect(), + default_model: default_profile.default_model.clone(), }); if let Some(server_id) = server_id { diff --git a/crates/agent_ui/src/profile_selector.rs b/crates/agent_ui/src/profile_selector.rs index 2f9fe19eb33667d6ca6bb2f5502fbd1c9f094e9c..c1949d22e268e8744db7834a58d1a3303fa4e236 100644 --- a/crates/agent_ui/src/profile_selector.rs +++ b/crates/agent_ui/src/profile_selector.rs @@ -15,8 +15,8 @@ use std::{ sync::{Arc, atomic::AtomicBool}, }; use ui::{ - DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, LabelSize, - ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*, + DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, KeyBinding, + LabelSize, ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*, }; /// Trait for types that can provide and manage agent profiles @@ -81,6 +81,7 @@ impl ProfileSelector { self.provider.clone(), self.profiles.clone(), cx.background_executor().clone(), + self.focus_handle.clone(), cx, ); @@ -207,6 +208,7 @@ pub(crate) struct ProfilePickerDelegate { selected_index: usize, query: String, cancel: Option>, + focus_handle: FocusHandle, } impl ProfilePickerDelegate { @@ -215,6 +217,7 @@ impl ProfilePickerDelegate { provider: Arc, profiles: AvailableProfiles, background: BackgroundExecutor, + focus_handle: FocusHandle, cx: &mut Context, ) -> Self { let candidates = Self::candidates_from(profiles); @@ -231,6 +234,7 @@ impl ProfilePickerDelegate { selected_index: 0, query: String::new(), cancel: None, + focus_handle, }; this.selected_index = this @@ -594,20 +598,26 @@ impl PickerDelegate for ProfilePickerDelegate { _: &mut Window, cx: &mut Context>, ) -> Option { + let focus_handle = self.focus_handle.clone(); + Some( h_flex() .w_full() .border_t_1() .border_color(cx.theme().colors().border_variant) - .p_1() - .gap_4() - .justify_between() + .p_1p5() .child( Button::new("configure", "Configure") - .icon(IconName::Settings) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .icon_position(IconPosition::Start) + .full_width() + .style(ButtonStyle::Outlined) + .key_binding( + KeyBinding::for_action_in( + &ManageProfiles::default(), + &focus_handle, + cx, + ) + .map(|kb| kb.size(rems_from_px(12.))), + ) .on_click(|_, window, cx| { window.dispatch_action(ManageProfiles::default().boxed_clone(), cx); }), @@ -659,20 +669,25 @@ mod tests { is_builtin: true, }]; - let delegate = ProfilePickerDelegate { - fs: FakeFs::new(cx.executor()), - provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))), - background: cx.executor(), - candidates, - string_candidates: Arc::new(Vec::new()), - filtered_entries: Vec::new(), - selected_index: 0, - query: String::new(), - cancel: None, - }; - - let matches = Vec::new(); // No matches - let _entries = delegate.entries_from_matches(matches); + cx.update(|cx| { + let focus_handle = cx.focus_handle(); + + let delegate = ProfilePickerDelegate { + fs: FakeFs::new(cx.background_executor().clone()), + provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))), + background: cx.background_executor().clone(), + candidates, + string_candidates: Arc::new(Vec::new()), + filtered_entries: Vec::new(), + selected_index: 0, + query: String::new(), + cancel: None, + focus_handle, + }; + + let matches = Vec::new(); // No matches + let _entries = delegate.entries_from_matches(matches); + }); } #[gpui::test] @@ -690,30 +705,35 @@ mod tests { }, ]; - let delegate = ProfilePickerDelegate { - fs: FakeFs::new(cx.executor()), - provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))), - background: cx.executor(), - candidates, - string_candidates: Arc::new(Vec::new()), - filtered_entries: vec![ - ProfilePickerEntry::Profile(ProfileMatchEntry { - candidate_index: 0, - positions: Vec::new(), - }), - ProfilePickerEntry::Profile(ProfileMatchEntry { - candidate_index: 1, - positions: Vec::new(), - }), - ], - selected_index: 0, - query: String::new(), - cancel: None, - }; - - // Active profile should be found at index 0 - let active_index = delegate.index_of_profile(&AgentProfileId("write".into())); - assert_eq!(active_index, Some(0)); + cx.update(|cx| { + let focus_handle = cx.focus_handle(); + + let delegate = ProfilePickerDelegate { + fs: FakeFs::new(cx.background_executor().clone()), + provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))), + background: cx.background_executor().clone(), + candidates, + string_candidates: Arc::new(Vec::new()), + filtered_entries: vec![ + ProfilePickerEntry::Profile(ProfileMatchEntry { + candidate_index: 0, + positions: Vec::new(), + }), + ProfilePickerEntry::Profile(ProfileMatchEntry { + candidate_index: 1, + positions: Vec::new(), + }), + ], + selected_index: 0, + query: String::new(), + cancel: None, + focus_handle, + }; + + // Active profile should be found at index 0 + let active_index = delegate.index_of_profile(&AgentProfileId("write".into())); + assert_eq!(active_index, Some(0)); + }); } struct TestProfileProvider { diff --git a/crates/eval/src/instance.rs b/crates/eval/src/instance.rs index 5317f100456748616dfec63819bc0373aaceb4c1..035f1ec0ac8d0c6490dc39637e03e377ee3d194b 100644 --- a/crates/eval/src/instance.rs +++ b/crates/eval/src/instance.rs @@ -322,7 +322,7 @@ impl ExampleInstance { thread.add_default_tools(Rc::new(EvalThreadEnvironment { project: project.clone(), }), cx); - thread.set_profile(meta.profile_id.clone()); + thread.set_profile(meta.profile_id.clone(), cx); thread.set_model( LanguageModelInterceptor::new( LanguageModelRegistry::read_global(cx).default_model().expect("Missing model").model.clone(), diff --git a/crates/settings/src/settings_content/agent.rs b/crates/settings/src/settings_content/agent.rs index c641f280e177669a2af14e91c844f2a5f059b648..425b5f05ff46fa705c073838dceab6c431c74bde 100644 --- a/crates/settings/src/settings_content/agent.rs +++ b/crates/settings/src/settings_content/agent.rs @@ -176,6 +176,8 @@ pub struct AgentProfileContent { pub enable_all_context_servers: Option, #[serde(default)] pub context_servers: IndexMap, ContextServerPresetContent>, + /// The default language model selected when using this profile. + pub default_model: Option, } #[skip_serializing_none] From 42ed032f129fbb65e1e9ddb42ac9dcf9ae71eba6 Mon Sep 17 00:00:00 2001 From: feeiyu <158308373+feeiyu@users.noreply.github.com> Date: Mon, 10 Nov 2025 22:52:03 +0800 Subject: [PATCH 10/74] Fix circular reference issue between EditPredictionButton and PopoverMenuHandle (#42351) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes #ISSUE While working on issue #40906, I discovered that RemoteClient was not being released after the remote project closed. Analysis revealed a circular reference between EditPredictionButton and PopoverMenuHandle. Dependency Chain: RemoteClient → Project → ZetaEditPredictionProvider → EditPredictionButton ↔ PopoverMenuHandle image a) EditPredictionButton hold the reference of PopoverMenuHandle https://github.com/zed-industries/zed/blob/5f8226457ee6e1346a224ae6b0329f014ea883f7/crates/zed/src/zed.rs#L386-L394 b) PopoverMenuHandle hold the reference of Fn which capture `Entity` https://github.com/zed-industries/zed/blob/5fc54986c72f2863645302c5e6a99277f8c38cab/crates/edit_prediction_button/src/edit_prediction_button.rs#L382-L389 https://github.com/zed-industries/zed/blob/a9bc890497f1edaf4f177385cf96785de60e910c/crates/ui/src/components/popover_menu.rs#L376-L384 Release Notes: - N/A --- crates/edit_prediction_button/src/edit_prediction_button.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 70c861ab1112630c2e3293cb54a4e96c6754b3bd..6e9000bc62eea94d5c48dca2416781f46428522c 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -379,11 +379,12 @@ impl Render for EditPredictionButton { }) }); - let this = cx.entity(); + let this = cx.weak_entity(); let mut popover_menu = PopoverMenu::new("zeta") .menu(move |window, cx| { - Some(this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx))) + this.update(cx, |this, cx| this.build_zeta_context_menu(window, cx)) + .ok() }) .anchor(Corner::BottomRight) .with_handle(self.popover_menu_handle.clone()); From d420dd63ed1e8274691eaf8466d39fc9a4a60997 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Mon, 10 Nov 2025 11:58:42 -0300 Subject: [PATCH 11/74] zeta: Improve unified diff prompt (#42354) Extract some of the improvements from to the unified diff prompt from https://github.com/zed-industries/zed/pull/42171 and adds some other about how context work to improve the reliability of predictions. We also now strip the `<|user_cursor|>` marker if it appears in the output rather than failing. Release Notes: - N/A --------- Co-authored-by: Max Brunsfeld --- .../src/cloud_zeta2_prompt.rs | 105 ++++++++---------- crates/zeta2/src/zeta2.rs | 43 ++++--- crates/zeta_cli/src/evaluate.rs | 2 +- crates/zeta_cli/src/predict.rs | 14 ++- 4 files changed, 86 insertions(+), 78 deletions(-) diff --git a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs index 7fb79906f29f38579feef82bb25e7ed42d1d6c83..6055c39e16ea95b38754bb26fd7371250d1fc525 100644 --- a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs +++ b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs @@ -56,48 +56,48 @@ const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#" const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#" # Instructions - You are a code completion assistant helping a programmer finish their work. Your task is to: + You are an edit prediction agent in a code editor. + Your job is to predict the next edit that the user will make, + based on their last few edits and their current cursor location. - 1. Analyze the edit history to understand what the programmer is trying to achieve - 2. Identify any incomplete refactoring or changes that need to be finished - 3. Make the remaining edits that a human programmer would logically make next - 4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere. + ## Output Format - Focus on: - - Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs) - - Completing any partially-applied changes across the codebase - - Ensuring consistency with the programming style and patterns already established - - Making edits that maintain or improve code quality - - If the programmer started refactoring one instance of a pattern, find and update ALL similar instances - - Don't write a lot of code if you're not sure what to do - - Rules: - - Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals. - - Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code. - - Write the edits in the unified diff format as shown in the example. - - # Example output: + You must briefly explain your understanding of the user's goal, in one + or two sentences, and then specify their next edit in the form of a + unified diff, like this: ``` --- a/src/myapp/cli.py +++ b/src/myapp/cli.py - @@ -1,3 +1,3 @@ - - - - - -import sys - +import json + @@ ... @@ + import os + import time + import sys + +from constants import LOG_LEVEL_WARNING + @@ ... @@ + config.headless() + config.set_interactive(false) + -config.set_log_level(LOG_L) + +config.set_log_level(LOG_LEVEL_WARNING) + config.set_use_color(True) ``` - # Edit History: + ## Edit History "#}; const UNIFIED_DIFF_REMINDER: &str = indoc! {" --- - Please analyze the edit history and the files, then provide the unified diff for your predicted edits. + Analyze the edit history and the files, then provide the unified diff for your predicted edits. Do not include the cursor marker in your output. - If you're editing multiple files, be sure to reflect filename in the hunk's header. + Your diff should include edited file paths in its file headers (lines beginning with `---` and `+++`). + Do not include line numbers in the hunk headers, use `@@ ... @@`. + Removed lines begin with `-`. + Added lines begin with `+`. + Context lines begin with an extra space. + Context and removed lines are used to match the target edit location, so make sure to include enough of them + to uniquely identify it amongst all excerpts of code provided. "}; pub fn build_prompt( @@ -121,8 +121,7 @@ pub fn build_prompt( EDITABLE_REGION_END_MARKER_WITH_NEWLINE, ), ], - PromptFormat::LabeledSections => vec![(request.cursor_point, CURSOR_MARKER)], - PromptFormat::NumLinesUniDiff => { + PromptFormat::LabeledSections | PromptFormat::NumLinesUniDiff => { vec![(request.cursor_point, CURSOR_MARKER)] } PromptFormat::OnlySnippets => vec![], @@ -132,46 +131,31 @@ pub fn build_prompt( PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(), PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(), PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(), - // only intended for use via zeta_cli PromptFormat::OnlySnippets => String::new(), }; if request.events.is_empty() { prompt.push_str("(No edit history)\n\n"); } else { - prompt.push_str( - "The following are the latest edits made by the user, from earlier to later.\n\n", - ); + prompt.push_str("Here are the latest edits made by the user, from earlier to later.\n\n"); push_events(&mut prompt, &request.events); } + prompt.push_str(indoc! {" + # Code Excerpts + + The cursor marker <|user_cursor|> indicates the current user cursor position. + The file is in current state, edits from edit history have been applied. + "}); + if request.prompt_format == PromptFormat::NumLinesUniDiff { - if request.referenced_declarations.is_empty() { - prompt.push_str(indoc! {" - # File under the cursor: - - The cursor marker <|user_cursor|> indicates the current user cursor position. - The file is in current state, edits from edit history have been applied. - We prepend line numbers (e.g., `123|`); they are not part of the file. - - "}); - } else { - // Note: This hasn't been trained on yet - prompt.push_str(indoc! {" - # Code Excerpts: - - The cursor marker <|user_cursor|> indicates the current user cursor position. - Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor. - Context excerpts are not guaranteed to be relevant, so use your own judgement. - Files are in their current state, edits from edit history have been applied. - We prepend line numbers (e.g., `123|`); they are not part of the file. - - "}); - } - } else { - prompt.push_str("\n## Code\n\n"); + prompt.push_str(indoc! {" + We prepend line numbers (e.g., `123|`); they are not part of the file. + "}); } + prompt.push('\n'); + let mut section_labels = Default::default(); if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() { @@ -198,8 +182,11 @@ pub fn build_prompt( } } - if request.prompt_format == PromptFormat::NumLinesUniDiff { - prompt.push_str(UNIFIED_DIFF_REMINDER); + match request.prompt_format { + PromptFormat::NumLinesUniDiff => { + prompt.push_str(UNIFIED_DIFF_REMINDER); + } + _ => {} } Ok((prompt, section_labels)) diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index 3a51f9975ccbcf3fb325712f7aafadc5187da541..297bfa1c4a940448e7fdb570ea4b808556c3f416 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -1,4 +1,4 @@ -use anyhow::{Context as _, Result, anyhow}; +use anyhow::{Context as _, Result, anyhow, bail}; use chrono::TimeDelta; use client::{Client, EditPredictionUsage, UserStore}; use cloud_llm_client::predict_edits_v3::{self, PromptFormat, Signature}; @@ -6,8 +6,8 @@ use cloud_llm_client::{ AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME, ZED_VERSION_HEADER_NAME, }; -use cloud_zeta2_prompt::DEFAULT_MAX_PROMPT_BYTES; use cloud_zeta2_prompt::retrieval_prompt::{SearchToolInput, SearchToolQuery}; +use cloud_zeta2_prompt::{CURSOR_MARKER, DEFAULT_MAX_PROMPT_BYTES}; use collections::HashMap; use edit_prediction_context::{ DeclarationId, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions, @@ -943,23 +943,34 @@ impl Zeta { let (res, usage) = response?; let request_id = EditPredictionId(res.id.clone().into()); - let Some(output_text) = text_from_response(res) else { + let Some(mut output_text) = text_from_response(res) else { return Ok((None, usage)) }; - let (edited_buffer_snapshot, edits) = - crate::udiff::parse_diff(&output_text, |path| { - included_files - .iter() - .find_map(|(_, buffer, probe_path, ranges)| { - if probe_path.as_ref() == path { - Some((buffer, ranges.as_slice())) - } else { - None - } - }) - }) - .await?; + if output_text.contains(CURSOR_MARKER) { + log::trace!("Stripping out {CURSOR_MARKER} from response"); + output_text = output_text.replace(CURSOR_MARKER, ""); + } + + let (edited_buffer_snapshot, edits) = match options.prompt_format { + PromptFormat::NumLinesUniDiff => { + crate::udiff::parse_diff(&output_text, |path| { + included_files + .iter() + .find_map(|(_, buffer, probe_path, ranges)| { + if probe_path.as_ref() == path { + Some((buffer, ranges.as_slice())) + } else { + None + } + }) + }) + .await? + } + _ => { + bail!("unsupported prompt format {}", options.prompt_format) + } + }; let edited_buffer = included_files .iter() diff --git a/crates/zeta_cli/src/evaluate.rs b/crates/zeta_cli/src/evaluate.rs index f99747e676b777e5d7a086c61db2f9e8d152c20b..c0f513fa38df5fb837be2294845eeae3214074bd 100644 --- a/crates/zeta_cli/src/evaluate.rs +++ b/crates/zeta_cli/src/evaluate.rs @@ -67,7 +67,7 @@ pub async fn run_evaluate_one( ); as_json } else { - zeta2_predict(example.clone(), &app_state, cx) + zeta2_predict(example.clone(), Default::default(), &app_state, cx) .await .unwrap() }; diff --git a/crates/zeta_cli/src/predict.rs b/crates/zeta_cli/src/predict.rs index a593a1b12ceb2b72a316463076657f35ac2c4e9d..f7f503ffebe24d71023ad259ce76adfdea364efc 100644 --- a/crates/zeta_cli/src/predict.rs +++ b/crates/zeta_cli/src/predict.rs @@ -1,9 +1,11 @@ +use crate::PromptFormat; use crate::example::{ActualExcerpt, NamedExample}; use crate::headless::ZetaCliAppState; use crate::paths::LOGS_DIR; use ::serde::Serialize; use anyhow::{Result, anyhow}; use clap::Args; +// use cloud_llm_client::predict_edits_v3::PromptFormat; use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock}; use futures::StreamExt as _; use gpui::{AppContext, AsyncApp}; @@ -19,9 +21,11 @@ use std::time::{Duration, Instant}; #[derive(Debug, Args)] pub struct PredictArguments { - example_path: PathBuf, + #[arg(long, value_enum, default_value_t = PromptFormat::default())] + prompt_format: PromptFormat, #[clap(long, short, value_enum, default_value_t = PredictionsOutputFormat::Md)] format: PredictionsOutputFormat, + example_path: PathBuf, } #[derive(clap::ValueEnum, Debug, Clone)] @@ -36,7 +40,9 @@ pub async fn run_zeta2_predict( cx: &mut AsyncApp, ) { let example = NamedExample::load(args.example_path).unwrap(); - let result = zeta2_predict(example, &app_state, cx).await.unwrap(); + let result = zeta2_predict(example, args.prompt_format, &app_state, cx) + .await + .unwrap(); result.write(args.format, std::io::stdout()).unwrap(); } @@ -46,6 +52,7 @@ thread_local! { pub async fn zeta2_predict( example: NamedExample, + prompt_format: PromptFormat, app_state: &Arc, cx: &mut AsyncApp, ) -> Result { @@ -193,6 +200,9 @@ pub async fn zeta2_predict( }); zeta.update(cx, |zeta, cx| { + let mut options = zeta.options().clone(); + options.prompt_format = prompt_format.into(); + zeta.set_options(options); zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx) })? .await?; From 35ae2f5b2b9cf18554533c2b009e660106051545 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Raz=20Guzm=C3=A1n=20Macedo?= Date: Mon, 10 Nov 2025 09:51:44 -0600 Subject: [PATCH 12/74] typo: Use tips from proselint (#42362) I ran [proselint](https://github.com/amperser/proselint) (recommended by cURL author [Daniel Stenberg](https://daniel.haxx.se/blog/2022/09/22/taking-curl-documentation-quality-up-one-more-notch/)) against all the `.md` files in the codebase to see if I could fix some easy typos. The tool is noisier than I would like and picking up the overrides to the default config in a `.proselintrc.json` was much harder than I expected. There's many other small nits [1] that I believe are best left to your docs czar whenever they want to consider incorporating a tool like this into big releases or CI, but these seemed like small wins for now to open a conversation about a tool like proselint. --- [1]: Such nits include - incosistent 1 or 2 spaces - "color" vs "colour" - ab/use of `very` - awkward or superfluous phrasing. Release Notes: - N/A Signed-off-by: mrg --- docs/src/globs.md | 2 +- docs/src/languages/ocaml.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/globs.md b/docs/src/globs.md index 60873e6965493c0c089a329e89fdb6462999739f..2f86fb9158f6ef13b5922a988dc3fa3433c42221 100644 --- a/docs/src/globs.md +++ b/docs/src/globs.md @@ -53,7 +53,7 @@ If instead you wanted to restrict yourself only to [Zed Language-Specific Docume ### Implicit Wildcards -When using the "Include" / "Exclude" filters on a Project Search each glob is wrapped in implicit wildcards. For example to exclude any files with license in the path or filename from your search just type type `license` in the exclude box. Behind the scenes Zed transforms `license` to `**license**`. This means that files named `license.*`, `*.license` or inside a `license` subdirectory will all be filtered out. This enables users to easily filter for `*.ts` without having to remember to type `**/*.ts` every time. +When using the "Include" / "Exclude" filters on a Project Search each glob is wrapped in implicit wildcards. For example to exclude any files with license in the path or filename from your search just type `license` in the exclude box. Behind the scenes Zed transforms `license` to `**license**`. This means that files named `license.*`, `*.license` or inside a `license` subdirectory will all be filtered out. This enables users to easily filter for `*.ts` without having to remember to type `**/*.ts` every time. Alternatively, if in your Zed settings you wanted a [`file_types`](./configuring-zed.md#file-types) override which only applied to a certain directory you must explicitly include the wildcard globs. For example, if you had a directory of template files with the `html` extension that you wanted to recognize as Jinja2 template you could use the following: diff --git a/docs/src/languages/ocaml.md b/docs/src/languages/ocaml.md index cf61defc1a9c1fa5bd6d6eeaf883e53cb4729791..10c3c1ac0932fd9a5f21d5ed459c19bb7c0434fc 100644 --- a/docs/src/languages/ocaml.md +++ b/docs/src/languages/ocaml.md @@ -33,4 +33,4 @@ Once you have the cli, simply from a terminal, navigate to your project and run zed . ``` -Voila! You should have Zed running with OCaml support, no additional setup required. +Voilà! You should have Zed running with OCaml support, no additional setup required. From 3c81ee6ba647f1ae7b172c0a259ce95e7dbd4af3 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 10 Nov 2025 13:12:13 -0300 Subject: [PATCH 13/74] agent_ui: Allow to configure a default model for profiles through modal (#42359) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Follow-up to https://github.com/zed-industries/zed/pull/39220 This PR allows to configure a default model for a given profile through the profile management modal. | Option In Picker | Model Selector | |--------|--------| | Screenshot 2025-11-10 at 12  24
2@2x | Screenshot 2025-11-10 at 12 
24@2x | Release Notes: - N/A --- .../manage_profiles_modal.rs | 161 +++++++++++++++++- crates/agent_ui/src/agent_model_selector.rs | 1 + .../agent_ui/src/language_model_selector.rs | 29 +++- crates/agent_ui/src/text_thread_editor.rs | 1 + 4 files changed, 185 insertions(+), 7 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs index e583bb7d5425ec4c6f233ac0eed67c358ccac98d..210cf5f5dd6612855b32e358a2d3ec38e8259373 100644 --- a/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs +++ b/crates/agent_ui/src/agent_configuration/manage_profiles_modal.rs @@ -7,8 +7,10 @@ use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profil use editor::Editor; use fs::Fs; use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*}; -use language_model::LanguageModel; -use settings::Settings as _; +use language_model::{LanguageModel, LanguageModelRegistry}; +use settings::{ + LanguageModelProviderSetting, LanguageModelSelection, Settings as _, update_settings_file, +}; use ui::{ KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*, }; @@ -16,6 +18,7 @@ use workspace::{ModalView, Workspace}; use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader; use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate}; +use crate::language_model_selector::{LanguageModelSelector, language_model_selector}; use crate::{AgentPanel, ManageProfiles}; enum Mode { @@ -32,6 +35,11 @@ enum Mode { tool_picker: Entity, _subscription: Subscription, }, + ConfigureDefaultModel { + profile_id: AgentProfileId, + model_picker: Entity, + _subscription: Subscription, + }, } impl Mode { @@ -83,6 +91,7 @@ pub struct ChooseProfileMode { pub struct ViewProfileMode { profile_id: AgentProfileId, fork_profile: NavigableEntry, + configure_default_model: NavigableEntry, configure_tools: NavigableEntry, configure_mcps: NavigableEntry, cancel_item: NavigableEntry, @@ -180,6 +189,7 @@ impl ManageProfilesModal { self.mode = Mode::ViewProfile(ViewProfileMode { profile_id, fork_profile: NavigableEntry::focusable(cx), + configure_default_model: NavigableEntry::focusable(cx), configure_tools: NavigableEntry::focusable(cx), configure_mcps: NavigableEntry::focusable(cx), cancel_item: NavigableEntry::focusable(cx), @@ -187,6 +197,83 @@ impl ManageProfilesModal { self.focus_handle(cx).focus(window); } + fn configure_default_model( + &mut self, + profile_id: AgentProfileId, + window: &mut Window, + cx: &mut Context, + ) { + let fs = self.fs.clone(); + let profile_id_for_closure = profile_id.clone(); + + let model_picker = cx.new(|cx| { + let fs = fs.clone(); + let profile_id = profile_id_for_closure.clone(); + + language_model_selector( + { + let profile_id = profile_id.clone(); + move |cx| { + let settings = AgentSettings::get_global(cx); + + settings + .profiles + .get(&profile_id) + .and_then(|profile| profile.default_model.as_ref()) + .and_then(|selection| { + let registry = LanguageModelRegistry::read_global(cx); + let provider_id = language_model::LanguageModelProviderId( + gpui::SharedString::from(selection.provider.0.clone()), + ); + let provider = registry.provider(&provider_id)?; + let model = provider + .provided_models(cx) + .iter() + .find(|m| m.id().0 == selection.model.as_str())? + .clone(); + Some(language_model::ConfiguredModel { provider, model }) + }) + } + }, + move |model, cx| { + let provider = model.provider_id().0.to_string(); + let model_id = model.id().0.to_string(); + let profile_id = profile_id.clone(); + + update_settings_file(fs.clone(), cx, move |settings, _cx| { + let agent_settings = settings.agent.get_or_insert_default(); + if let Some(profiles) = agent_settings.profiles.as_mut() { + if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) { + profile.default_model = Some(LanguageModelSelection { + provider: LanguageModelProviderSetting(provider.clone()), + model: model_id.clone(), + }); + } + } + }); + }, + false, // Do not use popover styles for the model picker + window, + cx, + ) + .modal(false) + }); + + let dismiss_subscription = cx.subscribe_in(&model_picker, window, { + let profile_id = profile_id.clone(); + move |this, _picker, _: &DismissEvent, window, cx| { + this.view_profile(profile_id.clone(), window, cx); + } + }); + + self.mode = Mode::ConfigureDefaultModel { + profile_id, + model_picker, + _subscription: dismiss_subscription, + }; + self.focus_handle(cx).focus(window); + } + fn configure_mcp_tools( &mut self, profile_id: AgentProfileId, @@ -277,6 +364,7 @@ impl ManageProfilesModal { Mode::ViewProfile(_) => {} Mode::ConfigureTools { .. } => {} Mode::ConfigureMcps { .. } => {} + Mode::ConfigureDefaultModel { .. } => {} } } @@ -299,6 +387,9 @@ impl ManageProfilesModal { Mode::ConfigureMcps { profile_id, .. } => { self.view_profile(profile_id.clone(), window, cx) } + Mode::ConfigureDefaultModel { profile_id, .. } => { + self.view_profile(profile_id.clone(), window, cx) + } } } } @@ -313,6 +404,7 @@ impl Focusable for ManageProfilesModal { Mode::ViewProfile(_) => self.focus_handle.clone(), Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx), Mode::ConfigureMcps { tool_picker, .. } => tool_picker.focus_handle(cx), + Mode::ConfigureDefaultModel { model_picker, .. } => model_picker.focus_handle(cx), } } } @@ -544,6 +636,47 @@ impl ManageProfilesModal { }), ), ) + .child( + div() + .id("configure-default-model") + .track_focus(&mode.configure_default_model.focus_handle) + .on_action({ + let profile_id = mode.profile_id.clone(); + cx.listener(move |this, _: &menu::Confirm, window, cx| { + this.configure_default_model( + profile_id.clone(), + window, + cx, + ); + }) + }) + .child( + ListItem::new("model-item") + .toggle_state( + mode.configure_default_model + .focus_handle + .contains_focused(window, cx), + ) + .inset(true) + .spacing(ListItemSpacing::Sparse) + .start_slot( + Icon::new(IconName::ZedAssistant) + .size(IconSize::Small) + .color(Color::Muted), + ) + .child(Label::new("Configure Default Model")) + .on_click({ + let profile_id = mode.profile_id.clone(); + cx.listener(move |this, _, window, cx| { + this.configure_default_model( + profile_id.clone(), + window, + cx, + ); + }) + }), + ), + ) .child( div() .id("configure-builtin-tools") @@ -668,6 +801,7 @@ impl ManageProfilesModal { .into_any_element(), ) .entry(mode.fork_profile) + .entry(mode.configure_default_model) .entry(mode.configure_tools) .entry(mode.configure_mcps) .entry(mode.cancel_item) @@ -753,6 +887,29 @@ impl Render for ManageProfilesModal { .child(go_back_item) .into_any_element() } + Mode::ConfigureDefaultModel { + profile_id, + model_picker, + .. + } => { + let profile_name = settings + .profiles + .get(profile_id) + .map(|profile| profile.name.clone()) + .unwrap_or_else(|| "Unknown".into()); + + v_flex() + .pb_1() + .child(ProfileModalHeader::new( + format!("{profile_name} — Configure Default Model"), + Some(IconName::Ai), + )) + .child(ListSeparator) + .child(v_flex().w(rems(34.)).child(model_picker.clone())) + .child(ListSeparator) + .child(go_back_item) + .into_any_element() + } Mode::ConfigureMcps { profile_id, tool_picker, diff --git a/crates/agent_ui/src/agent_model_selector.rs b/crates/agent_ui/src/agent_model_selector.rs index df7d166064da20aa4bc958ebd6a9df806164eb7a..900ca0b683670a30b3353655d17c2ef79cd5523b 100644 --- a/crates/agent_ui/src/agent_model_selector.rs +++ b/crates/agent_ui/src/agent_model_selector.rs @@ -47,6 +47,7 @@ impl AgentModelSelector { } } }, + true, // Use popover styles for picker window, cx, ) diff --git a/crates/agent_ui/src/language_model_selector.rs b/crates/agent_ui/src/language_model_selector.rs index c8838fd9bc8b33ccb87f3198126c5c470328c810..0f7b83e3edba6c8d97c2c12a939a65cb71c39dca 100644 --- a/crates/agent_ui/src/language_model_selector.rs +++ b/crates/agent_ui/src/language_model_selector.rs @@ -19,14 +19,26 @@ pub type LanguageModelSelector = Picker; pub fn language_model_selector( get_active_model: impl Fn(&App) -> Option + 'static, on_model_changed: impl Fn(Arc, &mut App) + 'static, + popover_styles: bool, window: &mut Window, cx: &mut Context, ) -> LanguageModelSelector { - let delegate = LanguageModelPickerDelegate::new(get_active_model, on_model_changed, window, cx); - Picker::list(delegate, window, cx) - .show_scrollbar(true) - .width(rems(20.)) - .max_height(Some(rems(20.).into())) + let delegate = LanguageModelPickerDelegate::new( + get_active_model, + on_model_changed, + popover_styles, + window, + cx, + ); + + if popover_styles { + Picker::list(delegate, window, cx) + .show_scrollbar(true) + .width(rems(20.)) + .max_height(Some(rems(20.).into())) + } else { + Picker::list(delegate, window, cx).show_scrollbar(true) + } } fn all_models(cx: &App) -> GroupedModels { @@ -75,12 +87,14 @@ pub struct LanguageModelPickerDelegate { selected_index: usize, _authenticate_all_providers_task: Task<()>, _subscriptions: Vec, + popover_styles: bool, } impl LanguageModelPickerDelegate { fn new( get_active_model: impl Fn(&App) -> Option + 'static, on_model_changed: impl Fn(Arc, &mut App) + 'static, + popover_styles: bool, window: &mut Window, cx: &mut Context>, ) -> Self { @@ -113,6 +127,7 @@ impl LanguageModelPickerDelegate { } }, )], + popover_styles, } } @@ -530,6 +545,10 @@ impl PickerDelegate for LanguageModelPickerDelegate { _window: &mut Window, cx: &mut Context>, ) -> Option { + if !self.popover_styles { + return None; + } + Some( h_flex() .w_full() diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index be77ec05fe3afbecab5373bd97ec23ee77dd614a..19063075f9cf7382270c4dbaf4930596a7592676 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -314,6 +314,7 @@ impl TextThreadEditor { ) }); }, + true, // Use popover styles for picker window, cx, ) From a3f04e8b36e3675727e7b25e12aae26249ef1523 Mon Sep 17 00:00:00 2001 From: Abul Hossain Khan <140191921+abulgit@users.noreply.github.com> Date: Mon, 10 Nov 2025 22:04:59 +0530 Subject: [PATCH 14/74] agent_ui: Fix thread history item showing GMT time instead of local time on Windows (#42198) Closes #42178 Now it's consistent with the DateAndTime path which already does timezone conversion. - **Future Work** Happy to tackle the TODO in `time_format.rs` about implementing native Windows APIs for proper localized formatting (similar to macOS's `CFDateFormatter`) as a follow-up. Release Notes: - agent: Fixed the thread history item timestamp, which was being shown in GMT instead of in the user's local timezone on Windows. --- crates/agent_ui/src/acp/thread_history.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/acp/thread_history.rs index 9cfe30278e1e46d95c00b3c881358a4b00786801..f8a2006f8fc9d2ece6d4a5526482dec5df948686 100644 --- a/crates/agent_ui/src/acp/thread_history.rs +++ b/crates/agent_ui/src/acp/thread_history.rs @@ -673,7 +673,7 @@ impl EntryTimeFormat { timezone, time_format::TimestampFormat::EnhancedAbsolute, ), - EntryTimeFormat::TimeOnly => time_format::format_time(timestamp), + EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)), } } } From 6e1d86f311f4a93f13a93fba1f042f69fee1916b Mon Sep 17 00:00:00 2001 From: Piotr Osiewicz <24362066+osiewicz@users.noreply.github.com> Date: Mon, 10 Nov 2025 18:18:40 +0100 Subject: [PATCH 15/74] fs: Handle io::ErrorKind::NotADirectory in fs::metadata (#42370) New error variants were stabilized in 1.83, and this might've led to us mis-handling not-a-directory errors. Co-authored-by: Dino Release Notes: - N/A Co-authored-by: Dino --- crates/fs/src/fs.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index 53af6ba6afc50cb0e568a01e25d1af22c02d9e36..b8714505093f03828e3d8783204ede61bb0989b0 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -719,9 +719,8 @@ impl Fs for RealFs { { Ok(metadata) => metadata, Err(err) => { - return match (err.kind(), err.raw_os_error()) { - (io::ErrorKind::NotFound, _) => Ok(None), - (io::ErrorKind::Other, Some(libc::ENOTDIR)) => Ok(None), + return match err.kind() { + io::ErrorKind::NotFound | io::ErrorKind::NotADirectory => Ok(None), _ => Err(anyhow::Error::new(err)), }; } From ddf5937899087b03893c4f1038033781e4ac96e7 Mon Sep 17 00:00:00 2001 From: Tryanks Date: Tue, 11 Nov 2025 01:19:35 +0800 Subject: [PATCH 16/74] gpui: Move 'app closing on last window closed' behavior to app-side (#41436) This commit is a continuation of #36548. As per [mikayla-maki's Comment](https://github.com/zed-industries/zed/pull/36548#issuecomment-3412140698), I removed the process management behavior located in GPUI and reimplemented it in Zed. Release Notes: - N/A --------- Co-authored-by: Mikayla Maki --- .../gpui/src/platform/linux/wayland/client.rs | 3 -- crates/gpui/src/platform/linux/x11/client.rs | 4 --- crates/gpui/src/platform/windows/platform.rs | 4 +-- crates/zed/src/zed.rs | 29 +++++++++++++------ 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/crates/gpui/src/platform/linux/wayland/client.rs b/crates/gpui/src/platform/linux/wayland/client.rs index ee2590aa4dfb9a34f61aa2d0d112a201093b10cd..fd4d9fb2b31bfa04fe1ecc7d192db11f997d8d59 100644 --- a/crates/gpui/src/platform/linux/wayland/client.rs +++ b/crates/gpui/src/platform/linux/wayland/client.rs @@ -387,9 +387,6 @@ impl WaylandClientStatePtr { { state.keyboard_focused_window = Some(window); } - if state.windows.is_empty() { - state.common.signal.stop(); - } } } diff --git a/crates/gpui/src/platform/linux/x11/client.rs b/crates/gpui/src/platform/linux/x11/client.rs index 98e70a3071e4cc440da0240b6ecb30c884b52b4b..5b0be84b2fc08d220800271a402496e5ba487b15 100644 --- a/crates/gpui/src/platform/linux/x11/client.rs +++ b/crates/gpui/src/platform/linux/x11/client.rs @@ -246,10 +246,6 @@ impl X11ClientStatePtr { state.keyboard_focused_window = None; } state.cursor_styles.remove(&x_window); - - if state.windows.is_empty() { - state.common.signal.stop(); - } } pub fn update_ime_position(&self, bounds: Bounds) { diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index d236068f3e715d88b7b34a2985bc6dab40278f1d..b985cc14b01b1171d4013bf5c41a0c5199565503 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -753,9 +753,7 @@ impl WindowsPlatformInner { } match message { WM_GPUI_CLOSE_ONE_WINDOW => { - if self.close_one_window(HWND(lparam.0 as _)) { - unsafe { PostQuitMessage(0) }; - } + self.close_one_window(HWND(lparam.0 as _)); Some(0) } WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD => self.run_foreground_task(), diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index d83e1eafc197ae394aceac7a205818074fb7173f..7cdaea920f4b90de4393dd08e0c855ecd1cb2f88 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -274,16 +274,27 @@ pub fn init(cx: &mut App) { } fn bind_on_window_closed(cx: &mut App) -> Option { - WorkspaceSettings::get_global(cx) - .on_last_window_closed - .is_quit_app() - .then(|| { - cx.on_window_closed(|cx| { - if cx.windows().is_empty() { - cx.quit(); - } + #[cfg(target_os = "macos")] + { + WorkspaceSettings::get_global(cx) + .on_last_window_closed + .is_quit_app() + .then(|| { + cx.on_window_closed(|cx| { + if cx.windows().is_empty() { + cx.quit(); + } + }) }) - }) + } + #[cfg(not(target_os = "macos"))] + { + Some(cx.on_window_closed(|cx| { + if cx.windows().is_empty() { + cx.quit(); + } + })) + } } pub fn build_window_options(display_uuid: Option, cx: &mut App) -> WindowOptions { From c748b177c4345c33438b43d66546fbd499570d22 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Mon, 10 Nov 2025 14:23:52 -0300 Subject: [PATCH 17/74] zeta2 cli: Cache at LLM request level (#42371) We'll now cache LLM responses at the request level (by hash of URL+contents) for both context and prediction. This way we don't need to worry about mistakenly using the cache when we change the prompt or its components. Release Notes: - N/A --------- Co-authored-by: Oleksiy Syvokon --- crates/zeta2/Cargo.toml | 3 ++ crates/zeta2/src/zeta2.rs | 73 +++++++++++++++++++++++++++++---- crates/zeta_cli/Cargo.toml | 2 +- crates/zeta_cli/src/evaluate.rs | 46 ++++++++------------- crates/zeta_cli/src/main.rs | 4 +- crates/zeta_cli/src/paths.rs | 2 +- crates/zeta_cli/src/predict.rs | 58 +++++++++++++++++++++++++- 7 files changed, 147 insertions(+), 41 deletions(-) diff --git a/crates/zeta2/Cargo.toml b/crates/zeta2/Cargo.toml index 3f394cd5ef2ab5d5bce05430a717312c9e3c0f5c..1cb3a866065748f8e39dee7a980b99ea0b6c63fa 100644 --- a/crates/zeta2/Cargo.toml +++ b/crates/zeta2/Cargo.toml @@ -11,6 +11,9 @@ workspace = true [lib] path = "src/zeta2.rs" +[features] +llm-response-cache = [] + [dependencies] anyhow.workspace = true arrayvec.workspace = true diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index 297bfa1c4a940448e7fdb570ea4b808556c3f416..c77c78b6f517bce085a26b2c60d04318b2f3cdae 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -131,6 +131,15 @@ pub struct Zeta { options: ZetaOptions, update_required: bool, debug_tx: Option>, + #[cfg(feature = "llm-response-cache")] + llm_response_cache: Option>, +} + +#[cfg(feature = "llm-response-cache")] +pub trait LlmResponseCache: Send + Sync { + fn get_key(&self, url: &gpui::http_client::Url, body: &str) -> u64; + fn read_response(&self, key: u64) -> Option; + fn write_response(&self, key: u64, value: &str); } #[derive(Debug, Clone, PartialEq)] @@ -359,9 +368,16 @@ impl Zeta { ), update_required: false, debug_tx: None, + #[cfg(feature = "llm-response-cache")] + llm_response_cache: None, } } + #[cfg(feature = "llm-response-cache")] + pub fn with_llm_response_cache(&mut self, cache: Arc) { + self.llm_response_cache = Some(cache); + } + pub fn debug_info(&mut self) -> mpsc::UnboundedReceiver { let (debug_watch_tx, debug_watch_rx) = mpsc::unbounded(); self.debug_tx = Some(debug_watch_tx); @@ -734,6 +750,9 @@ impl Zeta { }) .collect::>(); + #[cfg(feature = "llm-response-cache")] + let llm_response_cache = self.llm_response_cache.clone(); + let request_task = cx.background_spawn({ let active_buffer = active_buffer.clone(); async move { @@ -923,8 +942,14 @@ impl Zeta { log::trace!("Sending edit prediction request"); let before_request = chrono::Utc::now(); - let response = - Self::send_raw_llm_request(client, llm_token, app_version, request).await; + let response = Self::send_raw_llm_request( + request, + client, + llm_token, + app_version, + #[cfg(feature = "llm-response-cache")] + llm_response_cache + ).await; let request_time = chrono::Utc::now() - before_request; log::trace!("Got edit prediction response"); @@ -1005,10 +1030,13 @@ impl Zeta { } async fn send_raw_llm_request( + request: open_ai::Request, client: Arc, llm_token: LlmApiToken, app_version: SemanticVersion, - request: open_ai::Request, + #[cfg(feature = "llm-response-cache")] llm_response_cache: Option< + Arc, + >, ) -> Result<(open_ai::Response, Option)> { let url = if let Some(predict_edits_url) = PREDICT_EDITS_URL.as_ref() { http_client::Url::parse(&predict_edits_url)? @@ -1018,7 +1046,21 @@ impl Zeta { .build_zed_llm_url("/predict_edits/raw", &[])? }; - Self::send_api_request( + #[cfg(feature = "llm-response-cache")] + let cache_key = if let Some(cache) = llm_response_cache { + let request_json = serde_json::to_string(&request)?; + let key = cache.get_key(&url, &request_json); + + if let Some(response_str) = cache.read_response(key) { + return Ok((serde_json::from_str(&response_str)?, None)); + } + + Some((cache, key)) + } else { + None + }; + + let (response, usage) = Self::send_api_request( |builder| { let req = builder .uri(url.as_ref()) @@ -1029,7 +1071,14 @@ impl Zeta { llm_token, app_version, ) - .await + .await?; + + #[cfg(feature = "llm-response-cache")] + if let Some((cache, key)) = cache_key { + cache.write_response(key, &serde_json::to_string(&response)?); + } + + Ok((response, usage)) } fn handle_api_response( @@ -1297,10 +1346,20 @@ impl Zeta { reasoning_effort: None, }; + #[cfg(feature = "llm-response-cache")] + let llm_response_cache = self.llm_response_cache.clone(); + cx.spawn(async move |this, cx| { log::trace!("Sending search planning request"); - let response = - Self::send_raw_llm_request(client, llm_token, app_version, request).await; + let response = Self::send_raw_llm_request( + request, + client, + llm_token, + app_version, + #[cfg(feature = "llm-response-cache")] + llm_response_cache, + ) + .await; let mut response = Self::handle_api_response(&this, response, cx)?; log::trace!("Got search planning response"); diff --git a/crates/zeta_cli/Cargo.toml b/crates/zeta_cli/Cargo.toml index 5bf90910f18f085db42d5f7934d13601e1c691a2..2e62f2a4462e31b7632aa5e825ea76a4b7df5fc8 100644 --- a/crates/zeta_cli/Cargo.toml +++ b/crates/zeta_cli/Cargo.toml @@ -54,7 +54,7 @@ toml.workspace = true util.workspace = true watch.workspace = true zeta.workspace = true -zeta2.workspace = true +zeta2 = { workspace = true, features = ["llm-response-cache"] } zlog.workspace = true [dev-dependencies] diff --git a/crates/zeta_cli/src/evaluate.rs b/crates/zeta_cli/src/evaluate.rs index c0f513fa38df5fb837be2294845eeae3214074bd..6d5b2da13a4301bfb52cb3cda7662843dea7cd12 100644 --- a/crates/zeta_cli/src/evaluate.rs +++ b/crates/zeta_cli/src/evaluate.rs @@ -1,5 +1,4 @@ use std::{ - fs, io::IsTerminal, path::{Path, PathBuf}, sync::Arc, @@ -12,9 +11,9 @@ use gpui::AsyncApp; use zeta2::udiff::DiffLine; use crate::{ + PromptFormat, example::{Example, NamedExample}, headless::ZetaCliAppState, - paths::CACHE_DIR, predict::{PredictionDetails, zeta2_predict}, }; @@ -22,7 +21,9 @@ use crate::{ pub struct EvaluateArguments { example_paths: Vec, #[clap(long)] - re_run: bool, + skip_cache: bool, + #[arg(long, value_enum, default_value_t = PromptFormat::default())] + prompt_format: PromptFormat, } pub async fn run_evaluate( @@ -33,7 +34,16 @@ pub async fn run_evaluate( let example_len = args.example_paths.len(); let all_tasks = args.example_paths.into_iter().map(|path| { let app_state = app_state.clone(); - cx.spawn(async move |cx| run_evaluate_one(&path, args.re_run, app_state.clone(), cx).await) + cx.spawn(async move |cx| { + run_evaluate_one( + &path, + args.skip_cache, + args.prompt_format, + app_state.clone(), + cx, + ) + .await + }) }); let all_results = futures::future::try_join_all(all_tasks).await.unwrap(); @@ -51,35 +61,15 @@ pub async fn run_evaluate( pub async fn run_evaluate_one( example_path: &Path, - re_run: bool, + skip_cache: bool, + prompt_format: PromptFormat, app_state: Arc, cx: &mut AsyncApp, ) -> Result { let example = NamedExample::load(&example_path).unwrap(); - let example_cache_path = CACHE_DIR.join(&example_path.file_name().unwrap()); - - let predictions = if !re_run && example_cache_path.exists() { - let file_contents = fs::read_to_string(&example_cache_path)?; - let as_json = serde_json::from_str::(&file_contents)?; - log::debug!( - "Loaded predictions from cache: {}", - example_cache_path.display() - ); - as_json - } else { - zeta2_predict(example.clone(), Default::default(), &app_state, cx) - .await - .unwrap() - }; - - if !example_cache_path.exists() { - fs::create_dir_all(&*CACHE_DIR).unwrap(); - fs::write( - example_cache_path, - serde_json::to_string(&predictions).unwrap(), - ) + let predictions = zeta2_predict(example.clone(), skip_cache, prompt_format, &app_state, cx) + .await .unwrap(); - } let evaluation_result = evaluate(&example.example, &predictions); diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index 66b4a6c8bd71ce046b6336ecb671d491128af945..25fb920bab18f374e41b539bc21320faf6c75484 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -158,13 +158,13 @@ fn syntax_args_to_options( }), max_diagnostic_bytes: zeta2_args.max_diagnostic_bytes, max_prompt_bytes: zeta2_args.max_prompt_bytes, - prompt_format: zeta2_args.prompt_format.clone().into(), + prompt_format: zeta2_args.prompt_format.into(), file_indexing_parallelism: zeta2_args.file_indexing_parallelism, buffer_change_grouping_interval: Duration::ZERO, } } -#[derive(clap::ValueEnum, Default, Debug, Clone)] +#[derive(clap::ValueEnum, Default, Debug, Clone, Copy)] enum PromptFormat { MarkedExcerpt, LabeledSections, diff --git a/crates/zeta_cli/src/paths.rs b/crates/zeta_cli/src/paths.rs index 61987607bf2a5bb99eae68db4863f97bb282b29c..144bf6f5dd97c518d965d7bd23da83ce7f11f66f 100644 --- a/crates/zeta_cli/src/paths.rs +++ b/crates/zeta_cli/src/paths.rs @@ -2,7 +2,7 @@ use std::{env, path::PathBuf, sync::LazyLock}; static TARGET_DIR: LazyLock = LazyLock::new(|| env::current_dir().unwrap().join("target")); pub static CACHE_DIR: LazyLock = - LazyLock::new(|| TARGET_DIR.join("zeta-prediction-cache")); + LazyLock::new(|| TARGET_DIR.join("zeta-llm-response-cache")); pub static REPOS_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-repos")); pub static WORKTREES_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-worktrees")); pub static LOGS_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-logs")); diff --git a/crates/zeta_cli/src/predict.rs b/crates/zeta_cli/src/predict.rs index f7f503ffebe24d71023ad259ce76adfdea364efc..d85f009c9bacc0b6177683c064979740a0709115 100644 --- a/crates/zeta_cli/src/predict.rs +++ b/crates/zeta_cli/src/predict.rs @@ -1,10 +1,11 @@ use crate::PromptFormat; use crate::example::{ActualExcerpt, NamedExample}; use crate::headless::ZetaCliAppState; -use crate::paths::LOGS_DIR; +use crate::paths::{CACHE_DIR, LOGS_DIR}; use ::serde::Serialize; use anyhow::{Result, anyhow}; use clap::Args; +use gpui::http_client::Url; // use cloud_llm_client::predict_edits_v3::PromptFormat; use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock}; use futures::StreamExt as _; @@ -18,6 +19,7 @@ use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; use std::time::{Duration, Instant}; +use zeta2::LlmResponseCache; #[derive(Debug, Args)] pub struct PredictArguments { @@ -26,6 +28,8 @@ pub struct PredictArguments { #[clap(long, short, value_enum, default_value_t = PredictionsOutputFormat::Md)] format: PredictionsOutputFormat, example_path: PathBuf, + #[clap(long)] + skip_cache: bool, } #[derive(clap::ValueEnum, Debug, Clone)] @@ -40,7 +44,7 @@ pub async fn run_zeta2_predict( cx: &mut AsyncApp, ) { let example = NamedExample::load(args.example_path).unwrap(); - let result = zeta2_predict(example, args.prompt_format, &app_state, cx) + let result = zeta2_predict(example, args.skip_cache, args.prompt_format, &app_state, cx) .await .unwrap(); result.write(args.format, std::io::stdout()).unwrap(); @@ -52,6 +56,7 @@ thread_local! { pub async fn zeta2_predict( example: NamedExample, + skip_cache: bool, prompt_format: PromptFormat, app_state: &Arc, cx: &mut AsyncApp, @@ -95,6 +100,10 @@ pub async fn zeta2_predict( let zeta = cx.update(|cx| zeta2::Zeta::global(&app_state.client, &app_state.user_store, cx))?; + zeta.update(cx, |zeta, _cx| { + zeta.with_llm_response_cache(Arc::new(Cache { skip_cache })); + })?; + cx.subscribe(&buffer_store, { let project = project.clone(); move |_, event, cx| match event { @@ -233,6 +242,51 @@ pub async fn zeta2_predict( anyhow::Ok(result) } +struct Cache { + skip_cache: bool, +} + +impl Cache { + fn path(key: u64) -> PathBuf { + CACHE_DIR.join(format!("{key:x}.json")) + } +} + +impl LlmResponseCache for Cache { + fn get_key(&self, url: &Url, body: &str) -> u64 { + use collections::FxHasher; + use std::hash::{Hash, Hasher}; + + let mut hasher = FxHasher::default(); + url.hash(&mut hasher); + body.hash(&mut hasher); + hasher.finish() + } + + fn read_response(&self, key: u64) -> Option { + let path = Cache::path(key); + if path.exists() { + if self.skip_cache { + log::info!("Skipping existing cached LLM response: {}", path.display()); + None + } else { + log::info!("Using LLM response from cache: {}", path.display()); + Some(fs::read_to_string(path).unwrap()) + } + } else { + None + } + } + + fn write_response(&self, key: u64, value: &str) { + fs::create_dir_all(&*CACHE_DIR).unwrap(); + + let path = Cache::path(key); + log::info!("Writing LLM response to cache: {}", path.display()); + fs::write(path, value).unwrap(); + } +} + #[derive(Clone, Debug, Default, Serialize, Deserialize)] pub struct PredictionDetails { pub diff: String, From d896af2f15e153b39e2c39195daf293fca4b41d1 Mon Sep 17 00:00:00 2001 From: Dino Date: Mon, 10 Nov 2025 18:19:08 +0000 Subject: [PATCH 18/74] git: Handle buffer file path changes (#41944) Update `GitStore.on_buffer_store_event` so that, when a `BufferStoreEvent::BufferChangedFilePath` event is received, we check if there's any diff state for the buffer and, if so, update it according to the new file path, in case the file exists in the repository. Closes #40499 Release Notes: - Fixed issue with git diff tracking when updating a buffer's file from an untracked to a tracked file --- crates/project/src/git_store.rs | 37 +++++++++ crates/project/src/project_tests.rs | 114 ++++++++++++++++++++++++++++ 2 files changed, 151 insertions(+) diff --git a/crates/project/src/git_store.rs b/crates/project/src/git_store.rs index 5fcf28aff3554149ece954074f312e0fe37a9208..e75bafa2d2bdc3b8854e71d7e1e7c543c131d2ee 100644 --- a/crates/project/src/git_store.rs +++ b/crates/project/src/git_store.rs @@ -1399,7 +1399,44 @@ impl GitStore { diffs.remove(buffer_id); } } + BufferStoreEvent::BufferChangedFilePath { buffer, .. } => { + // Whenever a buffer's file path changes, it's possible that the + // new path is actually a path that is being tracked by a git + // repository. In that case, we'll want to update the buffer's + // `BufferDiffState`, in case it already has one. + let buffer_id = buffer.read(cx).remote_id(); + let diff_state = self.diffs.get(&buffer_id); + let repo = self.repository_and_path_for_buffer_id(buffer_id, cx); + + if let Some(diff_state) = diff_state + && let Some((repo, repo_path)) = repo + { + let buffer = buffer.clone(); + let diff_state = diff_state.clone(); + + cx.spawn(async move |_git_store, cx| { + async { + let diff_bases_change = repo + .update(cx, |repo, cx| { + repo.load_committed_text(buffer_id, repo_path, cx) + })? + .await?; + diff_state.update(cx, |diff_state, cx| { + let buffer_snapshot = buffer.read(cx).text_snapshot(); + diff_state.diff_bases_changed( + buffer_snapshot, + Some(diff_bases_change), + cx, + ); + }) + } + .await + .log_err(); + }) + .detach(); + } + } _ => {} } } diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index ad2c339d22fdd49d6565ff5be491749cfcac7830..c07ca96cd80a42500768a42a696b871f8c54bf04 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -10045,6 +10045,120 @@ async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) { pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]); } +#[gpui::test] +async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let file_1_committed = String::from(r#"file_1_committed"#); + let file_1_staged = String::from(r#"file_1_staged"#); + let file_2_committed = String::from(r#"file_2_committed"#); + let file_2_staged = String::from(r#"file_2_staged"#); + let buffer_contents = String::from(r#"buffer"#); + + let fs = FakeFs::new(cx.background_executor.clone()); + fs.insert_tree( + path!("/dir"), + json!({ + ".git": {}, + "src": { + "file_1.rs": file_1_committed.clone(), + "file_2.rs": file_2_committed.clone(), + } + }), + ) + .await; + + fs.set_head_for_repo( + path!("/dir/.git").as_ref(), + &[ + ("src/file_1.rs", file_1_committed.clone()), + ("src/file_2.rs", file_2_committed.clone()), + ], + "deadbeef", + ); + fs.set_index_for_repo( + path!("/dir/.git").as_ref(), + &[ + ("src/file_1.rs", file_1_staged.clone()), + ("src/file_2.rs", file_2_staged.clone()), + ], + ); + + let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await; + + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/dir/src/file_1.rs"), cx) + }) + .await + .unwrap(); + + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx); + }); + + let unstaged_diff = project + .update(cx, |project, cx| { + project.open_unstaged_diff(buffer.clone(), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + unstaged_diff.update(cx, |unstaged_diff, _cx| { + let base_text = unstaged_diff.base_text_string().unwrap(); + assert_eq!(base_text, file_1_staged, "Should start with file_1 staged"); + }); + + // Save the buffer as `file_2.rs`, which should trigger the + // `BufferChangedFilePath` event. + project + .update(cx, |project, cx| { + let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id(); + let path = ProjectPath { + worktree_id, + path: rel_path("src/file_2.rs").into(), + }; + project.save_buffer_as(buffer.clone(), path, cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + // Verify that the diff bases have been updated to file_2's contents due to + // the `BufferChangedFilePath` event being handled. + unstaged_diff.update(cx, |unstaged_diff, cx| { + let snapshot = buffer.read(cx).snapshot(); + let base_text = unstaged_diff.base_text_string().unwrap(); + assert_eq!( + base_text, file_2_staged, + "Diff bases should be automatically updated to file_2 staged content" + ); + + let hunks: Vec<_> = unstaged_diff.hunks(&snapshot, cx).collect(); + assert!(!hunks.is_empty(), "Should have diff hunks for file_2"); + }); + + let uncommitted_diff = project + .update(cx, |project, cx| { + project.open_uncommitted_diff(buffer.clone(), cx) + }) + .await + .unwrap(); + + cx.run_until_parked(); + + uncommitted_diff.update(cx, |uncommitted_diff, _cx| { + let base_text = uncommitted_diff.base_text_string().unwrap(); + assert_eq!( + base_text, file_2_committed, + "Uncommitted diff should compare against file_2 committed content" + ); + }); +} + async fn search( project: &Entity, query: SearchQuery, From aa6270e658754f0e1fc5ec27135c467fdea41b3d Mon Sep 17 00:00:00 2001 From: Tim Vermeulen Date: Mon, 10 Nov 2025 19:24:30 +0100 Subject: [PATCH 19/74] editor: Add sticky scroll (#42242) Closes #5344 https://github.com/user-attachments/assets/37ec58b0-7cf6-4eea-9b34-dccf03d3526b Release Notes: - Added a setting to stick scopes to the top of the editor --------- Co-authored-by: KyleBarton Co-authored-by: Conrad Irwin --- assets/settings/default.json | 4 + crates/editor/src/editor.rs | 50 +- crates/editor/src/editor_settings.rs | 10 + crates/editor/src/editor_tests.rs | 210 ++++++++ crates/editor/src/element.rs | 463 +++++++++++++++++- crates/language/src/language.rs | 3 + .../settings/src/settings_content/editor.rs | 14 + crates/settings/src/vscode_import.rs | 7 + crates/settings_ui/src/page_data.rs | 15 + docs/src/visual-customization.md | 4 + 10 files changed, 773 insertions(+), 7 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 4b7e8b1533001a550acb658b076eacf45aabe2f0..7fb583f95b0d6d39146ffe9e406201e958598905 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -605,6 +605,10 @@ // to both the horizontal and vertical delta values while scrolling. Fast scrolling // happens when a user holds the alt or option key while scrolling. "fast_scroll_sensitivity": 4.0, + "sticky_scroll": { + // Whether to stick scopes to the top of the editor. + "enabled": false + }, "relative_line_numbers": "disabled", // If 'search_wrap' is disabled, search result do not wrap around the end of the file. "search_wrap": true, diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index c7c6fee74216cf872be3e201034bb139458afa45..8c015d09c0717e2df52f8c5f85cead07be95bf50 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -117,8 +117,8 @@ use language::{ AutoindentMode, BlockCommentConfig, BracketMatch, BracketPair, Buffer, BufferRow, BufferSnapshot, Capability, CharClassifier, CharKind, CharScopeContext, CodeLabel, CursorShape, DiagnosticEntryRef, DiffOptions, EditPredictionsMode, EditPreview, HighlightedText, IndentKind, - IndentSize, Language, OffsetRangeExt, Point, Runnable, RunnableRange, Selection, SelectionGoal, - TextObject, TransactionId, TreeSitterOptions, WordsQuery, + IndentSize, Language, OffsetRangeExt, OutlineItem, Point, Runnable, RunnableRange, Selection, + SelectionGoal, TextObject, TransactionId, TreeSitterOptions, WordsQuery, language_settings::{ self, LspInsertMode, RewrapBehavior, WordsCompletionMode, all_language_settings, language_settings, @@ -1183,6 +1183,7 @@ pub struct Editor { hide_mouse_mode: HideMouseMode, pub change_list: ChangeList, inline_value_cache: InlineValueCache, + selection_drag_state: SelectionDragState, colors: Option, post_scroll_update: Task<()>, @@ -1764,6 +1765,51 @@ impl Editor { Editor::new_internal(mode, buffer, project, None, window, cx) } + pub fn sticky_headers(&self, cx: &App) -> Option>> { + let multi_buffer = self.buffer().read(cx); + let multi_buffer_snapshot = multi_buffer.snapshot(cx); + let multi_buffer_visible_start = self + .scroll_manager + .anchor() + .anchor + .to_point(&multi_buffer_snapshot); + let max_row = multi_buffer_snapshot.max_point().row; + + let start_row = (multi_buffer_visible_start.row).min(max_row); + let end_row = (multi_buffer_visible_start.row + 10).min(max_row); + + if let Some((excerpt_id, buffer_id, buffer)) = multi_buffer.read(cx).as_singleton() { + let outline_items = buffer + .outline_items_containing( + Point::new(start_row, 0)..Point::new(end_row, 0), + true, + self.style().map(|style| style.syntax.as_ref()), + ) + .into_iter() + .map(|outline_item| OutlineItem { + depth: outline_item.depth, + range: Anchor::range_in_buffer(*excerpt_id, buffer_id, outline_item.range), + source_range_for_text: Anchor::range_in_buffer( + *excerpt_id, + buffer_id, + outline_item.source_range_for_text, + ), + text: outline_item.text, + highlight_ranges: outline_item.highlight_ranges, + name_ranges: outline_item.name_ranges, + body_range: outline_item + .body_range + .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)), + annotation_range: outline_item + .annotation_range + .map(|range| Anchor::range_in_buffer(*excerpt_id, buffer_id, range)), + }); + return Some(outline_items.collect()); + } + + None + } + fn new_internal( mode: EditorMode, multi_buffer: Entity, diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index 9e78ca4ef7f829d74907d3fdb33c561d55f9d2dc..de4198493a9ba2722aef58276ee385a117749fa0 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -33,6 +33,7 @@ pub struct EditorSettings { pub horizontal_scroll_margin: f32, pub scroll_sensitivity: f32, pub fast_scroll_sensitivity: f32, + pub sticky_scroll: StickyScroll, pub relative_line_numbers: RelativeLineNumbers, pub seed_search_query_from_cursor: SeedQuerySetting, pub use_smartcase_search: bool, @@ -65,6 +66,11 @@ pub struct Jupyter { pub enabled: bool, } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct StickyScroll { + pub enabled: bool, +} + #[derive(Clone, Debug, PartialEq, Eq)] pub struct Toolbar { pub breadcrumbs: bool, @@ -185,6 +191,7 @@ impl Settings for EditorSettings { let toolbar = editor.toolbar.unwrap(); let search = editor.search.unwrap(); let drag_and_drop_selection = editor.drag_and_drop_selection.unwrap(); + let sticky_scroll = editor.sticky_scroll.unwrap(); Self { cursor_blink: editor.cursor_blink.unwrap(), cursor_shape: editor.cursor_shape.map(Into::into), @@ -235,6 +242,9 @@ impl Settings for EditorSettings { horizontal_scroll_margin: editor.horizontal_scroll_margin.unwrap(), scroll_sensitivity: editor.scroll_sensitivity.unwrap(), fast_scroll_sensitivity: editor.fast_scroll_sensitivity.unwrap(), + sticky_scroll: StickyScroll { + enabled: sticky_scroll.enabled.unwrap(), + }, relative_line_numbers: editor.relative_line_numbers.unwrap(), seed_search_query_from_cursor: editor.seed_search_query_from_cursor.unwrap(), use_smartcase_search: editor.use_smartcase_search.unwrap(), diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index bf11a9661cc327c1187a47b4995fe044fcdeb060..c1fbc9053882d9e6a74e27a8cd7fb788289d1fa7 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -3,6 +3,7 @@ use crate::{ JoinLines, code_context_menus::CodeContextMenu, edit_prediction_tests::FakeEditPredictionProvider, + element::StickyHeader, linked_editing_ranges::LinkedEditingRanges, scroll::scroll_amount::ScrollAmount, test::{ @@ -27003,6 +27004,215 @@ async fn test_end_of_editor_context(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_sticky_scroll(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + let mut cx = EditorTestContext::new(cx).await; + + let buffer = indoc! {" + ˇfn foo() { + let abc = 123; + } + struct Bar; + impl Bar { + fn new() -> Self { + Self + } + } + fn baz() { + } + "}; + cx.set_state(&buffer); + + cx.update_editor(|e, _, cx| { + e.buffer() + .read(cx) + .as_singleton() + .unwrap() + .update(cx, |buffer, cx| { + buffer.set_language(Some(rust_lang()), cx); + }) + }); + + let mut sticky_headers = |offset: ScrollOffset| { + cx.update_editor(|e, window, cx| { + e.scroll(gpui::Point { x: 0., y: offset }, None, window, cx); + EditorElement::sticky_headers(&e, &e.snapshot(window, cx), cx) + .into_iter() + .map( + |StickyHeader { + start_point, + offset, + .. + }| { (start_point, offset) }, + ) + .collect::>() + }) + }; + + let fn_foo = Point { row: 0, column: 0 }; + let impl_bar = Point { row: 4, column: 0 }; + let fn_new = Point { row: 5, column: 4 }; + + assert_eq!(sticky_headers(0.0), vec![]); + assert_eq!(sticky_headers(0.5), vec![(fn_foo, 0.0)]); + assert_eq!(sticky_headers(1.0), vec![(fn_foo, 0.0)]); + assert_eq!(sticky_headers(1.5), vec![(fn_foo, -0.5)]); + assert_eq!(sticky_headers(2.0), vec![]); + assert_eq!(sticky_headers(2.5), vec![]); + assert_eq!(sticky_headers(3.0), vec![]); + assert_eq!(sticky_headers(3.5), vec![]); + assert_eq!(sticky_headers(4.0), vec![]); + assert_eq!(sticky_headers(4.5), vec![(impl_bar, 0.0), (fn_new, 1.0)]); + assert_eq!(sticky_headers(5.0), vec![(impl_bar, 0.0), (fn_new, 1.0)]); + assert_eq!(sticky_headers(5.5), vec![(impl_bar, 0.0), (fn_new, 0.5)]); + assert_eq!(sticky_headers(6.0), vec![(impl_bar, 0.0)]); + assert_eq!(sticky_headers(6.5), vec![(impl_bar, 0.0)]); + assert_eq!(sticky_headers(7.0), vec![(impl_bar, 0.0)]); + assert_eq!(sticky_headers(7.5), vec![(impl_bar, -0.5)]); + assert_eq!(sticky_headers(8.0), vec![]); + assert_eq!(sticky_headers(8.5), vec![]); + assert_eq!(sticky_headers(9.0), vec![]); + assert_eq!(sticky_headers(9.5), vec![]); + assert_eq!(sticky_headers(10.0), vec![]); +} + +#[gpui::test] +async fn test_scroll_by_clicking_sticky_header(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.editor.sticky_scroll = Some(settings::StickyScrollContent { + enabled: Some(true), + }) + }); + }); + }); + let mut cx = EditorTestContext::new(cx).await; + + let line_height = cx.editor(|editor, window, _cx| { + editor + .style() + .unwrap() + .text + .line_height_in_pixels(window.rem_size()) + }); + + let buffer = indoc! {" + ˇfn foo() { + let abc = 123; + } + struct Bar; + impl Bar { + fn new() -> Self { + Self + } + } + fn baz() { + } + "}; + cx.set_state(&buffer); + + cx.update_editor(|e, _, cx| { + e.buffer() + .read(cx) + .as_singleton() + .unwrap() + .update(cx, |buffer, cx| { + buffer.set_language(Some(rust_lang()), cx); + }) + }); + + let fn_foo = || empty_range(0, 0); + let impl_bar = || empty_range(4, 0); + let fn_new = || empty_range(5, 4); + + let mut scroll_and_click = |scroll_offset: ScrollOffset, click_offset: ScrollOffset| { + cx.update_editor(|e, window, cx| { + e.scroll( + gpui::Point { + x: 0., + y: scroll_offset, + }, + None, + window, + cx, + ); + }); + cx.simulate_click( + gpui::Point { + x: px(0.), + y: click_offset as f32 * line_height, + }, + Modifiers::none(), + ); + cx.update_editor(|e, _, cx| (e.scroll_position(cx), display_ranges(e, cx))) + }; + + assert_eq!( + scroll_and_click( + 4.5, // impl Bar is halfway off the screen + 0.0 // click top of screen + ), + // scrolled to impl Bar + (gpui::Point { x: 0., y: 4. }, vec![impl_bar()]) + ); + + assert_eq!( + scroll_and_click( + 4.5, // impl Bar is halfway off the screen + 0.25 // click middle of impl Bar + ), + // scrolled to impl Bar + (gpui::Point { x: 0., y: 4. }, vec![impl_bar()]) + ); + + assert_eq!( + scroll_and_click( + 4.5, // impl Bar is halfway off the screen + 1.5 // click below impl Bar (e.g. fn new()) + ), + // scrolled to fn new() - this is below the impl Bar header which has persisted + (gpui::Point { x: 0., y: 4. }, vec![fn_new()]) + ); + + assert_eq!( + scroll_and_click( + 5.5, // fn new is halfway underneath impl Bar + 0.75 // click on the overlap of impl Bar and fn new() + ), + (gpui::Point { x: 0., y: 4. }, vec![impl_bar()]) + ); + + assert_eq!( + scroll_and_click( + 5.5, // fn new is halfway underneath impl Bar + 1.25 // click on the visible part of fn new() + ), + (gpui::Point { x: 0., y: 4. }, vec![fn_new()]) + ); + + assert_eq!( + scroll_and_click( + 1.5, // fn foo is halfway off the screen + 0.0 // click top of screen + ), + (gpui::Point { x: 0., y: 0. }, vec![fn_foo()]) + ); + + assert_eq!( + scroll_and_click( + 1.5, // fn foo is halfway off the screen + 0.75 // click visible part of let abc... + ) + .0, + // no change in scroll + // we don't assert on the visible_range because if we clicked the gutter, our line is fully selected + (gpui::Point { x: 0., y: 1.5 }) + ); +} + #[gpui::test] async fn test_next_prev_reference(cx: &mut TestAppContext) { const CYCLE_POSITIONS: &[&'static str] = &[ diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 99a4743eb9d5e1ef8dfc99fbee7b2a74490c7356..67f6350ce625e96fcbe8734bf690fb557b86046c 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -8,8 +8,8 @@ use crate::{ HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, OpenExcerptsSplit, PageDown, PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt, - SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SizingBehavior, SoftWrap, - StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll, + SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects, + SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll, code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP}, display_map::{ Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins, @@ -29,7 +29,7 @@ use crate::{ items::BufferSearchHighlights, mouse_context_menu::{self, MenuPosition}, scroll::{ - ActiveScrollbarState, ScrollOffset, ScrollPixelOffset, ScrollbarThumbState, + ActiveScrollbarState, Autoscroll, ScrollOffset, ScrollPixelOffset, ScrollbarThumbState, scroll_amount::ScrollAmount, }, }; @@ -4555,6 +4555,138 @@ impl EditorElement { header } + fn layout_sticky_headers( + &self, + snapshot: &EditorSnapshot, + editor_width: Pixels, + is_row_soft_wrapped: impl Copy + Fn(usize) -> bool, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + content_origin: gpui::Point, + gutter_dimensions: &GutterDimensions, + gutter_hitbox: &Hitbox, + text_hitbox: &Hitbox, + window: &mut Window, + cx: &mut App, + ) -> Option { + let show_line_numbers = snapshot + .show_line_numbers + .unwrap_or_else(|| EditorSettings::get_global(cx).gutter.line_numbers); + + let rows = Self::sticky_headers(self.editor.read(cx), snapshot, cx); + + let mut lines = Vec::::new(); + + for StickyHeader { + item, + sticky_row, + start_point, + offset, + } in rows.into_iter().rev() + { + let line = layout_line( + sticky_row, + snapshot, + &self.style, + editor_width, + is_row_soft_wrapped, + window, + cx, + ); + + let line_number = show_line_numbers.then(|| { + let number = (start_point.row + 1).to_string(); + let color = cx.theme().colors().editor_line_number; + self.shape_line_number(SharedString::from(number), color, window) + }); + + lines.push(StickyHeaderLine::new( + sticky_row, + line_height * offset as f32, + line, + line_number, + item.range.start, + line_height, + scroll_pixel_position, + content_origin, + gutter_hitbox, + text_hitbox, + window, + cx, + )); + } + + lines.reverse(); + if lines.is_empty() { + return None; + } + + Some(StickyHeaders { + lines, + gutter_background: cx.theme().colors().editor_gutter_background, + content_background: self.style.background, + gutter_right_padding: gutter_dimensions.right_padding, + }) + } + + pub(crate) fn sticky_headers( + editor: &Editor, + snapshot: &EditorSnapshot, + cx: &App, + ) -> Vec { + let scroll_top = snapshot.scroll_position().y; + + let mut end_rows = Vec::::new(); + let mut rows = Vec::::new(); + + let items = editor.sticky_headers(cx).unwrap_or_default(); + + for item in items { + let start_point = item.range.start.to_point(snapshot.buffer_snapshot()); + let end_point = item.range.end.to_point(snapshot.buffer_snapshot()); + + let sticky_row = snapshot + .display_snapshot + .point_to_display_point(start_point, Bias::Left) + .row(); + let end_row = snapshot + .display_snapshot + .point_to_display_point(end_point, Bias::Left) + .row(); + let max_sticky_row = end_row.previous_row(); + if max_sticky_row <= sticky_row { + continue; + } + + while end_rows + .last() + .is_some_and(|&last_end| last_end < sticky_row) + { + end_rows.pop(); + } + let depth = end_rows.len(); + let adjusted_scroll_top = scroll_top + depth as f64; + + if sticky_row.as_f64() >= adjusted_scroll_top || end_row.as_f64() <= adjusted_scroll_top + { + continue; + } + + let max_scroll_offset = max_sticky_row.as_f64() - scroll_top; + let offset = (depth as f64).min(max_scroll_offset); + + end_rows.push(end_row); + rows.push(StickyHeader { + item, + sticky_row, + start_point, + offset, + }); + } + + rows + } + fn layout_cursor_popovers( &self, line_height: Pixels, @@ -6407,6 +6539,89 @@ impl EditorElement { } } + fn paint_sticky_headers( + &mut self, + layout: &mut EditorLayout, + window: &mut Window, + cx: &mut App, + ) { + let Some(mut sticky_headers) = layout.sticky_headers.take() else { + return; + }; + + if sticky_headers.lines.is_empty() { + layout.sticky_headers = Some(sticky_headers); + return; + } + + let whitespace_setting = self + .editor + .read(cx) + .buffer + .read(cx) + .language_settings(cx) + .show_whitespaces; + sticky_headers.paint(layout, whitespace_setting, window, cx); + + let sticky_header_hitboxes: Vec = sticky_headers + .lines + .iter() + .map(|line| line.hitbox.clone()) + .collect(); + let hovered_hitbox = sticky_header_hitboxes + .iter() + .find_map(|hitbox| hitbox.is_hovered(window).then_some(hitbox.id)); + + window.on_mouse_event(move |_: &MouseMoveEvent, phase, window, _cx| { + if !phase.bubble() { + return; + } + + let current_hover = sticky_header_hitboxes + .iter() + .find_map(|hitbox| hitbox.is_hovered(window).then_some(hitbox.id)); + if hovered_hitbox != current_hover { + window.refresh(); + } + }); + + for (line_index, line) in sticky_headers.lines.iter().enumerate() { + let editor = self.editor.clone(); + let hitbox = line.hitbox.clone(); + let target_anchor = line.target_anchor; + window.on_mouse_event(move |event: &MouseDownEvent, phase, window, cx| { + if !phase.bubble() { + return; + } + + if event.button == MouseButton::Left && hitbox.is_hovered(window) { + editor.update(cx, |editor, cx| { + editor.change_selections( + SelectionEffects::scroll(Autoscroll::top_relative(line_index)), + window, + cx, + |selections| selections.select_ranges([target_anchor..target_anchor]), + ); + cx.stop_propagation(); + }); + } + }); + } + + let text_bounds = layout.position_map.text_hitbox.bounds; + let border_top = text_bounds.top() + + sticky_headers.lines.last().unwrap().offset + + layout.position_map.line_height; + let separator_height = px(1.); + let border_bounds = Bounds::from_corners( + point(layout.gutter_hitbox.bounds.left(), border_top), + point(text_bounds.right(), border_top + separator_height), + ); + window.paint_quad(fill(border_bounds, cx.theme().colors().border_variant)); + + layout.sticky_headers = Some(sticky_headers); + } + fn paint_lines_background( &mut self, layout: &mut EditorLayout, @@ -8107,6 +8322,27 @@ impl LineWithInvisibles { cx: &mut App, ) { let line_y = f32::from(line_height) * Pixels::from(row.as_f64() - scroll_position.y); + self.prepaint_with_custom_offset( + line_height, + scroll_pixel_position, + content_origin, + line_y, + line_elements, + window, + cx, + ); + } + + fn prepaint_with_custom_offset( + &mut self, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + content_origin: gpui::Point, + line_y: Pixels, + line_elements: &mut SmallVec<[AnyElement; 1]>, + window: &mut Window, + cx: &mut App, + ) { let mut fragment_origin = content_origin + gpui::point(Pixels::from(-scroll_pixel_position.x), line_y); for fragment in &mut self.fragments { @@ -8141,9 +8377,31 @@ impl LineWithInvisibles { window: &mut Window, cx: &mut App, ) { - let line_height = layout.position_map.line_height; - let line_y = line_height * (row.as_f64() - layout.position_map.scroll_position.y) as f32; + self.draw_with_custom_offset( + layout, + row, + content_origin, + layout.position_map.line_height + * (row.as_f64() - layout.position_map.scroll_position.y) as f32, + whitespace_setting, + selection_ranges, + window, + cx, + ); + } + fn draw_with_custom_offset( + &self, + layout: &EditorLayout, + row: DisplayRow, + content_origin: gpui::Point, + line_y: Pixels, + whitespace_setting: ShowWhitespaceSetting, + selection_ranges: &[Range], + window: &mut Window, + cx: &mut App, + ) { + let line_height = layout.position_map.line_height; let mut fragment_origin = content_origin + gpui::point( Pixels::from(-layout.position_map.scroll_pixel_position.x), @@ -8582,6 +8840,7 @@ impl Element for EditorElement { }; let is_minimap = self.editor.read(cx).mode.is_minimap(); + let is_singleton = self.editor.read(cx).buffer_kind(cx) == ItemBufferKind::Singleton; if !is_minimap { let focus_handle = self.editor.focus_handle(cx); @@ -9228,6 +9487,26 @@ impl Element for EditorElement { scroll_position.x * f64::from(em_advance), scroll_position.y * f64::from(line_height), ); + let sticky_headers = if !is_minimap + && is_singleton + && EditorSettings::get_global(cx).sticky_scroll.enabled + { + self.layout_sticky_headers( + &snapshot, + editor_width, + is_row_soft_wrapped, + line_height, + scroll_pixel_position, + content_origin, + &gutter_dimensions, + &gutter_hitbox, + &text_hitbox, + window, + cx, + ) + } else { + None + }; let indent_guides = self.layout_indent_guides( content_origin, text_hitbox.origin, @@ -9697,6 +9976,7 @@ impl Element for EditorElement { tab_invisible, space_invisible, sticky_buffer_header, + sticky_headers, expand_toggles, } }) @@ -9767,6 +10047,7 @@ impl Element for EditorElement { } }); + self.paint_sticky_headers(layout, window, cx); self.paint_minimap(layout, window, cx); self.paint_scrollbars(layout, window, cx); self.paint_edit_prediction_popover(layout, window, cx); @@ -9875,15 +10156,180 @@ pub struct EditorLayout { tab_invisible: ShapedLine, space_invisible: ShapedLine, sticky_buffer_header: Option, + sticky_headers: Option, document_colors: Option<(DocumentColorsRenderMode, Vec<(Range, Hsla)>)>, } +struct StickyHeaders { + lines: Vec, + gutter_background: Hsla, + content_background: Hsla, + gutter_right_padding: Pixels, +} + +struct StickyHeaderLine { + row: DisplayRow, + offset: Pixels, + line: LineWithInvisibles, + line_number: Option, + elements: SmallVec<[AnyElement; 1]>, + available_text_width: Pixels, + target_anchor: Anchor, + hitbox: Hitbox, +} + impl EditorLayout { fn line_end_overshoot(&self) -> Pixels { 0.15 * self.position_map.line_height } } +impl StickyHeaders { + fn paint( + &mut self, + layout: &mut EditorLayout, + whitespace_setting: ShowWhitespaceSetting, + window: &mut Window, + cx: &mut App, + ) { + let line_height = layout.position_map.line_height; + + for line in self.lines.iter_mut().rev() { + window.paint_layer( + Bounds::new( + layout.gutter_hitbox.origin + point(Pixels::ZERO, line.offset), + size(line.hitbox.size.width, line_height), + ), + |window| { + let gutter_bounds = Bounds::new( + layout.gutter_hitbox.origin + point(Pixels::ZERO, line.offset), + size(layout.gutter_hitbox.size.width, line_height), + ); + window.paint_quad(fill(gutter_bounds, self.gutter_background)); + + let text_bounds = Bounds::new( + layout.position_map.text_hitbox.origin + point(Pixels::ZERO, line.offset), + size(line.available_text_width, line_height), + ); + window.paint_quad(fill(text_bounds, self.content_background)); + + if line.hitbox.is_hovered(window) { + let hover_overlay = cx.theme().colors().panel_overlay_hover; + window.paint_quad(fill(gutter_bounds, hover_overlay)); + window.paint_quad(fill(text_bounds, hover_overlay)); + } + + line.paint( + layout, + self.gutter_right_padding, + line.available_text_width, + layout.content_origin, + line_height, + whitespace_setting, + window, + cx, + ); + }, + ); + + window.set_cursor_style(CursorStyle::PointingHand, &line.hitbox); + } + } +} + +impl StickyHeaderLine { + fn new( + row: DisplayRow, + offset: Pixels, + mut line: LineWithInvisibles, + line_number: Option, + target_anchor: Anchor, + line_height: Pixels, + scroll_pixel_position: gpui::Point, + content_origin: gpui::Point, + gutter_hitbox: &Hitbox, + text_hitbox: &Hitbox, + window: &mut Window, + cx: &mut App, + ) -> Self { + let mut elements = SmallVec::<[AnyElement; 1]>::new(); + line.prepaint_with_custom_offset( + line_height, + scroll_pixel_position, + content_origin, + offset, + &mut elements, + window, + cx, + ); + + let hitbox_bounds = Bounds::new( + gutter_hitbox.origin + point(Pixels::ZERO, offset), + size(text_hitbox.right() - gutter_hitbox.left(), line_height), + ); + let available_text_width = + (hitbox_bounds.size.width - gutter_hitbox.size.width).max(Pixels::ZERO); + + Self { + row, + offset, + line, + line_number, + elements, + available_text_width, + target_anchor, + hitbox: window.insert_hitbox(hitbox_bounds, HitboxBehavior::BlockMouseExceptScroll), + } + } + + fn paint( + &mut self, + layout: &EditorLayout, + gutter_right_padding: Pixels, + available_text_width: Pixels, + content_origin: gpui::Point, + line_height: Pixels, + whitespace_setting: ShowWhitespaceSetting, + window: &mut Window, + cx: &mut App, + ) { + window.with_content_mask( + Some(ContentMask { + bounds: Bounds::new( + layout.position_map.text_hitbox.bounds.origin + + point(Pixels::ZERO, self.offset), + size(available_text_width, line_height), + ), + }), + |window| { + self.line.draw_with_custom_offset( + layout, + self.row, + content_origin, + self.offset, + whitespace_setting, + &[], + window, + cx, + ); + for element in &mut self.elements { + element.paint(window, cx); + } + }, + ); + + if let Some(line_number) = &self.line_number { + let gutter_origin = layout.gutter_hitbox.origin + point(Pixels::ZERO, self.offset); + let gutter_width = layout.gutter_hitbox.size.width; + let origin = point( + gutter_origin.x + gutter_width - gutter_right_padding - line_number.width, + gutter_origin.y, + ); + line_number.paint(origin, line_height, window, cx).log_err(); + } + } +} + #[derive(Debug)] struct LineNumberSegment { shaped_line: ShapedLine, @@ -10730,6 +11176,13 @@ impl HighlightedRange { } } +pub(crate) struct StickyHeader { + pub item: language::OutlineItem, + pub sticky_row: DisplayRow, + pub start_point: Point, + pub offset: ScrollOffset, +} + enum CursorPopoverType { CodeContextMenu, EditPrediction, diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index 75f53524efc40e2cfaf06c5bbe893b7c5af5883c..2a2f870d6b55abc57a14e623375f77b9fb2d5dbc 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -2613,6 +2613,9 @@ pub fn rust_lang() -> Arc { Some(tree_sitter_rust::LANGUAGE.into()), ) .with_queries(LanguageQueries { + outline: Some(Cow::from(include_str!( + "../../languages/src/rust/outline.scm" + ))), indents: Some(Cow::from( r#" [ diff --git a/crates/settings/src/settings_content/editor.rs b/crates/settings/src/settings_content/editor.rs index 5c33dbc2af48a55e176a5f093afcc83437054932..2dc3c6c0fdc78bf470e78e0577cc886d1471e8b2 100644 --- a/crates/settings/src/settings_content/editor.rs +++ b/crates/settings/src/settings_content/editor.rs @@ -96,6 +96,10 @@ pub struct EditorSettingsContent { /// Default: 4.0 #[serde(serialize_with = "crate::serialize_optional_f32_with_two_decimal_places")] pub fast_scroll_sensitivity: Option, + /// Settings for sticking scopes to the top of the editor. + /// + /// Default: sticky scroll is disabled + pub sticky_scroll: Option, /// Whether the line numbers on editors gutter are relative or not. /// When "enabled" shows relative number of buffer lines, when "wrapped" shows /// relative number of display lines. @@ -312,6 +316,16 @@ pub struct ScrollbarContent { pub axes: Option, } +/// Sticky scroll related settings +#[skip_serializing_none] +#[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)] +pub struct StickyScrollContent { + /// Whether sticky scroll is enabled. + /// + /// Default: false + pub enabled: Option, +} + /// Minimap related settings #[skip_serializing_none] #[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema, MergeFrom, PartialEq)] diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index a8fd15c32acc130a9cde4948cc2aa66f898708d0..36bd84e1a145a9a64eadbaec9411f904b9a881c9 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -255,6 +255,7 @@ impl VsCodeSettings { excerpt_context_lines: None, expand_excerpt_lines: None, fast_scroll_sensitivity: self.read_f32("editor.fastScrollSensitivity"), + sticky_scroll: self.sticky_scroll_content(), go_to_definition_fallback: None, gutter: self.gutter_content(), hide_mouse: None, @@ -303,6 +304,12 @@ impl VsCodeSettings { } } + fn sticky_scroll_content(&self) -> Option { + skip_default(StickyScrollContent { + enabled: self.read_bool("editor.stickyScroll.enabled"), + }) + } + fn gutter_content(&self) -> Option { skip_default(GutterContent { line_numbers: self.read_enum("editor.lineNumbers", |s| match s { diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 98db1a7efee6b333d258f3db29142532c514aca3..392796c091d379429f2ae787b14a75841bef12fa 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -1352,6 +1352,21 @@ pub(crate) fn settings_data(cx: &App) -> Vec { metadata: None, files: USER, }), + SettingsPageItem::SettingItem(SettingItem { + title: "Sticky Scroll", + description: "Whether to stick scopes to the top of the editor", + field: Box::new(SettingField { + json_path: Some("sticky_scroll.enabled"), + pick: |settings_content| { + settings_content.editor.sticky_scroll.as_ref().and_then(|sticky_scroll| sticky_scroll.enabled.as_ref()) + }, + write: |settings_content, value| { + settings_content.editor.sticky_scroll.get_or_insert_default().enabled = value; + }, + }), + metadata: None, + files: USER, + }), SettingsPageItem::SectionHeader("Signature Help"), SettingsPageItem::SettingItem(SettingItem { title: "Auto Signature Help", diff --git a/docs/src/visual-customization.md b/docs/src/visual-customization.md index 77d3ac2d87fe95f9b9f6836a2a14ae58f6ef4c9a..98b07797a2f7904acd10fe54b04ab39fe0854667 100644 --- a/docs/src/visual-customization.md +++ b/docs/src/visual-customization.md @@ -218,6 +218,10 @@ TBD: Centered layout related settings "active_line_width": 1, // Width of active guide in pixels [1-10] "coloring": "fixed", // disabled, fixed, indent_aware "background_coloring": "disabled" // disabled, indent_aware + }, + + "sticky_scroll": { + "enabled": false // Whether to stick scopes to the top of the editor. Disabled by default. } ``` From ed61a79cc5a28f36f65f77b16ae6369822845d73 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 10 Nov 2025 15:29:55 -0300 Subject: [PATCH 20/74] agent_ui: Fix history view losing focus when empty (#42374) Closes https://github.com/zed-industries/zed/issues/42356 This PR fixes the history view losing focus by simply always displaying the search editor. I don't think it's too weird to not have it when it's empty, and it also ends up matching how regular pickers work. Release Notes: - agent: Fixed a bug where navigating the agent panel with the keyboard wouldn't work if you visited the history view and it was empty/had no entries. --- crates/agent_ui/src/acp/thread_history.rs | 56 ++++++++++------------- 1 file changed, 25 insertions(+), 31 deletions(-) diff --git a/crates/agent_ui/src/acp/thread_history.rs b/crates/agent_ui/src/acp/thread_history.rs index f8a2006f8fc9d2ece6d4a5526482dec5df948686..11718c63475212fbe8b996b2f6edae8b4295c91a 100644 --- a/crates/agent_ui/src/acp/thread_history.rs +++ b/crates/agent_ui/src/acp/thread_history.rs @@ -457,25 +457,23 @@ impl Render for AcpThreadHistory { .on_action(cx.listener(Self::select_last)) .on_action(cx.listener(Self::confirm)) .on_action(cx.listener(Self::remove_selected_thread)) - .when(!self.history_store.read(cx).is_empty(cx), |parent| { - parent.child( - h_flex() - .h(px(41.)) // Match the toolbar perfectly - .w_full() - .py_1() - .px_2() - .gap_2() - .justify_between() - .border_b_1() - .border_color(cx.theme().colors().border) - .child( - Icon::new(IconName::MagnifyingGlass) - .color(Color::Muted) - .size(IconSize::Small), - ) - .child(self.search_editor.clone()), - ) - }) + .child( + h_flex() + .h(px(41.)) // Match the toolbar perfectly + .w_full() + .py_1() + .px_2() + .gap_2() + .justify_between() + .border_b_1() + .border_color(cx.theme().colors().border) + .child( + Icon::new(IconName::MagnifyingGlass) + .color(Color::Muted) + .size(IconSize::Small), + ) + .child(self.search_editor.clone()), + ) .child({ let view = v_flex() .id("list-container") @@ -484,19 +482,15 @@ impl Render for AcpThreadHistory { .flex_grow(); if self.history_store.read(cx).is_empty(cx) { - view.justify_center() - .child( - h_flex().w_full().justify_center().child( - Label::new("You don't have any past threads yet.") - .size(LabelSize::Small), - ), - ) - } else if self.search_produced_no_matches() { - view.justify_center().child( - h_flex().w_full().justify_center().child( - Label::new("No threads match your search.").size(LabelSize::Small), - ), + view.justify_center().items_center().child( + Label::new("You don't have any past threads yet.") + .size(LabelSize::Small) + .color(Color::Muted), ) + } else if self.search_produced_no_matches() { + view.justify_center() + .items_center() + .child(Label::new("No threads match your search.").size(LabelSize::Small)) } else { view.child( uniform_list( From 2b369d7532ccd5129e6555c57dad3675000475fa Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 10 Nov 2025 19:41:14 +0100 Subject: [PATCH 21/74] rust: Explicitly capture lifetime identifier (#42372) Closes #42030 This matches what VSCode and basically also this capture does. However, the identifier capture was overridden by other captures, hence the need to be explicit here. | Before | After | | - | - | | Bildschirmfoto 2025-11-10 um 17 56
28 | Bildschirmfoto 2025-11-10 um 17
54 35 | Release Notes: - Improved lifetime highlighting in Rust using the `lifetime` capture. --- crates/languages/src/rust/highlights.scm | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/languages/src/rust/highlights.scm b/crates/languages/src/rust/highlights.scm index c541b5121784e3edb86f6d2e97b0666204d9f475..ea3935257648b31f666ea64c7c302644ab3eb24e 100644 --- a/crates/languages/src/rust/highlights.scm +++ b/crates/languages/src/rust/highlights.scm @@ -195,7 +195,9 @@ (unary_expression "!" @operator) operator: "/" @operator -(lifetime) @lifetime +(lifetime + "'" @lifetime + (identifier) @lifetime) (parameter (identifier) @variable.parameter) From 3fbfea491d81664b40e3b73abcf0e8bba491ba82 Mon Sep 17 00:00:00 2001 From: Andrew Farkas <6060305+HactarCE@users.noreply.github.com> Date: Mon, 10 Nov 2025 14:33:00 -0500 Subject: [PATCH 22/74] Support relative paths in LSP & DAP binaries (#42135) Closes #41214 Release Notes: - Added support for relative paths in LSP and DAP binaries --------- Co-authored-by: Cole Miller Co-authored-by: Julia Ryan --- crates/project/src/debugger/dap_store.rs | 5 +- crates/project/src/lsp_store.rs | 7 +-- crates/project/src/project_tests.rs | 67 ++++++++++++++++++++++++ crates/worktree/src/worktree.rs | 4 +- 4 files changed, 77 insertions(+), 6 deletions(-) diff --git a/crates/project/src/debugger/dap_store.rs b/crates/project/src/debugger/dap_store.rs index 0b733aac29843090361cd5868799f6cb1db630f6..04901a5fef60cfc1692f712f3cdd4a3ec1071632 100644 --- a/crates/project/src/debugger/dap_store.rs +++ b/crates/project/src/debugger/dap_store.rs @@ -261,7 +261,10 @@ impl DapStore { .get(&adapter.name()); let user_installed_path = dap_settings.and_then(|s| match &s.binary { DapBinary::Default => None, - DapBinary::Custom(binary) => Some(PathBuf::from(binary)), + DapBinary::Custom(binary) => { + // if `binary` is absolute, `.join()` will keep it unmodified + Some(worktree.read(cx).abs_path().join(PathBuf::from(binary))) + } }); let user_args = dap_settings.map(|s| s.args.clone()); let user_env = dap_settings.map(|s| s.env.clone()); diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 50f8c6695c188b065e89b4694e004470aa997abc..ecfe169b47b7daa1b1c8c0794d9cdde8f0b06ad4 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -563,8 +563,8 @@ impl LocalLspStore { allow_binary_download: bool, cx: &mut App, ) -> Task> { - if let Some(settings) = settings.binary.as_ref() - && settings.path.is_some() + if let Some(settings) = &settings.binary + && let Some(path) = settings.path.as_ref().map(PathBuf::from) { let settings = settings.clone(); @@ -573,7 +573,8 @@ impl LocalLspStore { env.extend(settings.env.unwrap_or_default()); Ok(LanguageServerBinary { - path: PathBuf::from(&settings.path.unwrap()), + // if `path` is absolute, `.join()` will keep it unmodified + path: delegate.worktree_root_path().join(path), env: Some(env), arguments: settings .arguments diff --git a/crates/project/src/project_tests.rs b/crates/project/src/project_tests.rs index c07ca96cd80a42500768a42a696b871f8c54bf04..332fdb3e0ffd158cfb0d4df199752b3ccddfb743 100644 --- a/crates/project/src/project_tests.rs +++ b/crates/project/src/project_tests.rs @@ -1208,6 +1208,73 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) { ); } +#[gpui::test] +async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) { + init_test(cx); + + let settings_json_contents = json!({ + "languages": { + "Rust": { + "language_servers": ["my_fake_lsp"] + } + }, + "lsp": { + "my_fake_lsp": { + "binary": { + "path": path!("relative_path/to/my_fake_lsp_binary.exe").to_string(), + } + } + }, + }); + + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + path!("/the-root"), + json!({ + ".zed": { + "settings.json": settings_json_contents.to_string(), + }, + "relative_path": { + "to": { + "my_fake_lsp.exe": "", + }, + }, + "src": { + "main.rs": "", + } + }), + ) + .await; + + let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await; + let language_registry = project.read_with(cx, |project, _| project.languages().clone()); + language_registry.add(rust_lang()); + + let mut fake_rust_servers = language_registry.register_fake_lsp( + "Rust", + FakeLspAdapter { + name: "my_fake_lsp", + ..Default::default() + }, + ); + + cx.run_until_parked(); + + // Start the language server by opening a buffer with a compatible file extension. + project + .update(cx, |project, cx| { + project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx) + }) + .await + .unwrap(); + + let lsp_path = fake_rust_servers.next().await.unwrap().binary.path; + assert_eq!( + lsp_path.to_string_lossy(), + path!("/the-root/relative_path/to/my_fake_lsp_binary.exe"), + ); +} + #[gpui::test] async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) { init_test(cx); diff --git a/crates/worktree/src/worktree.rs b/crates/worktree/src/worktree.rs index 8f6a1d23b82a272452ed90e635c3936f169d1404..69fee07583a33106689c463732fe6defbdcfbb40 100644 --- a/crates/worktree/src/worktree.rs +++ b/crates/worktree/src/worktree.rs @@ -2352,8 +2352,8 @@ impl Snapshot { self.entries_by_path.first() } - /// TODO: what's the difference between `root_dir` and `abs_path`? - /// is there any? if so, document it. + /// Returns `None` for a single file worktree, or `Some(self.abs_path())` if + /// it is a directory. pub fn root_dir(&self) -> Option> { self.root_entry() .filter(|entry| entry.is_dir()) From c24f9e47b4eac032e589784704b0ce35ea804dad Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 10 Nov 2025 12:50:43 -0700 Subject: [PATCH 23/74] Try to download wasi-sdk ahead of time (#42377) This hopefully resolves the lingering test failures on linux, but also adds some logging just in case this isn't the problem... Release Notes: - N/A --------- Co-authored-by: Ben Kunkle --- .github/workflows/compare_perf.yml | 3 + .github/workflows/release.yml | 17 ++++-- .github/workflows/release_nightly.yml | 6 ++ .github/workflows/run_agent_evals.yml | 3 + .github/workflows/run_bundling.yml | 6 ++ .github/workflows/run_tests.yml | 28 ++++++--- .github/workflows/run_unit_evals.yml | 3 + .../src/extension_store_test.rs | 8 ++- crates/zlog/src/zlog.rs | 13 ++-- script/download-wasi-sdk | 60 +++++++++++++++++++ .../xtask/src/tasks/workflows/run_tests.rs | 8 +-- tooling/xtask/src/tasks/workflows/steps.rs | 8 ++- 12 files changed, 138 insertions(+), 25 deletions(-) create mode 100755 script/download-wasi-sdk diff --git a/.github/workflows/compare_perf.yml b/.github/workflows/compare_perf.yml index 40dc2fb2129dfa355c18e0ecfd0aa3ae6afba9ea..5bcb733f3f21c95e530d7c221df080997dfc24eb 100644 --- a/.github/workflows/compare_perf.yml +++ b/.github/workflows/compare_perf.yml @@ -35,6 +35,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: compare_perf::run_perf::install_hyperfine run: cargo install hyperfine shell: bash -euxo pipefail {0} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c25bb13e7545f5acdcea469fcb82b023f3690dd6..25f8b00910f5d64e9319eb40943ae1b5b89d8f28 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -57,16 +57,19 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: steps::setup_linux run: ./script/linux shell: bash -euxo pipefail {0} - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} - - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 - with: - cache: rust + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: steps::setup_node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: @@ -202,6 +205,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} @@ -242,6 +248,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index f3efe70a498e5718740adca572358c8b7bb81609..431308bd1cfdf6f4385a8f462edcab8c5769ba5f 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -93,6 +93,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} @@ -140,6 +143,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml index fa686148590785f1ba93501ecd873d19af6bcb2b..e13bae4031174f057e555db7f2d779208d55456e 100644 --- a/.github/workflows/run_agent_evals.yml +++ b/.github/workflows/run_agent_evals.yml @@ -40,6 +40,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: steps::setup_cargo_config run: | mkdir -p ./../.cargo diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 5cf10d11f3f9444c9b57f594897dbfa0e435f0b4..ddedd38ebedb647f07e162286365f4e6b95f45a2 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -34,6 +34,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} @@ -74,6 +77,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: ./script/bundle-linux run: ./script/bundle-linux shell: bash -euxo pipefail {0} diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index db11382d8ba3b5115653ecfc88c4ed5027511d00..a7d0a145b6d26d964020f48c321556032ae567ed 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -143,16 +143,19 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: steps::setup_linux run: ./script/linux shell: bash -euxo pipefail {0} - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} - - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 - with: - cache: rust + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: steps::setup_node uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 with: @@ -232,6 +235,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: steps::setup_cargo_config run: | mkdir -p ./../.cargo @@ -263,16 +269,19 @@ jobs: mkdir -p ./../.cargo cp ./.cargo/ci-config.toml ./../.cargo/config.toml shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust - name: steps::setup_linux run: ./script/linux shell: bash -euxo pipefail {0} - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} - - name: steps::cache_rust_dependencies_namespace - uses: namespacelabs/nscloud-cache-action@v1 - with: - cache: rust + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: cargo build -p collab run: cargo build -p collab shell: bash -euxo pipefail {0} @@ -348,6 +357,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: run_tests::check_docs::install_mdbook uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08 with: diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml index e4a22c3f164b78699e36ea55854731f4657c3c79..a41b4fb6d7058a97dcd5a98894a0d2c4687ceed4 100644 --- a/.github/workflows/run_unit_evals.yml +++ b/.github/workflows/run_unit_evals.yml @@ -33,6 +33,9 @@ jobs: - name: steps::install_mold run: ./script/install-mold shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} - name: steps::cargo_install_nextest run: cargo install cargo-nextest --locked shell: bash -euxo pipefail {0} diff --git a/crates/extension_host/src/extension_store_test.rs b/crates/extension_host/src/extension_store_test.rs index 4104fdff1aa7b1d105c5fc5eae54ebe94c160f5a..7ba368c667ef5bdd37a09b53b697a062a2a0fc8b 100644 --- a/crates/extension_host/src/extension_store_test.rs +++ b/crates/extension_host/src/extension_store_test.rs @@ -31,8 +31,7 @@ use util::test::TempTree; #[cfg(test)] #[ctor::ctor] fn init_logger() { - // show info logs while we debug the extension_store tests hanging. - zlog::init_test_with("info"); + zlog::init_test(); } #[gpui::test] @@ -532,6 +531,7 @@ async fn test_extension_store(cx: &mut TestAppContext) { #[gpui::test] async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { + log::info!("Initializing test"); init_test(cx); cx.executor().allow_parking(); @@ -556,6 +556,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { let extensions_dir = extensions_tree.path().canonicalize().unwrap(); let project_dir = project_dir.path().canonicalize().unwrap(); + log::info!("Setting up test"); + let project = Project::test(fs.clone(), [project_dir.as_path()], cx).await; let proxy = Arc::new(ExtensionHostProxy::new()); @@ -674,6 +676,8 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) { ) }); + log::info!("Flushing events"); + // Ensure that debounces fire. let mut events = cx.events(&extension_store); let executor = cx.executor(); diff --git a/crates/zlog/src/zlog.rs b/crates/zlog/src/zlog.rs index 29eff31195657c715d2b0539c1544775fbd5b16a..04f58e91025b9598038a075c470eb4750d92e4cf 100644 --- a/crates/zlog/src/zlog.rs +++ b/crates/zlog/src/zlog.rs @@ -30,16 +30,17 @@ pub fn init_test() { } } -pub fn init_test_with(filter: &str) { - if try_init(Some(filter.to_owned())).is_ok() { - init_output_stdout(); - } -} - fn get_env_config() -> Option { std::env::var("ZED_LOG") .or_else(|_| std::env::var("RUST_LOG")) .ok() + .or_else(|| { + if std::env::var("CI").is_ok() { + Some("info".to_owned()) + } else { + None + } + }) } pub fn process_env(filter: Option) { diff --git a/script/download-wasi-sdk b/script/download-wasi-sdk new file mode 100755 index 0000000000000000000000000000000000000000..8cf36ffda187223a4648f1fc763b84919d60470e --- /dev/null +++ b/script/download-wasi-sdk @@ -0,0 +1,60 @@ +#!/bin/bash + +# Check if ./target/wasi-sdk exists +if [ ! -d "./target/wasi-sdk" ]; then + echo "WASI SDK not found, downloading v25..." + + # Determine OS and architecture + OS=$(uname -s | tr '[:upper:]' '[:lower:]') + ARCH=$(uname -m) + + # Map architecture names to WASI SDK format + case $ARCH in + x86_64) + ARCH="x86_64" + ;; + arm64|aarch64) + ARCH="arm64" + ;; + *) + echo "Unsupported architecture: $ARCH" + exit 1 + ;; + esac + + # Map OS names to WASI SDK format + case $OS in + darwin) + OS="macos" + ;; + linux) + OS="linux" + ;; + mingw*|msys*|cygwin*) + OS="mingw" + ;; + *) + echo "Unsupported OS: $OS" + exit 1 + ;; + esac + + # Construct download URL + WASI_SDK_VERSION="25" + WASI_SDK_URL="https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-${ARCH}-${OS}.tar.gz" + + echo "Downloading from: $WASI_SDK_URL" + + # Create target directory if it doesn't exist + mkdir -p ./target + + # Download and extract + curl -L "$WASI_SDK_URL" | tar -xz -C ./target + + # Rename the extracted directory to wasi-sdk + mv "./target/wasi-sdk-${WASI_SDK_VERSION}.0-${ARCH}-${OS}" "./target/wasi-sdk" + + echo "WASI SDK v25 installed successfully" +else + echo "WASI SDK already exists at ./target/wasi-sdk" +fi diff --git a/tooling/xtask/src/tasks/workflows/run_tests.rs b/tooling/xtask/src/tasks/workflows/run_tests.rs index e2f1303c15a5e9e6c471b60350bb3834d8afd1d8..2c2cd306cd26ea3695f6f94db3e571cffe427b0c 100644 --- a/tooling/xtask/src/tasks/workflows/run_tests.rs +++ b/tooling/xtask/src/tasks/workflows/run_tests.rs @@ -292,8 +292,8 @@ fn check_workspace_binaries() -> NamedJob { .runs_on(runners::LINUX_LARGE) .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(Platform::Linux)) - .map(steps::install_linux_dependencies) .add_step(steps::cache_rust_dependencies_namespace()) + .map(steps::install_linux_dependencies) .add_step(steps::script("cargo build -p collab")) .add_step(steps::script("cargo build --workspace --bins --examples")) .add_step(steps::cleanup_cargo_config(Platform::Linux)), @@ -312,13 +312,13 @@ pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob { .runs_on(runner) .add_step(steps::checkout_repo()) .add_step(steps::setup_cargo_config(platform)) + .when(platform == Platform::Linux, |this| { + this.add_step(steps::cache_rust_dependencies_namespace()) + }) .when( platform == Platform::Linux, steps::install_linux_dependencies, ) - .when(platform == Platform::Linux, |this| { - this.add_step(steps::cache_rust_dependencies_namespace()) - }) .add_step(steps::setup_node()) .add_step(steps::clippy(platform)) .add_step(steps::cargo_install_nextest(platform)) diff --git a/tooling/xtask/src/tasks/workflows/steps.rs b/tooling/xtask/src/tasks/workflows/steps.rs index 6e7d6130abb3fbdb04389c9d226f067397dcf41d..3ca3610a5eca612d98154495e9ad6daf03d09997 100644 --- a/tooling/xtask/src/tasks/workflows/steps.rs +++ b/tooling/xtask/src/tasks/workflows/steps.rs @@ -113,8 +113,14 @@ fn install_mold() -> Step { named::bash("./script/install-mold") } +fn download_wasi_sdk() -> Step { + named::bash("./script/download-wasi-sdk") +} + pub(crate) fn install_linux_dependencies(job: Job) -> Job { - job.add_step(setup_linux()).add_step(install_mold()) + job.add_step(setup_linux()) + .add_step(install_mold()) + .add_step(download_wasi_sdk()) } pub fn script(name: &str) -> Step { From 2c375e2e0a310266002abc483945f71f776be7a0 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Mon, 10 Nov 2025 16:50:52 -0300 Subject: [PATCH 24/74] agent_ui: Ensure message editor placeholder text is accurate (#42375) This PR creates a dedicated function for the agent panel message editor's placeholder text so that we can wait for the agent initialization to capture whether they support slash commands or not. On the one (nice) hand, this allow us to stop matching agents by name and make this a bit more generic. On the other (bad) hand, the "/ for commands" bit should take a little second to show up because we can only know whether an agent supports it after it is initialized. This is particularly relevant now that we have agents coming from extensions and for them, we would obviously not be able to match by name. Release Notes: - agent: Fixed agent panel message editor's placeholder text by making it more accurate as to whether agents support slash commands, particularly those coming from extensions. --- crates/agent_ui/src/acp/message_editor.rs | 11 ++++++++ crates/agent_ui/src/acp/thread_view.rs | 34 ++++++++++++++--------- 2 files changed, 32 insertions(+), 13 deletions(-) diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 7789564d3b8b0c03ebb207e634d718a359befafe..4f919a6c0425e48575d09380339730d7ddb26172 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -1195,6 +1195,17 @@ impl MessageEditor { self.editor.read(cx).text(cx) } + pub fn set_placeholder_text( + &mut self, + placeholder: &str, + window: &mut Window, + cx: &mut Context, + ) { + self.editor.update(cx, |editor, cx| { + editor.set_placeholder_text(placeholder, window, cx); + }); + } + #[cfg(test)] pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context) { self.editor.update(cx, |editor, cx| { diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 306976473d772f55cfdf1ee9caa65eab4f1d5552..17daf5a18e97829d5e4d64d30d266b5d5d271e7b 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -337,19 +337,7 @@ impl AcpThreadView { let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default())); let available_commands = Rc::new(RefCell::new(vec![])); - let placeholder = if agent.name() == "Zed Agent" { - format!("Message the {} — @ to include context", agent.name()) - } else if agent.name() == "Claude Code" - || agent.name() == "Codex" - || !available_commands.borrow().is_empty() - { - format!( - "Message {} — @ to include context, / for commands", - agent.name() - ) - } else { - format!("Message {} — @ to include context", agent.name()) - }; + let placeholder = placeholder_text(agent.name().as_ref(), false); let message_editor = cx.new(|cx| { let mut editor = MessageEditor::new( @@ -1456,7 +1444,14 @@ impl AcpThreadView { }); } + let has_commands = !available_commands.is_empty(); self.available_commands.replace(available_commands); + + let new_placeholder = placeholder_text(self.agent.name().as_ref(), has_commands); + + self.message_editor.update(cx, |editor, cx| { + editor.set_placeholder_text(&new_placeholder, window, cx); + }); } AcpThreadEvent::ModeUpdated(_mode) => { // The connection keeps track of the mode @@ -5708,6 +5703,19 @@ fn loading_contents_spinner(size: IconSize) -> AnyElement { .into_any_element() } +fn placeholder_text(agent_name: &str, has_commands: bool) -> String { + if agent_name == "Zed Agent" { + format!("Message the {} — @ to include context", agent_name) + } else if has_commands { + format!( + "Message {} — @ to include context, / for commands", + agent_name + ) + } else { + format!("Message {} — @ to include context", agent_name) + } +} + impl Focusable for AcpThreadView { fn focus_handle(&self, cx: &App) -> FocusHandle { match self.thread_state { From 87d0401e647e1cf9a93d57ae5a993d3b1accb5c6 Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 10 Nov 2025 21:00:50 +0100 Subject: [PATCH 25/74] editor: Show relative line numbers for deleted rows (#42378) Closes #42191 This PR adds support for relative line numbers in deleted hunks. Note that this only applies in cases where there is a form of relative numbering. It also adds some tests for this functionality as well as missing tests for other cases in line layouting that was previously untested. Release Notes: - Line numbers will now be shown in deleted git hunks if relative line numbering is enabled --- crates/editor/src/element.rs | 119 +++++++++++++++++++++++++++++++---- 1 file changed, 107 insertions(+), 12 deletions(-) diff --git a/crates/editor/src/element.rs b/crates/editor/src/element.rs index 67f6350ce625e96fcbe8734bf690fb557b86046c..7442ccc7442a11ab2f845cc637e5ad416085af02 100644 --- a/crates/editor/src/element.rs +++ b/crates/editor/src/element.rs @@ -3255,11 +3255,9 @@ impl EditorElement { (newest_selection_head, relative) }); - let relative_to = if relative.enabled() { - Some(newest_selection_head.row()) - } else { - None - }; + let relative_line_numbers_enabled = relative.enabled(); + let relative_to = relative_line_numbers_enabled.then(|| newest_selection_head.row()); + let relative_rows = self.calculate_relative_line_numbers(snapshot, &rows, relative_to, relative.wrapped()); let mut line_number = String::new(); @@ -3271,17 +3269,18 @@ impl EditorElement { } else { row_info.buffer_row? + 1 }; - let number = relative_rows - .get(&display_row) - .unwrap_or(&non_relative_number); - write!(&mut line_number, "{number}").unwrap(); - if row_info - .diff_status - .is_some_and(|status| status.is_deleted()) + let relative_number = relative_rows.get(&display_row); + if !(relative_line_numbers_enabled && relative_number.is_some()) + && row_info + .diff_status + .is_some_and(|status| status.is_deleted()) { return None; } + let number = relative_number.unwrap_or(&non_relative_number); + write!(&mut line_number, "{number}").unwrap(); + let color = active_rows .get(&display_row) .map(|spec| { @@ -11455,6 +11454,46 @@ mod tests { assert_eq!(relative_rows[&DisplayRow(0)], 5); assert_eq!(relative_rows[&DisplayRow(1)], 4); assert_eq!(relative_rows[&DisplayRow(2)], 3); + + const DELETED_LINE: u32 = 3; + let layouts = cx + .update_window(*window, |_, window, cx| { + element.layout_line_numbers( + None, + GutterDimensions { + left_padding: Pixels::ZERO, + right_padding: Pixels::ZERO, + width: px(30.0), + margin: Pixels::ZERO, + git_blame_entries_width: None, + }, + line_height, + gpui::Point::default(), + DisplayRow(0)..DisplayRow(6), + &(0..6) + .map(|row| RowInfo { + buffer_row: Some(row), + diff_status: (row == DELETED_LINE).then(|| { + DiffHunkStatus::deleted( + buffer_diff::DiffHunkSecondaryStatus::NoSecondaryHunk, + ) + }), + ..Default::default() + }) + .collect::>(), + &BTreeMap::default(), + Some(DisplayPoint::new(DisplayRow(0), 0)), + &snapshot, + window, + cx, + ) + }) + .unwrap(); + assert_eq!(layouts.len(), 5,); + assert!( + layouts.get(&MultiBufferRow(DELETED_LINE)).is_none(), + "Deleted line should not have a line number" + ); } #[gpui::test] @@ -11530,6 +11569,62 @@ mod tests { // current line has no relative number assert_eq!(relative_rows[&DisplayRow(4)], 1); assert_eq!(relative_rows[&DisplayRow(5)], 2); + + let layouts = cx + .update_window(*window, |_, window, cx| { + element.layout_line_numbers( + None, + GutterDimensions { + left_padding: Pixels::ZERO, + right_padding: Pixels::ZERO, + width: px(30.0), + margin: Pixels::ZERO, + git_blame_entries_width: None, + }, + line_height, + gpui::Point::default(), + DisplayRow(0)..DisplayRow(6), + &(0..6) + .map(|row| RowInfo { + buffer_row: Some(row), + diff_status: Some(DiffHunkStatus::deleted( + buffer_diff::DiffHunkSecondaryStatus::NoSecondaryHunk, + )), + ..Default::default() + }) + .collect::>(), + &BTreeMap::from_iter([(DisplayRow(0), LineHighlightSpec::default())]), + Some(DisplayPoint::new(DisplayRow(0), 0)), + &snapshot, + window, + cx, + ) + }) + .unwrap(); + assert!( + layouts.is_empty(), + "Deleted lines should have no line number" + ); + + let relative_rows = window + .update(cx, |editor, window, cx| { + let snapshot = editor.snapshot(window, cx); + element.calculate_relative_line_numbers( + &snapshot, + &(DisplayRow(0)..DisplayRow(6)), + Some(DisplayRow(3)), + true, + ) + }) + .unwrap(); + + // Deleted lines should still have relative numbers + assert_eq!(relative_rows[&DisplayRow(0)], 3); + assert_eq!(relative_rows[&DisplayRow(1)], 2); + assert_eq!(relative_rows[&DisplayRow(2)], 1); + // current line, even if deleted, has no relative number + assert_eq!(relative_rows[&DisplayRow(4)], 1); + assert_eq!(relative_rows[&DisplayRow(5)], 2); } #[gpui::test] From 62e3a4921206f0b09d85815559c1f19c560ccf3d Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Mon, 10 Nov 2025 21:08:48 +0100 Subject: [PATCH 26/74] editor: Fix rare panic in wrap map (#39379) Closes ZED-1SV Closes ZED-TG Closes ZED-22G Closes ZED-22J This seems to fix the reported error there, but ultimately, this might benefit from a test to reproduce. Hence, marking as draft for now. Release Notes: - Fixed a rare panic whilst wrapping lines. --- crates/editor/src/display_map/block_map.rs | 20 ++++++++------------ crates/editor/src/display_map/wrap_map.rs | 2 +- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 54c008ef60cdaf3a6c7b9231bfbb90d2caf0b3ae..639d2a06579ca16eb938f3d23908e48b702254ef 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -19,7 +19,7 @@ use std::{ cell::RefCell, cmp::{self, Ordering}, fmt::Debug, - ops::{Deref, DerefMut, Range, RangeBounds, RangeInclusive}, + ops::{Deref, DerefMut, Not, Range, RangeBounds, RangeInclusive}, sync::{ Arc, atomic::{AtomicUsize, Ordering::SeqCst}, @@ -1879,18 +1879,14 @@ impl Iterator for BlockRows<'_> { } let transform = self.transforms.item()?; - if let Some(block) = transform.block.as_ref() { - if block.is_replacement() && self.transforms.start().0 == self.output_row { - if matches!(block, Block::FoldedBuffer { .. }) { - Some(RowInfo::default()) - } else { - Some(self.input_rows.next().unwrap()) - } - } else { - Some(RowInfo::default()) - } + if transform.block.as_ref().is_none_or(|block| { + block.is_replacement() + && self.transforms.start().0 == self.output_row + && matches!(block, Block::FoldedBuffer { .. }).not() + }) { + self.input_rows.next() } else { - Some(self.input_rows.next().unwrap()) + Some(RowInfo::default()) } } } diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index ffdae5913229de33faeab195dd5211229c641f73..6d9704b5f93c0ce48d413babdd59997b02f093e6 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -965,7 +965,7 @@ impl<'a> Iterator for WrapChunks<'a> { } if self.input_chunk.text.is_empty() { - self.input_chunk = self.input_chunks.next().unwrap(); + self.input_chunk = self.input_chunks.next()?; } let mut input_len = 0; From aaf2f9d309195e207845a7b2b486a8edf69570a8 Mon Sep 17 00:00:00 2001 From: John Tur Date: Mon, 10 Nov 2025 15:11:18 -0500 Subject: [PATCH 27/74] Ignore "Option as Meta" setting outside of macOS (#42367) The "Option" key only exists on a Mac. On other operating systems, it is always expected that the Alt key generates escaped characters. Fixes https://github.com/zed-industries/zed/issues/40583 Release Notes: - N/A --- crates/terminal/src/mappings/keys.rs | 4 ++-- crates/terminal/src/terminal.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/terminal/src/mappings/keys.rs b/crates/terminal/src/mappings/keys.rs index b003bf82ad368cd9938788b26a037895677f2caa..8073961fc5451728c23582f5eee21495ee002e63 100644 --- a/crates/terminal/src/mappings/keys.rs +++ b/crates/terminal/src/mappings/keys.rs @@ -46,7 +46,7 @@ impl AlacModifiers { pub fn to_esc_str( keystroke: &Keystroke, mode: &TermMode, - alt_is_meta: bool, + option_as_meta: bool, ) -> Option> { let modifiers = AlacModifiers::new(keystroke); @@ -218,7 +218,7 @@ pub fn to_esc_str( } } - if alt_is_meta { + if !cfg!(target_os = "macos") || option_as_meta { let is_alt_lowercase_ascii = modifiers == AlacModifiers::Alt && keystroke.key.is_ascii(); let is_alt_uppercase_ascii = keystroke.modifiers.alt && keystroke.modifiers.shift && keystroke.key.is_ascii(); diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 8283f5fad77ff3b6a4020db06439714d38bf119b..4f6f0e75ee6f38615a2d82d7a4ad3ee0c06c2323 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -1490,14 +1490,14 @@ impl Terminal { } } - pub fn try_keystroke(&mut self, keystroke: &Keystroke, alt_is_meta: bool) -> bool { + pub fn try_keystroke(&mut self, keystroke: &Keystroke, option_as_meta: bool) -> bool { if self.vi_mode_enabled { self.vi_motion(keystroke); return true; } // Keep default terminal behavior - let esc = to_esc_str(keystroke, &self.last_content.mode, alt_is_meta); + let esc = to_esc_str(keystroke, &self.last_content.mode, option_as_meta); if let Some(esc) = esc { match esc { Cow::Borrowed(string) => self.input(string.as_bytes()), From efcd7f7d10a1e236010d01acdb37867b7ff9e333 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ole=20J=C3=B8rgen=20Br=C3=B8nner?= Date: Mon, 10 Nov 2025 22:08:40 +0100 Subject: [PATCH 28/74] Slightly improve completion in settings.json (for lsp..) (#42263) Document "any-typed" (`serde_json::Value`) "lsp" keys to include them in json-language-server completions. The vscode-json-languageserver seems to skip generically typed keys when offering completion. For this schema ``` "LspSettings": { "type": "object", "properties": { ... "initialization_options": true, ... } } ``` "initialization_options" is not offered in the completion. The effect is easy to verify by triggering completion inside: ``` "lsp": { "basedpyright": { COMPLETE HERE ``` image By adding a documentation string the keys are offered even if they are generically typed: image --- Note: I did some cursory research of whether it's possible to make vscode-json-languageserver change behavior without success. IMO, not offering completions here is a bug (or at minimal should be configurable) --- Release Notes: - N/A --------- Co-authored-by: Kirill Bulatov --- crates/settings/src/settings_content/project.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/crates/settings/src/settings_content/project.rs b/crates/settings/src/settings_content/project.rs index 56c3ff1c78ead6b113799c1c11552e0732b62345..e7a3798ebfa827cd287255f464f9e35bddd619f4 100644 --- a/crates/settings/src/settings_content/project.rs +++ b/crates/settings/src/settings_content/project.rs @@ -26,6 +26,7 @@ pub struct ProjectSettingsContent { /// The following settings can be overridden for specific language servers: /// - initialization_options /// + /// /// To override settings for a language, add an entry for that language server's /// name to the lsp value. /// Default: null @@ -108,7 +109,19 @@ pub struct WorktreeSettingsContent { #[serde(rename_all = "snake_case")] pub struct LspSettings { pub binary: Option, + /// Options passed to the language server at startup. + /// + /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize + /// + /// Consult the documentation for the specific language server to see what settings + /// are supported. pub initialization_options: Option, + /// Language server settings. + /// + /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_configuration + /// + /// Consult the documentation for the specific LSP to see what settings + /// are supported. pub settings: Option, /// If the server supports sending tasks over LSP extensions, /// this setting can be used to enable or disable them in Zed. From a44fc9a1de24f30c6805d78ce3f2f21c12034603 Mon Sep 17 00:00:00 2001 From: Connor Tsui <87130162+connortsui20@users.noreply.github.com> Date: Mon, 10 Nov 2025 16:26:01 -0500 Subject: [PATCH 29/74] Rename `ThemeMode` to `ThemeAppearanceMode` (#42279) There was a TODO in `crates/settings/src/settings_content/theme.rs` to make this rename. This PR is just splitting off this change from https://github.com/zed-industries/zed/pull/40035 to make reviewing that one a bit easier since that PR is a bit more involved than expected. Release Notes: - N/A Signed-off-by: Connor Tsui --- crates/onboarding/src/basics_page.rs | 37 ++++++++++++------- crates/settings/src/settings_content/theme.rs | 7 ++-- crates/settings_ui/src/page_data.rs | 16 ++++---- crates/settings_ui/src/settings_ui.rs | 2 +- crates/theme/src/settings.rs | 36 +++++++++--------- 5 files changed, 53 insertions(+), 45 deletions(-) diff --git a/crates/onboarding/src/basics_page.rs b/crates/onboarding/src/basics_page.rs index eaf9c41a53dc6c4b0d8ef9a93a9ed8423ddf2db6..198871c9eb55b41c44a5f0db162eb446c1760ba9 100644 --- a/crates/onboarding/src/basics_page.rs +++ b/crates/onboarding/src/basics_page.rs @@ -5,7 +5,7 @@ use fs::Fs; use gpui::{Action, App, IntoElement}; use settings::{BaseKeymap, Settings, update_settings_file}; use theme::{ - Appearance, SystemAppearance, ThemeMode, ThemeName, ThemeRegistry, ThemeSelection, + Appearance, SystemAppearance, ThemeAppearanceMode, ThemeName, ThemeRegistry, ThemeSelection, ThemeSettings, }; use ui::{ @@ -44,8 +44,8 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement let theme_mode = theme_selection .mode() .unwrap_or_else(|| match *system_appearance { - Appearance::Light => ThemeMode::Light, - Appearance::Dark => ThemeMode::Dark, + Appearance::Light => ThemeAppearanceMode::Light, + Appearance::Dark => ThemeAppearanceMode::Dark, }); return v_flex() @@ -54,7 +54,12 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement h_flex().justify_between().child(Label::new("Theme")).child( ToggleButtonGroup::single_row( "theme-selector-onboarding-dark-light", - [ThemeMode::Light, ThemeMode::Dark, ThemeMode::System].map(|mode| { + [ + ThemeAppearanceMode::Light, + ThemeAppearanceMode::Dark, + ThemeAppearanceMode::System, + ] + .map(|mode| { const MODE_NAMES: [SharedString; 3] = [ SharedString::new_static("Light"), SharedString::new_static("Dark"), @@ -100,13 +105,13 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement let theme_mode = theme_selection .mode() .unwrap_or_else(|| match *system_appearance { - Appearance::Light => ThemeMode::Light, - Appearance::Dark => ThemeMode::Dark, + Appearance::Light => ThemeAppearanceMode::Light, + Appearance::Dark => ThemeAppearanceMode::Dark, }); let appearance = match theme_mode { - ThemeMode::Light => Appearance::Light, - ThemeMode::Dark => Appearance::Dark, - ThemeMode::System => *system_appearance, + ThemeAppearanceMode::Light => Appearance::Light, + ThemeAppearanceMode::Dark => Appearance::Dark, + ThemeAppearanceMode::System => *system_appearance, }; let current_theme_name: SharedString = theme_selection.name(appearance).0.into(); @@ -164,7 +169,7 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement } }) .map(|this| { - if theme_mode == ThemeMode::System { + if theme_mode == ThemeAppearanceMode::System { let (light, dark) = ( theme_registry.get(LIGHT_THEMES[index]).unwrap(), theme_registry.get(DARK_THEMES[index]).unwrap(), @@ -189,23 +194,27 @@ fn render_theme_section(tab_index: &mut isize, cx: &mut App) -> impl IntoElement }) } - fn write_mode_change(mode: ThemeMode, cx: &mut App) { + fn write_mode_change(mode: ThemeAppearanceMode, cx: &mut App) { let fs = ::global(cx); update_settings_file(fs, cx, move |settings, _cx| { theme::set_mode(settings, mode); }); } - fn write_theme_change(theme: impl Into>, theme_mode: ThemeMode, cx: &mut App) { + fn write_theme_change( + theme: impl Into>, + theme_mode: ThemeAppearanceMode, + cx: &mut App, + ) { let fs = ::global(cx); let theme = theme.into(); update_settings_file(fs, cx, move |settings, cx| { - if theme_mode == ThemeMode::System { + if theme_mode == ThemeAppearanceMode::System { let (light_theme, dark_theme) = get_theme_family_themes(&theme).unwrap_or((theme.as_ref(), theme.as_ref())); settings.theme.theme = Some(settings::ThemeSelection::Dynamic { - mode: ThemeMode::System, + mode: ThemeAppearanceMode::System, light: ThemeName(light_theme.into()), dark: ThemeName(dark_theme.into()), }); diff --git a/crates/settings/src/settings_content/theme.rs b/crates/settings/src/settings_content/theme.rs index 80b543a20aa389ca05b13371f235ebc0dda9c82e..8b87cc15196b7a562a794eb4a1effeb5cb102ef6 100644 --- a/crates/settings/src/settings_content/theme.rs +++ b/crates/settings/src/settings_content/theme.rs @@ -157,7 +157,7 @@ pub enum ThemeSelection { Dynamic { /// The mode used to determine which theme to use. #[serde(default)] - mode: ThemeMode, + mode: ThemeAppearanceMode, /// The theme to use for light mode. light: ThemeName, /// The theme to use for dark mode. @@ -186,7 +186,7 @@ pub enum IconThemeSelection { Dynamic { /// The mode used to determine which theme to use. #[serde(default)] - mode: ThemeMode, + mode: ThemeAppearanceMode, /// The icon theme to use for light mode. light: IconThemeName, /// The icon theme to use for dark mode. @@ -194,7 +194,6 @@ pub enum IconThemeSelection { }, } -// TODO: Rename ThemeMode -> ThemeAppearanceMode /// The mode use to select a theme. /// /// `Light` and `Dark` will select their respective themes. @@ -215,7 +214,7 @@ pub enum IconThemeSelection { strum::VariantNames, )] #[serde(rename_all = "snake_case")] -pub enum ThemeMode { +pub enum ThemeAppearanceMode { /// Use the specified `light` theme. Light, diff --git a/crates/settings_ui/src/page_data.rs b/crates/settings_ui/src/page_data.rs index 392796c091d379429f2ae787b14a75841bef12fa..e3165fbc79850484950e90bdcdbb81338df9974d 100644 --- a/crates/settings_ui/src/page_data.rs +++ b/crates/settings_ui/src/page_data.rs @@ -300,9 +300,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec { settings::ThemeSelection::Static(_) => return, settings::ThemeSelection::Dynamic { mode, light, dark } => { match mode { - theme::ThemeMode::Light => light.clone(), - theme::ThemeMode::Dark => dark.clone(), - theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice + theme::ThemeAppearanceMode::Light => light.clone(), + theme::ThemeAppearanceMode::Dark => dark.clone(), + theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice } }, }; @@ -315,7 +315,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }; settings::ThemeSelection::Dynamic { - mode: settings::ThemeMode::System, + mode: settings::ThemeAppearanceMode::System, light: static_name.clone(), dark: static_name, } @@ -470,9 +470,9 @@ pub(crate) fn settings_data(cx: &App) -> Vec { settings::IconThemeSelection::Static(_) => return, settings::IconThemeSelection::Dynamic { mode, light, dark } => { match mode { - theme::ThemeMode::Light => light.clone(), - theme::ThemeMode::Dark => dark.clone(), - theme::ThemeMode::System => dark.clone(), // no cx, can't determine correct choice + theme::ThemeAppearanceMode::Light => light.clone(), + theme::ThemeAppearanceMode::Dark => dark.clone(), + theme::ThemeAppearanceMode::System => dark.clone(), // no cx, can't determine correct choice } }, }; @@ -485,7 +485,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec { }; settings::IconThemeSelection::Dynamic { - mode: settings::ThemeMode::System, + mode: settings::ThemeAppearanceMode::System, light: static_name.clone(), dark: static_name, } diff --git a/crates/settings_ui/src/settings_ui.rs b/crates/settings_ui/src/settings_ui.rs index ea0e8760116c254f7777ecdd2fab1e956be6fe6a..6a561086f2d614eb8fc06c5be146b5e02dc05b3d 100644 --- a/crates/settings_ui/src/settings_ui.rs +++ b/crates/settings_ui/src/settings_ui.rs @@ -486,7 +486,7 @@ fn init_renderers(cx: &mut App) { .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_dropdown) - .add_basic_renderer::(render_dropdown) + .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_theme_picker) .add_basic_renderer::(render_dropdown) .add_basic_renderer::(render_icon_theme_picker) diff --git a/crates/theme/src/settings.rs b/crates/theme/src/settings.rs index 024eca7a5c7d82e9306468e30b53fa897f470adf..a753859d52677974902a36a5d67ea86611e47006 100644 --- a/crates/theme/src/settings.rs +++ b/crates/theme/src/settings.rs @@ -11,7 +11,7 @@ use gpui::{ use refineable::Refineable; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; -pub use settings::{FontFamilyName, IconThemeName, ThemeMode, ThemeName}; +pub use settings::{FontFamilyName, IconThemeName, ThemeAppearanceMode, ThemeName}; use settings::{RegisterSetting, Settings, SettingsContent}; use std::sync::Arc; @@ -208,7 +208,7 @@ pub enum ThemeSelection { Dynamic { /// The mode used to determine which theme to use. #[serde(default)] - mode: ThemeMode, + mode: ThemeAppearanceMode, /// The theme to use for light mode. light: ThemeName, /// The theme to use for dark mode. @@ -233,9 +233,9 @@ impl ThemeSelection { match self { Self::Static(theme) => theme.clone(), Self::Dynamic { mode, light, dark } => match mode { - ThemeMode::Light => light.clone(), - ThemeMode::Dark => dark.clone(), - ThemeMode::System => match system_appearance { + ThemeAppearanceMode::Light => light.clone(), + ThemeAppearanceMode::Dark => dark.clone(), + ThemeAppearanceMode::System => match system_appearance { Appearance::Light => light.clone(), Appearance::Dark => dark.clone(), }, @@ -244,7 +244,7 @@ impl ThemeSelection { } /// Returns the [ThemeMode] for the [ThemeSelection]. - pub fn mode(&self) -> Option { + pub fn mode(&self) -> Option { match self { ThemeSelection::Static(_) => None, ThemeSelection::Dynamic { mode, .. } => Some(*mode), @@ -260,7 +260,7 @@ pub enum IconThemeSelection { /// A dynamic icon theme selection, which can change based on the [`ThemeMode`]. Dynamic { /// The mode used to determine which theme to use. - mode: ThemeMode, + mode: ThemeAppearanceMode, /// The icon theme to use for light mode. light: IconThemeName, /// The icon theme to use for dark mode. @@ -285,9 +285,9 @@ impl IconThemeSelection { match self { Self::Static(theme) => theme.clone(), Self::Dynamic { mode, light, dark } => match mode { - ThemeMode::Light => light.clone(), - ThemeMode::Dark => dark.clone(), - ThemeMode::System => match system_appearance { + ThemeAppearanceMode::Light => light.clone(), + ThemeAppearanceMode::Dark => dark.clone(), + ThemeAppearanceMode::System => match system_appearance { Appearance::Light => light.clone(), Appearance::Dark => dark.clone(), }, @@ -296,7 +296,7 @@ impl IconThemeSelection { } /// Returns the [`ThemeMode`] for the [`IconThemeSelection`]. - pub fn mode(&self) -> Option { + pub fn mode(&self) -> Option { match self { IconThemeSelection::Static(_) => None, IconThemeSelection::Dynamic { mode, .. } => Some(*mode), @@ -315,9 +315,9 @@ pub fn set_theme( let theme_to_update = match selection { settings::ThemeSelection::Static(theme) => theme, settings::ThemeSelection::Dynamic { mode, light, dark } => match mode { - ThemeMode::Light => light, - ThemeMode::Dark => dark, - ThemeMode::System => match appearance { + ThemeAppearanceMode::Light => light, + ThemeAppearanceMode::Dark => dark, + ThemeAppearanceMode::System => match appearance { Appearance::Light => light, Appearance::Dark => dark, }, @@ -342,9 +342,9 @@ pub fn set_icon_theme( let icon_theme_to_update = match selection { settings::IconThemeSelection::Static(theme) => theme, settings::IconThemeSelection::Dynamic { mode, light, dark } => match mode { - ThemeMode::Light => light, - ThemeMode::Dark => dark, - ThemeMode::System => match appearance { + ThemeAppearanceMode::Light => light, + ThemeAppearanceMode::Dark => dark, + ThemeAppearanceMode::System => match appearance { Appearance::Light => light, Appearance::Dark => dark, }, @@ -358,7 +358,7 @@ pub fn set_icon_theme( } /// Sets the mode for the theme. -pub fn set_mode(content: &mut SettingsContent, mode: ThemeMode) { +pub fn set_mode(content: &mut SettingsContent, mode: ThemeAppearanceMode) { let theme = content.theme.as_mut(); if let Some(selection) = theme.theme.as_mut() { From 32ec1037e15d6b9b37dc31b06b1e9911a15f6be8 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 10 Nov 2025 16:39:44 -0500 Subject: [PATCH 30/74] collab: Remove unused models left over from chat (#42390) This PR removes some database models that were left over from the chat feature. Release Notes: - N/A --- crates/collab/src/db/tables.rs | 3 -- .../collab/src/db/tables/channel_message.rs | 47 ------------------- .../src/db/tables/channel_message_mention.rs | 43 ----------------- .../db/tables/observed_channel_messages.rs | 41 ---------------- 4 files changed, 134 deletions(-) delete mode 100644 crates/collab/src/db/tables/channel_message.rs delete mode 100644 crates/collab/src/db/tables/channel_message_mention.rs delete mode 100644 crates/collab/src/db/tables/observed_channel_messages.rs diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index 32c4570af5893b503f0fcfdaa1759616cf9be387..e619acaaf2bc237caac67dedcb5c738114d260d5 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -6,8 +6,6 @@ pub mod channel; pub mod channel_buffer_collaborator; pub mod channel_chat_participant; pub mod channel_member; -pub mod channel_message; -pub mod channel_message_mention; pub mod contact; pub mod contributor; pub mod embedding; @@ -18,7 +16,6 @@ pub mod language_server; pub mod notification; pub mod notification_kind; pub mod observed_buffer_edits; -pub mod observed_channel_messages; pub mod project; pub mod project_collaborator; pub mod project_repository; diff --git a/crates/collab/src/db/tables/channel_message.rs b/crates/collab/src/db/tables/channel_message.rs deleted file mode 100644 index 2ec776f189c938d4c65a45a75ebe6a24482a314e..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/channel_message.rs +++ /dev/null @@ -1,47 +0,0 @@ -use crate::db::{ChannelId, MessageId, UserId}; -use sea_orm::entity::prelude::*; -use time::PrimitiveDateTime; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "channel_messages")] -pub struct Model { - #[sea_orm(primary_key)] - pub id: MessageId, - pub channel_id: ChannelId, - pub sender_id: UserId, - pub body: String, - pub sent_at: PrimitiveDateTime, - pub edited_at: Option, - pub nonce: Uuid, - pub reply_to_message_id: Option, -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::channel::Entity", - from = "Column::ChannelId", - to = "super::channel::Column::Id" - )] - Channel, - #[sea_orm( - belongs_to = "super::user::Entity", - from = "Column::SenderId", - to = "super::user::Column::Id" - )] - Sender, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Channel.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Sender.def() - } -} diff --git a/crates/collab/src/db/tables/channel_message_mention.rs b/crates/collab/src/db/tables/channel_message_mention.rs deleted file mode 100644 index 6155b057f0cf8862cb26f6efff30669d59592eb8..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/channel_message_mention.rs +++ /dev/null @@ -1,43 +0,0 @@ -use crate::db::{MessageId, UserId}; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "channel_message_mentions")] -pub struct Model { - #[sea_orm(primary_key)] - pub message_id: MessageId, - #[sea_orm(primary_key)] - pub start_offset: i32, - pub end_offset: i32, - pub user_id: UserId, -} - -impl ActiveModelBehavior for ActiveModel {} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::channel_message::Entity", - from = "Column::MessageId", - to = "super::channel_message::Column::Id" - )] - Message, - #[sea_orm( - belongs_to = "super::user::Entity", - from = "Column::UserId", - to = "super::user::Column::Id" - )] - MentionedUser, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Message.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::MentionedUser.def() - } -} diff --git a/crates/collab/src/db/tables/observed_channel_messages.rs b/crates/collab/src/db/tables/observed_channel_messages.rs deleted file mode 100644 index 18259f844274750ebcb463c7d69d619457055d89..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/observed_channel_messages.rs +++ /dev/null @@ -1,41 +0,0 @@ -use crate::db::{ChannelId, MessageId, UserId}; -use sea_orm::entity::prelude::*; - -#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)] -#[sea_orm(table_name = "observed_channel_messages")] -pub struct Model { - #[sea_orm(primary_key)] - pub user_id: UserId, - pub channel_id: ChannelId, - pub channel_message_id: MessageId, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation { - #[sea_orm( - belongs_to = "super::channel::Entity", - from = "Column::ChannelId", - to = "super::channel::Column::Id" - )] - Channel, - #[sea_orm( - belongs_to = "super::user::Entity", - from = "Column::UserId", - to = "super::user::Column::Id" - )] - User, -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::Channel.def() - } -} - -impl Related for Entity { - fn to() -> RelationDef { - Relation::User.def() - } -} - -impl ActiveModelBehavior for ActiveModel {} From 112b5c16b74b27ff9efee98416402932c80847c5 Mon Sep 17 00:00:00 2001 From: John Tur Date: Mon, 10 Nov 2025 16:45:43 -0500 Subject: [PATCH 31/74] Add QuitMode policy to GPUI (#42391) Applications can select a policy for when the app quits using the new function `Application::with_quit_mode`: - Only on explicit calls to `App::quit` - When the last window is closed - Platform default (former on macOS, latter everywhere else) Release Notes: - N/A --- crates/gpui/src/app.rs | 37 ++++++++++++++++++++++++++++++ crates/zed/src/main.rs | 52 ++++++++++++++++++++++-------------------- 2 files changed, 64 insertions(+), 25 deletions(-) diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index d4bd7798187a5b7a358106965d9e41fd85efeffe..864968b9e7a9ad862d9b67a19cc8897524dffb9e 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -169,6 +169,13 @@ impl Application { self } + /// Configures when the application should automatically quit. + /// By default, [`QuitMode::Default`] is used. + pub fn with_quit_mode(self, mode: QuitMode) -> Self { + self.0.borrow_mut().quit_mode = mode; + self + } + /// Start the application. The provided callback will be called once the /// app is fully launched. pub fn run(self, on_finish_launching: F) @@ -238,6 +245,18 @@ type WindowClosedHandler = Box; type ReleaseListener = Box; type NewEntityListener = Box, &mut App) + 'static>; +/// Defines when the application should automatically quit. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] +pub enum QuitMode { + /// Use [`QuitMode::Explicit`] on macOS and [`QuitMode::LastWindowClosed`] on other platforms. + #[default] + Default, + /// Quit automatically when the last window is closed. + LastWindowClosed, + /// Quit only when requested via [`App::quit`]. + Explicit, +} + #[doc(hidden)] #[derive(Clone, PartialEq, Eq)] pub struct SystemWindowTab { @@ -588,6 +607,7 @@ pub struct App { pub(crate) inspector_element_registry: InspectorElementRegistry, #[cfg(any(test, feature = "test-support", debug_assertions))] pub(crate) name: Option<&'static str>, + quit_mode: QuitMode, quitting: bool, } @@ -659,6 +679,7 @@ impl App { inspector_renderer: None, #[cfg(any(feature = "inspector", debug_assertions))] inspector_element_registry: InspectorElementRegistry::default(), + quit_mode: QuitMode::default(), quitting: false, #[cfg(any(test, feature = "test-support", debug_assertions))] @@ -1172,6 +1193,12 @@ impl App { self.http_client = new_client; } + /// Configures when the application should automatically quit. + /// By default, [`QuitMode::Default`] is used. + pub fn set_quit_mode(&mut self, mode: QuitMode) { + self.quit_mode = mode; + } + /// Returns the SVG renderer used by the application. pub fn svg_renderer(&self) -> SvgRenderer { self.svg_renderer.clone() @@ -1379,6 +1406,16 @@ impl App { callback(cx); true }); + + let quit_on_empty = match cx.quit_mode { + QuitMode::Explicit => false, + QuitMode::LastWindowClosed => true, + QuitMode::Default => !cfg!(macos), + }; + + if quit_on_empty && cx.windows.is_empty() { + cx.quit(); + } } else { cx.windows.get_mut(id)?.replace(window); } diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 14308450e2adda064de4ded30a7649441b4d2d25..180e0f1f04d1c7b1eddb0156659f697f423967ea 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -15,7 +15,7 @@ use extension::ExtensionHostProxy; use fs::{Fs, RealFs}; use futures::{StreamExt, channel::oneshot, future}; use git::GitHostingProviderRegistry; -use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, UpdateGlobal as _}; +use gpui::{App, AppContext, Application, AsyncApp, Focusable as _, QuitMode, UpdateGlobal as _}; use gpui_tokio::Tokio; use language::LanguageRegistry; @@ -87,31 +87,33 @@ fn files_not_created_on_launch(errors: HashMap>) { .collect::>().join("\n\n"); eprintln!("{message}: {error_details}"); - Application::new().run(move |cx| { - if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |_, cx| { - cx.new(|_| gpui::Empty) - }) { - window - .update(cx, |_, window, cx| { - let response = window.prompt( - gpui::PromptLevel::Critical, - message, - Some(&error_details), - &["Exit"], - cx, - ); - - cx.spawn_in(window, async move |_, cx| { - response.await?; - cx.update(|_, cx| cx.quit()) + Application::new() + .with_quit_mode(QuitMode::Explicit) + .run(move |cx| { + if let Ok(window) = cx.open_window(gpui::WindowOptions::default(), |_, cx| { + cx.new(|_| gpui::Empty) + }) { + window + .update(cx, |_, window, cx| { + let response = window.prompt( + gpui::PromptLevel::Critical, + message, + Some(&error_details), + &["Exit"], + cx, + ); + + cx.spawn_in(window, async move |_, cx| { + response.await?; + cx.update(|_, cx| cx.quit()) + }) + .detach_and_log_err(cx); }) - .detach_and_log_err(cx); - }) - .log_err(); - } else { - fail_to_open_window(anyhow::anyhow!("{message}: {error_details}"), cx) - } - }) + .log_err(); + } else { + fail_to_open_window(anyhow::anyhow!("{message}: {error_details}"), cx) + } + }) } fn fail_to_open_window_async(e: anyhow::Error, cx: &mut AsyncApp) { From 11b38db3e32f5e4b8e94fbdc1d019d91812ac097 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 10 Nov 2025 16:59:05 -0500 Subject: [PATCH 32/74] collab: Drop `channel_messages` table and its dependents (#42392) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR drops the `channel_messages` table and its dependents—`channel_message_mentions` and `observed_channel_messages`—as they are no longer used. Release Notes: - N/A --- .../20221109000000_test_schema.sql | 32 ------------------- .../20251110214057_drop_channel_messages.sql | 3 ++ 2 files changed, 3 insertions(+), 32 deletions(-) create mode 100644 crates/collab/migrations/20251110214057_drop_channel_messages.sql diff --git a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql index f2cbf419f0a64004a2210af216faba2baffca8b4..a736ddfd1fe3334b1b847e820bd1816cb625ddca 100644 --- a/crates/collab/migrations.sqlite/20221109000000_test_schema.sql +++ b/crates/collab/migrations.sqlite/20221109000000_test_schema.sql @@ -291,29 +291,6 @@ CREATE TABLE IF NOT EXISTS "channel_chat_participants" ( CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id"); -CREATE TABLE IF NOT EXISTS "channel_messages" ( - "id" INTEGER PRIMARY KEY AUTOINCREMENT, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "sender_id" INTEGER NOT NULL REFERENCES users (id), - "body" TEXT NOT NULL, - "sent_at" TIMESTAMP, - "edited_at" TIMESTAMP, - "nonce" BLOB NOT NULL, - "reply_to_message_id" INTEGER DEFAULT NULL -); - -CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id"); - -CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce"); - -CREATE TABLE "channel_message_mentions" ( - "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE, - "start_offset" INTEGER NOT NULL, - "end_offset" INTEGER NOT NULL, - "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - PRIMARY KEY (message_id, start_offset) -); - CREATE TABLE "channel_members" ( "id" INTEGER PRIMARY KEY AUTOINCREMENT, "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, @@ -408,15 +385,6 @@ CREATE TABLE "observed_buffer_edits" ( CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id"); -CREATE TABLE IF NOT EXISTS "observed_channel_messages" ( - "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE, - "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE, - "channel_message_id" INTEGER NOT NULL, - PRIMARY KEY (user_id, channel_id) -); - -CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id"); - CREATE TABLE "notification_kinds" ( "id" INTEGER PRIMARY KEY AUTOINCREMENT, "name" VARCHAR NOT NULL diff --git a/crates/collab/migrations/20251110214057_drop_channel_messages.sql b/crates/collab/migrations/20251110214057_drop_channel_messages.sql new file mode 100644 index 0000000000000000000000000000000000000000..468534542fbb7cee04aee985bfe2143f30d219ad --- /dev/null +++ b/crates/collab/migrations/20251110214057_drop_channel_messages.sql @@ -0,0 +1,3 @@ +drop table observed_channel_messages; +drop table channel_message_mentions; +drop table channel_messages; From a8b04369aebe0882733bf3d152288fd02b2612bc Mon Sep 17 00:00:00 2001 From: Andrew Farkas <6060305+HactarCE@users.noreply.github.com> Date: Mon, 10 Nov 2025 17:00:59 -0500 Subject: [PATCH 33/74] Refactor completions (#42122) This is progress toward multi-word snippets (including snippets with prefixes containing symbols) Release Notes: - Removed `trigger` argument in `ShowCompletions` command --------- Co-authored-by: Conrad Irwin --- crates/agent_ui/src/text_thread_editor.rs | 2 +- crates/editor/src/actions.rs | 11 +- crates/editor/src/code_context_menus.rs | 9 + crates/editor/src/editor.rs | 261 +++++++++++----------- crates/editor/src/editor_tests.rs | 65 ++++-- 5 files changed, 192 insertions(+), 156 deletions(-) diff --git a/crates/agent_ui/src/text_thread_editor.rs b/crates/agent_ui/src/text_thread_editor.rs index 19063075f9cf7382270c4dbaf4930596a7592676..e7f16b8886c719cf60763f651fe9abb9fe33d828 100644 --- a/crates/agent_ui/src/text_thread_editor.rs +++ b/crates/agent_ui/src/text_thread_editor.rs @@ -478,7 +478,7 @@ impl TextThreadEditor { editor.insert(&format!("/{name}"), window, cx); if command.accepts_arguments() { editor.insert(" ", window, cx); - editor.show_completions(&ShowCompletions::default(), window, cx); + editor.show_completions(&ShowCompletions, window, cx); } }); }); diff --git a/crates/editor/src/actions.rs b/crates/editor/src/actions.rs index 276f20a7aacc9315f27a929876984342edc8d394..e823b06910fba67a38754ece6ad746f5f632e613 100644 --- a/crates/editor/src/actions.rs +++ b/crates/editor/src/actions.rs @@ -213,15 +213,6 @@ pub struct ExpandExcerptsDown { pub(super) lines: u32, } -/// Shows code completion suggestions at the cursor position. -#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)] -#[action(namespace = editor)] -#[serde(deny_unknown_fields)] -pub struct ShowCompletions { - #[serde(default)] - pub(super) trigger: Option, -} - /// Handles text input in the editor. #[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)] #[action(namespace = editor)] @@ -736,6 +727,8 @@ actions!( SelectToStartOfParagraph, /// Extends selection up. SelectUp, + /// Shows code completion suggestions at the cursor position. + ShowCompletions, /// Shows the system character palette. ShowCharacterPalette, /// Shows edit prediction at cursor. diff --git a/crates/editor/src/code_context_menus.rs b/crates/editor/src/code_context_menus.rs index b7f3d57870a9504b7e6f9f736a0951b9b4b733e5..9e29cd955a80c7025ef2ff1ee5aaf38c665bed1a 100644 --- a/crates/editor/src/code_context_menus.rs +++ b/crates/editor/src/code_context_menus.rs @@ -252,8 +252,17 @@ enum MarkdownCacheKey { #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum CompletionsMenuSource { + /// Show all completions (words, snippets, LSP) Normal, + /// Show only snippets (not words or LSP) + /// + /// Used after typing a non-word character + SnippetsOnly, + /// Tab stops within a snippet that have a predefined finite set of choices SnippetChoices, + /// Show only words (not snippets or LSP) + /// + /// Used when word completions are explicitly triggered Words { ignore_threshold: bool }, } diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8c015d09c0717e2df52f8c5f85cead07be95bf50..8c165a6d7ce0a5410000cb21d9616e4c508a6fb3 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3262,7 +3262,7 @@ impl Editor { }; if continue_showing { - self.show_completions(&ShowCompletions { trigger: None }, window, cx); + self.open_or_update_completions_menu(None, None, false, window, cx); } else { self.hide_context_menu(window, cx); } @@ -5097,57 +5097,18 @@ impl Editor { ignore_threshold: false, }), None, - window, - cx, - ); - } - Some(CompletionsMenuSource::Normal) - | Some(CompletionsMenuSource::SnippetChoices) - | None - if self.is_completion_trigger( - text, trigger_in_words, - completions_source.is_some(), - cx, - ) => - { - self.show_completions( - &ShowCompletions { - trigger: Some(text.to_owned()).filter(|x| !x.is_empty()), - }, window, cx, - ) - } - _ => { - self.hide_context_menu(window, cx); + ); } - } - } - - fn is_completion_trigger( - &self, - text: &str, - trigger_in_words: bool, - menu_is_open: bool, - cx: &mut Context, - ) -> bool { - let position = self.selections.newest_anchor().head(); - let Some(buffer) = self.buffer.read(cx).buffer_for_anchor(position, cx) else { - return false; - }; - - if let Some(completion_provider) = &self.completion_provider { - completion_provider.is_completion_trigger( - &buffer, - position.text_anchor, - text, - trigger_in_words, - menu_is_open, + _ => self.open_or_update_completions_menu( + None, + Some(text.to_owned()).filter(|x| !x.is_empty()), + true, + window, cx, - ) - } else { - false + ), } } @@ -5425,6 +5386,7 @@ impl Editor { ignore_threshold: true, }), None, + false, window, cx, ); @@ -5432,17 +5394,18 @@ impl Editor { pub fn show_completions( &mut self, - options: &ShowCompletions, + _: &ShowCompletions, window: &mut Window, cx: &mut Context, ) { - self.open_or_update_completions_menu(None, options.trigger.as_deref(), window, cx); + self.open_or_update_completions_menu(None, None, false, window, cx); } fn open_or_update_completions_menu( &mut self, requested_source: Option, - trigger: Option<&str>, + trigger: Option, + trigger_in_words: bool, window: &mut Window, cx: &mut Context, ) { @@ -5450,6 +5413,15 @@ impl Editor { return; } + let completions_source = self + .context_menu + .borrow() + .as_ref() + .and_then(|menu| match menu { + CodeContextMenu::Completions(completions_menu) => Some(completions_menu.source), + CodeContextMenu::CodeActions(_) => None, + }); + let multibuffer_snapshot = self.buffer.read(cx).read(cx); // Typically `start` == `end`, but with snippet tabstop choices the default choice is @@ -5497,7 +5469,8 @@ impl Editor { ignore_word_threshold = ignore_threshold; None } - Some(CompletionsMenuSource::SnippetChoices) => { + Some(CompletionsMenuSource::SnippetChoices) + | Some(CompletionsMenuSource::SnippetsOnly) => { log::error!("bug: SnippetChoices requested_source is not handled"); None } @@ -5511,13 +5484,19 @@ impl Editor { .as_ref() .is_none_or(|provider| provider.filter_completions()); + let was_snippets_only = matches!( + completions_source, + Some(CompletionsMenuSource::SnippetsOnly) + ); + if let Some(CodeContextMenu::Completions(menu)) = self.context_menu.borrow_mut().as_mut() { if filter_completions { menu.filter(query.clone(), provider.clone(), window, cx); } // When `is_incomplete` is false, no need to re-query completions when the current query // is a suffix of the initial query. - if !menu.is_incomplete { + let was_complete = !menu.is_incomplete; + if was_complete && !was_snippets_only { // If the new query is a suffix of the old query (typing more characters) and // the previous result was complete, the existing completions can be filtered. // @@ -5541,23 +5520,6 @@ impl Editor { } }; - let trigger_kind = match trigger { - Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => { - CompletionTriggerKind::TRIGGER_CHARACTER - } - _ => CompletionTriggerKind::INVOKED, - }; - let completion_context = CompletionContext { - trigger_character: trigger.and_then(|trigger| { - if trigger_kind == CompletionTriggerKind::TRIGGER_CHARACTER { - Some(String::from(trigger)) - } else { - None - } - }), - trigger_kind, - }; - let Anchor { excerpt_id: buffer_excerpt_id, text_anchor: buffer_position, @@ -5610,54 +5572,88 @@ impl Editor { .as_ref() .is_none_or(|query| !query.chars().any(|c| c.is_digit(10))); - let omit_word_completions = !self.word_completions_enabled - || (!ignore_word_threshold - && match &query { - Some(query) => query.chars().count() < completion_settings.words_min_length, - None => completion_settings.words_min_length != 0, - }); - - let (mut words, provider_responses) = match &provider { - Some(provider) => { - let provider_responses = provider.completions( - buffer_excerpt_id, + let load_provider_completions = provider.as_ref().is_some_and(|provider| { + trigger.as_ref().is_none_or(|trigger| { + provider.is_completion_trigger( &buffer, - buffer_position, - completion_context, - window, + position.text_anchor, + trigger, + trigger_in_words, + completions_source.is_some(), cx, - ); + ) + }) + }); - let words = match (omit_word_completions, completion_settings.words) { - (true, _) | (_, WordsCompletionMode::Disabled) => { - Task::ready(BTreeMap::default()) - } - (false, WordsCompletionMode::Enabled | WordsCompletionMode::Fallback) => cx - .background_spawn(async move { - buffer_snapshot.words_in_range(WordsQuery { - fuzzy_contents: None, - range: word_search_range, - skip_digits, - }) - }), - }; + let provider_responses = if let Some(provider) = &provider + && load_provider_completions + { + let trigger_character = + trigger.filter(|trigger| buffer.read(cx).completion_triggers().contains(trigger)); + let completion_context = CompletionContext { + trigger_kind: match &trigger_character { + Some(_) => CompletionTriggerKind::TRIGGER_CHARACTER, + None => CompletionTriggerKind::INVOKED, + }, + trigger_character, + }; - (words, provider_responses) - } - None => { - let words = if omit_word_completions { - Task::ready(BTreeMap::default()) - } else { - cx.background_spawn(async move { - buffer_snapshot.words_in_range(WordsQuery { - fuzzy_contents: None, - range: word_search_range, - skip_digits, - }) - }) - }; - (words, Task::ready(Ok(Vec::new()))) - } + provider.completions( + buffer_excerpt_id, + &buffer, + buffer_position, + completion_context, + window, + cx, + ) + } else { + Task::ready(Ok(Vec::new())) + }; + + let load_word_completions = if !self.word_completions_enabled { + false + } else if requested_source + == Some(CompletionsMenuSource::Words { + ignore_threshold: true, + }) + { + true + } else { + load_provider_completions + && completion_settings.words != WordsCompletionMode::Disabled + && (ignore_word_threshold || { + let words_min_length = completion_settings.words_min_length; + // check whether word has at least `words_min_length` characters + let query_chars = query.iter().flat_map(|q| q.chars()); + query_chars.take(words_min_length).count() == words_min_length + }) + }; + + let mut words = if load_word_completions { + cx.background_spawn(async move { + buffer_snapshot.words_in_range(WordsQuery { + fuzzy_contents: None, + range: word_search_range, + skip_digits, + }) + }) + } else { + Task::ready(BTreeMap::default()) + }; + + let snippets = if let Some(provider) = &provider + && provider.show_snippets() + && let Some(project) = self.project() + { + project.update(cx, |project, cx| { + snippet_completions(project, &buffer, buffer_position, cx) + }) + } else { + Task::ready(Ok(CompletionResponse { + completions: Vec::new(), + display_options: Default::default(), + is_incomplete: false, + })) }; let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order; @@ -5715,6 +5711,13 @@ impl Editor { confirm: None, })); + completions.extend( + snippets + .await + .into_iter() + .flat_map(|response| response.completions), + ); + let menu = if completions.is_empty() { None } else { @@ -5726,7 +5729,11 @@ impl Editor { .map(|workspace| workspace.read(cx).app_state().languages.clone()); let menu = CompletionsMenu::new( id, - requested_source.unwrap_or(CompletionsMenuSource::Normal), + requested_source.unwrap_or(if load_provider_completions { + CompletionsMenuSource::Normal + } else { + CompletionsMenuSource::SnippetsOnly + }), sort_completions, show_completion_documentation, position, @@ -6056,7 +6063,7 @@ impl Editor { .as_ref() .is_some_and(|confirm| confirm(intent, window, cx)); if show_new_completions_on_confirm { - self.show_completions(&ShowCompletions { trigger: None }, window, cx); + self.open_or_update_completions_menu(None, None, false, window, cx); } let provider = self.completion_provider.as_ref()?; @@ -12852,6 +12859,10 @@ impl Editor { }); } + // 🤔 | .. | show_in_menu | + // | .. | true true + // | had_edit_prediction | false true + let trigger_in_words = this.show_edit_predictions_in_menu() || !had_active_edit_prediction; @@ -23059,6 +23070,10 @@ pub trait CompletionProvider { fn filter_completions(&self) -> bool { true } + + fn show_snippets(&self) -> bool { + false + } } pub trait CodeActionProvider { @@ -23319,16 +23334,8 @@ impl CompletionProvider for Entity { cx: &mut Context, ) -> Task>> { self.update(cx, |project, cx| { - let snippets = snippet_completions(project, buffer, buffer_position, cx); - let project_completions = project.completions(buffer, buffer_position, options, cx); - cx.background_spawn(async move { - let mut responses = project_completions.await?; - let snippets = snippets.await?; - if !snippets.completions.is_empty() { - responses.push(snippets); - } - Ok(responses) - }) + let task = project.completions(buffer, buffer_position, options, cx); + cx.background_spawn(task) }) } @@ -23400,6 +23407,10 @@ impl CompletionProvider for Entity { buffer.completion_triggers().contains(text) } + + fn show_snippets(&self) -> bool { + true + } } impl SemanticsProvider for Entity { diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index c1fbc9053882d9e6a74e27a8cd7fb788289d1fa7..ce97cf9a1cc68ed4ff06d57ac02e0dbb9fdd8788 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -13827,7 +13827,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) { cx.set_state(&run.initial_state); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); let counter = Arc::new(AtomicUsize::new(0)); @@ -13887,7 +13887,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) cx.set_state(initial_state); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); let counter = Arc::new(AtomicUsize::new(0)); @@ -13923,7 +13923,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext) cx.set_state(initial_state); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); handle_completion_request_with_insert_and_replace( &mut cx, @@ -14010,7 +14010,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T "}; cx.set_state(initial_state); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); handle_completion_request_with_insert_and_replace( &mut cx, @@ -14064,7 +14064,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T "}; cx.set_state(initial_state); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); handle_completion_request_with_insert_and_replace( &mut cx, @@ -14113,7 +14113,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T "}; cx.set_state(initial_state); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); handle_completion_request_with_insert_and_replace( &mut cx, @@ -14264,7 +14264,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte }); editor.update_in(cx, |editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); fake_server @@ -14503,7 +14503,7 @@ async fn test_completion(cx: &mut TestAppContext) { cx.assert_editor_state("editor.cloˇ"); assert!(cx.editor(|e, _, _| e.context_menu.borrow_mut().is_none())); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); handle_completion_request( "editor.", @@ -14902,7 +14902,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) { 4.5f32 "}); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions::default(), window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); cx.executor().run_until_parked(); cx.condition(|editor, _| editor.context_menu_visible()) @@ -14928,7 +14928,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) { 33.35f32 "}); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions::default(), window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); cx.executor().run_until_parked(); cx.condition(|editor, _| editor.context_menu_visible()) @@ -15056,6 +15056,35 @@ async fn test_word_completions_disabled(cx: &mut TestAppContext) { }); } +#[gpui::test] +async fn test_word_completions_disabled_with_no_provider(cx: &mut TestAppContext) { + init_test(cx, |language_settings| { + language_settings.defaults.completions = Some(CompletionSettingsContent { + words: Some(WordsCompletionMode::Disabled), + words_min_length: Some(0), + lsp_insert_mode: Some(LspInsertMode::Insert), + ..Default::default() + }); + }); + + let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await; + cx.update_editor(|editor, _, _| { + editor.set_completion_provider(None); + }); + cx.set_state(indoc! {"ˇ + wow + wowen + wowser + "}); + cx.simulate_keystroke("w"); + cx.executor().run_until_parked(); + cx.update_editor(|editor, _, _| { + if editor.context_menu.borrow_mut().is_some() { + panic!("expected completion menu to be hidden, as disabled in settings"); + } + }); +} + fn gen_text_edit(params: &CompletionParams, text: &str) -> Option { let position = || lsp::Position { line: params.text_document_position.position.line, @@ -15352,13 +15381,7 @@ async fn test_as_is_completions(cx: &mut TestAppContext) { cx.set_state("fn a() {}\n nˇ"); cx.executor().run_until_parked(); cx.update_editor(|editor, window, cx| { - editor.show_completions( - &ShowCompletions { - trigger: Some("\n".into()), - }, - window, - cx, - ); + editor.trigger_completion_on_input("n", true, window, cx) }); cx.executor().run_until_parked(); @@ -15456,7 +15479,7 @@ int fn_branch(bool do_branch1, bool do_branch2); }))) }); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); cx.executor().run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -15505,7 +15528,7 @@ int fn_branch(bool do_branch1, bool do_branch2); }))) }); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); cx.executor().run_until_parked(); cx.update_editor(|editor, window, cx| { @@ -17995,7 +18018,7 @@ async fn test_context_menus_hide_hover_popover(cx: &mut gpui::TestAppContext) { } }); cx.update_editor(|editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); completion_requests.next().await; cx.condition(|editor, _| editor.context_menu_visible()) @@ -24391,7 +24414,7 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) { ]))) }); editor.update_in(cx, |editor, window, cx| { - editor.show_completions(&ShowCompletions { trigger: None }, window, cx); + editor.show_completions(&ShowCompletions, window, cx); }); cx.run_until_parked(); completion_handle.next().await.unwrap(); From e5fce424b3cb22b2bf89feae885afc85284634d3 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 10 Nov 2025 17:05:46 -0500 Subject: [PATCH 34/74] Update CI badge in README (#42394) This PR updates the CI badge in the README, after the CI workflow reorganization. Release Notes: - N/A --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index adc152b7af163b3c90c73a23e0f45bab1120bddc..d1e2a75beccc9b115bd3b2e09bcc812aebc98329 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Zed [![Zed](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/zed-industries/zed/main/assets/badge/v0.json)](https://zed.dev) -[![CI](https://github.com/zed-industries/zed/actions/workflows/ci.yml/badge.svg)](https://github.com/zed-industries/zed/actions/workflows/ci.yml) +[![CI](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml/badge.svg)](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml) Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter). From b607077c08c562bb42023c7316ef57b84371f5b6 Mon Sep 17 00:00:00 2001 From: Max Brunsfeld Date: Mon, 10 Nov 2025 14:44:54 -0800 Subject: [PATCH 35/74] Add old_text/new_text as a zeta2 prompt format (#42171) Release Notes: - N/A --------- Co-authored-by: Agus Zubiaga Co-authored-by: Oleksiy Syvokon Co-authored-by: Ben Kunkle Co-authored-by: Michael Sloan --- .../cloud_llm_client/src/predict_edits_v3.rs | 2 + .../src/cloud_zeta2_prompt.rs | 57 ++++- crates/zeta2/src/xml_edits.rs | 197 ++++++++++++++++++ crates/zeta2/src/zeta2.rs | 64 ++++-- crates/zeta_cli/src/evaluate.rs | 17 +- crates/zeta_cli/src/main.rs | 2 + crates/zeta_cli/src/predict.rs | 134 +++++++++--- 7 files changed, 418 insertions(+), 55 deletions(-) create mode 100644 crates/zeta2/src/xml_edits.rs diff --git a/crates/cloud_llm_client/src/predict_edits_v3.rs b/crates/cloud_llm_client/src/predict_edits_v3.rs index 2e884ae9fcb27530e5579b83767bde95b5df414c..98ca0748934d663d204c64544af8a3e83fcd704d 100644 --- a/crates/cloud_llm_client/src/predict_edits_v3.rs +++ b/crates/cloud_llm_client/src/predict_edits_v3.rs @@ -73,6 +73,7 @@ pub enum PromptFormat { MarkedExcerpt, LabeledSections, NumLinesUniDiff, + OldTextNewText, /// Prompt format intended for use via zeta_cli OnlySnippets, } @@ -100,6 +101,7 @@ impl std::fmt::Display for PromptFormat { PromptFormat::LabeledSections => write!(f, "Labeled Sections"), PromptFormat::OnlySnippets => write!(f, "Only Snippets"), PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"), + PromptFormat::OldTextNewText => write!(f, "Old Text / New Text"), } } } diff --git a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs index 6055c39e16ea95b38754bb26fd7371250d1fc525..3f0bd476c50b9e6f92a9f457af15899fcb33b8ed 100644 --- a/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs +++ b/crates/cloud_zeta2_prompt/src/cloud_zeta2_prompt.rs @@ -100,6 +100,54 @@ const UNIFIED_DIFF_REMINDER: &str = indoc! {" to uniquely identify it amongst all excerpts of code provided. "}; +const XML_TAGS_INSTRUCTIONS: &str = indoc! {r#" + # Instructions + + You are an edit prediction agent in a code editor. + Your job is to predict the next edit that the user will make, + based on their last few edits and their current cursor location. + + # Output Format + + You must briefly explain your understanding of the user's goal, in one + or two sentences, and then specify their next edit, using the following + XML format: + + + + OLD TEXT 1 HERE + + + NEW TEXT 1 HERE + + + + OLD TEXT 1 HERE + + + NEW TEXT 1 HERE + + + + - Specify the file to edit using the `path` attribute. + - Use `` and `` tags to replace content + - `` must exactly match existing file content, including indentation + - `` cannot be empty + - Do not escape quotes, newlines, or other characters within tags + - Always close all tags properly + - Don't include the <|user_cursor|> marker in your output. + + # Edit History: + +"#}; + +const OLD_TEXT_NEW_TEXT_REMINDER: &str = indoc! {r#" + --- + + Remember that the edits in the edit history have already been deployed. + The files are currently as shown in the Code Excerpts section. +"#}; + pub fn build_prompt( request: &predict_edits_v3::PredictEditsRequest, ) -> Result<(String, SectionLabels)> { @@ -121,7 +169,9 @@ pub fn build_prompt( EDITABLE_REGION_END_MARKER_WITH_NEWLINE, ), ], - PromptFormat::LabeledSections | PromptFormat::NumLinesUniDiff => { + PromptFormat::LabeledSections + | PromptFormat::NumLinesUniDiff + | PromptFormat::OldTextNewText => { vec![(request.cursor_point, CURSOR_MARKER)] } PromptFormat::OnlySnippets => vec![], @@ -131,6 +181,7 @@ pub fn build_prompt( PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(), PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(), PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(), + PromptFormat::OldTextNewText => XML_TAGS_INSTRUCTIONS.to_string(), PromptFormat::OnlySnippets => String::new(), }; @@ -186,6 +237,9 @@ pub fn build_prompt( PromptFormat::NumLinesUniDiff => { prompt.push_str(UNIFIED_DIFF_REMINDER); } + PromptFormat::OldTextNewText => { + prompt.push_str(OLD_TEXT_NEW_TEXT_REMINDER); + } _ => {} } @@ -611,6 +665,7 @@ impl<'a> SyntaxBasedPrompt<'a> { match self.request.prompt_format { PromptFormat::MarkedExcerpt | PromptFormat::OnlySnippets + | PromptFormat::OldTextNewText | PromptFormat::NumLinesUniDiff => { if range.start.0 > 0 && !skipped_last_snippet { output.push_str("…\n"); diff --git a/crates/zeta2/src/xml_edits.rs b/crates/zeta2/src/xml_edits.rs new file mode 100644 index 0000000000000000000000000000000000000000..e8bcc4b1ba7eb2d00cd73b0b2e8d1638a5b00e32 --- /dev/null +++ b/crates/zeta2/src/xml_edits.rs @@ -0,0 +1,197 @@ +use anyhow::{Context as _, Result, anyhow}; +use language::{Anchor, BufferSnapshot, OffsetRangeExt as _, TextBufferSnapshot}; +use std::ops::Range; +use std::path::Path; +use std::sync::Arc; + +pub async fn parse_xml_edits<'a>( + mut input: &'a str, + get_buffer: impl Fn(&Path) -> Option<(&'a BufferSnapshot, &'a [Range])> + Send, +) -> Result<(&'a BufferSnapshot, Vec<(Range, Arc)>)> { + let edits_tag = parse_tag(&mut input, "edits")?.context("No edits tag")?; + + input = edits_tag.body; + + let file_path = edits_tag + .attributes + .trim_start() + .strip_prefix("path") + .context("no file attribute on edits tag")? + .trim_end() + .strip_prefix('=') + .context("no value for path attribute")? + .trim() + .trim_start_matches('"') + .trim_end_matches('"'); + + let (buffer, context_ranges) = get_buffer(file_path.as_ref()) + .with_context(|| format!("no buffer for file {file_path}"))?; + + let mut edits = vec![]; + while let Some(old_text_tag) = parse_tag(&mut input, "old_text")? { + let new_text_tag = + parse_tag(&mut input, "new_text")?.context("no new_text tag following old_text")?; + edits.extend(resolve_new_text_old_text_in_buffer( + new_text_tag.body, + old_text_tag.body, + buffer, + context_ranges, + )?); + } + + Ok((buffer, edits)) +} + +fn resolve_new_text_old_text_in_buffer( + new_text: &str, + old_text: &str, + buffer: &TextBufferSnapshot, + ranges: &[Range], +) -> Result, Arc)>, anyhow::Error> { + let context_offset = if old_text.is_empty() { + Ok(0) + } else { + let mut offset = None; + for range in ranges { + let range = range.to_offset(buffer); + let text = buffer.text_for_range(range.clone()).collect::(); + for (match_offset, _) in text.match_indices(old_text) { + if offset.is_some() { + anyhow::bail!("old_text is not unique enough:\n{}", old_text); + } + offset = Some(range.start + match_offset); + } + } + offset.ok_or_else(|| anyhow!("Failed to match old_text:\n{}", old_text)) + }?; + + let edits_within_hunk = language::text_diff(&old_text, &new_text); + Ok(edits_within_hunk + .into_iter() + .map(move |(inner_range, inner_text)| { + ( + buffer.anchor_after(context_offset + inner_range.start) + ..buffer.anchor_before(context_offset + inner_range.end), + inner_text, + ) + })) +} + +struct ParsedTag<'a> { + attributes: &'a str, + body: &'a str, +} + +fn parse_tag<'a>(input: &mut &'a str, tag: &str) -> Result>> { + let open_tag = format!("<{}", tag); + let close_tag = format!("", tag); + let Some(start_ix) = input.find(&open_tag) else { + return Ok(None); + }; + let start_ix = start_ix + open_tag.len(); + let closing_bracket_ix = start_ix + + input[start_ix..] + .find('>') + .with_context(|| format!("missing > after {tag}"))?; + let attributes = &input[start_ix..closing_bracket_ix].trim(); + let end_ix = closing_bracket_ix + + input[closing_bracket_ix..] + .find(&close_tag) + .with_context(|| format!("no `{close_tag}` tag"))?; + let body = &input[closing_bracket_ix + '>'.len_utf8()..end_ix]; + let body = body.strip_prefix('\n').unwrap_or(body); + *input = &input[end_ix + close_tag.len()..]; + Ok(Some(ParsedTag { attributes, body })) +} + +#[cfg(test)] +mod tests { + use super::*; + use gpui::TestAppContext; + use indoc::indoc; + use language::Point; + use project::{FakeFs, Project}; + use serde_json::json; + use settings::SettingsStore; + use util::path; + + #[test] + fn test_parse_tags() { + let mut input = indoc! {r#" + Prelude + + tag value + + "# }; + let parsed = parse_tag(&mut input, "tag").unwrap().unwrap(); + assert_eq!(parsed.attributes, "attr=\"foo\""); + assert_eq!(parsed.body, "tag value\n"); + assert_eq!(input, "\n"); + } + + #[gpui::test] + async fn test_parse_xml_edits(cx: &mut TestAppContext) { + let fs = init_test(cx); + + let buffer_1_text = indoc! {r#" + one two three four + five six seven eight + nine ten eleven twelve + "# }; + + fs.insert_tree( + path!("/root"), + json!({ + "file1": buffer_1_text, + }), + ) + .await; + + let project = Project::test(fs, [path!("/root").as_ref()], cx).await; + let buffer = project + .update(cx, |project, cx| { + project.open_local_buffer(path!("/root/file1"), cx) + }) + .await + .unwrap(); + let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot()); + + let edits = indoc! {r#" + + + five six seven eight + + + five SIX seven eight! + + + "#}; + + let (buffer, edits) = parse_xml_edits(edits, |_path| { + Some((&buffer_snapshot, &[(Anchor::MIN..Anchor::MAX)] as &[_])) + }) + .await + .unwrap(); + + let edits = edits + .into_iter() + .map(|(range, text)| (range.to_point(&buffer), text)) + .collect::>(); + assert_eq!( + edits, + &[ + (Point::new(1, 5)..Point::new(1, 8), "SIX".into()), + (Point::new(1, 20)..Point::new(1, 20), "!".into()) + ] + ); + } + + fn init_test(cx: &mut TestAppContext) -> Arc { + cx.update(|cx| { + let settings_store = SettingsStore::test(cx); + cx.set_global(settings_store); + }); + + FakeFs::new(cx.background_executor.clone()) + } +} diff --git a/crates/zeta2/src/zeta2.rs b/crates/zeta2/src/zeta2.rs index c77c78b6f517bce085a26b2c60d04318b2f3cdae..6139c9c75e16f8805e6529dc1700eef1beacd713 100644 --- a/crates/zeta2/src/zeta2.rs +++ b/crates/zeta2/src/zeta2.rs @@ -47,6 +47,7 @@ mod prediction; mod provider; pub mod retrieval_search; pub mod udiff; +mod xml_edits; use crate::merge_excerpts::merge_excerpts; use crate::prediction::EditPrediction; @@ -948,8 +949,9 @@ impl Zeta { llm_token, app_version, #[cfg(feature = "llm-response-cache")] - llm_response_cache - ).await; + llm_response_cache, + ) + .await; let request_time = chrono::Utc::now() - before_request; log::trace!("Got edit prediction response"); @@ -969,7 +971,7 @@ impl Zeta { let (res, usage) = response?; let request_id = EditPredictionId(res.id.clone().into()); let Some(mut output_text) = text_from_response(res) else { - return Ok((None, usage)) + return Ok((None, usage)); }; if output_text.contains(CURSOR_MARKER) { @@ -977,20 +979,25 @@ impl Zeta { output_text = output_text.replace(CURSOR_MARKER, ""); } + let get_buffer_from_context = |path: &Path| { + included_files + .iter() + .find_map(|(_, buffer, probe_path, ranges)| { + if probe_path.as_ref() == path { + Some((buffer, ranges.as_slice())) + } else { + None + } + }) + }; + let (edited_buffer_snapshot, edits) = match options.prompt_format { PromptFormat::NumLinesUniDiff => { - crate::udiff::parse_diff(&output_text, |path| { - included_files - .iter() - .find_map(|(_, buffer, probe_path, ranges)| { - if probe_path.as_ref() == path { - Some((buffer, ranges.as_slice())) - } else { - None - } - }) - }) - .await? + crate::udiff::parse_diff(&output_text, get_buffer_from_context).await? + } + PromptFormat::OldTextNewText => { + crate::xml_edits::parse_xml_edits(&output_text, get_buffer_from_context) + .await? } _ => { bail!("unsupported prompt format {}", options.prompt_format) @@ -1006,9 +1013,17 @@ impl Zeta { None } }) - .context("Failed to find buffer in included_buffers, even though we just found the snapshot")?; - - anyhow::Ok((Some((request_id, edited_buffer, edited_buffer_snapshot.clone(), edits)), usage)) + .context("Failed to find buffer in included_buffers")?; + + anyhow::Ok(( + Some(( + request_id, + edited_buffer, + edited_buffer_snapshot.clone(), + edits, + )), + usage, + )) } }); @@ -1387,7 +1402,8 @@ impl Zeta { continue; } - let input: SearchToolInput = serde_json::from_str(&function.arguments)?; + let input: SearchToolInput = serde_json::from_str(&function.arguments) + .with_context(|| format!("invalid search json {}", &function.arguments))?; queries.extend(input.queries); } @@ -1447,6 +1463,16 @@ impl Zeta { }) } + pub fn set_context( + &mut self, + project: Entity, + context: HashMap, Vec>>, + ) { + if let Some(zeta_project) = self.projects.get_mut(&project.entity_id()) { + zeta_project.context = Some(context); + } + } + fn gather_nearby_diagnostics( cursor_offset: usize, diagnostic_sets: &[(LanguageServerId, DiagnosticSet)], diff --git a/crates/zeta_cli/src/evaluate.rs b/crates/zeta_cli/src/evaluate.rs index 6d5b2da13a4301bfb52cb3cda7662843dea7cd12..b5c23af24845a90d153943f6ee2ccd29bbfaf6a7 100644 --- a/crates/zeta_cli/src/evaluate.rs +++ b/crates/zeta_cli/src/evaluate.rs @@ -24,6 +24,8 @@ pub struct EvaluateArguments { skip_cache: bool, #[arg(long, value_enum, default_value_t = PromptFormat::default())] prompt_format: PromptFormat, + #[arg(long)] + use_expected_context: bool, } pub async fn run_evaluate( @@ -39,6 +41,7 @@ pub async fn run_evaluate( &path, args.skip_cache, args.prompt_format, + args.use_expected_context, app_state.clone(), cx, ) @@ -63,13 +66,21 @@ pub async fn run_evaluate_one( example_path: &Path, skip_cache: bool, prompt_format: PromptFormat, + use_expected_context: bool, app_state: Arc, cx: &mut AsyncApp, ) -> Result { let example = NamedExample::load(&example_path).unwrap(); - let predictions = zeta2_predict(example.clone(), skip_cache, prompt_format, &app_state, cx) - .await - .unwrap(); + let predictions = zeta2_predict( + example.clone(), + skip_cache, + prompt_format, + use_expected_context, + &app_state, + cx, + ) + .await + .unwrap(); let evaluation_result = evaluate(&example.example, &predictions); diff --git a/crates/zeta_cli/src/main.rs b/crates/zeta_cli/src/main.rs index 25fb920bab18f374e41b539bc21320faf6c75484..82760d6061d9b96a2da74bf5cb24e43d9ecdba60 100644 --- a/crates/zeta_cli/src/main.rs +++ b/crates/zeta_cli/src/main.rs @@ -171,6 +171,7 @@ enum PromptFormat { OnlySnippets, #[default] NumberedLines, + OldTextNewText, } impl Into for PromptFormat { @@ -180,6 +181,7 @@ impl Into for PromptFormat { Self::LabeledSections => predict_edits_v3::PromptFormat::LabeledSections, Self::OnlySnippets => predict_edits_v3::PromptFormat::OnlySnippets, Self::NumberedLines => predict_edits_v3::PromptFormat::NumLinesUniDiff, + Self::OldTextNewText => predict_edits_v3::PromptFormat::OldTextNewText, } } } diff --git a/crates/zeta_cli/src/predict.rs b/crates/zeta_cli/src/predict.rs index d85f009c9bacc0b6177683c064979740a0709115..4efc82fa8a7c5d5cf6773a7f771d12dd89b4e1ed 100644 --- a/crates/zeta_cli/src/predict.rs +++ b/crates/zeta_cli/src/predict.rs @@ -1,20 +1,23 @@ use crate::PromptFormat; -use crate::example::{ActualExcerpt, NamedExample}; +use crate::example::{ActualExcerpt, ExpectedExcerpt, NamedExample}; use crate::headless::ZetaCliAppState; use crate::paths::{CACHE_DIR, LOGS_DIR}; use ::serde::Serialize; use anyhow::{Result, anyhow}; use clap::Args; +use collections::HashMap; use gpui::http_client::Url; +use language::{Anchor, Buffer, Point}; // use cloud_llm_client::predict_edits_v3::PromptFormat; use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock}; use futures::StreamExt as _; -use gpui::{AppContext, AsyncApp}; +use gpui::{AppContext, AsyncApp, Entity}; use project::Project; use serde::Deserialize; use std::cell::Cell; use std::fs; use std::io::Write; +use std::ops::Range; use std::path::PathBuf; use std::sync::Arc; use std::sync::Mutex; @@ -25,6 +28,8 @@ use zeta2::LlmResponseCache; pub struct PredictArguments { #[arg(long, value_enum, default_value_t = PromptFormat::default())] prompt_format: PromptFormat, + #[arg(long)] + use_expected_context: bool, #[clap(long, short, value_enum, default_value_t = PredictionsOutputFormat::Md)] format: PredictionsOutputFormat, example_path: PathBuf, @@ -38,15 +43,23 @@ pub enum PredictionsOutputFormat { Md, Diff, } + pub async fn run_zeta2_predict( args: PredictArguments, app_state: &Arc, cx: &mut AsyncApp, ) { let example = NamedExample::load(args.example_path).unwrap(); - let result = zeta2_predict(example, args.skip_cache, args.prompt_format, &app_state, cx) - .await - .unwrap(); + let result = zeta2_predict( + example, + args.skip_cache, + args.prompt_format, + args.use_expected_context, + &app_state, + cx, + ) + .await + .unwrap(); result.write(args.format, std::io::stdout()).unwrap(); } @@ -58,6 +71,7 @@ pub async fn zeta2_predict( example: NamedExample, skip_cache: bool, prompt_format: PromptFormat, + use_expected_context: bool, app_state: &Arc, cx: &mut AsyncApp, ) -> Result { @@ -126,14 +140,13 @@ pub async fn zeta2_predict( let debug_task = cx.background_spawn({ let result = result.clone(); async move { - let mut context_retrieval_started_at = None; - let mut context_retrieval_finished_at = None; + let mut start_time = None; let mut search_queries_generated_at = None; let mut search_queries_executed_at = None; while let Some(event) = debug_rx.next().await { match event { zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => { - context_retrieval_started_at = Some(info.timestamp); + start_time = Some(info.timestamp); fs::write(LOGS_DIR.join("search_prompt.md"), &info.search_prompt)?; } zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => { @@ -146,11 +159,10 @@ pub async fn zeta2_predict( zeta2::ZetaDebugInfo::SearchQueriesExecuted(info) => { search_queries_executed_at = Some(info.timestamp); } - zeta2::ZetaDebugInfo::ContextRetrievalFinished(info) => { - context_retrieval_finished_at = Some(info.timestamp); - } + zeta2::ZetaDebugInfo::ContextRetrievalFinished(_info) => {} zeta2::ZetaDebugInfo::EditPredictionRequested(request) => { let prediction_started_at = Instant::now(); + start_time.get_or_insert(prediction_started_at); fs::write( LOGS_DIR.join("prediction_prompt.md"), &request.local_prompt.unwrap_or_default(), @@ -190,15 +202,16 @@ pub async fn zeta2_predict( let mut result = result.lock().unwrap(); - result.planning_search_time = search_queries_generated_at.unwrap() - - context_retrieval_started_at.unwrap(); - result.running_search_time = search_queries_executed_at.unwrap() - - search_queries_generated_at.unwrap(); - result.filtering_search_time = context_retrieval_finished_at.unwrap() - - search_queries_executed_at.unwrap(); + if !use_expected_context { + result.planning_search_time = + Some(search_queries_generated_at.unwrap() - start_time.unwrap()); + result.running_search_time = Some( + search_queries_executed_at.unwrap() + - search_queries_generated_at.unwrap(), + ); + } result.prediction_time = prediction_finished_at - prediction_started_at; - result.total_time = - prediction_finished_at - context_retrieval_started_at.unwrap(); + result.total_time = prediction_finished_at - start_time.unwrap(); break; } @@ -208,13 +221,42 @@ pub async fn zeta2_predict( } }); - zeta.update(cx, |zeta, cx| { + zeta.update(cx, |zeta, _cx| { let mut options = zeta.options().clone(); options.prompt_format = prompt_format.into(); zeta.set_options(options); - zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx) - })? - .await?; + })?; + + if use_expected_context { + let context_excerpts_tasks = example + .example + .expected_context + .iter() + .flat_map(|section| { + section.alternatives[0].excerpts.iter().map(|excerpt| { + resolve_context_entry(project.clone(), excerpt.clone(), cx.clone()) + }) + }) + .collect::>(); + let context_excerpts_vec = futures::future::try_join_all(context_excerpts_tasks).await?; + + let mut context_excerpts = HashMap::default(); + for (buffer, mut excerpts) in context_excerpts_vec { + context_excerpts + .entry(buffer) + .or_insert(Vec::new()) + .append(&mut excerpts); + } + + zeta.update(cx, |zeta, _cx| { + zeta.set_context(project.clone(), context_excerpts) + })?; + } else { + zeta.update(cx, |zeta, cx| { + zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx) + })? + .await?; + } let prediction = zeta .update(cx, |zeta, cx| { @@ -242,6 +284,38 @@ pub async fn zeta2_predict( anyhow::Ok(result) } +async fn resolve_context_entry( + project: Entity, + excerpt: ExpectedExcerpt, + mut cx: AsyncApp, +) -> Result<(Entity, Vec>)> { + let buffer = project + .update(&mut cx, |project, cx| { + let project_path = project.find_project_path(&excerpt.path, cx).unwrap(); + project.open_buffer(project_path, cx) + })? + .await?; + + let ranges = buffer.read_with(&mut cx, |buffer, _| { + let full_text = buffer.text(); + let offset = full_text + .find(&excerpt.text) + .expect("Expected context not found"); + let point = buffer.offset_to_point(offset); + excerpt + .required_lines + .iter() + .map(|line| { + let row = point.row + line.0; + let range = Point::new(row, 0)..Point::new(row + 1, 0); + buffer.anchor_after(range.start)..buffer.anchor_before(range.end) + }) + .collect() + })?; + + Ok((buffer, ranges)) +} + struct Cache { skip_cache: bool, } @@ -292,9 +366,8 @@ pub struct PredictionDetails { pub diff: String, pub excerpts: Vec, pub excerpts_text: String, // TODO: contains the worktree root path. Drop this field and compute it on the fly - pub planning_search_time: Duration, - pub filtering_search_time: Duration, - pub running_search_time: Duration, + pub planning_search_time: Option, + pub running_search_time: Option, pub prediction_time: Duration, pub total_time: Duration, } @@ -311,8 +384,7 @@ impl PredictionDetails { } pub fn to_markdown(&self) -> String { - let inference_time = - self.planning_search_time + self.filtering_search_time + self.prediction_time; + let inference_time = self.planning_search_time.unwrap_or_default() + self.prediction_time; format!( "## Excerpts\n\n\ @@ -322,16 +394,14 @@ impl PredictionDetails { ## Time\n\n\ Planning searches: {}ms\n\ Running searches: {}ms\n\ - Filtering context results: {}ms\n\ Making Prediction: {}ms\n\n\ -------------------\n\n\ Total: {}ms\n\ Inference: {}ms ({:.2}%)\n", self.excerpts_text, self.diff, - self.planning_search_time.as_millis(), - self.running_search_time.as_millis(), - self.filtering_search_time.as_millis(), + self.planning_search_time.unwrap_or_default().as_millis(), + self.running_search_time.unwrap_or_default().as_millis(), self.prediction_time.as_millis(), self.total_time.as_millis(), inference_time.as_millis(), From 359521e91d2a42d8c863c03a26f99529047a81cb Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 10 Nov 2025 16:00:52 -0700 Subject: [PATCH 36/74] Allow passing model_name to evals (#42395) Release Notes: - N/A --- .github/workflows/run_agent_evals.yml | 24 ++++++-------- .../src/tasks/workflows/run_agent_evals.rs | 32 +++++++------------ 2 files changed, 21 insertions(+), 35 deletions(-) diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml index e13bae4031174f057e555db7f2d779208d55456e..1a875aa2c463d264002f14264993b9c99ae1f49c 100644 --- a/.github/workflows/run_agent_evals.yml +++ b/.github/workflows/run_agent_evals.yml @@ -8,22 +8,16 @@ env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_EVAL_TELEMETRY: '1' + MODEL_NAME: ${{ inputs.model_name }} on: - pull_request: - types: - - synchronize - - reopened - - labeled - branches: - - '**' - schedule: - - cron: 0 0 * * * - workflow_dispatch: {} + workflow_dispatch: + inputs: + model_name: + description: model_name + required: true + type: string jobs: agent_evals: - if: | - github.repository_owner == 'zed-industries' && - (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo @@ -52,14 +46,14 @@ jobs: run: cargo build --package=eval shell: bash -euxo pipefail {0} - name: run_agent_evals::agent_evals::run_eval - run: cargo run --package=eval -- --repetitions=8 --concurrency=1 + run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}" shell: bash -euxo pipefail {0} - name: steps::cleanup_cargo_config if: always() run: | rm -rf ./../.cargo shell: bash -euxo pipefail {0} - timeout-minutes: 60 + timeout-minutes: 600 concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs index 1af09f6ca8fa0bc24c99eda7a18904b1b8886bb3..4601d5a5bf6a60435a87edab9cd6d62b77ef52a8 100644 --- a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -1,26 +1,19 @@ -use gh_workflow::{ - Event, Expression, Job, PullRequest, PullRequestType, Run, Schedule, Step, Use, Workflow, - WorkflowDispatch, -}; +use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Use, Workflow, WorkflowDispatch}; use crate::tasks::workflows::{ runners::{self, Platform}, steps::{self, FluentBuilder as _, NamedJob, named, setup_cargo_config}, - vars, + vars::{self, Input}, }; pub(crate) fn run_agent_evals() -> Workflow { let agent_evals = agent_evals(); + let model_name = Input::string("model_name", None); named::workflow() - .on(Event::default() - .schedule([Schedule::default().cron("0 0 * * *")]) - .pull_request(PullRequest::default().add_branch("**").types([ - PullRequestType::Synchronize, - PullRequestType::Reopened, - PullRequestType::Labeled, - ])) - .workflow_dispatch(WorkflowDispatch::default())) + .on(Event::default().workflow_dispatch( + WorkflowDispatch::default().add_input(model_name.name, model_name.input()), + )) .concurrency(vars::one_workflow_per_non_main_branch()) .add_env(("CARGO_TERM_COLOR", "always")) .add_env(("CARGO_INCREMENTAL", 0)) @@ -28,29 +21,28 @@ pub(crate) fn run_agent_evals() -> Workflow { .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)) .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) .add_env(("ZED_EVAL_TELEMETRY", 1)) + .add_env(("MODEL_NAME", model_name.to_string())) .add_job(agent_evals.name, agent_evals.job) } fn agent_evals() -> NamedJob { fn run_eval() -> Step { - named::bash("cargo run --package=eval -- --repetitions=8 --concurrency=1") + named::bash( + "cargo run --package=eval -- --repetitions=8 --concurrency=1 --model \"${MODEL_NAME}\"", + ) } named::job( Job::default() - .cond(Expression::new(indoc::indoc!{r#" - github.repository_owner == 'zed-industries' && - (github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval')) - "#})) .runs_on(runners::LINUX_DEFAULT) - .timeout_minutes(60_u32) + .timeout_minutes(60_u32 * 10) .add_step(steps::checkout_repo()) .add_step(steps::cache_rust_dependencies_namespace()) .map(steps::install_linux_dependencies) .add_step(setup_cargo_config(Platform::Linux)) .add_step(steps::script("cargo build --package=eval")) .add_step(run_eval()) - .add_step(steps::cleanup_cargo_config(Platform::Linux)) + .add_step(steps::cleanup_cargo_config(Platform::Linux)), ) } From f52549c1c4cc75efc9c69c97364d9791d7a0f1f7 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 11 Nov 2025 01:16:28 +0200 Subject: [PATCH 37/74] Small documentation fixes (#42397) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Release Notes: - N/A Co-authored-by: Ole Jørgen Brønner --- crates/settings/src/settings_content/project.rs | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/crates/settings/src/settings_content/project.rs b/crates/settings/src/settings_content/project.rs index e7a3798ebfa827cd287255f464f9e35bddd619f4..b6bebd76e28a316f19c400db2877219aeb2c7cc8 100644 --- a/crates/settings/src/settings_content/project.rs +++ b/crates/settings/src/settings_content/project.rs @@ -26,7 +26,6 @@ pub struct ProjectSettingsContent { /// The following settings can be overridden for specific language servers: /// - initialization_options /// - /// /// To override settings for a language, add an entry for that language server's /// name to the lsp value. /// Default: null @@ -113,15 +112,13 @@ pub struct LspSettings { /// /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initialize /// - /// Consult the documentation for the specific language server to see what settings - /// are supported. + /// Consult the documentation for the specific language server to see which settings are supported. pub initialization_options: Option, /// Language server settings. /// /// Ref: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#workspace_configuration /// - /// Consult the documentation for the specific LSP to see what settings - /// are supported. + /// Consult the documentation for the specific language server to see which settings are supported. pub settings: Option, /// If the server supports sending tasks over LSP extensions, /// this setting can be used to enable or disable them in Zed. From e488b6cd0b405b0c796ee3a26910f2bd9aaa8abb Mon Sep 17 00:00:00 2001 From: Finn Evers Date: Tue, 11 Nov 2025 00:25:27 +0100 Subject: [PATCH 38/74] agent_ui: Fix issue where MCP extension could not be uninstalled (#42384) Closes https://github.com/zed-industries/zed/issues/42312 The issue here was that we assumed that context servers provided by extensions would always need a config in the settings to be present when actually the opposite was the case - context servers provided by extensions are the only context servers that do not need a config to be in place in order to be available in the UI. Release Notes: - Fixed an issue where context servers provided by extensions could not be uninstalled if they were previously unconfigured. --- crates/agent_ui/src/agent_configuration.rs | 34 ++++++---------------- 1 file changed, 9 insertions(+), 25 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 3cbc6e7145d2e3611cdb229447a8795ffb0301ca..8ace684234e90c5203528cae360a28b30798bea3 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -638,15 +638,13 @@ impl AgentConfiguration { let is_running = matches!(server_status, ContextServerStatus::Running); let item_id = SharedString::from(context_server_id.0.clone()); - let is_from_extension = server_configuration - .as_ref() - .map(|config| { - matches!( - config.as_ref(), - ContextServerConfiguration::Extension { .. } - ) - }) - .unwrap_or(false); + // Servers without a configuration can only be provided by extensions. + let provided_by_extension = server_configuration.is_none_or(|config| { + matches!( + config.as_ref(), + ContextServerConfiguration::Extension { .. } + ) + }); let error = if let ContextServerStatus::Error(error) = server_status.clone() { Some(error) @@ -660,7 +658,7 @@ impl AgentConfiguration { .tools_for_server(&context_server_id) .count(); - let (source_icon, source_tooltip) = if is_from_extension { + let (source_icon, source_tooltip) = if provided_by_extension { ( IconName::ZedSrcExtension, "This MCP server was installed from an extension.", @@ -710,7 +708,6 @@ impl AgentConfiguration { let fs = self.fs.clone(); let context_server_id = context_server_id.clone(); let language_registry = self.language_registry.clone(); - let context_server_store = self.context_server_store.clone(); let workspace = self.workspace.clone(); let context_server_registry = self.context_server_registry.clone(); @@ -752,23 +749,10 @@ impl AgentConfiguration { .entry("Uninstall", None, { let fs = fs.clone(); let context_server_id = context_server_id.clone(); - let context_server_store = context_server_store.clone(); let workspace = workspace.clone(); move |_, cx| { - let is_provided_by_extension = context_server_store - .read(cx) - .configuration_for_server(&context_server_id) - .as_ref() - .map(|config| { - matches!( - config.as_ref(), - ContextServerConfiguration::Extension { .. } - ) - }) - .unwrap_or(false); - let uninstall_extension_task = match ( - is_provided_by_extension, + provided_by_extension, resolve_extension_for_context_server(&context_server_id, cx), ) { (true, Some((id, manifest))) => { From 83e7c21b2c99bdfc1a022f3237858a30066ffa8f Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Mon, 10 Nov 2025 18:47:39 -0500 Subject: [PATCH 39/74] collab: Remove unused user queries (#42400) This PR removes queries on users that were no longer being used. Release Notes: - N/A --- crates/collab/src/db/queries/users.rs | 77 ------------------- crates/collab/src/db/tests.rs | 1 - crates/collab/src/db/tests/db_tests.rs | 49 +----------- crates/collab/src/db/tests/user_tests.rs | 96 ------------------------ 4 files changed, 1 insertion(+), 222 deletions(-) delete mode 100644 crates/collab/src/db/tests/user_tests.rs diff --git a/crates/collab/src/db/queries/users.rs b/crates/collab/src/db/queries/users.rs index 89211130b88c69d4bf524bba25ae116790321d3e..79b5d227c0c6f69476e5968e080d268dd20e879a 100644 --- a/crates/collab/src/db/queries/users.rs +++ b/crates/collab/src/db/queries/users.rs @@ -66,40 +66,6 @@ impl Database { .await } - /// Returns all users flagged as staff. - pub async fn get_staff_users(&self) -> Result> { - self.transaction(|tx| async { - let tx = tx; - Ok(user::Entity::find() - .filter(user::Column::Admin.eq(true)) - .all(&*tx) - .await?) - }) - .await - } - - /// Returns a user by email address. There are no access checks here, so this should only be used internally. - pub async fn get_user_by_email(&self, email: &str) -> Result> { - self.transaction(|tx| async move { - Ok(user::Entity::find() - .filter(user::Column::EmailAddress.eq(email)) - .one(&*tx) - .await?) - }) - .await - } - - /// Returns a user by GitHub user ID. There are no access checks here, so this should only be used internally. - pub async fn get_user_by_github_user_id(&self, github_user_id: i32) -> Result> { - self.transaction(|tx| async move { - Ok(user::Entity::find() - .filter(user::Column::GithubUserId.eq(github_user_id)) - .one(&*tx) - .await?) - }) - .await - } - /// Returns a user by GitHub login. There are no access checks here, so this should only be used internally. pub async fn get_user_by_github_login(&self, github_login: &str) -> Result> { self.transaction(|tx| async move { @@ -270,39 +236,6 @@ impl Database { .await } - /// Sets "accepted_tos_at" on the user to the given timestamp. - pub async fn set_user_accepted_tos_at( - &self, - id: UserId, - accepted_tos_at: Option, - ) -> Result<()> { - self.transaction(|tx| async move { - user::Entity::update_many() - .filter(user::Column::Id.eq(id)) - .set(user::ActiveModel { - accepted_tos_at: ActiveValue::set(accepted_tos_at), - ..Default::default() - }) - .exec(&*tx) - .await?; - Ok(()) - }) - .await - } - - /// hard delete the user. - pub async fn destroy_user(&self, id: UserId) -> Result<()> { - self.transaction(|tx| async move { - access_token::Entity::delete_many() - .filter(access_token::Column::UserId.eq(id)) - .exec(&*tx) - .await?; - user::Entity::delete_by_id(id).exec(&*tx).await?; - Ok(()) - }) - .await - } - /// Find users where github_login ILIKE name_query. pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result> { self.transaction(|tx| async { @@ -341,14 +274,4 @@ impl Database { result.push('%'); result } - - pub async fn get_users_missing_github_user_created_at(&self) -> Result> { - self.transaction(|tx| async move { - Ok(user::Entity::find() - .filter(user::Column::GithubUserCreatedAt.is_null()) - .all(&*tx) - .await?) - }) - .await - } } diff --git a/crates/collab/src/db/tests.rs b/crates/collab/src/db/tests.rs index 318fbc4e76d907f4798c069400ba92574c06e789..67c36576aac0f938bbc040202d7fa83e35af2d3b 100644 --- a/crates/collab/src/db/tests.rs +++ b/crates/collab/src/db/tests.rs @@ -6,7 +6,6 @@ mod db_tests; #[cfg(target_os = "macos")] mod embedding_tests; mod extension_tests; -mod user_tests; use crate::migrations::run_database_migrations; diff --git a/crates/collab/src/db/tests/db_tests.rs b/crates/collab/src/db/tests/db_tests.rs index def0769c373605021653b07a97cbff2ec807d34d..2f0bda1cc6e5b69c115d217a46a4061c328a784f 100644 --- a/crates/collab/src/db/tests/db_tests.rs +++ b/crates/collab/src/db/tests/db_tests.rs @@ -1,7 +1,7 @@ use super::*; use crate::test_both_dbs; use chrono::Utc; -use pretty_assertions::{assert_eq, assert_ne}; +use pretty_assertions::assert_eq; use std::sync::Arc; test_both_dbs!( @@ -457,53 +457,6 @@ async fn test_add_contacts(db: &Arc) { ); } -test_both_dbs!( - test_metrics_id, - test_metrics_id_postgres, - test_metrics_id_sqlite -); - -async fn test_metrics_id(db: &Arc) { - let NewUserResult { - user_id: user1, - metrics_id: metrics_id1, - .. - } = db - .create_user( - "person1@example.com", - None, - false, - NewUserParams { - github_login: "person1".into(), - github_user_id: 101, - }, - ) - .await - .unwrap(); - let NewUserResult { - user_id: user2, - metrics_id: metrics_id2, - .. - } = db - .create_user( - "person2@example.com", - None, - false, - NewUserParams { - github_login: "person2".into(), - github_user_id: 102, - }, - ) - .await - .unwrap(); - - assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1); - assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2); - assert_eq!(metrics_id1.len(), 36); - assert_eq!(metrics_id2.len(), 36); - assert_ne!(metrics_id1, metrics_id2); -} - test_both_dbs!( test_project_count, test_project_count_postgres, diff --git a/crates/collab/src/db/tests/user_tests.rs b/crates/collab/src/db/tests/user_tests.rs deleted file mode 100644 index dd61da55ca001a0424aaeafb0411f8a7de343795..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tests/user_tests.rs +++ /dev/null @@ -1,96 +0,0 @@ -use chrono::Utc; - -use crate::{ - db::{Database, NewUserParams}, - test_both_dbs, -}; -use std::sync::Arc; - -test_both_dbs!( - test_accepted_tos, - test_accepted_tos_postgres, - test_accepted_tos_sqlite -); - -async fn test_accepted_tos(db: &Arc) { - let user_id = db - .create_user( - "user1@example.com", - None, - false, - NewUserParams { - github_login: "user1".to_string(), - github_user_id: 1, - }, - ) - .await - .unwrap() - .user_id; - - let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); - assert!(user.accepted_tos_at.is_none()); - - let accepted_tos_at = Utc::now().naive_utc(); - db.set_user_accepted_tos_at(user_id, Some(accepted_tos_at)) - .await - .unwrap(); - - let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); - assert!(user.accepted_tos_at.is_some()); - assert_eq!(user.accepted_tos_at, Some(accepted_tos_at)); - - db.set_user_accepted_tos_at(user_id, None).await.unwrap(); - - let user = db.get_user_by_id(user_id).await.unwrap().unwrap(); - assert!(user.accepted_tos_at.is_none()); -} - -test_both_dbs!( - test_destroy_user_cascade_deletes_access_tokens, - test_destroy_user_cascade_deletes_access_tokens_postgres, - test_destroy_user_cascade_deletes_access_tokens_sqlite -); - -async fn test_destroy_user_cascade_deletes_access_tokens(db: &Arc) { - let user_id = db - .create_user( - "user1@example.com", - Some("user1"), - false, - NewUserParams { - github_login: "user1".to_string(), - github_user_id: 12345, - }, - ) - .await - .unwrap() - .user_id; - - let user = db.get_user_by_id(user_id).await.unwrap(); - assert!(user.is_some()); - - let token_1_id = db - .create_access_token(user_id, None, "token-1", 10) - .await - .unwrap(); - - let token_2_id = db - .create_access_token(user_id, None, "token-2", 10) - .await - .unwrap(); - - let token_1 = db.get_access_token(token_1_id).await; - let token_2 = db.get_access_token(token_2_id).await; - assert!(token_1.is_ok()); - assert!(token_2.is_ok()); - - db.destroy_user(user_id).await.unwrap(); - - let user = db.get_user_by_id(user_id).await.unwrap(); - assert!(user.is_none()); - - let token_1 = db.get_access_token(token_1_id).await; - let token_2 = db.get_access_token(token_2_id).await; - assert!(token_1.is_err()); - assert!(token_2.is_err()); -} From 378b30eba5da7b9131b4a1d5bcee5bf09ad567ef Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 10 Nov 2025 16:55:19 -0700 Subject: [PATCH 40/74] Use cloud.zed.dev for install.sh (#42399) Similar to #42246, we'd like to avoid having Vercel on the critical path. https://zed.dev/install.sh is served from Cloudflare by intercepting a route on that page, so this makes the shell-based install flow vercel independent. Release Notes: - `./script/install.sh` will now fetch assets via `https://cloud.zed.dev/` instead of `https://zed.dev`. As before it will redirect to GitHub releases to complete the download. --- docs/src/linux.md | 7 ++++--- script/get-released-version | 2 +- script/install.sh | 4 ++-- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/docs/src/linux.md b/docs/src/linux.md index 433891a3e461f6c20d4281c72f7b9ae10a459c03..715b3a1bab4b6d580886207b50f54b741f72e5c2 100644 --- a/docs/src/linux.md +++ b/docs/src/linux.md @@ -72,9 +72,10 @@ If you'd prefer, you can install Zed by downloading our pre-built .tar.gz. This Download the `.tar.gz` file: -- [zed-linux-x86_64.tar.gz](https://zed.dev/api/releases/stable/latest/zed-linux-x86_64.tar.gz) ([preview](https://zed.dev/api/releases/preview/latest/zed-linux-x86_64.tar.gz)) -- [zed-linux-aarch64.tar.gz](https://zed.dev/api/releases/stable/latest/zed-linux-aarch64.tar.gz) - ([preview](https://zed.dev/api/releases/preview/latest/zed-linux-aarch64.tar.gz)) +- [zed-linux-x86_64.tar.gz](https://cloud.zed.dev/releases/stable/latest/download?asset=zed&arch=x86_64&os=linux&source=docs) + ([preview](https://cloud.zed.dev/releases/preview/latest/download?asset=zed&arch=x86_64&os=linux&source=docs)) +- [zed-linux-aarch64.tar.gz](https://cloud.zed.dev/releases/stable/latest/download?asset=zed&arch=aarch64&os=linux&source=docs) + ([preview](https://cloud.zed.dev/releases/preview/latest/download?asset=zed&arch=aarch64&os=linux&source=docs)) Then ensure that the `zed` binary in the tarball is on your path. The easiest way is to unpack the tarball and create a symlink: diff --git a/script/get-released-version b/script/get-released-version index 547026d003e9ec896254fa0ab02830735fc1c61e..0fbb2e1757ab5034f94f97980f9618aa6f5a50e0 100755 --- a/script/get-released-version +++ b/script/get-released-version @@ -18,4 +18,4 @@ case $channel in ;; esac -curl -s "https://zed.dev/api/releases/latest?asset=zed&os=macos&arch=aarch64$query" | jq -r .version +curl -s "https://cloud.zed.dev/releases/$channel/latest/asset?asset=zed&os=macos&arch=aarch64" | jq -r .version diff --git a/script/install.sh b/script/install.sh index feb140c9843c680502fb07ced3f8be7fc8559136..0c2cfa1b74e9818a53cda785b3f431c46a0a0437 100755 --- a/script/install.sh +++ b/script/install.sh @@ -82,7 +82,7 @@ linux() { cp "$ZED_BUNDLE_PATH" "$temp/zed-linux-$arch.tar.gz" else echo "Downloading Zed" - curl "https://zed.dev/api/releases/$channel/latest/zed-linux-$arch.tar.gz" > "$temp/zed-linux-$arch.tar.gz" + curl "https://cloud.zed.dev/releases/$channel/latest/download?asset=zed&arch=$arch&os=linux&source=install.sh" > "$temp/zed-linux-$arch.tar.gz" fi suffix="" @@ -135,7 +135,7 @@ linux() { macos() { echo "Downloading Zed" - curl "https://zed.dev/api/releases/$channel/latest/Zed-$arch.dmg" > "$temp/Zed-$arch.dmg" + curl "https://cloud.zed.dev/releases/$channel/latest/download?asset=zed&os=macos&arch=$arch&source=install.sh" > "$temp/Zed-$arch.dmg" hdiutil attach -quiet "$temp/Zed-$arch.dmg" -mountpoint "$temp/mount" app="$(cd "$temp/mount/"; echo *.app)" echo "Installing $app" From 70bcf93355028596a37ab44cf1f9339e89e49ce1 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 10 Nov 2025 21:32:09 -0700 Subject: [PATCH 41/74] Add an `event_source` to events (#42125) Release Notes: - N/A --- crates/client/src/telemetry.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/crates/client/src/telemetry.rs b/crates/client/src/telemetry.rs index d2ec4726400df16fa10730b4d3516167640637fc..50bd4ace80341cd2616d5bc88d5ab2475e094b8e 100644 --- a/crates/client/src/telemetry.rs +++ b/crates/client/src/telemetry.rs @@ -435,7 +435,7 @@ impl Telemetry { Some(project_types) } - fn report_event(self: &Arc, event: Event) { + fn report_event(self: &Arc, mut event: Event) { let mut state = self.state.lock(); // RUST_LOG=telemetry=trace to debug telemetry events log::trace!(target: "telemetry", "{:?}", event); @@ -444,6 +444,12 @@ impl Telemetry { return; } + match &mut event { + Event::Flexible(event) => event + .event_properties + .insert("event_source".into(), "zed".into()), + }; + if state.flush_events_task.is_none() { let this = self.clone(); state.flush_events_task = Some(self.executor.spawn(async move { From 823844ef18e478992ccfc93235a64d7423b58c5a Mon Sep 17 00:00:00 2001 From: CnsMaple <92523839+CnsMaple@users.noreply.github.com> Date: Tue, 11 Nov 2025 12:48:27 +0800 Subject: [PATCH 42/74] vim: Fix increment order (#42256) before: https://github.com/user-attachments/assets/d490573c-4c2b-4645-a685-d683f06c611f after: https://github.com/user-attachments/assets/a69067a1-6e68-4f05-ba56-18eadb1c54df Release Notes: - Fix vim increment order --- crates/vim/src/normal/increment.rs | 284 +++++++++++++---------------- 1 file changed, 122 insertions(+), 162 deletions(-) diff --git a/crates/vim/src/normal/increment.rs b/crates/vim/src/normal/increment.rs index 4b27b4dfaf911c72458c9f412d5d0d2ba4cd70b8..888d9ff25b63fad2e7fc0cf6cf534bfb1a7aaf76 100644 --- a/crates/vim/src/normal/increment.rs +++ b/crates/vim/src/normal/increment.rs @@ -76,17 +76,18 @@ impl Vim { Point::new(row, snapshot.line_len(multi_buffer::MultiBufferRow(row))) }; - let number_result = if !selection.is_empty() { - find_number_in_range(&snapshot, start, end) + let find_result = if !selection.is_empty() { + find_target(&snapshot, start, end, true) } else { - find_number(&snapshot, start) + find_target(&snapshot, start, end, false) }; - if let Some((range, num, radix)) = number_result { + if let Some((range, target, radix)) = find_result { let replace = match radix { - 10 => increment_decimal_string(&num, delta), - 16 => increment_hex_string(&num, delta), - 2 => increment_binary_string(&num, delta), + 10 => increment_decimal_string(&target, delta), + 16 => increment_hex_string(&target, delta), + 2 => increment_binary_string(&target, delta), + 0 => increment_toggle_string(&target), _ => unreachable!(), }; delta += step as i64; @@ -94,13 +95,6 @@ impl Vim { if selection.is_empty() { new_anchors.push((false, snapshot.anchor_after(range.end))) } - } else if let Some((range, boolean)) = find_boolean(&snapshot, start) { - let replace = toggle_boolean(&boolean); - delta += step as i64; - edits.push((range.clone(), replace)); - if selection.is_empty() { - new_anchors.push((false, snapshot.anchor_after(range.end))) - } } else if selection.is_empty() { new_anchors.push((true, snapshot.anchor_after(start))) } @@ -200,83 +194,127 @@ fn increment_binary_string(num: &str, delta: i64) -> String { format!("{:0width$b}", result, width = num.len()) } -fn find_number_in_range( +fn find_target( snapshot: &MultiBufferSnapshot, start: Point, end: Point, + need_range: bool, ) -> Option<(Range, String, u32)> { let start_offset = start.to_offset(snapshot); let end_offset = end.to_offset(snapshot); let mut offset = start_offset; + let mut first_char_is_num = snapshot + .chars_at(offset) + .next() + .map_or(false, |ch| ch.is_ascii_hexdigit()); + let mut pre_char = String::new(); // Backward scan to find the start of the number, but stop at start_offset - for ch in snapshot.reversed_chars_at(offset) { - if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' { - if offset == 0 { - break; - } - offset -= ch.len_utf8(); - if offset < start_offset { - offset = start_offset; + for ch in snapshot.reversed_chars_at(offset + 1) { + // Search boundaries + if offset == 0 || ch.is_whitespace() || (need_range && offset <= start_offset) { + break; + } + + // Avoid the influence of hexadecimal letters + if first_char_is_num + && !ch.is_ascii_hexdigit() + && (ch != 'b' && ch != 'B') + && (ch != 'x' && ch != 'X') + && ch != '-' + { + // Used to determine if the initial character is a number. + if is_numeric_string(&pre_char) { break; + } else { + first_char_is_num = false; } - } else { - break; } + + pre_char.insert(0, ch); + offset -= ch.len_utf8(); } let mut begin = None; - let mut end_num = None; - let mut num = String::new(); + let mut end = None; + let mut target = String::new(); let mut radix = 10; + let mut is_num = false; let mut chars = snapshot.chars_at(offset).peekable(); while let Some(ch) = chars.next() { - if offset >= end_offset { + if need_range && offset >= end_offset { break; // stop at end of selection } - if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) { + if target == "0" + && (ch == 'b' || ch == 'B') + && chars.peek().is_some() + && chars.peek().unwrap().is_digit(2) + { radix = 2; begin = None; - num = String::new(); - } else if num == "0" - && ch == 'x' + target = String::new(); + } else if target == "0" + && (ch == 'x' || ch == 'X') && chars.peek().is_some() && chars.peek().unwrap().is_ascii_hexdigit() { radix = 16; begin = None; - num = String::new(); - } - - if ch.is_digit(radix) - || (begin.is_none() + target = String::new(); + } else if ch == '.' { + is_num = false; + begin = None; + target = String::new(); + } else if ch.is_digit(radix) + || ((begin.is_none() || !is_num) && ch == '-' && chars.peek().is_some() && chars.peek().unwrap().is_digit(radix)) { + if !is_num { + is_num = true; + begin = Some(offset); + target = String::new(); + } else if begin.is_none() { + begin = Some(offset); + } + target.push(ch); + } else if ch.is_ascii_alphabetic() && !is_num { if begin.is_none() { begin = Some(offset); } - num.push(ch); - } else if begin.is_some() { - end_num = Some(offset); + target.push(ch); + } else if begin.is_some() && (is_num || !is_num && is_toggle_word(&target)) { + // End of matching + end = Some(offset); break; } else if ch == '\n' { break; + } else { + // To match the next word + is_num = false; + begin = None; + target = String::new(); } offset += ch.len_utf8(); } - if let Some(begin) = begin { - let end_num = end_num.unwrap_or(offset); + if let Some(begin) = begin + && (is_num || !is_num && is_toggle_word(&target)) + { + if !is_num { + radix = 0; + } + + let end = end.unwrap_or(offset); Some(( - begin.to_point(snapshot)..end_num.to_point(snapshot), - num, + begin.to_point(snapshot)..end.to_point(snapshot), + target, radix, )) } else { @@ -284,133 +322,38 @@ fn find_number_in_range( } } -fn find_number( - snapshot: &MultiBufferSnapshot, - start: Point, -) -> Option<(Range, String, u32)> { - let mut offset = start.to_offset(snapshot); - - let ch0 = snapshot.chars_at(offset).next(); - if ch0.as_ref().is_some_and(char::is_ascii_hexdigit) || matches!(ch0, Some('-' | 'b' | 'x')) { - // go backwards to the start of any number the selection is within - for ch in snapshot.reversed_chars_at(offset) { - if ch.is_ascii_hexdigit() || ch == '-' || ch == 'b' || ch == 'x' { - offset -= ch.len_utf8(); - continue; - } - break; - } +fn is_numeric_string(s: &str) -> bool { + if s.is_empty() { + return false; } - let mut begin = None; - let mut end = None; - let mut num = String::new(); - let mut radix = 10; - - let mut chars = snapshot.chars_at(offset).peekable(); - // find the next number on the line (may start after the original cursor position) - while let Some(ch) = chars.next() { - if num == "0" && ch == 'b' && chars.peek().is_some() && chars.peek().unwrap().is_digit(2) { - radix = 2; - begin = None; - num = String::new(); - } - if num == "0" - && ch == 'x' - && chars.peek().is_some() - && chars.peek().unwrap().is_ascii_hexdigit() - { - radix = 16; - begin = None; - num = String::new(); - } + let (_, rest) = if let Some(r) = s.strip_prefix('-') { + (true, r) + } else { + (false, s) + }; - if ch.is_digit(radix) - || (begin.is_none() - && ch == '-' - && chars.peek().is_some() - && chars.peek().unwrap().is_digit(radix)) - { - if begin.is_none() { - begin = Some(offset); - } - num.push(ch); - } else if begin.is_some() { - end = Some(offset); - break; - } else if ch == '\n' { - break; - } - offset += ch.len_utf8(); + if rest.is_empty() { + return false; } - if let Some(begin) = begin { - let end = end.unwrap_or(offset); - Some((begin.to_point(snapshot)..end.to_point(snapshot), num, radix)) + + if let Some(digits) = rest.strip_prefix("0b").or_else(|| rest.strip_prefix("0B")) { + digits.is_empty() || digits.chars().all(|c| c == '0' || c == '1') + } else if let Some(digits) = rest.strip_prefix("0x").or_else(|| rest.strip_prefix("0X")) { + digits.is_empty() || digits.chars().all(|c| c.is_ascii_hexdigit()) } else { - None + !rest.is_empty() && rest.chars().all(|c| c.is_ascii_digit()) } } -fn find_boolean(snapshot: &MultiBufferSnapshot, start: Point) -> Option<(Range, String)> { - let mut offset = start.to_offset(snapshot); - - let ch0 = snapshot.chars_at(offset).next(); - if ch0.as_ref().is_some_and(|c| c.is_ascii_alphabetic()) { - for ch in snapshot.reversed_chars_at(offset) { - if ch.is_ascii_alphabetic() { - offset -= ch.len_utf8(); - continue; - } - break; - } - } - - let mut begin = None; - let mut end = None; - let mut word = String::new(); - - let chars = snapshot.chars_at(offset); - - for ch in chars { - if ch.is_ascii_alphabetic() { - if begin.is_none() { - begin = Some(offset); - } - word.push(ch); - } else if begin.is_some() { - end = Some(offset); - let word_lower = word.to_lowercase(); - if BOOLEAN_PAIRS - .iter() - .any(|(a, b)| word_lower == *a || word_lower == *b) - { - return Some(( - begin.unwrap().to_point(snapshot)..end.unwrap().to_point(snapshot), - word, - )); - } - begin = None; - end = None; - word = String::new(); - } else if ch == '\n' { - break; - } - offset += ch.len_utf8(); - } - if let Some(begin) = begin { - let end = end.unwrap_or(offset); - let word_lower = word.to_lowercase(); - if BOOLEAN_PAIRS - .iter() - .any(|(a, b)| word_lower == *a || word_lower == *b) - { - return Some((begin.to_point(snapshot)..end.to_point(snapshot), word)); - } - } - None +fn is_toggle_word(word: &str) -> bool { + let lower = word.to_lowercase(); + BOOLEAN_PAIRS + .iter() + .any(|(a, b)| lower == *a || lower == *b) } -fn toggle_boolean(boolean: &str) -> String { +fn increment_toggle_string(boolean: &str) -> String { let lower = boolean.to_lowercase(); let target = BOOLEAN_PAIRS @@ -802,7 +745,7 @@ mod test { } #[gpui::test] - async fn test_toggle_boolean(cx: &mut gpui::TestAppContext) { + async fn test_increment_toggle(cx: &mut gpui::TestAppContext) { let mut cx = VimTestContext::new(cx, true).await; cx.set_state("let enabled = trˇue;", Mode::Normal); @@ -860,6 +803,23 @@ mod test { cx.assert_state("let enabled = ˇOff;", Mode::Normal); } + #[gpui::test] + async fn test_increment_order(cx: &mut gpui::TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state("aaˇa false 1 2 3", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("aaa truˇe 1 2 3", Mode::Normal); + + cx.set_state("aaˇa 1 false 2 3", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("aaa ˇ2 false 2 3", Mode::Normal); + + cx.set_state("trueˇ 1 2 3", Mode::Normal); + cx.simulate_keystrokes("ctrl-a"); + cx.assert_state("true ˇ2 2 3", Mode::Normal); + } + #[gpui::test] async fn test_increment_visual_partial_number(cx: &mut gpui::TestAppContext) { let mut cx = NeovimBackedTestContext::new(cx).await; From 9e717c771168f6368b7a63acee5b0adf025677c3 Mon Sep 17 00:00:00 2001 From: Conrad Irwin Date: Mon, 10 Nov 2025 23:00:55 -0700 Subject: [PATCH 43/74] Use cloud for auto-update (#42246) We've had several outages with a proximate cause of "vercel is complicated", and auto-update is considered a critical feature; so lets not use vercel for that. Release Notes: - Auto Updates (and remote server binaries) are now downloaded via https://cloud.zed.dev instead of https://zed.dev. As before, these URLs redirect to the GitHub release for actual downloads. --- Cargo.lock | 6 + crates/auto_update/Cargo.toml | 5 + crates/auto_update/src/auto_update.rs | 438 +++++++++++------- crates/client/src/client.rs | 2 +- .../cloud_api_client/src/cloud_api_client.rs | 8 +- crates/gpui/src/app/test_context.rs | 11 +- crates/gpui/src/platform/test/platform.rs | 8 +- crates/http_client/Cargo.toml | 1 + crates/http_client/src/http_client.rs | 21 +- .../recent_projects/src/remote_connections.rs | 12 +- crates/release_channel/src/lib.rs | 6 + crates/remote/src/remote_client.rs | 8 +- crates/remote/src/transport/ssh.rs | 22 +- crates/zed/src/main.rs | 2 +- script/bundle-mac | 22 +- 15 files changed, 350 insertions(+), 222 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index faae1259d9d5c08559ec6ba02463367e84b3aa4d..bee290f2f17ffba973d432272c91344b8caa99f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1330,10 +1330,14 @@ version = "0.1.0" dependencies = [ "anyhow", "client", + "clock", + "ctor", "db", + "futures 0.3.31", "gpui", "http_client", "log", + "parking_lot", "paths", "release_channel", "serde", @@ -1344,6 +1348,7 @@ dependencies = [ "util", "which 6.0.3", "workspace", + "zlog", ] [[package]] @@ -7799,6 +7804,7 @@ dependencies = [ "parking_lot", "serde", "serde_json", + "serde_urlencoded", "sha2", "tempfile", "url", diff --git a/crates/auto_update/Cargo.toml b/crates/auto_update/Cargo.toml index 630be043dca120ca76b2552f0a729a03a684f934..ae7c869493d8ca33528800f91c446e9546c952d0 100644 --- a/crates/auto_update/Cargo.toml +++ b/crates/auto_update/Cargo.toml @@ -33,4 +33,9 @@ workspace.workspace = true which.workspace = true [dev-dependencies] +ctor.workspace = true +clock= { workspace = true, "features" = ["test-support"] } +futures.workspace = true gpui = { workspace = true, "features" = ["test-support"] } +parking_lot.workspace = true +zlog.workspace = true diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index 1997beaf11cb2b1d29cc759c5e5f8a6ad6f51eb8..bd44eb714c08f9a5c698e92570a9edb518c5c806 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -1,12 +1,11 @@ use anyhow::{Context as _, Result}; -use client::{Client, TelemetrySettings}; -use db::RELEASE_CHANNEL; +use client::Client; use db::kvp::KEY_VALUE_STORE; use gpui::{ App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, SemanticVersion, Task, Window, actions, }; -use http_client::{AsyncBody, HttpClient, HttpClientWithUrl}; +use http_client::{HttpClient, HttpClientWithUrl}; use paths::remote_servers_dir; use release_channel::{AppCommitSha, ReleaseChannel}; use serde::{Deserialize, Serialize}; @@ -41,22 +40,23 @@ actions!( ] ); -#[derive(Serialize)] -struct UpdateRequestBody { - installation_id: Option>, - release_channel: Option<&'static str>, - telemetry: bool, - is_staff: Option, - destination: &'static str, -} - #[derive(Clone, Debug, PartialEq, Eq)] pub enum VersionCheckType { Sha(AppCommitSha), Semantic(SemanticVersion), } -#[derive(Clone)] +#[derive(Serialize, Debug)] +pub struct AssetQuery<'a> { + asset: &'a str, + os: &'a str, + arch: &'a str, + metrics_id: Option<&'a str>, + system_id: Option<&'a str>, + is_staff: Option, +} + +#[derive(Clone, Debug)] pub enum AutoUpdateStatus { Idle, Checking, @@ -66,6 +66,31 @@ pub enum AutoUpdateStatus { Errored { error: Arc }, } +impl PartialEq for AutoUpdateStatus { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (AutoUpdateStatus::Idle, AutoUpdateStatus::Idle) => true, + (AutoUpdateStatus::Checking, AutoUpdateStatus::Checking) => true, + ( + AutoUpdateStatus::Downloading { version: v1 }, + AutoUpdateStatus::Downloading { version: v2 }, + ) => v1 == v2, + ( + AutoUpdateStatus::Installing { version: v1 }, + AutoUpdateStatus::Installing { version: v2 }, + ) => v1 == v2, + ( + AutoUpdateStatus::Updated { version: v1 }, + AutoUpdateStatus::Updated { version: v2 }, + ) => v1 == v2, + (AutoUpdateStatus::Errored { error: e1 }, AutoUpdateStatus::Errored { error: e2 }) => { + e1.to_string() == e2.to_string() + } + _ => false, + } + } +} + impl AutoUpdateStatus { pub fn is_updated(&self) -> bool { matches!(self, Self::Updated { .. }) @@ -75,13 +100,13 @@ impl AutoUpdateStatus { pub struct AutoUpdater { status: AutoUpdateStatus, current_version: SemanticVersion, - http_client: Arc, + client: Arc, pending_poll: Option>>, quit_subscription: Option, } -#[derive(Deserialize, Clone, Debug)] -pub struct JsonRelease { +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct ReleaseAsset { pub version: String, pub url: String, } @@ -137,7 +162,7 @@ struct GlobalAutoUpdate(Option>); impl Global for GlobalAutoUpdate {} -pub fn init(http_client: Arc, cx: &mut App) { +pub fn init(client: Arc, cx: &mut App) { cx.observe_new(|workspace: &mut Workspace, _window, _cx| { workspace.register_action(|_, action, window, cx| check(action, window, cx)); @@ -149,7 +174,7 @@ pub fn init(http_client: Arc, cx: &mut App) { let version = release_channel::AppVersion::global(cx); let auto_updater = cx.new(|cx| { - let updater = AutoUpdater::new(version, http_client, cx); + let updater = AutoUpdater::new(version, client, cx); let poll_for_updates = ReleaseChannel::try_global(cx) .map(|channel| channel.poll_for_updates()) @@ -233,7 +258,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut App) -> Option<()> { let current_version = auto_updater.current_version; let release_channel = release_channel.dev_name(); let path = format!("/releases/{release_channel}/{current_version}"); - let url = &auto_updater.http_client.build_url(&path); + let url = &auto_updater.client.http_client().build_url(&path); cx.open_url(url); } ReleaseChannel::Nightly => { @@ -296,11 +321,7 @@ impl AutoUpdater { cx.default_global::().0.clone() } - fn new( - current_version: SemanticVersion, - http_client: Arc, - cx: &mut Context, - ) -> Self { + fn new(current_version: SemanticVersion, client: Arc, cx: &mut Context) -> Self { // On windows, executable files cannot be overwritten while they are // running, so we must wait to overwrite the application until quitting // or restarting. When quitting the app, we spawn the auto update helper @@ -321,7 +342,7 @@ impl AutoUpdater { Self { status: AutoUpdateStatus::Idle, current_version, - http_client, + client, pending_poll: None, quit_subscription, } @@ -354,7 +375,7 @@ impl AutoUpdater { cx.notify(); self.pending_poll = Some(cx.spawn(async move |this, cx| { - let result = Self::update(this.upgrade()?, cx.clone()).await; + let result = Self::update(this.upgrade()?, cx).await; this.update(cx, |this, cx| { this.pending_poll = None; if let Err(error) = result { @@ -400,10 +421,10 @@ impl AutoUpdater { // you can override this function. You should also update get_remote_server_release_url to return // Ok(None). pub async fn download_remote_server_release( - os: &str, - arch: &str, release_channel: ReleaseChannel, version: Option, + os: &str, + arch: &str, set_status: impl Fn(&str, &mut AsyncApp) + Send + 'static, cx: &mut AsyncApp, ) -> Result { @@ -415,13 +436,13 @@ impl AutoUpdater { })??; set_status("Fetching remote server release", cx); - let release = Self::get_release( + let release = Self::get_release_asset( &this, + release_channel, + version, "zed-remote-server", os, arch, - version, - Some(release_channel), cx, ) .await?; @@ -432,7 +453,7 @@ impl AutoUpdater { let version_path = platform_dir.join(format!("{}.gz", release.version)); smol::fs::create_dir_all(&platform_dir).await.ok(); - let client = this.read_with(cx, |this, _| this.http_client.clone())?; + let client = this.read_with(cx, |this, _| this.client.http_client())?; if smol::fs::metadata(&version_path).await.is_err() { log::info!( @@ -440,19 +461,19 @@ impl AutoUpdater { release.version ); set_status("Downloading remote server", cx); - download_remote_server_binary(&version_path, release, client, cx).await?; + download_remote_server_binary(&version_path, release, client).await?; } Ok(version_path) } pub async fn get_remote_server_release_url( + channel: ReleaseChannel, + version: Option, os: &str, arch: &str, - release_channel: ReleaseChannel, - version: Option, cx: &mut AsyncApp, - ) -> Result> { + ) -> Result> { let this = cx.update(|cx| { cx.default_global::() .0 @@ -460,108 +481,99 @@ impl AutoUpdater { .context("auto-update not initialized") })??; - let release = Self::get_release( - &this, - "zed-remote-server", - os, - arch, - version, - Some(release_channel), - cx, - ) - .await?; - - let update_request_body = build_remote_server_update_request_body(cx)?; - let body = serde_json::to_string(&update_request_body)?; + let release = + Self::get_release_asset(&this, channel, version, "zed-remote-server", os, arch, cx) + .await?; - Ok(Some((release.url, body))) + Ok(Some(release.url)) } - async fn get_release( + async fn get_release_asset( this: &Entity, + release_channel: ReleaseChannel, + version: Option, asset: &str, os: &str, arch: &str, - version: Option, - release_channel: Option, cx: &mut AsyncApp, - ) -> Result { - let client = this.read_with(cx, |this, _| this.http_client.clone())?; - - if let Some(version) = version { - let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable"); - - let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",); - - Ok(JsonRelease { - version: version.to_string(), - url: client.build_url(&url), - }) + ) -> Result { + let client = this.read_with(cx, |this, _| this.client.clone())?; + + let (system_id, metrics_id, is_staff) = if client.telemetry().metrics_enabled() { + ( + client.telemetry().system_id(), + client.telemetry().metrics_id(), + client.telemetry().is_staff(), + ) } else { - let mut url_string = client.build_url(&format!( - "/api/releases/latest?asset={}&os={}&arch={}", - asset, os, arch - )); - if let Some(param) = release_channel.and_then(|c| c.release_query_param()) { - url_string += "&"; - url_string += param; - } + (None, None, None) + }; - let mut response = client.get(&url_string, Default::default(), true).await?; - let mut body = Vec::new(); - response.body_mut().read_to_end(&mut body).await?; + let version = if let Some(version) = version { + version.to_string() + } else { + "latest".to_string() + }; + let http_client = client.http_client(); + + let path = format!("/releases/{}/{}/asset", release_channel.dev_name(), version,); + let url = http_client.build_zed_cloud_url_with_query( + &path, + AssetQuery { + os, + arch, + asset, + metrics_id: metrics_id.as_deref(), + system_id: system_id.as_deref(), + is_staff: is_staff, + }, + )?; - anyhow::ensure!( - response.status().is_success(), - "failed to fetch release: {:?}", - String::from_utf8_lossy(&body), - ); + let mut response = http_client + .get(url.as_str(), Default::default(), true) + .await?; + let mut body = Vec::new(); + response.body_mut().read_to_end(&mut body).await?; - serde_json::from_slice(body.as_slice()).with_context(|| { - format!( - "error deserializing release {:?}", - String::from_utf8_lossy(&body), - ) - }) - } - } + anyhow::ensure!( + response.status().is_success(), + "failed to fetch release: {:?}", + String::from_utf8_lossy(&body), + ); - async fn get_latest_release( - this: &Entity, - asset: &str, - os: &str, - arch: &str, - release_channel: Option, - cx: &mut AsyncApp, - ) -> Result { - Self::get_release(this, asset, os, arch, None, release_channel, cx).await + serde_json::from_slice(body.as_slice()).with_context(|| { + format!( + "error deserializing release {:?}", + String::from_utf8_lossy(&body), + ) + }) } - async fn update(this: Entity, mut cx: AsyncApp) -> Result<()> { + async fn update(this: Entity, cx: &mut AsyncApp) -> Result<()> { let (client, installed_version, previous_status, release_channel) = - this.read_with(&cx, |this, cx| { + this.read_with(cx, |this, cx| { ( - this.http_client.clone(), + this.client.http_client(), this.current_version, this.status.clone(), - ReleaseChannel::try_global(cx), + ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable), ) })?; Self::check_dependencies()?; - this.update(&mut cx, |this, cx| { + this.update(cx, |this, cx| { this.status = AutoUpdateStatus::Checking; log::info!("Auto Update: checking for updates"); cx.notify(); })?; let fetched_release_data = - Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?; + Self::get_release_asset(&this, release_channel, None, "zed", OS, ARCH, cx).await?; let fetched_version = fetched_release_data.clone().version; let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full())); let newer_version = Self::check_if_fetched_version_is_newer( - *RELEASE_CHANNEL, + release_channel, app_commit_sha, installed_version, fetched_version, @@ -569,7 +581,7 @@ impl AutoUpdater { )?; let Some(newer_version) = newer_version else { - return this.update(&mut cx, |this, cx| { + return this.update(cx, |this, cx| { let status = match previous_status { AutoUpdateStatus::Updated { .. } => previous_status, _ => AutoUpdateStatus::Idle, @@ -579,7 +591,7 @@ impl AutoUpdater { }); }; - this.update(&mut cx, |this, cx| { + this.update(cx, |this, cx| { this.status = AutoUpdateStatus::Downloading { version: newer_version.clone(), }; @@ -588,21 +600,21 @@ impl AutoUpdater { let installer_dir = InstallerDir::new().await?; let target_path = Self::target_path(&installer_dir).await?; - download_release(&target_path, fetched_release_data, client, &cx).await?; + download_release(&target_path, fetched_release_data, client).await?; - this.update(&mut cx, |this, cx| { + this.update(cx, |this, cx| { this.status = AutoUpdateStatus::Installing { version: newer_version.clone(), }; cx.notify(); })?; - let new_binary_path = Self::install_release(installer_dir, target_path, &cx).await?; + let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?; if let Some(new_binary_path) = new_binary_path { cx.update(|cx| cx.set_restart_path(new_binary_path))?; } - this.update(&mut cx, |this, cx| { + this.update(cx, |this, cx| { this.set_should_show_update_notification(true, cx) .detach_and_log_err(cx); this.status = AutoUpdateStatus::Updated { @@ -681,6 +693,12 @@ impl AutoUpdater { target_path: PathBuf, cx: &AsyncApp, ) -> Result> { + #[cfg(test)] + if let Some(test_install) = + cx.try_read_global::(|g, _| g.0.clone()) + { + return test_install(target_path, cx); + } match OS { "macos" => install_release_macos(&installer_dir, target_path, cx).await, "linux" => install_release_linux(&installer_dir, target_path, cx).await, @@ -731,16 +749,13 @@ impl AutoUpdater { async fn download_remote_server_binary( target_path: &PathBuf, - release: JsonRelease, + release: ReleaseAsset, client: Arc, - cx: &AsyncApp, ) -> Result<()> { let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?; let mut temp_file = File::create(&temp).await?; - let update_request_body = build_remote_server_update_request_body(cx)?; - let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?); - let mut response = client.get(&release.url, request_body, true).await?; + let mut response = client.get(&release.url, Default::default(), true).await?; anyhow::ensure!( response.status().is_success(), "failed to download remote server release: {:?}", @@ -752,65 +767,19 @@ async fn download_remote_server_binary( Ok(()) } -fn build_remote_server_update_request_body(cx: &AsyncApp) -> Result { - let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| { - let telemetry = Client::global(cx).telemetry().clone(); - let is_staff = telemetry.is_staff(); - let installation_id = telemetry.installation_id(); - let release_channel = - ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name()); - let telemetry_enabled = TelemetrySettings::get_global(cx).metrics; - - ( - installation_id, - release_channel, - telemetry_enabled, - is_staff, - ) - })?; - - Ok(UpdateRequestBody { - installation_id, - release_channel, - telemetry: telemetry_enabled, - is_staff, - destination: "remote", - }) -} - async fn download_release( target_path: &Path, - release: JsonRelease, + release: ReleaseAsset, client: Arc, - cx: &AsyncApp, ) -> Result<()> { let mut target_file = File::create(&target_path).await?; - let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| { - let telemetry = Client::global(cx).telemetry().clone(); - let is_staff = telemetry.is_staff(); - let installation_id = telemetry.installation_id(); - let release_channel = - ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name()); - let telemetry_enabled = TelemetrySettings::get_global(cx).metrics; - - ( - installation_id, - release_channel, - telemetry_enabled, - is_staff, - ) - })?; - - let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody { - installation_id, - release_channel, - telemetry: telemetry_enabled, - is_staff, - destination: "local", - })?); - - let mut response = client.get(&release.url, request_body, true).await?; + let mut response = client.get(&release.url, Default::default(), true).await?; + anyhow::ensure!( + response.status().is_success(), + "failed to download update: {:?}", + response.status() + ); smol::io::copy(response.body_mut(), &mut target_file).await?; log::info!("downloaded update. path:{:?}", target_path); @@ -1010,11 +979,33 @@ pub async fn finalize_auto_update_on_quit() { #[cfg(test)] mod tests { + use client::Client; + use clock::FakeSystemClock; + use futures::channel::oneshot; use gpui::TestAppContext; + use http_client::{FakeHttpClient, Response}; use settings::default_settings; + use std::{ + rc::Rc, + sync::{ + Arc, + atomic::{self, AtomicBool}, + }, + }; + use tempfile::tempdir; + + #[ctor::ctor] + fn init_logger() { + zlog::init_test(); + } use super::*; + pub(super) struct InstallOverride( + pub Rc Result>>, + ); + impl Global for InstallOverride {} + #[gpui::test] fn test_auto_update_defaults_to_true(cx: &mut TestAppContext) { cx.update(|cx| { @@ -1030,6 +1021,115 @@ mod tests { }); } + #[gpui::test] + async fn test_auto_update_downloads(cx: &mut TestAppContext) { + cx.background_executor.allow_parking(); + zlog::init_test(); + let release_available = Arc::new(AtomicBool::new(false)); + + let (dmg_tx, dmg_rx) = oneshot::channel::(); + + cx.update(|cx| { + settings::init(cx); + + let current_version = SemanticVersion::new(0, 100, 0); + release_channel::init_test(current_version, ReleaseChannel::Stable, cx); + + let clock = Arc::new(FakeSystemClock::new()); + let release_available = Arc::clone(&release_available); + let dmg_rx = Arc::new(parking_lot::Mutex::new(Some(dmg_rx))); + let fake_client_http = FakeHttpClient::create(move |req| { + let release_available = release_available.load(atomic::Ordering::Relaxed); + let dmg_rx = dmg_rx.clone(); + async move { + if req.uri().path() == "/releases/stable/latest/asset" { + if release_available { + return Ok(Response::builder().status(200).body( + r#"{"version":"0.100.1","url":"https://test.example/new-download"}"#.into() + ).unwrap()); + } else { + return Ok(Response::builder().status(200).body( + r#"{"version":"0.100.0","url":"https://test.example/old-download"}"#.into() + ).unwrap()); + } + } else if req.uri().path() == "/new-download" { + return Ok(Response::builder().status(200).body({ + let dmg_rx = dmg_rx.lock().take().unwrap(); + dmg_rx.await.unwrap().into() + }).unwrap()); + } + Ok(Response::builder().status(404).body("".into()).unwrap()) + } + }); + let client = Client::new(clock, fake_client_http, cx); + crate::init(client, cx); + }); + + let auto_updater = cx.update(|cx| AutoUpdater::get(cx).expect("auto updater should exist")); + + cx.background_executor.run_until_parked(); + + auto_updater.read_with(cx, |updater, _| { + assert_eq!(updater.status(), AutoUpdateStatus::Idle); + assert_eq!(updater.current_version(), SemanticVersion::new(0, 100, 0)); + }); + + release_available.store(true, atomic::Ordering::SeqCst); + cx.background_executor.advance_clock(POLL_INTERVAL); + cx.background_executor.run_until_parked(); + + loop { + cx.background_executor.timer(Duration::from_millis(0)).await; + cx.run_until_parked(); + let status = auto_updater.read_with(cx, |updater, _| updater.status()); + if !matches!(status, AutoUpdateStatus::Idle) { + break; + } + } + let status = auto_updater.read_with(cx, |updater, _| updater.status()); + assert_eq!( + status, + AutoUpdateStatus::Downloading { + version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1)) + } + ); + + dmg_tx.send("".to_owned()).unwrap(); + + let tmp_dir = Arc::new(tempdir().unwrap()); + + cx.update(|cx| { + let tmp_dir = tmp_dir.clone(); + cx.set_global(InstallOverride(Rc::new(move |target_path, _cx| { + let tmp_dir = tmp_dir.clone(); + let dest_path = tmp_dir.path().join("zed"); + std::fs::copy(&target_path, &dest_path)?; + Ok(Some(dest_path)) + }))); + }); + + loop { + cx.background_executor.timer(Duration::from_millis(0)).await; + cx.run_until_parked(); + let status = auto_updater.read_with(cx, |updater, _| updater.status()); + if !matches!(status, AutoUpdateStatus::Downloading { .. }) { + break; + } + } + let status = auto_updater.read_with(cx, |updater, _| updater.status()); + assert_eq!( + status, + AutoUpdateStatus::Updated { + version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1)) + } + ); + let will_restart = cx.expect_restart(); + cx.update(|cx| cx.restart()); + let path = will_restart.await.unwrap().unwrap(); + assert_eq!(path, tmp_dir.path().join("zed")); + assert_eq!(std::fs::read_to_string(path).unwrap(), ""); + } + #[test] fn test_stable_does_not_update_when_fetched_version_is_not_higher() { let release_channel = ReleaseChannel::Stable; diff --git a/crates/client/src/client.rs b/crates/client/src/client.rs index 3e220b0275270c04098ebb8cf3f5564cc3ca0342..96b15dc9fb13deea3cdc706f1927c4d6f016b57a 100644 --- a/crates/client/src/client.rs +++ b/crates/client/src/client.rs @@ -1487,7 +1487,7 @@ impl Client { let url = self .http - .build_zed_cloud_url("/internal/users/impersonate", &[])?; + .build_zed_cloud_url("/internal/users/impersonate")?; let request = Request::post(url.as_str()) .header("Content-Type", "application/json") .header("Authorization", format!("Bearer {api_token}")) diff --git a/crates/cloud_api_client/src/cloud_api_client.rs b/crates/cloud_api_client/src/cloud_api_client.rs index 53b2b16a6a7c9447face6daa199bc4b2125445b9..9206e5e7efe51e99e4d57b708f09c682283612ed 100644 --- a/crates/cloud_api_client/src/cloud_api_client.rs +++ b/crates/cloud_api_client/src/cloud_api_client.rs @@ -62,7 +62,7 @@ impl CloudApiClient { let request = self.build_request( Request::builder().method(Method::GET).uri( self.http_client - .build_zed_cloud_url("/client/users/me", &[])? + .build_zed_cloud_url("/client/users/me")? .as_ref(), ), AsyncBody::default(), @@ -89,7 +89,7 @@ impl CloudApiClient { pub fn connect(&self, cx: &App) -> Result>> { let mut connect_url = self .http_client - .build_zed_cloud_url("/client/users/connect", &[])?; + .build_zed_cloud_url("/client/users/connect")?; connect_url .set_scheme(match connect_url.scheme() { "https" => "wss", @@ -123,7 +123,7 @@ impl CloudApiClient { .method(Method::POST) .uri( self.http_client - .build_zed_cloud_url("/client/llm_tokens", &[])? + .build_zed_cloud_url("/client/llm_tokens")? .as_ref(), ) .when_some(system_id, |builder, system_id| { @@ -154,7 +154,7 @@ impl CloudApiClient { let request = build_request( Request::builder().method(Method::GET).uri( self.http_client - .build_zed_cloud_url("/client/users/me", &[])? + .build_zed_cloud_url("/client/users/me")? .as_ref(), ), AsyncBody::default(), diff --git a/crates/gpui/src/app/test_context.rs b/crates/gpui/src/app/test_context.rs index d974823396d9f0d546a6b035f47b569145eb021b..4a7b73c359ed3dd55b136b22e9487dee1735e42e 100644 --- a/crates/gpui/src/app/test_context.rs +++ b/crates/gpui/src/app/test_context.rs @@ -10,7 +10,9 @@ use crate::{ use anyhow::{anyhow, bail}; use futures::{Stream, StreamExt, channel::oneshot}; use rand::{SeedableRng, rngs::StdRng}; -use std::{cell::RefCell, future::Future, ops::Deref, rc::Rc, sync::Arc, time::Duration}; +use std::{ + cell::RefCell, future::Future, ops::Deref, path::PathBuf, rc::Rc, sync::Arc, time::Duration, +}; /// A TestAppContext is provided to tests created with `#[gpui::test]`, it provides /// an implementation of `Context` with additional methods that are useful in tests. @@ -331,6 +333,13 @@ impl TestAppContext { self.test_window(window_handle).simulate_resize(size); } + /// Returns true if there's an alert dialog open. + pub fn expect_restart(&self) -> oneshot::Receiver> { + let (tx, rx) = futures::channel::oneshot::channel(); + self.test_platform.expect_restart.borrow_mut().replace(tx); + rx + } + /// Causes the given sources to be returned if the application queries for screen /// capture sources. pub fn set_screen_capture_sources(&self, sources: Vec) { diff --git a/crates/gpui/src/platform/test/platform.rs b/crates/gpui/src/platform/test/platform.rs index 15b909199fbd53b974e6a140f3223641dc0ac6ae..dfada364667989792325e02f8530e6c91bdf4716 100644 --- a/crates/gpui/src/platform/test/platform.rs +++ b/crates/gpui/src/platform/test/platform.rs @@ -36,6 +36,7 @@ pub(crate) struct TestPlatform { screen_capture_sources: RefCell>, pub opened_url: RefCell>, pub text_system: Arc, + pub expect_restart: RefCell>>>, #[cfg(target_os = "windows")] bitmap_factory: std::mem::ManuallyDrop, weak: Weak, @@ -112,6 +113,7 @@ impl TestPlatform { active_cursor: Default::default(), active_display: Rc::new(TestDisplay::new()), active_window: Default::default(), + expect_restart: Default::default(), current_clipboard_item: Mutex::new(None), #[cfg(any(target_os = "linux", target_os = "freebsd"))] current_primary_item: Mutex::new(None), @@ -250,8 +252,10 @@ impl Platform for TestPlatform { fn quit(&self) {} - fn restart(&self, _: Option) { - // + fn restart(&self, path: Option) { + if let Some(tx) = self.expect_restart.take() { + tx.send(path).unwrap(); + } } fn activate(&self, _ignoring_other_apps: bool) { diff --git a/crates/http_client/Cargo.toml b/crates/http_client/Cargo.toml index f4ce028b1c650ba3c85081d7737c99e9d1434e44..16600627a77f6a73fa913340f29f5a2da0875de9 100644 --- a/crates/http_client/Cargo.toml +++ b/crates/http_client/Cargo.toml @@ -31,6 +31,7 @@ parking_lot.workspace = true reqwest.workspace = true serde.workspace = true serde_json.workspace = true +serde_urlencoded.workspace = true sha2.workspace = true tempfile.workspace = true url.workspace = true diff --git a/crates/http_client/src/http_client.rs b/crates/http_client/src/http_client.rs index 6050d75c3edc8aefb1122df6e6af3bf078673217..a75df61646f31c9dc997bea83acc9d669bf1e29e 100644 --- a/crates/http_client/src/http_client.rs +++ b/crates/http_client/src/http_client.rs @@ -13,6 +13,7 @@ use futures::{ future::{self, BoxFuture}, }; use parking_lot::Mutex; +use serde::Serialize; #[cfg(feature = "test-support")] use std::fmt; use std::{any::type_name, sync::Arc}; @@ -255,7 +256,7 @@ impl HttpClientWithUrl { } /// Builds a Zed Cloud URL using the given path. - pub fn build_zed_cloud_url(&self, path: &str, query: &[(&str, &str)]) -> Result { + pub fn build_zed_cloud_url(&self, path: &str) -> Result { let base_url = self.base_url(); let base_api_url = match base_url.as_ref() { "https://zed.dev" => "https://cloud.zed.dev", @@ -264,10 +265,20 @@ impl HttpClientWithUrl { other => other, }; - Ok(Url::parse_with_params( - &format!("{}{}", base_api_url, path), - query, - )?) + Ok(Url::parse(&format!("{}{}", base_api_url, path))?) + } + + /// Builds a Zed Cloud URL using the given path and query params. + pub fn build_zed_cloud_url_with_query(&self, path: &str, query: impl Serialize) -> Result { + let base_url = self.base_url(); + let base_api_url = match base_url.as_ref() { + "https://zed.dev" => "https://cloud.zed.dev", + "https://staging.zed.dev" => "https://cloud.zed.dev", + "http://localhost:3000" => "http://localhost:8787", + other => other, + }; + let query = serde_urlencoded::to_string(&query)?; + Ok(Url::parse(&format!("{}{}?{}", base_api_url, path, query))?) } /// Builds a Zed LLM URL using the given path. diff --git a/crates/recent_projects/src/remote_connections.rs b/crates/recent_projects/src/remote_connections.rs index ef6ce2e8deda150f352a88a466822a44ed02b55b..50d7912b80d0842854c36810378c5f8abbf7a2f7 100644 --- a/crates/recent_projects/src/remote_connections.rs +++ b/crates/recent_projects/src/remote_connections.rs @@ -486,10 +486,10 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate { let this = self.clone(); cx.spawn(async move |cx| { AutoUpdater::download_remote_server_release( - platform.os, - platform.arch, release_channel, version, + platform.os, + platform.arch, move |status, cx| this.set_status(Some(status), cx), cx, ) @@ -507,19 +507,19 @@ impl remote::RemoteClientDelegate for RemoteClientDelegate { }) } - fn get_download_params( + fn get_download_url( &self, platform: RemotePlatform, release_channel: ReleaseChannel, version: Option, cx: &mut AsyncApp, - ) -> Task>> { + ) -> Task>> { cx.spawn(async move |cx| { AutoUpdater::get_remote_server_release_url( - platform.os, - platform.arch, release_channel, version, + platform.os, + platform.arch, cx, ) .await diff --git a/crates/release_channel/src/lib.rs b/crates/release_channel/src/lib.rs index ba8d2e767503b00ed7f39921780a262b3e6c3624..c0ceafc760a5949d636dc2df3e93dc8926111417 100644 --- a/crates/release_channel/src/lib.rs +++ b/crates/release_channel/src/lib.rs @@ -126,6 +126,12 @@ pub fn init(app_version: SemanticVersion, cx: &mut App) { cx.set_global(GlobalReleaseChannel(*RELEASE_CHANNEL)) } +/// Initializes the release channel for tests that rely on fake release channel. +pub fn init_test(app_version: SemanticVersion, release_channel: ReleaseChannel, cx: &mut App) { + cx.set_global(GlobalAppVersion(app_version)); + cx.set_global(GlobalReleaseChannel(release_channel)) +} + impl ReleaseChannel { /// Returns the global [`ReleaseChannel`]. pub fn global(cx: &App) -> Self { diff --git a/crates/remote/src/remote_client.rs b/crates/remote/src/remote_client.rs index 54ec6644b9abef23446aaf0f8ddd21c0da6bdf05..1c14a0e244c3f09bb8b02e4aa99bd6b282435db5 100644 --- a/crates/remote/src/remote_client.rs +++ b/crates/remote/src/remote_client.rs @@ -67,13 +67,13 @@ pub trait RemoteClientDelegate: Send + Sync { tx: oneshot::Sender, cx: &mut AsyncApp, ); - fn get_download_params( + fn get_download_url( &self, platform: RemotePlatform, release_channel: ReleaseChannel, version: Option, cx: &mut AsyncApp, - ) -> Task>>; + ) -> Task>>; fn download_server_binary_locally( &self, platform: RemotePlatform, @@ -1669,13 +1669,13 @@ mod fake { unreachable!() } - fn get_download_params( + fn get_download_url( &self, _platform: RemotePlatform, _release_channel: ReleaseChannel, _version: Option, _cx: &mut AsyncApp, - ) -> Task>> { + ) -> Task>> { unreachable!() } diff --git a/crates/remote/src/transport/ssh.rs b/crates/remote/src/transport/ssh.rs index 433c4b017aac81b73b15d388518e6349632435f6..ec020cba0b321ea3cb5929a3fa17cb6c425b1ef7 100644 --- a/crates/remote/src/transport/ssh.rs +++ b/crates/remote/src/transport/ssh.rs @@ -606,12 +606,12 @@ impl SshRemoteConnection { .unwrap(), ); if !self.socket.connection_options.upload_binary_over_ssh - && let Some((url, body)) = delegate - .get_download_params(self.ssh_platform, release_channel, wanted_version, cx) + && let Some(url) = delegate + .get_download_url(self.ssh_platform, release_channel, wanted_version, cx) .await? { match self - .download_binary_on_server(&url, &body, &tmp_path_gz, delegate, cx) + .download_binary_on_server(&url, &tmp_path_gz, delegate, cx) .await { Ok(_) => { @@ -644,7 +644,6 @@ impl SshRemoteConnection { async fn download_binary_on_server( &self, url: &str, - body: &str, tmp_path_gz: &RelPath, delegate: &Arc, cx: &mut AsyncApp, @@ -670,12 +669,6 @@ impl SshRemoteConnection { &[ "-f", "-L", - "-X", - "GET", - "-H", - "Content-Type: application/json", - "-d", - body, url, "-o", &tmp_path_gz.display(self.path_style()), @@ -700,14 +693,7 @@ impl SshRemoteConnection { .run_command( self.ssh_shell_kind, "wget", - &[ - "--header=Content-Type: application/json", - "--body-data", - body, - url, - "-O", - &tmp_path_gz.display(self.path_style()), - ], + &[url, "-O", &tmp_path_gz.display(self.path_style())], true, ) .await diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 180e0f1f04d1c7b1eddb0156659f697f423967ea..14e718ec2457b7d0f49c60cbc923cc7f215f9a15 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -539,7 +539,7 @@ pub fn main() { }); AppState::set_global(Arc::downgrade(&app_state), cx); - auto_update::init(client.http_client(), cx); + auto_update::init(client.clone(), cx); dap_adapters::init(cx); auto_update_ui::init(cx); reliability::init( diff --git a/script/bundle-mac b/script/bundle-mac index c647424d7ee657f6ca3e94c3cb94957fcf50ad98..248cb10203a16299e33b1d997aeee8cfca46250e 100755 --- a/script/bundle-mac +++ b/script/bundle-mac @@ -22,7 +22,7 @@ Build the application bundle for macOS. Options: -d Compile in debug mode -o Open dir with the resulting DMG or launch the app itself in local mode. - -i Install the resulting DMG into /Applications in local mode. Noop without -l. + -i Install the resulting DMG into /Applications. -h Display this help and exit. " } @@ -209,16 +209,6 @@ function sign_app_binaries() { codesign --force --deep --entitlements "${app_path}/Contents/Resources/zed.entitlements" --sign ${MACOS_SIGNING_KEY:- -} "${app_path}" -v fi - if [[ "$target_dir" = "debug" ]]; then - if [ "$open_result" = true ]; then - open "$app_path" - else - echo "Created application bundle:" - echo "$app_path" - fi - exit 0 - fi - bundle_name=$(basename "$app_path") if [ "$local_install" = true ]; then @@ -229,6 +219,16 @@ function sign_app_binaries() { echo "Opening /Applications/$bundle_name" open "/Applications/$bundle_name" fi + elif [ "$open_result" = true ]; then + open "$app_path" + fi + + if [[ "$target_dir" = "debug" ]]; then + echo "Debug build detected - skipping DMG creation and signing" + if [ "$local_install" = false ]; then + echo "Created application bundle:" + echo "$app_path" + fi else dmg_target_directory="target/${target_triple}/${target_dir}" dmg_source_directory="${dmg_target_directory}/dmg" From f90d0789fb74753abb8067ee1c88e736f4261f14 Mon Sep 17 00:00:00 2001 From: Alvaro Parker <64918109+AlvaroParker@users.noreply.github.com> Date: Tue, 11 Nov 2025 03:02:13 -0300 Subject: [PATCH 44/74] git: Add notification to git clone (#41712) Adds a simple notification when cloning a repo using the integrated git clone on Zed. Before this, the user had no feedback after starting the cloning action. Demo: https://github.com/user-attachments/assets/72fcdf1b-fc99-4fe5-8db2-7c30b170f12f Not sure about that icon I'm using for the animation, but that can be easily changed. Release Notes: - Added notification when cloning a repo from zed --- Cargo.lock | 1 + crates/activity_indicator/Cargo.toml | 1 + .../src/activity_indicator.rs | 42 ++++++++- crates/fs/src/fs.rs | 90 ++++++++++++++++++- 4 files changed, 130 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bee290f2f17ffba973d432272c91344b8caa99f3..a3300a818c12f39406cc39848cae86eeb26a0a56 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -96,6 +96,7 @@ dependencies = [ "auto_update", "editor", "extension_host", + "fs", "futures 0.3.31", "gpui", "language", diff --git a/crates/activity_indicator/Cargo.toml b/crates/activity_indicator/Cargo.toml index 4e604b452122c5a8e38b2d02b54f4ee639817ab4..99ae5b5b077a14c0909737d64935220698a007c7 100644 --- a/crates/activity_indicator/Cargo.toml +++ b/crates/activity_indicator/Cargo.toml @@ -17,6 +17,7 @@ anyhow.workspace = true auto_update.workspace = true editor.workspace = true extension_host.workspace = true +fs.workspace = true futures.workspace = true gpui.workspace = true language.workspace = true diff --git a/crates/activity_indicator/src/activity_indicator.rs b/crates/activity_indicator/src/activity_indicator.rs index 09cc2fb9568ca01748435c73fd8834efdbb50839..5cb4e1c6153154782bf10447c13c3a9017cbcce7 100644 --- a/crates/activity_indicator/src/activity_indicator.rs +++ b/crates/activity_indicator/src/activity_indicator.rs @@ -51,6 +51,7 @@ pub struct ActivityIndicator { project: Entity, auto_updater: Option>, context_menu_handle: PopoverMenuHandle, + fs_jobs: Vec, } #[derive(Debug)] @@ -99,6 +100,27 @@ impl ActivityIndicator { }) .detach(); + let fs = project.read(cx).fs().clone(); + let mut job_events = fs.subscribe_to_jobs(); + cx.spawn(async move |this, cx| { + while let Some(job_event) = job_events.next().await { + this.update(cx, |this: &mut ActivityIndicator, cx| { + match job_event { + fs::JobEvent::Started { info } => { + this.fs_jobs.retain(|j| j.id != info.id); + this.fs_jobs.push(info); + } + fs::JobEvent::Completed { id } => { + this.fs_jobs.retain(|j| j.id != id); + } + } + cx.notify(); + })?; + } + anyhow::Ok(()) + }) + .detach(); + cx.subscribe( &project.read(cx).lsp_store(), |activity_indicator, _, event, cx| { @@ -201,7 +223,8 @@ impl ActivityIndicator { statuses: Vec::new(), project: project.clone(), auto_updater, - context_menu_handle: Default::default(), + context_menu_handle: PopoverMenuHandle::default(), + fs_jobs: Vec::new(), } }); @@ -432,6 +455,23 @@ impl ActivityIndicator { }); } + // Show any long-running fs command + for fs_job in &self.fs_jobs { + if Instant::now().duration_since(fs_job.start) >= GIT_OPERATION_DELAY { + return Some(Content { + icon: Some( + Icon::new(IconName::ArrowCircle) + .size(IconSize::Small) + .with_rotate_animation(2) + .into_any_element(), + ), + message: fs_job.message.clone().into(), + on_click: None, + tooltip_message: None, + }); + } + } + // Show any language server installation info. let mut downloading = SmallVec::<[_; 3]>::new(); let mut checking_for_update = SmallVec::<[_; 3]>::new(); diff --git a/crates/fs/src/fs.rs b/crates/fs/src/fs.rs index b8714505093f03828e3d8783204ede61bb0989b0..33cc83a7886349a537a87d4b6c8bb3f5211608fc 100644 --- a/crates/fs/src/fs.rs +++ b/crates/fs/src/fs.rs @@ -4,6 +4,10 @@ mod mac_watcher; #[cfg(not(target_os = "macos"))] pub mod fs_watcher; +use parking_lot::Mutex; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::time::Instant; + use anyhow::{Context as _, Result, anyhow}; #[cfg(any(target_os = "linux", target_os = "freebsd"))] use ashpd::desktop::trash; @@ -12,6 +16,7 @@ use gpui::App; use gpui::BackgroundExecutor; use gpui::Global; use gpui::ReadGlobal as _; +use gpui::SharedString; use std::borrow::Cow; use util::command::new_smol_command; @@ -51,8 +56,7 @@ use git::{ repository::{RepoPath, repo_path}, status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus}, }; -#[cfg(any(test, feature = "test-support"))] -use parking_lot::Mutex; + #[cfg(any(test, feature = "test-support"))] use smol::io::AsyncReadExt; #[cfg(any(test, feature = "test-support"))] @@ -148,6 +152,7 @@ pub trait Fs: Send + Sync { async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()>; fn is_fake(&self) -> bool; async fn is_case_sensitive(&self) -> Result; + fn subscribe_to_jobs(&self) -> JobEventReceiver; #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> Arc { @@ -215,6 +220,55 @@ pub struct Metadata { #[serde(transparent)] pub struct MTime(SystemTime); +pub type JobId = usize; + +#[derive(Clone, Debug)] +pub struct JobInfo { + pub start: Instant, + pub message: SharedString, + pub id: JobId, +} + +#[derive(Debug, Clone)] +pub enum JobEvent { + Started { info: JobInfo }, + Completed { id: JobId }, +} + +pub type JobEventSender = futures::channel::mpsc::UnboundedSender; +pub type JobEventReceiver = futures::channel::mpsc::UnboundedReceiver; + +struct JobTracker { + id: JobId, + subscribers: Arc>>, +} + +impl JobTracker { + fn new(info: JobInfo, subscribers: Arc>>) -> Self { + let id = info.id; + { + let mut subs = subscribers.lock(); + subs.retain(|sender| { + sender + .unbounded_send(JobEvent::Started { info: info.clone() }) + .is_ok() + }); + } + Self { id, subscribers } + } +} + +impl Drop for JobTracker { + fn drop(&mut self) { + let mut subs = self.subscribers.lock(); + subs.retain(|sender| { + sender + .unbounded_send(JobEvent::Completed { id: self.id }) + .is_ok() + }); + } +} + impl MTime { /// Conversion intended for persistence and testing. pub fn from_seconds_and_nanos(secs: u64, nanos: u32) -> Self { @@ -257,6 +311,8 @@ impl From for proto::Timestamp { pub struct RealFs { bundled_git_binary_path: Option, executor: BackgroundExecutor, + next_job_id: Arc, + job_event_subscribers: Arc>>, } pub trait FileHandle: Send + Sync + std::fmt::Debug { @@ -361,6 +417,8 @@ impl RealFs { Self { bundled_git_binary_path: git_binary_path, executor, + next_job_id: Arc::new(AtomicUsize::new(0)), + job_event_subscribers: Arc::new(Mutex::new(Vec::new())), } } } @@ -862,7 +920,6 @@ impl Fs for RealFs { Pin>>>, Arc, ) { - use parking_lot::Mutex; use util::{ResultExt as _, paths::SanitizedPath}; let (tx, rx) = smol::channel::unbounded(); @@ -959,6 +1016,15 @@ impl Fs for RealFs { } async fn git_clone(&self, repo_url: &str, abs_work_directory: &Path) -> Result<()> { + let job_id = self.next_job_id.fetch_add(1, Ordering::SeqCst); + let job_info = JobInfo { + id: job_id, + start: Instant::now(), + message: SharedString::from(format!("Cloning {}", repo_url)), + }; + + let _job_tracker = JobTracker::new(job_info, self.job_event_subscribers.clone()); + let output = new_smol_command("git") .current_dir(abs_work_directory) .args(&["clone", repo_url]) @@ -979,6 +1045,12 @@ impl Fs for RealFs { false } + fn subscribe_to_jobs(&self) -> JobEventReceiver { + let (sender, receiver) = futures::channel::mpsc::unbounded(); + self.job_event_subscribers.lock().push(sender); + receiver + } + /// Checks whether the file system is case sensitive by attempting to create two files /// that have the same name except for the casing. /// @@ -1049,6 +1121,7 @@ struct FakeFsState { read_dir_call_count: usize, path_write_counts: std::collections::HashMap, moves: std::collections::HashMap, + job_event_subscribers: Arc>>, } #[cfg(any(test, feature = "test-support"))] @@ -1333,6 +1406,7 @@ impl FakeFs { metadata_call_count: 0, path_write_counts: Default::default(), moves: Default::default(), + job_event_subscribers: Arc::new(Mutex::new(Vec::new())), })), }); @@ -2587,6 +2661,12 @@ impl Fs for FakeFs { Ok(true) } + fn subscribe_to_jobs(&self) -> JobEventReceiver { + let (sender, receiver) = futures::channel::mpsc::unbounded(); + self.state.lock().job_event_subscribers.lock().push(sender); + receiver + } + #[cfg(any(test, feature = "test-support"))] fn as_fake(&self) -> Arc { self.this.upgrade().unwrap() @@ -3201,6 +3281,8 @@ mod tests { let fs = RealFs { bundled_git_binary_path: None, executor, + next_job_id: Arc::new(AtomicUsize::new(0)), + job_event_subscribers: Arc::new(Mutex::new(Vec::new())), }; let temp_dir = TempDir::new().unwrap(); let file_to_be_replaced = temp_dir.path().join("file.txt"); @@ -3219,6 +3301,8 @@ mod tests { let fs = RealFs { bundled_git_binary_path: None, executor, + next_job_id: Arc::new(AtomicUsize::new(0)), + job_event_subscribers: Arc::new(Mutex::new(Vec::new())), }; let temp_dir = TempDir::new().unwrap(); let file_to_be_replaced = temp_dir.path().join("file.txt"); From 1a807a7a6a6568385402cf2cca1b43fce66a94fa Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 08:57:30 +0100 Subject: [PATCH 45/74] terminal: Spawn terminal process on main thread on macos again (#42411) Closes https://github.com/zed-industries/zed/issues/42365, follow up to https://github.com/zed-industries/zed/pull/42234 Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/terminal/src/terminal.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index 4f6f0e75ee6f38615a2d82d7a4ad3ee0c06c2323..c635cb9b417f422ad0ddd91611233cb04a771679 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -383,7 +383,7 @@ impl TerminalBuilder { selection_phase: SelectionPhase::Ended, hyperlink_regex_searches: RegexSearches::new(), vi_mode_enabled: false, - is_ssh_terminal: false, + is_remote_terminal: false, last_mouse_move_time: Instant::now(), last_hyperlink_search_position: None, #[cfg(windows)] @@ -415,7 +415,7 @@ impl TerminalBuilder { cursor_shape: CursorShape, alternate_scroll: AlternateScroll, max_scroll_history_lines: Option, - is_ssh_terminal: bool, + is_remote_terminal: bool, window_id: u64, completion_tx: Option>>, cx: &App, @@ -601,7 +601,7 @@ impl TerminalBuilder { selection_phase: SelectionPhase::Ended, hyperlink_regex_searches: RegexSearches::new(), vi_mode_enabled: false, - is_ssh_terminal, + is_remote_terminal, last_mouse_move_time: Instant::now(), last_hyperlink_search_position: None, #[cfg(windows)] @@ -646,7 +646,7 @@ impl TerminalBuilder { }) }; // the thread we spawn things on has an effect on signal handling - if cfg!(target_os = "unix") { + if !cfg!(target_os = "windows") { cx.spawn(async move |_| fut.await) } else { cx.background_spawn(fut) @@ -828,7 +828,7 @@ pub struct Terminal { hyperlink_regex_searches: RegexSearches, task: Option, vi_mode_enabled: bool, - is_ssh_terminal: bool, + is_remote_terminal: bool, last_mouse_move_time: Instant, last_hyperlink_search_position: Option>, #[cfg(windows)] @@ -1959,7 +1959,7 @@ impl Terminal { } pub fn working_directory(&self) -> Option { - if self.is_ssh_terminal { + if self.is_remote_terminal { // We can't yet reliably detect the working directory of a shell on the // SSH host. Until we can do that, it doesn't make sense to display // the working directory on the client and persist that. @@ -2158,7 +2158,7 @@ impl Terminal { self.template.cursor_shape, self.template.alternate_scroll, self.template.max_scroll_history_lines, - self.is_ssh_terminal, + self.is_remote_terminal, self.template.window_id, None, cx, From 46db753f7972e041611f89c2a22023b90fcde7f6 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 09:57:11 +0100 Subject: [PATCH 46/74] diagnostics: Fix panic due non-sorted diagnostics excerpt ranges (#42416) Fixes ZED-356 Release Notes: - N/A *or* Added/Fixed/Improved ... Co-authored-by: Smit Barmase --- crates/diagnostics/src/diagnostics.rs | 52 +++++++++++---------------- 1 file changed, 21 insertions(+), 31 deletions(-) diff --git a/crates/diagnostics/src/diagnostics.rs b/crates/diagnostics/src/diagnostics.rs index c2caefe3f388e12fe91931060fe8980908157e48..2e729cbdf420264d96db6e6fec8317d250ec642c 100644 --- a/crates/diagnostics/src/diagnostics.rs +++ b/crates/diagnostics/src/diagnostics.rs @@ -564,6 +564,20 @@ impl ProjectDiagnosticsEditor { blocks.extend(more); } + let cmp_excerpts = |buffer_snapshot: &BufferSnapshot, + a: &ExcerptRange, + b: &ExcerptRange| { + let context_start = || a.context.start.cmp(&b.context.start, buffer_snapshot); + let context_end = || a.context.end.cmp(&b.context.end, buffer_snapshot); + let primary_start = || a.primary.start.cmp(&b.primary.start, buffer_snapshot); + let primary_end = || a.primary.end.cmp(&b.primary.end, buffer_snapshot); + context_start() + .then_with(context_end) + .then_with(primary_start) + .then_with(primary_end) + .then(cmp::Ordering::Greater) + }; + let mut excerpt_ranges: Vec> = this.update(cx, |this, cx| { this.multibuffer.update(cx, |multi_buffer, cx| { let is_dirty = multi_buffer @@ -575,10 +589,12 @@ impl ProjectDiagnosticsEditor { .excerpts_for_buffer(buffer_id, cx) .into_iter() .map(|(_, range)| range) + .sorted_by(|a, b| cmp_excerpts(&buffer_snapshot, a, b)) .collect(), } }) })?; + let mut result_blocks = vec![None; excerpt_ranges.len()]; let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?; for b in blocks { @@ -592,40 +608,14 @@ impl ProjectDiagnosticsEditor { buffer_snapshot = cx.update(|_, cx| buffer.read(cx).snapshot())?; let initial_range = buffer_snapshot.anchor_after(b.initial_range.start) ..buffer_snapshot.anchor_before(b.initial_range.end); - - let bin_search = |probe: &ExcerptRange| { - let context_start = || { - probe - .context - .start - .cmp(&excerpt_range.start, &buffer_snapshot) - }; - let context_end = - || probe.context.end.cmp(&excerpt_range.end, &buffer_snapshot); - let primary_start = || { - probe - .primary - .start - .cmp(&initial_range.start, &buffer_snapshot) - }; - let primary_end = - || probe.primary.end.cmp(&initial_range.end, &buffer_snapshot); - context_start() - .then_with(context_end) - .then_with(primary_start) - .then_with(primary_end) - .then(cmp::Ordering::Greater) + let excerpt_range = ExcerptRange { + context: excerpt_range, + primary: initial_range, }; let i = excerpt_ranges - .binary_search_by(bin_search) + .binary_search_by(|probe| cmp_excerpts(&buffer_snapshot, probe, &excerpt_range)) .unwrap_or_else(|i| i); - excerpt_ranges.insert( - i, - ExcerptRange { - context: excerpt_range, - primary: initial_range, - }, - ); + excerpt_ranges.insert(i, excerpt_range); result_blocks.insert(i, Some(b)); } From dcf56144b5378c422070fadb5f9bd9cf99b01238 Mon Sep 17 00:00:00 2001 From: Dino Date: Tue, 11 Nov 2025 10:04:30 +0000 Subject: [PATCH 47/74] vim: Sort whole buffer when no range is specified (#42376) - Introduce a `default_range` field to `VimCommand`, to be optionally used when no range is specified for the command - Update `VimCommand.parse` to take into consideration the `default_range` - Introduce `CommandRange::buffer` to obtain the `CommandRange` which corresponds to the whole buffer - Update the `VimCommand` definitions for both `sort` and `sort i` to default to the whole buffer when no range is specified Closes #41750 Release Notes: - Improved vim's `:sort` command to sort the buffer's content when no selection is used --- crates/vim/src/command.rs | 133 +++++++++++++++++++++++++++++++++++++- 1 file changed, 131 insertions(+), 2 deletions(-) diff --git a/crates/vim/src/command.rs b/crates/vim/src/command.rs index b44ec8907399a7d744f91d9e90b397a174d5f85a..cba8351e8d36e784c77c20b15ac0dead41f84a13 100644 --- a/crates/vim/src/command.rs +++ b/crates/vim/src/command.rs @@ -725,6 +725,8 @@ struct VimCommand { args: Option< Box, String) -> Option> + Send + Sync + 'static>, >, + /// Optional range Range to use if no range is specified. + default_range: Option, range: Option< Box< dyn Fn(Box, &CommandRange) -> Option> @@ -793,6 +795,11 @@ impl VimCommand { self } + fn default_range(mut self, range: CommandRange) -> Self { + self.default_range = Some(range); + self + } + fn count(mut self) -> Self { self.has_count = true; self @@ -923,6 +930,7 @@ impl VimCommand { self.args.as_ref()?(action, args)? }; + let range = range.as_ref().or(self.default_range.as_ref()); if let Some(range) = range { self.range.as_ref().and_then(|f| f(action, range)) } else { @@ -1121,6 +1129,7 @@ impl CommandRange { self.end.as_ref().unwrap_or(&self.start) } + /// Convert the `CommandRange` into a `Range`. pub(crate) fn buffer_range( &self, vim: &Vim, @@ -1152,6 +1161,14 @@ impl CommandRange { None } } + + /// The `CommandRange` representing the entire buffer. + fn buffer() -> Self { + Self { + start: Position::Line { row: 1, offset: 0 }, + end: Some(Position::LastLine { offset: 0 }), + } + } } fn generate_commands(_: &App) -> Vec { @@ -1421,8 +1438,12 @@ fn generate_commands(_: &App) -> Vec { VimCommand::new(("delm", "arks"), ArgumentRequired) .bang(DeleteMarks::AllLocal) .args(|_, args| Some(DeleteMarks::Marks(args).boxed_clone())), - VimCommand::new(("sor", "t"), SortLinesCaseSensitive).range(select_range), - VimCommand::new(("sort i", ""), SortLinesCaseInsensitive).range(select_range), + VimCommand::new(("sor", "t"), SortLinesCaseSensitive) + .range(select_range) + .default_range(CommandRange::buffer()), + VimCommand::new(("sort i", ""), SortLinesCaseInsensitive) + .range(select_range) + .default_range(CommandRange::buffer()), VimCommand::str(("E", "xplore"), "project_panel::ToggleFocus"), VimCommand::str(("H", "explore"), "project_panel::ToggleFocus"), VimCommand::str(("L", "explore"), "project_panel::ToggleFocus"), @@ -2898,4 +2919,112 @@ mod test { ); }); } + + #[gpui::test] + async fn test_sort_commands(cx: &mut TestAppContext) { + let mut cx = VimTestContext::new(cx, true).await; + + cx.set_state( + indoc! {" + «hornet + quirrel + elderbug + cornifer + idaˇ» + "}, + Mode::Visual, + ); + + cx.simulate_keystrokes(": sort"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + ˇcornifer + elderbug + hornet + ida + quirrel + "}, + Mode::Normal, + ); + + // Assert that, by default, `:sort` takes case into consideration. + cx.set_state( + indoc! {" + «hornet + quirrel + Elderbug + cornifer + idaˇ» + "}, + Mode::Visual, + ); + + cx.simulate_keystrokes(": sort"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + ˇElderbug + cornifer + hornet + ida + quirrel + "}, + Mode::Normal, + ); + + // Assert that, if the `i` option is passed, `:sort` ignores case. + cx.set_state( + indoc! {" + «hornet + quirrel + Elderbug + cornifer + idaˇ» + "}, + Mode::Visual, + ); + + cx.simulate_keystrokes(": sort space i"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + ˇcornifer + Elderbug + hornet + ida + quirrel + "}, + Mode::Normal, + ); + + // When no range is provided, sorts the whole buffer. + cx.set_state( + indoc! {" + ˇhornet + quirrel + elderbug + cornifer + ida + "}, + Mode::Normal, + ); + + cx.simulate_keystrokes(": sort"); + cx.simulate_keystrokes("enter"); + + cx.assert_state( + indoc! {" + ˇcornifer + elderbug + hornet + ida + quirrel + "}, + Mode::Normal, + ); + } } From 97100ce52f507695dd21902285a1b9279a455cad Mon Sep 17 00:00:00 2001 From: Dino Date: Tue, 11 Nov 2025 10:26:40 +0000 Subject: [PATCH 48/74] editor: Respect search case sensitivity when selecting occurrences (#42121) Update how the editor's `select_*` methods work in order to respect the `search.case_sensitive` setting, or to be overriden by the `BufferSearchBar` search options. - Update both the `SearchableItem` and `SearchableItemHandle` traits with a new `set_search_is_case_sensitive` method that allows callers to set the case sensitivity of the search - Update the `BufferSearchBar` to leverage `SearchableItemHandle.set_search_is_case_sensitive` in order to sync its case sensitivity options with the searchable item - Update the implementation of the `SearchableItem` trait for `Editor` so as to store the argument provided to the `set_search_is_case_sensitive` method - Update the way search queries are built by `Editor` so as to rely on `SearchableItem.set_search_is_case_sensitive` argument, if not `None`, or default to the editor's `search.case_sensitive` settings Closes #41070 Release Notes: - Improved the "Select Next Occurrence", "Select Previous Occurrence" and "Select All Occurrences" actions in order to respect the case sensitivity search settings --------- Co-authored-by: Conrad Irwin --- assets/settings/default.json | 4 + crates/editor/src/editor.rs | 32 +++++- crates/editor/src/editor_settings.rs | 5 + crates/editor/src/editor_tests.rs | 108 +++++++++++++++++- crates/editor/src/items.rs | 8 ++ crates/search/src/buffer_search.rs | 92 +++++++++++++-- .../settings/src/settings_content/editor.rs | 4 + crates/workspace/src/searchable.rs | 8 ++ docs/src/configuring-zed.md | 42 ++++++- 9 files changed, 285 insertions(+), 18 deletions(-) diff --git a/assets/settings/default.json b/assets/settings/default.json index 7fb583f95b0d6d39146ffe9e406201e958598905..d8c800081246dcf937f7380399d726dd3d349679 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -616,9 +616,13 @@ "search": { // Whether to show the project search button in the status bar. "button": true, + // Whether to only match on whole words. "whole_word": false, + // Whether to match case sensitively. "case_sensitive": false, + // Whether to include gitignored files in search results. "include_ignored": false, + // Whether to interpret the search query as a regular expression. "regex": false, // Whether to center the cursor on each search match when navigating. "center_on_match": false diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 8c165a6d7ce0a5410000cb21d9616e4c508a6fb3..17eb051e35ad6e2ef0c2358cd0664cdba93af013 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -74,7 +74,7 @@ use ::git::{ blame::{BlameEntry, ParsedCommitMessage}, status::FileStatus, }; -use aho_corasick::AhoCorasick; +use aho_corasick::{AhoCorasick, AhoCorasickBuilder, BuildError}; use anyhow::{Context as _, Result, anyhow}; use blink_manager::BlinkManager; use buffer_diff::DiffHunkStatus; @@ -1190,6 +1190,7 @@ pub struct Editor { refresh_colors_task: Task<()>, inlay_hints: Option, folding_newlines: Task<()>, + select_next_is_case_sensitive: Option, pub lookup_key: Option>, } @@ -2333,6 +2334,7 @@ impl Editor { selection_drag_state: SelectionDragState::None, folding_newlines: Task::ready(()), lookup_key: None, + select_next_is_case_sensitive: None, }; if is_minimap { @@ -14645,7 +14647,7 @@ impl Editor { .collect::(); let is_empty = query.is_empty(); let select_state = SelectNextState { - query: AhoCorasick::new(&[query])?, + query: self.build_query(&[query], cx)?, wordwise: true, done: is_empty, }; @@ -14655,7 +14657,7 @@ impl Editor { } } else if let Some(selected_text) = selected_text { self.select_next_state = Some(SelectNextState { - query: AhoCorasick::new(&[selected_text])?, + query: self.build_query(&[selected_text], cx)?, wordwise: false, done: false, }); @@ -14863,7 +14865,7 @@ impl Editor { .collect::(); let is_empty = query.is_empty(); let select_state = SelectNextState { - query: AhoCorasick::new(&[query.chars().rev().collect::()])?, + query: self.build_query(&[query.chars().rev().collect::()], cx)?, wordwise: true, done: is_empty, }; @@ -14873,7 +14875,8 @@ impl Editor { } } else if let Some(selected_text) = selected_text { self.select_prev_state = Some(SelectNextState { - query: AhoCorasick::new(&[selected_text.chars().rev().collect::()])?, + query: self + .build_query(&[selected_text.chars().rev().collect::()], cx)?, wordwise: false, done: false, }); @@ -14883,6 +14886,25 @@ impl Editor { Ok(()) } + /// Builds an `AhoCorasick` automaton from the provided patterns, while + /// setting the case sensitivity based on the global + /// `SelectNextCaseSensitive` setting, if set, otherwise based on the + /// editor's settings. + fn build_query(&self, patterns: I, cx: &Context) -> Result + where + I: IntoIterator, + P: AsRef<[u8]>, + { + let case_sensitive = self.select_next_is_case_sensitive.map_or_else( + || EditorSettings::get_global(cx).search.case_sensitive, + |value| value, + ); + + let mut builder = AhoCorasickBuilder::new(); + builder.ascii_case_insensitive(!case_sensitive); + builder.build(patterns) + } + pub fn find_next_match( &mut self, _: &FindNextMatch, diff --git a/crates/editor/src/editor_settings.rs b/crates/editor/src/editor_settings.rs index de4198493a9ba2722aef58276ee385a117749fa0..e1984311d4eb0ba9d989f77a707b22698b00c750 100644 --- a/crates/editor/src/editor_settings.rs +++ b/crates/editor/src/editor_settings.rs @@ -162,10 +162,15 @@ pub struct DragAndDropSelection { pub struct SearchSettings { /// Whether to show the project search button in the status bar. pub button: bool, + /// Whether to only match on whole words. pub whole_word: bool, + /// Whether to match case sensitively. pub case_sensitive: bool, + /// Whether to include gitignored files in search results. pub include_ignored: bool, + /// Whether to interpret the search query as a regular expression. pub regex: bool, + /// Whether to center the cursor on each search match when navigating. pub center_on_match: bool, } diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index ce97cf9a1cc68ed4ff06d57ac02e0dbb9fdd8788..598d1383726a9610bb5a2c851cd1d56a709546ec 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -44,8 +44,8 @@ use project::{ }; use serde_json::{self, json}; use settings::{ - AllLanguageSettingsContent, IndentGuideBackgroundColoring, IndentGuideColoring, - ProjectSettingsContent, + AllLanguageSettingsContent, EditorSettingsContent, IndentGuideBackgroundColoring, + IndentGuideColoring, ProjectSettingsContent, SearchSettingsContent, }; use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant}; use std::{ @@ -8314,8 +8314,15 @@ async fn test_add_selection_above_below_multi_cursor_existing_state(cx: &mut Tes #[gpui::test] async fn test_select_next(cx: &mut TestAppContext) { init_test(cx, |_| {}); - let mut cx = EditorTestContext::new(cx).await; + + // Enable case sensitive search. + update_test_editor_settings(&mut cx, |settings| { + let mut search_settings = SearchSettingsContent::default(); + search_settings.case_sensitive = Some(true); + settings.search = Some(search_settings); + }); + cx.set_state("abc\nˇabc abc\ndefabc\nabc"); cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx)) @@ -8346,14 +8353,41 @@ async fn test_select_next(cx: &mut TestAppContext) { cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx)) .unwrap(); cx.assert_editor_state("abc\n«ˇabc» «ˇabc»\ndefabc\nabc"); + + // Test case sensitivity + cx.set_state("«ˇfoo»\nFOO\nFoo\nfoo"); + cx.update_editor(|e, window, cx| { + e.select_next(&SelectNext::default(), window, cx).unwrap(); + }); + cx.assert_editor_state("«ˇfoo»\nFOO\nFoo\n«ˇfoo»"); + + // Disable case sensitive search. + update_test_editor_settings(&mut cx, |settings| { + let mut search_settings = SearchSettingsContent::default(); + search_settings.case_sensitive = Some(false); + settings.search = Some(search_settings); + }); + + cx.set_state("«ˇfoo»\nFOO\nFoo"); + cx.update_editor(|e, window, cx| { + e.select_next(&SelectNext::default(), window, cx).unwrap(); + e.select_next(&SelectNext::default(), window, cx).unwrap(); + }); + cx.assert_editor_state("«ˇfoo»\n«ˇFOO»\n«ˇFoo»"); } #[gpui::test] async fn test_select_all_matches(cx: &mut TestAppContext) { init_test(cx, |_| {}); - let mut cx = EditorTestContext::new(cx).await; + // Enable case sensitive search. + update_test_editor_settings(&mut cx, |settings| { + let mut search_settings = SearchSettingsContent::default(); + search_settings.case_sensitive = Some(true); + settings.search = Some(search_settings); + }); + // Test caret-only selections cx.set_state("abc\nˇabc abc\ndefabc\nabc"); cx.update_editor(|e, window, cx| e.select_all_matches(&SelectAllMatches, window, cx)) @@ -8398,6 +8432,26 @@ async fn test_select_all_matches(cx: &mut TestAppContext) { e.set_clip_at_line_ends(false, cx); }); cx.assert_editor_state("«abcˇ»"); + + // Test case sensitivity + cx.set_state("fˇoo\nFOO\nFoo"); + cx.update_editor(|e, window, cx| { + e.select_all_matches(&SelectAllMatches, window, cx).unwrap(); + }); + cx.assert_editor_state("«fooˇ»\nFOO\nFoo"); + + // Disable case sensitive search. + update_test_editor_settings(&mut cx, |settings| { + let mut search_settings = SearchSettingsContent::default(); + search_settings.case_sensitive = Some(false); + settings.search = Some(search_settings); + }); + + cx.set_state("fˇoo\nFOO\nFoo"); + cx.update_editor(|e, window, cx| { + e.select_all_matches(&SelectAllMatches, window, cx).unwrap(); + }); + cx.assert_editor_state("«fooˇ»\n«FOOˇ»\n«Fooˇ»"); } #[gpui::test] @@ -8769,8 +8823,15 @@ let foo = «2ˇ»;"#, #[gpui::test] async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) { init_test(cx, |_| {}); - let mut cx = EditorTestContext::new(cx).await; + + // Enable case sensitive search. + update_test_editor_settings(&mut cx, |settings| { + let mut search_settings = SearchSettingsContent::default(); + search_settings.case_sensitive = Some(true); + settings.search = Some(search_settings); + }); + cx.set_state("abc\n«ˇabc» abc\ndefabc\nabc"); cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx)) @@ -8795,6 +8856,32 @@ async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) { cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx)) .unwrap(); cx.assert_editor_state("«ˇabc»\n«ˇabc» «ˇabc»\ndef«ˇabc»\n«ˇabc»"); + + // Test case sensitivity + cx.set_state("foo\nFOO\nFoo\n«ˇfoo»"); + cx.update_editor(|e, window, cx| { + e.select_previous(&SelectPrevious::default(), window, cx) + .unwrap(); + e.select_previous(&SelectPrevious::default(), window, cx) + .unwrap(); + }); + cx.assert_editor_state("«ˇfoo»\nFOO\nFoo\n«ˇfoo»"); + + // Disable case sensitive search. + update_test_editor_settings(&mut cx, |settings| { + let mut search_settings = SearchSettingsContent::default(); + search_settings.case_sensitive = Some(false); + settings.search = Some(search_settings); + }); + + cx.set_state("foo\nFOO\n«ˇFoo»"); + cx.update_editor(|e, window, cx| { + e.select_previous(&SelectPrevious::default(), window, cx) + .unwrap(); + e.select_previous(&SelectPrevious::default(), window, cx) + .unwrap(); + }); + cx.assert_editor_state("«ˇfoo»\n«ˇFOO»\n«ˇFoo»"); } #[gpui::test] @@ -25717,6 +25804,17 @@ pub(crate) fn update_test_project_settings( }); } +pub(crate) fn update_test_editor_settings( + cx: &mut TestAppContext, + f: impl Fn(&mut EditorSettingsContent), +) { + cx.update(|cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| f(&mut settings.editor)); + }) + }) +} + pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) { cx.update(|cx| { assets::Assets.load_test_fonts(cx); diff --git a/crates/editor/src/items.rs b/crates/editor/src/items.rs index f82a4e7a30f47798e2db00a17b082a88fb6c7239..12590e4b3f95648dd653d408252ced460e2e834e 100644 --- a/crates/editor/src/items.rs +++ b/crates/editor/src/items.rs @@ -1796,6 +1796,14 @@ impl SearchableItem for Editor { fn search_bar_visibility_changed(&mut self, _: bool, _: &mut Window, _: &mut Context) { self.expect_bounds_change = self.last_bounds; } + + fn set_search_is_case_sensitive( + &mut self, + case_sensitive: Option, + _cx: &mut Context, + ) { + self.select_next_is_case_sensitive = case_sensitive; + } } pub fn active_match_index( diff --git a/crates/search/src/buffer_search.rs b/crates/search/src/buffer_search.rs index d4b8e0b3eb9edb3612ba04dcd33deb61ed883755..764d0a81f7ac8c7fd03fe63c478aea14b3e2e31b 100644 --- a/crates/search/src/buffer_search.rs +++ b/crates/search/src/buffer_search.rs @@ -127,12 +127,6 @@ pub struct BufferSearchBar { regex_language: Option>, } -impl BufferSearchBar { - pub fn query_editor_focused(&self) -> bool { - self.query_editor_focused - } -} - impl EventEmitter for BufferSearchBar {} impl EventEmitter for BufferSearchBar {} impl Render for BufferSearchBar { @@ -521,6 +515,10 @@ impl ToolbarItemView for BufferSearchBar { } impl BufferSearchBar { + pub fn query_editor_focused(&self) -> bool { + self.query_editor_focused + } + pub fn register(registrar: &mut impl SearchActionsRegistrar) { registrar.register_handler(ForDeployed(|this, _: &FocusSearch, window, cx| { this.query_editor.focus_handle(cx).focus(window); @@ -696,6 +694,8 @@ impl BufferSearchBar { pub fn dismiss(&mut self, _: &Dismiss, window: &mut Window, cx: &mut Context) { self.dismissed = true; self.query_error = None; + self.sync_select_next_case_sensitivity(cx); + for searchable_item in self.searchable_items_with_matches.keys() { if let Some(searchable_item) = WeakSearchableItemHandle::upgrade(searchable_item.as_ref(), cx) @@ -711,6 +711,7 @@ impl BufferSearchBar { let handle = active_editor.item_focus_handle(cx); self.focus(&handle, window); } + cx.emit(Event::UpdateLocation); cx.emit(ToolbarItemEvent::ChangeLocation( ToolbarItemLocation::Hidden, @@ -730,6 +731,7 @@ impl BufferSearchBar { } self.search_suggested(window, cx); self.smartcase(window, cx); + self.sync_select_next_case_sensitivity(cx); self.replace_enabled = deploy.replace_enabled; self.selection_search_enabled = if deploy.selection_search_enabled { Some(FilteredSearchRange::Default) @@ -919,6 +921,7 @@ impl BufferSearchBar { self.default_options = self.search_options; drop(self.update_matches(false, false, window, cx)); self.adjust_query_regex_language(cx); + self.sync_select_next_case_sensitivity(cx); cx.notify(); } @@ -953,6 +956,7 @@ impl BufferSearchBar { pub fn set_search_options(&mut self, search_options: SearchOptions, cx: &mut Context) { self.search_options = search_options; self.adjust_query_regex_language(cx); + self.sync_select_next_case_sensitivity(cx); cx.notify(); } @@ -1507,6 +1511,7 @@ impl BufferSearchBar { .read(cx) .as_singleton() .expect("query editor should be backed by a singleton buffer"); + if enable { if let Some(regex_language) = self.regex_language.clone() { query_buffer.update(cx, |query_buffer, cx| { @@ -1519,6 +1524,24 @@ impl BufferSearchBar { }) } } + + /// Updates the searchable item's case sensitivity option to match the + /// search bar's current case sensitivity setting. This ensures that + /// editor's `select_next`/ `select_previous` operations respect the buffer + /// search bar's search options. + /// + /// Clears the case sensitivity when the search bar is dismissed so that + /// only the editor's settings are respected. + fn sync_select_next_case_sensitivity(&self, cx: &mut Context) { + let case_sensitive = match self.dismissed { + true => None, + false => Some(self.search_options.contains(SearchOptions::CASE_SENSITIVE)), + }; + + if let Some(active_searchable_item) = self.active_searchable_item.as_ref() { + active_searchable_item.set_search_is_case_sensitive(case_sensitive, cx); + } + } } #[cfg(test)] @@ -1528,7 +1551,7 @@ mod tests { use super::*; use editor::{ DisplayPoint, Editor, MultiBuffer, SearchSettings, SelectionEffects, - display_map::DisplayRow, + display_map::DisplayRow, test::editor_test_context::EditorTestContext, }; use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext}; use language::{Buffer, Point}; @@ -2963,6 +2986,61 @@ mod tests { }); } + #[gpui::test] + async fn test_select_occurrence_case_sensitivity(cx: &mut TestAppContext) { + let (editor, search_bar, cx) = init_test(cx); + let mut editor_cx = EditorTestContext::for_editor_in(editor, cx).await; + + // Start with case sensitive search settings. + let mut search_settings = SearchSettings::default(); + search_settings.case_sensitive = true; + update_search_settings(search_settings, cx); + search_bar.update(cx, |search_bar, cx| { + let mut search_options = search_bar.search_options; + search_options.insert(SearchOptions::CASE_SENSITIVE); + search_bar.set_search_options(search_options, cx); + }); + + editor_cx.set_state("«ˇfoo»\nFOO\nFoo\nfoo"); + editor_cx.update_editor(|e, window, cx| { + e.select_next(&Default::default(), window, cx).unwrap(); + }); + editor_cx.assert_editor_state("«ˇfoo»\nFOO\nFoo\n«ˇfoo»"); + + // Update the search bar's case sensitivite toggle, so we can later + // confirm that `select_next` will now be case-insensitive. + editor_cx.set_state("«ˇfoo»\nFOO\nFoo\nfoo"); + search_bar.update_in(cx, |search_bar, window, cx| { + search_bar.toggle_case_sensitive(&Default::default(), window, cx); + }); + editor_cx.update_editor(|e, window, cx| { + e.select_next(&Default::default(), window, cx).unwrap(); + }); + editor_cx.assert_editor_state("«ˇfoo»\n«ˇFOO»\nFoo\nfoo"); + + // Confirm that, after dismissing the search bar, only the editor's + // search settings actually affect the behavior of `select_next`. + search_bar.update_in(cx, |search_bar, window, cx| { + search_bar.dismiss(&Default::default(), window, cx); + }); + editor_cx.set_state("«ˇfoo»\nFOO\nFoo\nfoo"); + editor_cx.update_editor(|e, window, cx| { + e.select_next(&Default::default(), window, cx).unwrap(); + }); + editor_cx.assert_editor_state("«ˇfoo»\nFOO\nFoo\n«ˇfoo»"); + + // Update the editor's search settings, disabling case sensitivity, to + // check that the value is respected. + let mut search_settings = SearchSettings::default(); + search_settings.case_sensitive = false; + update_search_settings(search_settings, cx); + editor_cx.set_state("«ˇfoo»\nFOO\nFoo\nfoo"); + editor_cx.update_editor(|e, window, cx| { + e.select_next(&Default::default(), window, cx).unwrap(); + }); + editor_cx.assert_editor_state("«ˇfoo»\n«ˇFOO»\nFoo\nfoo"); + } + fn update_search_settings(search_settings: SearchSettings, cx: &mut TestAppContext) { cx.update(|cx| { SettingsStore::update_global(cx, |store, cx| { diff --git a/crates/settings/src/settings_content/editor.rs b/crates/settings/src/settings_content/editor.rs index 2dc3c6c0fdc78bf470e78e0577cc886d1471e8b2..4ef5f3e427b8ca8a2658c7bb35012ecc9618e377 100644 --- a/crates/settings/src/settings_content/editor.rs +++ b/crates/settings/src/settings_content/editor.rs @@ -759,9 +759,13 @@ pub enum SnippetSortOrder { pub struct SearchSettingsContent { /// Whether to show the project search button in the status bar. pub button: Option, + /// Whether to only match on whole words. pub whole_word: Option, + /// Whether to match case sensitively. pub case_sensitive: Option, + /// Whether to include gitignored files in search results. pub include_ignored: Option, + /// Whether to interpret the search query as a regular expression. pub regex: Option, /// Whether to center the cursor on each search match when navigating. pub center_on_match: Option, diff --git a/crates/workspace/src/searchable.rs b/crates/workspace/src/searchable.rs index 18da3f16f2e7a1e57dd42287059c0041d9309a78..9907df3be3eb8594f6cc8f63f05e2e93befd416c 100644 --- a/crates/workspace/src/searchable.rs +++ b/crates/workspace/src/searchable.rs @@ -166,6 +166,7 @@ pub trait SearchableItem: Item + EventEmitter { window: &mut Window, cx: &mut Context, ) -> Option; + fn set_search_is_case_sensitive(&mut self, _: Option, _: &mut Context) {} } pub trait SearchableItemHandle: ItemHandle { @@ -234,6 +235,8 @@ pub trait SearchableItemHandle: ItemHandle { window: &mut Window, cx: &mut App, ); + + fn set_search_is_case_sensitive(&self, is_case_sensitive: Option, cx: &mut App); } impl SearchableItemHandle for Entity { @@ -390,6 +393,11 @@ impl SearchableItemHandle for Entity { this.toggle_filtered_search_ranges(enabled, window, cx) }); } + fn set_search_is_case_sensitive(&self, enabled: Option, cx: &mut App) { + self.update(cx, |this, cx| { + this.set_search_is_case_sensitive(enabled, cx) + }); + } } impl From> for AnyView { diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index ac72abd7c2635f1873ea2ee23770ba58babbaf6d..145620c3962984407db73bf7ac4c0a3bbfa75324 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -3184,13 +3184,53 @@ Non-negative `integer` values ```json [settings] "search": { + "button": true, "whole_word": false, "case_sensitive": false, "include_ignored": false, - "regex": false + "regex": false, + "center_on_match": false }, ``` +### Button + +- Description: Whether to show the project search button in the status bar. +- Setting: `button` +- Default: `true` + +### Whole Word + +- Description: Whether to only match on whole words. +- Setting: `whole_word` +- Default: `false` + +### Case Sensitive + +- Description: Whether to match case sensitively. This setting affects both + searches and editor actions like "Select Next Occurrence", "Select Previous + Occurrence", and "Select All Occurrences". +- Setting: `case_sensitive` +- Default: `false` + +### Include Ignore + +- Description: Whether to include gitignored files in search results. +- Setting: `include_ignored` +- Default: `false` + +### Regex + +- Description: Whether to interpret the search query as a regular expression. +- Setting: `regex` +- Default: `false` + +### Center On Match + +- Description: Whether to center the cursor on each search match when navigating. +- Setting: `center_on_match` +- Default: `false` + ## Search Wrap - Description: If `search_wrap` is disabled, search result do not wrap around the end of the file From 1c4bb60209950770debdb23a7a63c899d069d3ef Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 11:55:19 +0100 Subject: [PATCH 49/74] gpui: Fix invalid unwrap in windows window creation (#42426) Fixes ZED-34M Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/gpui/src/platform/windows/window.rs | 3 ++- crates/project/src/lsp_store.rs | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index a9217a85e3e38697fbb06aea3067901d222e3986..0050fa4bc0e96b8702314f33637db67998b5941d 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -453,8 +453,9 @@ impl WindowsWindow { // Failure to create a `WindowsWindowState` can cause window creation to fail, // so check the inner result first. - let this = context.inner.take().unwrap()?; + let this = context.inner.take().transpose()?; let hwnd = creation_result?; + let this = this.unwrap(); register_drag_drop(&this)?; configure_dwm_dark_mode(hwnd, appearance); diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index ecfe169b47b7daa1b1c8c0794d9cdde8f0b06ad4..90675e364b4d962b5c67cafb941b2b6cb9e1df9b 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7651,7 +7651,10 @@ impl LspStore { let buffer = buffer.read(cx); let file = File::from_dyn(buffer.file())?; let abs_path = file.as_local()?.abs_path(cx); - let uri = lsp::Uri::from_file_path(abs_path).unwrap(); + let uri = lsp::Uri::from_file_path(&abs_path) + .ok() + .with_context(|| format!("Failed to convert path to URI: {}", abs_path.display())) + .unwrap(); let next_snapshot = buffer.text_snapshot(); for language_server in language_servers { let language_server = language_server.clone(); From dc372e8a847f78ed253b7e409a8699f570eae1a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexandre=20An=C3=ADcio?= <58861037+anicioalexandre@users.noreply.github.com> Date: Tue, 11 Nov 2025 08:29:44 -0300 Subject: [PATCH 50/74] editor: Unfold buffers with selections on edit + Remove selections on buffer fold (#37953) Closes #36376 Problem: Multi-cursor edits/selections in multi-buffers view were jumping to incorrect locations after toggling buffer folds. When users created multiple selections across different buffers in a multi-buffer view (like project search results) and then folded one of the buffers, subsequent text insertion would either: 1. Insert text at wrong locations (like at the top of the first unfolded buffer) 2. Replace the entire content in some buffers instead of inserting at the intended cursor positions 3. Create orphaned selections that caused corruption in the editing experience The issue seems to happen because when a buffer gets folded in a multi-buffer view, the existing selections associated with that buffer become invalid anchor points. Solution: 1. Selection Cleanup on Buffer Folding - Added `remove_selections_from_buffer()` method that filters out all selections from a buffer when it gets folded - This prevents invalid selections from corrupting subsequent editing operations - Includes edge case handling: if all selections are removed (all buffers folded), it creates a default selection at the start of the first buffer to prevent panics 2. Unfolding buffers before editing - Added `unfold_buffers_with_selections()` call in `handle_input()` ensures buffers with active selections are automatically unfolded before editing - This helps in fixing an edge case (covered in the tests) where, if you fold all buffers in a multi-buffer view, and try to insert text in a selection, it gets unfolded before the edit happens. Without this, the inserted text would override the entire buffer content. - If we don't care about this edge case, we could remove this method. I find it ok to add since we already trigger buffer unfolding after edits with `Event::ExcerptsEdited`. Release Notes: - Fixed multi-cursor edits jumping to incorrect locations after toggling buffer folds in multi-buffer views (e.g, project search) - Multi-cursor selections now properly handle buffer folding/unfolding operations - Text insertion no longer occurs at the wrong positions when buffers are folded during multi-cursor editing - Eliminated content replacement bugs where entire buffer contents were incorrectly overwritten - Added safe fallback behavior when all buffers in a multi-buffer view are folded --------- Co-authored-by: Smit Barmase --- crates/editor/src/editor.rs | 24 +++ crates/editor/src/editor_tests.rs | 214 ++++++++++++++++++++- crates/editor/src/selections_collection.rs | 37 ++++ crates/outline_panel/src/outline_panel.rs | 10 +- 4 files changed, 277 insertions(+), 8 deletions(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 17eb051e35ad6e2ef0c2358cd0664cdba93af013..223dbb776550e949d0ce86dca6f68aff6482433d 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -3449,6 +3449,21 @@ impl Editor { Subscription::join(other_subscription, this_subscription) } + fn unfold_buffers_with_selections(&mut self, cx: &mut Context) { + if self.buffer().read(cx).is_singleton() { + return; + } + let snapshot = self.buffer.read(cx).snapshot(cx); + let buffer_ids: HashSet = self + .selections + .disjoint_anchor_ranges() + .flat_map(|range| snapshot.buffer_ids_for_range(range)) + .collect(); + for buffer_id in buffer_ids { + self.unfold_buffer(buffer_id, cx); + } + } + /// Changes selections using the provided mutation function. Changes to `self.selections` occur /// immediately, but when run within `transact` or `with_selection_effects_deferred` other /// effects of selection change occur at the end of the transaction. @@ -4190,6 +4205,8 @@ impl Editor { self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx); + self.unfold_buffers_with_selections(cx); + let selections = self.selections.all_adjusted(&self.display_snapshot(cx)); let mut bracket_inserted = false; let mut edits = Vec::new(); @@ -18879,10 +18896,17 @@ impl Editor { if self.buffer().read(cx).is_singleton() || self.is_buffer_folded(buffer_id, cx) { return; } + let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx); self.display_map.update(cx, |display_map, cx| { display_map.fold_buffers([buffer_id], cx) }); + + let snapshot = self.display_snapshot(cx); + self.selections.change_with(&snapshot, |selections| { + selections.remove_selections_from_buffer(buffer_id); + }); + cx.emit(EditorEvent::BufferFoldToggled { ids: folded_excerpts.iter().map(|&(id, _)| id).collect(), folded: true, diff --git a/crates/editor/src/editor_tests.rs b/crates/editor/src/editor_tests.rs index 598d1383726a9610bb5a2c851cd1d56a709546ec..4510e61b74c9bd9ca8ace634f7554f63c4981dd7 100644 --- a/crates/editor/src/editor_tests.rs +++ b/crates/editor/src/editor_tests.rs @@ -22378,7 +22378,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n", + "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n", "After unfolding the first buffer, its and 2nd buffer's text should be displayed" ); @@ -22387,7 +22387,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) { }); assert_eq!( multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)), - "\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555", + "\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555", "After unfolding the all buffers, all original text should be displayed" ); } @@ -27453,3 +27453,213 @@ async fn test_next_prev_reference(cx: &mut TestAppContext) { _move(Direction::Prev, 2, &mut cx).await; cx.assert_editor_state(CYCLE_POSITIONS[1]); } + +#[gpui::test] +async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + + let (editor, cx) = cx.add_window_view(|window, cx| { + let multi_buffer = MultiBuffer::build_multi( + [ + ("1\n2\n3\n", vec![Point::row_range(0..3)]), + ("1\n2\n3\n", vec![Point::row_range(0..3)]), + ], + cx, + ); + Editor::new(EditorMode::full(), multi_buffer, None, window, cx) + }); + + let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await; + let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids()); + + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + ˇ1 + 2 + 3 + [EXCERPT] + 1 + 2 + 3 + "}); + + // Scenario 1: Unfolded buffers, position cursor on "2", select all matches, then insert + cx.update_editor(|editor, window, cx| { + editor.change_selections(None.into(), window, cx, |s| { + s.select_ranges([2..3]); + }); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + 1 + 2ˇ + 3 + [EXCERPT] + 1 + 2 + 3 + "}); + + cx.update_editor(|editor, window, cx| { + editor + .select_all_matches(&SelectAllMatches, window, cx) + .unwrap(); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + 1 + 2ˇ + 3 + [EXCERPT] + 1 + 2ˇ + 3 + "}); + + cx.update_editor(|editor, window, cx| { + editor.handle_input("X", window, cx); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + 1 + Xˇ + 3 + [EXCERPT] + 1 + Xˇ + 3 + "}); + + // Scenario 2: Select "2", then fold second buffer before insertion + cx.update_multibuffer(|mb, cx| { + for buffer_id in buffer_ids.iter() { + let buffer = mb.buffer(*buffer_id).unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx); + }); + } + }); + + // Select "2" and select all matches + cx.update_editor(|editor, window, cx| { + editor.change_selections(None.into(), window, cx, |s| { + s.select_ranges([2..3]); + }); + editor + .select_all_matches(&SelectAllMatches, window, cx) + .unwrap(); + }); + + // Fold second buffer - should remove selections from folded buffer + cx.update_editor(|editor, _, cx| { + editor.fold_buffer(buffer_ids[1], cx); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + 1 + 2ˇ + 3 + [EXCERPT] + [FOLDED] + "}); + + // Insert text - should only affect first buffer + cx.update_editor(|editor, window, cx| { + editor.handle_input("Y", window, cx); + }); + cx.update_editor(|editor, _, cx| { + editor.unfold_buffer(buffer_ids[1], cx); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + 1 + Yˇ + 3 + [EXCERPT] + 1 + 2 + 3 + "}); + + // Scenario 3: Select "2", then fold first buffer before insertion + cx.update_multibuffer(|mb, cx| { + for buffer_id in buffer_ids.iter() { + let buffer = mb.buffer(*buffer_id).unwrap(); + buffer.update(cx, |buffer, cx| { + buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx); + }); + } + }); + + // Select "2" and select all matches + cx.update_editor(|editor, window, cx| { + editor.change_selections(None.into(), window, cx, |s| { + s.select_ranges([2..3]); + }); + editor + .select_all_matches(&SelectAllMatches, window, cx) + .unwrap(); + }); + + // Fold first buffer - should remove selections from folded buffer + cx.update_editor(|editor, _, cx| { + editor.fold_buffer(buffer_ids[0], cx); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + [FOLDED] + [EXCERPT] + 1 + 2ˇ + 3 + "}); + + // Insert text - should only affect second buffer + cx.update_editor(|editor, window, cx| { + editor.handle_input("Z", window, cx); + }); + cx.update_editor(|editor, _, cx| { + editor.unfold_buffer(buffer_ids[0], cx); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + 1 + 2 + 3 + [EXCERPT] + 1 + Zˇ + 3 + "}); + + // Edge case scenario: fold all buffers, then try to insert + cx.update_editor(|editor, _, cx| { + editor.fold_buffer(buffer_ids[0], cx); + editor.fold_buffer(buffer_ids[1], cx); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + ˇ[FOLDED] + [EXCERPT] + [FOLDED] + "}); + + // Insert should work via default selection + cx.update_editor(|editor, window, cx| { + editor.handle_input("W", window, cx); + }); + cx.update_editor(|editor, _, cx| { + editor.unfold_buffer(buffer_ids[0], cx); + editor.unfold_buffer(buffer_ids[1], cx); + }); + cx.assert_excerpts_with_selections(indoc! {" + [EXCERPT] + Wˇ1 + 2 + 3 + [EXCERPT] + 1 + Z + 3 + "}); +} diff --git a/crates/editor/src/selections_collection.rs b/crates/editor/src/selections_collection.rs index eeecfffa70a30705174b64f698d2965f9540fe0b..75fffdc7fea17fe35f9942125499ba15c9a77422 100644 --- a/crates/editor/src/selections_collection.rs +++ b/crates/editor/src/selections_collection.rs @@ -487,6 +487,43 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> { self.selections_changed |= changed; } + pub fn remove_selections_from_buffer(&mut self, buffer_id: language::BufferId) { + let mut changed = false; + + let filtered_selections: Arc<[Selection]> = { + self.disjoint + .iter() + .filter(|selection| { + if let Some(selection_buffer_id) = + self.snapshot.buffer_id_for_anchor(selection.start) + { + let should_remove = selection_buffer_id == buffer_id; + changed |= should_remove; + !should_remove + } else { + true + } + }) + .cloned() + .collect() + }; + + if filtered_selections.is_empty() { + let default_anchor = self.snapshot.anchor_before(0); + self.collection.disjoint = Arc::from([Selection { + id: post_inc(&mut self.collection.next_selection_id), + start: default_anchor, + end: default_anchor, + reversed: false, + goal: SelectionGoal::None, + }]); + } else { + self.collection.disjoint = filtered_selections; + } + + self.selections_changed |= changed; + } + pub fn clear_pending(&mut self) { if self.collection.pending.is_some() { self.collection.pending = None; diff --git a/crates/outline_panel/src/outline_panel.rs b/crates/outline_panel/src/outline_panel.rs index ee271c9ad92cde8fe8f7da54fb1cc1ae74d20ea9..a7fe9ea679d565b2a8a2a26bf86306b93dd62e78 100644 --- a/crates/outline_panel/src/outline_panel.rs +++ b/crates/outline_panel/src/outline_panel.rs @@ -6619,13 +6619,11 @@ outline: struct OutlineEntryExcerpt format!( r#"frontend-project/ public/lottie/ - syntax-tree.json - search: {{ "something": "«static»" }} + syntax-tree.json <==== selected src/ app/(site)/ components/ - ErrorBoundary.tsx <==== selected - search: «static»"# + ErrorBoundary.tsx"# ) ); }); @@ -6667,7 +6665,7 @@ outline: struct OutlineEntryExcerpt format!( r#"frontend-project/ public/lottie/ - syntax-tree.json + syntax-tree.json <==== selected search: {{ "something": "«static»" }} src/ app/(site)/ @@ -6678,7 +6676,7 @@ outline: struct OutlineEntryExcerpt page.tsx search: «static» components/ - ErrorBoundary.tsx <==== selected + ErrorBoundary.tsx search: «static»"# ) ); From 25489c2b7a5a5dbcb53a33da19662a9b9c9810d8 Mon Sep 17 00:00:00 2001 From: dDostalker <146743193+dDostalker@users.noreply.github.com> Date: Tue, 11 Nov 2025 20:22:32 +0800 Subject: [PATCH 51/74] Fix adding a Python virtual environment, may duplicate the "open this dictionary" string when modifying content. (#41840) Release Notes: - Fixed an issue when adding a Python virtual environment that may cause duplicate "open this dictionary" entries - Trigger condition: Type `C:\`, delete `\`, then repeatedly add `\`. -Video bug: https://github.com/user-attachments/assets/f68008bb-9138-4451-a842-25b58574493b fix: https://github.com/user-attachments/assets/2913b8c2-adee-4275-af7e-e055fd78915f --- crates/file_finder/src/open_path_prompt.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/file_finder/src/open_path_prompt.rs b/crates/file_finder/src/open_path_prompt.rs index f29c0e6cd20f423dd9073abced0182f272b588c9..53bad3b34880d69aba169df965db71f69b2296eb 100644 --- a/crates/file_finder/src/open_path_prompt.rs +++ b/crates/file_finder/src/open_path_prompt.rs @@ -399,7 +399,12 @@ impl PickerDelegate for OpenPathDelegate { } }) .unwrap_or(false); - if should_prepend_with_current_dir { + + let current_dir_in_new_entries = new_entries + .iter() + .any(|entry| &entry.path.string == current_dir); + + if should_prepend_with_current_dir && !current_dir_in_new_entries { new_entries.insert( 0, CandidateInfo { From b3dd51560bb00cd90a2a56dd29f79e9c7b3c1cda Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miguel=20Raz=20Guzm=C3=A1n=20Macedo?= Date: Tue, 11 Nov 2025 06:55:02 -0600 Subject: [PATCH 52/74] docs: Fix broken links in docs with lychee (#42404) Lychee is a [Rust based](https://lychee.cli.rs) async parallel link checker. I ran it against the codebase to suss out stale links and fixed those up. There's currently 2 remaining cases that I don't know how to resolve: 1. https://flathub.org/apps/dev.zed.Zed - nginx is giving a 502 bad gateway 2. https://github.com/zed-industries/zed/actions/workflows/ci.yml/badge.svg - I don't want to mess with the CI pipeline in this PR. Once again, I'll punt to the Docs Czar to see if this gets incorporated into CI later. --- ## Running `lychee` locally: ``` cargo binstall -y lychee lychee . ``` --- Release Notes: - N/A Signed-off-by: mrg --- docs/src/ai/llm-providers.md | 2 +- docs/src/development/release-notes.md | 2 +- docs/src/uninstall.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/ai/llm-providers.md b/docs/src/ai/llm-providers.md index b737be53eac87cd630303556c1d0f8fcd8d406a1..3e40d7ae0283b3dbd1c50ba1bef5ae410d969305 100644 --- a/docs/src/ai/llm-providers.md +++ b/docs/src/ai/llm-providers.md @@ -587,7 +587,7 @@ These routing controls let you fine‑tune cost, capability, and reliability tra ### Vercel v0 {#vercel-v0} -[Vercel v0](https://vercel.com/docs/v0/api) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel. +[Vercel v0](https://v0.app/docs/api/model) is an expert model for generating full-stack apps, with framework-aware completions optimized for modern stacks like Next.js and Vercel. It supports text and image inputs and provides fast streaming responses. The v0 models are [OpenAI-compatible models](/#openai-api-compatible), but Vercel is listed as first-class provider in the panel's settings view. diff --git a/docs/src/development/release-notes.md b/docs/src/development/release-notes.md index 5005fc32d36bafb57754e45423b45fc8b7bf64d9..90e1ad21b102de291f65894748f0abf11519a59f 100644 --- a/docs/src/development/release-notes.md +++ b/docs/src/development/release-notes.md @@ -10,7 +10,7 @@ Release Notes: - N/A _or_ Added/Fixed/Improved ... ``` -On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/releases) docs. +On Wednesdays, we run a [`get-preview-channel-changes`](https://github.com/zed-industries/zed/blob/main/script/get-preview-channel-changes) script that scrapes `Release Notes` lines from pull requests landing in preview, as documented in our [Release](https://zed.dev/docs/development/release-notes) docs. The script outputs everything below the `Release Notes` line, including additional data such as the pull request author (if not a Zed team member) and a link to the pull request. If you use `N/A`, the script skips your pull request entirely. diff --git a/docs/src/uninstall.md b/docs/src/uninstall.md index f2d7da93e78b71c607e79b0bdd5d017f88d55f4d..c1f71a6609ff1b73e15171802441f9aebc8f09cb 100644 --- a/docs/src/uninstall.md +++ b/docs/src/uninstall.md @@ -110,4 +110,4 @@ If you encounter issues during uninstallation: - **Linux**: If the uninstall script fails, check the error message and consider manual removal of the directories listed above. - **All platforms**: If you want to start fresh while keeping Zed installed, you can delete the configuration directories instead of uninstalling the application entirely. -For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community). +For additional help, see our [Linux-specific documentation](./linux.md) or visit the [Zed community](https://zed.dev/community-links). From 777b46533f6324fb0ee31150d079830fcfb54b5b Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 13:55:19 +0100 Subject: [PATCH 53/74] auto_update: Ignore dir removal errors on windows (#42435) The auto update helper already removes these when successful, so these will always fail in the common case. Additional replaces a mutable const with a static as otherwise we'll rebuild the job list on every access Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/auto_update/src/auto_update.rs | 17 +++-------------- crates/auto_update_helper/src/updater.rs | 14 +++++--------- 2 files changed, 8 insertions(+), 23 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index bd44eb714c08f9a5c698e92570a9edb518c5c806..facb55e0df951633b082f23477e35ce2b55f6f84 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -905,26 +905,15 @@ async fn install_release_macos( #[cfg(target_os = "windows")] async fn cleanup_windows() -> Result<()> { - use util::ResultExt; - let parent = std::env::current_exe()? .parent() .context("No parent dir for Zed.exe")? .to_owned(); // keep in sync with crates/auto_update_helper/src/updater.rs - smol::fs::remove_dir(parent.join("updates")) - .await - .context("failed to remove updates dir") - .log_err(); - smol::fs::remove_dir(parent.join("install")) - .await - .context("failed to remove install dir") - .log_err(); - smol::fs::remove_dir(parent.join("old")) - .await - .context("failed to remove old version dir") - .log_err(); + _ = smol::fs::remove_dir(parent.join("updates")).await; + _ = smol::fs::remove_dir(parent.join("install")).await; + _ = smol::fs::remove_dir(parent.join("old")).await; Ok(()) } diff --git a/crates/auto_update_helper/src/updater.rs b/crates/auto_update_helper/src/updater.rs index f146583d3bc69b167b61339278a475827bf28d0b..076e11fb4eef1e5c53e2bdc290be7117330c3e61 100644 --- a/crates/auto_update_helper/src/updater.rs +++ b/crates/auto_update_helper/src/updater.rs @@ -1,6 +1,6 @@ use std::{ - cell::LazyCell, path::Path, + sync::LazyLock, time::{Duration, Instant}, }; @@ -13,8 +13,8 @@ use windows::Win32::{ use crate::windows_impl::WM_JOB_UPDATED; pub(crate) struct Job { - pub apply: Box Result<()>>, - pub rollback: Box Result<()>>, + pub apply: Box Result<()> + Send + Sync>, + pub rollback: Box Result<()> + Send + Sync>, } impl Job { @@ -154,10 +154,8 @@ impl Job { } } -// app is single threaded #[cfg(not(test))] -#[allow(clippy::declare_interior_mutable_const)] -pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| { +pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| { fn p(value: &str) -> &Path { Path::new(value) } @@ -206,10 +204,8 @@ pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| { ] }); -// app is single threaded #[cfg(test)] -#[allow(clippy::declare_interior_mutable_const)] -pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| { +pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| { fn p(value: &str) -> &Path { Path::new(value) } From f2ad0d716f132e6e05545f3dd4a73f8765dc0fb3 Mon Sep 17 00:00:00 2001 From: Agus Zubiaga Date: Tue, 11 Nov 2025 09:56:20 -0300 Subject: [PATCH 54/74] zeta cli: Print log paths when running predict (#42396) Release Notes: - N/A Co-authored-by: Michael Sloan Co-authored-by: Ben Kunkle --- crates/zeta_cli/src/paths.rs | 8 ++++++++ crates/zeta_cli/src/predict.rs | 22 +++++++++++++++++----- 2 files changed, 25 insertions(+), 5 deletions(-) diff --git a/crates/zeta_cli/src/paths.rs b/crates/zeta_cli/src/paths.rs index 144bf6f5dd97c518d965d7bd23da83ce7f11f66f..fc7f8b3afc3dbcd724649749a58b76dbab275750 100644 --- a/crates/zeta_cli/src/paths.rs +++ b/crates/zeta_cli/src/paths.rs @@ -6,3 +6,11 @@ pub static CACHE_DIR: LazyLock = pub static REPOS_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-repos")); pub static WORKTREES_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-worktrees")); pub static LOGS_DIR: LazyLock = LazyLock::new(|| TARGET_DIR.join("zeta-logs")); +pub static LOGS_SEARCH_PROMPT: LazyLock = + LazyLock::new(|| LOGS_DIR.join("search_prompt.md")); +pub static LOGS_SEARCH_QUERIES: LazyLock = + LazyLock::new(|| LOGS_DIR.join("search_queries.json")); +pub static LOGS_PREDICTION_PROMPT: LazyLock = + LazyLock::new(|| LOGS_DIR.join("prediction_prompt.md")); +pub static LOGS_PREDICTION_RESPONSE: LazyLock = + LazyLock::new(|| LOGS_DIR.join("prediction_response.md")); diff --git a/crates/zeta_cli/src/predict.rs b/crates/zeta_cli/src/predict.rs index 4efc82fa8a7c5d5cf6773a7f771d12dd89b4e1ed..32f2f564fc53df987579bf2946eb5765519157c6 100644 --- a/crates/zeta_cli/src/predict.rs +++ b/crates/zeta_cli/src/predict.rs @@ -1,7 +1,10 @@ use crate::PromptFormat; use crate::example::{ActualExcerpt, ExpectedExcerpt, NamedExample}; use crate::headless::ZetaCliAppState; -use crate::paths::{CACHE_DIR, LOGS_DIR}; +use crate::paths::{ + CACHE_DIR, LOGS_DIR, LOGS_PREDICTION_PROMPT, LOGS_PREDICTION_RESPONSE, LOGS_SEARCH_PROMPT, + LOGS_SEARCH_QUERIES, +}; use ::serde::Serialize; use anyhow::{Result, anyhow}; use clap::Args; @@ -61,6 +64,15 @@ pub async fn run_zeta2_predict( .await .unwrap(); result.write(args.format, std::io::stdout()).unwrap(); + + println!("## Logs\n"); + println!("Search prompt: {}", LOGS_SEARCH_PROMPT.display()); + println!("Search queries: {}", LOGS_SEARCH_QUERIES.display()); + println!("Prediction prompt: {}", LOGS_PREDICTION_PROMPT.display()); + println!( + "Prediction response: {}", + LOGS_PREDICTION_RESPONSE.display() + ); } thread_local! { @@ -147,12 +159,12 @@ pub async fn zeta2_predict( match event { zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => { start_time = Some(info.timestamp); - fs::write(LOGS_DIR.join("search_prompt.md"), &info.search_prompt)?; + fs::write(&*LOGS_SEARCH_PROMPT, &info.search_prompt)?; } zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => { search_queries_generated_at = Some(info.timestamp); fs::write( - LOGS_DIR.join("search_queries.json"), + &*LOGS_SEARCH_QUERIES, serde_json::to_string_pretty(&info.search_queries).unwrap(), )?; } @@ -164,7 +176,7 @@ pub async fn zeta2_predict( let prediction_started_at = Instant::now(); start_time.get_or_insert(prediction_started_at); fs::write( - LOGS_DIR.join("prediction_prompt.md"), + &*LOGS_PREDICTION_PROMPT, &request.local_prompt.unwrap_or_default(), )?; @@ -198,7 +210,7 @@ pub async fn zeta2_predict( let response = request.response_rx.await?.0.map_err(|err| anyhow!(err))?; let response = zeta2::text_from_response(response).unwrap_or_default(); let prediction_finished_at = Instant::now(); - fs::write(LOGS_DIR.join("prediction_response.md"), &response)?; + fs::write(&*LOGS_PREDICTION_RESPONSE, &response)?; let mut result = result.lock().unwrap(); From 58db38722b57f8531fab877be548c44a9a785229 Mon Sep 17 00:00:00 2001 From: Kirill Bulatov Date: Tue, 11 Nov 2025 15:38:28 +0200 Subject: [PATCH 55/74] Find proper applicable chunks for visible ranges (#42422) Release Notes: - Fixed inlay hints not being queried for certain long-ranged jumps Co-authored-by: Smit Barmase Co-authored-by: Lukas Wirth --- crates/project/src/lsp_store/inlay_hint_cache.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/project/src/lsp_store/inlay_hint_cache.rs b/crates/project/src/lsp_store/inlay_hint_cache.rs index 51189d8fdae788c7c12546f2c9ac1735930c3095..cca9d66e8c330f1a4c723a84c4fb418b976f7c03 100644 --- a/crates/project/src/lsp_store/inlay_hint_cache.rs +++ b/crates/project/src/lsp_store/inlay_hint_cache.rs @@ -6,6 +6,7 @@ use gpui::{App, Entity, Task}; use language::{Buffer, BufferRow, BufferSnapshot}; use lsp::LanguageServerId; use text::OffsetRangeExt; +use util::RangeExt as _; use crate::{InlayHint, InlayId}; @@ -123,18 +124,17 @@ impl BufferInlayHints { let row_ranges = ranges .iter() .map(|range| range.to_point(&self.snapshot)) - .map(|point_range| point_range.start.row..=point_range.end.row) + // Be lenient and yield multiple chunks if they "touch" the exclusive part of the range. + // This will result in LSP hints [re-]queried for more ranges, but also more hints already visible when scrolling around. + .map(|point_range| point_range.start.row..point_range.end.row + 1) .collect::>(); self.buffer_chunks .iter() - .filter(move |chunk| -> bool { - // Be lenient and yield multiple chunks if they "touch" the exclusive part of the range. - // This will result in LSP hints [re-]queried for more ranges, but also more hints already visible when scrolling around. + .filter(move |chunk| { let chunk_range = chunk.start..=chunk.end; - row_ranges.iter().any(|row_range| { - chunk_range.contains(&row_range.start()) - || chunk_range.contains(&row_range.end()) - }) + row_ranges + .iter() + .any(|row_range| chunk_range.overlaps(&row_range)) }) .copied() } From 9be5e31aca85c9d9965c7c28d3e18241afa4c964 Mon Sep 17 00:00:00 2001 From: Libon Date: Tue, 11 Nov 2025 21:42:00 +0800 Subject: [PATCH 56/74] Add clear recent files history command (#42176) ![2025-11-07 181619](https://github.com/user-attachments/assets/a9bef7a6-dc0b-4db2-85e5-2e1df7b21cfa) Release Notes: - Added "workspace: clear navigation history" command --- crates/file_finder/src/file_finder_tests.rs | 96 +++++++++++++++++++++ crates/workspace/src/pane.rs | 19 ++++ crates/workspace/src/workspace.rs | 13 +++ 3 files changed, 128 insertions(+) diff --git a/crates/file_finder/src/file_finder_tests.rs b/crates/file_finder/src/file_finder_tests.rs index 690265562e1c36e685574ec590819d8f513c128a..d6971da15fde8406ac4d00fb613906c91e25d8d4 100644 --- a/crates/file_finder/src/file_finder_tests.rs +++ b/crates/file_finder/src/file_finder_tests.rs @@ -3452,3 +3452,99 @@ async fn test_paths_with_starting_slash(cx: &mut TestAppContext) { assert_eq!(active_editor.read(cx).title(cx), "file1.txt"); }); } + +#[gpui::test] +async fn test_clear_navigation_history(cx: &mut TestAppContext) { + let app_state = init_test(cx); + app_state + .fs + .as_fake() + .insert_tree( + path!("/src"), + json!({ + "test": { + "first.rs": "// First file", + "second.rs": "// Second file", + "third.rs": "// Third file", + } + }), + ) + .await; + + let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await; + let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx)); + + workspace.update_in(cx, |_workspace, window, cx| window.focused(cx)); + + // Open some files to generate navigation history + open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await; + open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await; + let history_before_clear = + open_close_queried_buffer("thi", 1, "third.rs", &workspace, cx).await; + + assert_eq!( + history_before_clear.len(), + 2, + "Should have history items before clearing" + ); + + // Verify that file finder shows history items + let picker = open_file_picker(&workspace, cx); + cx.simulate_input("fir"); + picker.update(cx, |finder, _| { + let matches = collect_search_matches(finder); + assert!( + !matches.history.is_empty(), + "File finder should show history items before clearing" + ); + }); + workspace.update_in(cx, |_, window, cx| { + window.dispatch_action(menu::Cancel.boxed_clone(), cx); + }); + + // Verify navigation state before clear + workspace.update(cx, |workspace, cx| { + let pane = workspace.active_pane(); + pane.read(cx).can_navigate_backward() + }); + + // Clear navigation history + cx.dispatch_action(workspace::ClearNavigationHistory); + + // Verify that navigation is disabled immediately after clear + workspace.update(cx, |workspace, cx| { + let pane = workspace.active_pane(); + assert!( + !pane.read(cx).can_navigate_backward(), + "Should not be able to navigate backward after clearing history" + ); + assert!( + !pane.read(cx).can_navigate_forward(), + "Should not be able to navigate forward after clearing history" + ); + }); + + // Verify that file finder no longer shows history items + let picker = open_file_picker(&workspace, cx); + cx.simulate_input("fir"); + picker.update(cx, |finder, _| { + let matches = collect_search_matches(finder); + assert!( + matches.history.is_empty(), + "File finder should not show history items after clearing" + ); + }); + workspace.update_in(cx, |_, window, cx| { + window.dispatch_action(menu::Cancel.boxed_clone(), cx); + }); + + // Verify history is empty by opening a new file + // (this should not show any previous history) + let history_after_clear = + open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await; + assert_eq!( + history_after_clear.len(), + 0, + "Should have no history items after clearing" + ); +} diff --git a/crates/workspace/src/pane.rs b/crates/workspace/src/pane.rs index 24f4254232b33975d77f227a6fa2af57d49c25fd..d85662733d52390db820957818901fa2e2cfd2a2 100644 --- a/crates/workspace/src/pane.rs +++ b/crates/workspace/src/pane.rs @@ -4041,6 +4041,25 @@ impl NavHistory { self.0.lock().mode = NavigationMode::Normal; } + pub fn clear(&mut self, cx: &mut App) { + let mut state = self.0.lock(); + + if state.backward_stack.is_empty() + && state.forward_stack.is_empty() + && state.closed_stack.is_empty() + && state.paths_by_item.is_empty() + { + return; + } + + state.mode = NavigationMode::Normal; + state.backward_stack.clear(); + state.forward_stack.clear(); + state.closed_stack.clear(); + state.paths_by_item.clear(); + state.did_update(cx); + } + pub fn pop(&mut self, mode: NavigationMode, cx: &mut App) -> Option { let mut state = self.0.lock(); let entry = match mode { diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index 7e35510652b1118e9dc8ffa18491d3c2a7904c75..316969812ac34e84f4019a191fda225e255700f0 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -199,6 +199,8 @@ actions!( AddFolderToProject, /// Clears all notifications. ClearAllNotifications, + /// Clears all navigation history, including forward/backward navigation, recently opened files, and recently closed tabs. **This action is irreversible**. + ClearNavigationHistory, /// Closes the active dock. CloseActiveDock, /// Closes all docks. @@ -1917,6 +1919,12 @@ impl Workspace { .collect() } + pub fn clear_navigation_history(&mut self, _window: &mut Window, cx: &mut Context) { + for pane in &self.panes { + pane.update(cx, |pane, cx| pane.nav_history_mut().clear(cx)); + } + } + fn navigate_history( &mut self, pane: WeakEntity, @@ -5858,6 +5866,11 @@ impl Workspace { workspace.clear_all_notifications(cx); }, )) + .on_action(cx.listener( + |workspace: &mut Workspace, _: &ClearNavigationHistory, window, cx| { + workspace.clear_navigation_history(window, cx); + }, + )) .on_action(cx.listener( |workspace: &mut Workspace, _: &SuppressNotification, _, cx| { if let Some((notification_id, _)) = workspace.notifications.pop() { From b8fcd3ea044ebc3780588f37d1ba1ab5133a7134 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 14:43:06 +0100 Subject: [PATCH 57/74] gpui: Fix `RefCell already borrowed` in `WindowsPlatform::run` (#42440) Fixes ZED-1VX Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/gpui/src/platform/windows/events.rs | 14 ++---- crates/gpui/src/platform/windows/platform.rs | 50 +++++++++++--------- crates/gpui/src/platform/windows/window.rs | 10 +++- 3 files changed, 42 insertions(+), 32 deletions(-) diff --git a/crates/gpui/src/platform/windows/events.rs b/crates/gpui/src/platform/windows/events.rs index 4e6df63106f4c650ad3130e39d410670ddc4687d..cc17f19bcfac86a6f8ac31ec1059d76c24e79695 100644 --- a/crates/gpui/src/platform/windows/events.rs +++ b/crates/gpui/src/platform/windows/events.rs @@ -487,14 +487,12 @@ impl WindowsWindowInner { let scale_factor = lock.scale_factor; let wheel_scroll_amount = match modifiers.shift { true => { - self.system_settings - .borrow() + self.system_settings() .mouse_wheel_settings .wheel_scroll_chars } false => { - self.system_settings - .borrow() + self.system_settings() .mouse_wheel_settings .wheel_scroll_lines } @@ -541,8 +539,7 @@ impl WindowsWindowInner { }; let scale_factor = lock.scale_factor; let wheel_scroll_chars = self - .system_settings - .borrow() + .system_settings() .mouse_wheel_settings .wheel_scroll_chars; drop(lock); @@ -677,8 +674,7 @@ impl WindowsWindowInner { // used by Chrome. However, it may result in one row of pixels being obscured // in our client area. But as Chrome says, "there seems to be no better solution." if is_maximized - && let Some(ref taskbar_position) = - self.system_settings.borrow().auto_hide_taskbar_position + && let Some(ref taskbar_position) = self.system_settings().auto_hide_taskbar_position { // For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge, // so the window isn't treated as a "fullscreen app", which would cause @@ -1072,7 +1068,7 @@ impl WindowsWindowInner { lock.border_offset.update(handle).log_err(); // system settings may emit a window message which wants to take the refcell lock, so drop it drop(lock); - self.system_settings.borrow_mut().update(display, wparam.0); + self.system_settings_mut().update(display, wparam.0); } else { self.handle_system_theme_changed(handle, lparam)?; }; diff --git a/crates/gpui/src/platform/windows/platform.rs b/crates/gpui/src/platform/windows/platform.rs index b985cc14b01b1171d4013bf5c41a0c5199565503..72f427beb55b18ff5b94a1a90e334e07045b8726 100644 --- a/crates/gpui/src/platform/windows/platform.rs +++ b/crates/gpui/src/platform/windows/platform.rs @@ -342,9 +342,8 @@ impl Platform for WindowsPlatform { } } - if let Some(ref mut callback) = self.inner.state.borrow_mut().callbacks.quit { - callback(); - } + self.inner + .with_callback(|callbacks| &mut callbacks.quit, |callback| callback()); } fn quit(&self) { @@ -578,14 +577,13 @@ impl Platform for WindowsPlatform { fn set_cursor_style(&self, style: CursorStyle) { let hcursor = load_cursor(style); - let mut lock = self.inner.state.borrow_mut(); - if lock.current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) { + if self.inner.state.borrow_mut().current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) { self.post_message( WM_GPUI_CURSOR_STYLE_CHANGED, WPARAM(0), LPARAM(hcursor.map_or(0, |c| c.0 as isize)), ); - lock.current_cursor = hcursor; + self.inner.state.borrow_mut().current_cursor = hcursor; } } @@ -724,6 +722,18 @@ impl WindowsPlatformInner { })) } + /// Calls `project` to project to the corresponding callback field, removes it from callbacks, calls `f` with the callback and then puts the callback back. + fn with_callback( + &self, + project: impl Fn(&mut PlatformCallbacks) -> &mut Option, + f: impl FnOnce(&mut T), + ) { + if let Some(mut callback) = project(&mut self.state.borrow_mut().callbacks).take() { + f(&mut callback); + *project(&mut self.state.borrow_mut().callbacks) = Some(callback) + } + } + fn handle_msg( self: &Rc, handle: HWND, @@ -807,40 +817,36 @@ impl WindowsPlatformInner { } fn handle_dock_action_event(&self, action_idx: usize) -> Option { - let mut lock = self.state.borrow_mut(); - let mut callback = lock.callbacks.app_menu_action.take()?; - let Some(action) = lock + let Some(action) = self + .state + .borrow_mut() .jump_list .dock_menus .get(action_idx) .map(|dock_menu| dock_menu.action.boxed_clone()) else { - lock.callbacks.app_menu_action = Some(callback); log::error!("Dock menu for index {action_idx} not found"); return Some(1); }; - drop(lock); - callback(&*action); - self.state.borrow_mut().callbacks.app_menu_action = Some(callback); + self.with_callback( + |callbacks| &mut callbacks.app_menu_action, + |callback| callback(&*action), + ); Some(0) } fn handle_keyboard_layout_change(&self) -> Option { - let mut callback = self - .state - .borrow_mut() - .callbacks - .keyboard_layout_change - .take()?; - callback(); - self.state.borrow_mut().callbacks.keyboard_layout_change = Some(callback); + self.with_callback( + |callbacks| &mut callbacks.keyboard_layout_change, + |callback| callback(), + ); Some(0) } fn handle_device_lost(&self, lparam: LPARAM) -> Option { - let mut lock = self.state.borrow_mut(); let directx_devices = lparam.0 as *const DirectXDevices; let directx_devices = unsafe { &*directx_devices }; + let mut lock = self.state.borrow_mut(); lock.directx_devices.take(); lock.directx_devices = Some(directx_devices.clone()); diff --git a/crates/gpui/src/platform/windows/window.rs b/crates/gpui/src/platform/windows/window.rs index 0050fa4bc0e96b8702314f33637db67998b5941d..4b89fcffb39d9bfbc0734977cec16a00984f5c9a 100644 --- a/crates/gpui/src/platform/windows/window.rs +++ b/crates/gpui/src/platform/windows/window.rs @@ -63,7 +63,7 @@ pub(crate) struct WindowsWindowInner { hwnd: HWND, drop_target_helper: IDropTargetHelper, pub(crate) state: RefCell, - pub(crate) system_settings: RefCell, + system_settings: RefCell, pub(crate) handle: AnyWindowHandle, pub(crate) hide_title_bar: bool, pub(crate) is_movable: bool, @@ -321,6 +321,14 @@ impl WindowsWindowInner { } Ok(()) } + + pub(crate) fn system_settings(&self) -> std::cell::Ref<'_, WindowsSystemSettings> { + self.system_settings.borrow() + } + + pub(crate) fn system_settings_mut(&self) -> std::cell::RefMut<'_, WindowsSystemSettings> { + self.system_settings.borrow_mut() + } } #[derive(Default)] From dfd7e85d5de70f642ca3cbef7a1a1535fd108362 Mon Sep 17 00:00:00 2001 From: Terra Date: Tue, 11 Nov 2025 22:43:25 +0900 Subject: [PATCH 58/74] Replace deprecated json.schemastore.org with www.schemastore.org (#42336) Release Notes: - N/A According to [microsoft/vscode#254689](https://github.com/microsoft/vscode/issues/254689), the json.schemastore.org domain has been deprecated and should now use www.schemastore.org (or schemastore.org) instead. This PR updates all occurrences of the old domain within the Zed codebase, including code, documentation, and configuration files. --- .github/ISSUE_TEMPLATE/config.yml | 2 +- crates/json_schema_store/src/schemas/package.json | 14 +++++++------- crates/json_schema_store/src/schemas/tsconfig.json | 2 +- docs/src/languages/deno.md | 2 +- docs/src/languages/yaml.md | 4 ++-- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 3d0b2ce0af79944c9c86dba6187b0fd7d91c5b8c..8602daf90ddd0fe49d9db27c39f3d52d6e7da032 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,4 +1,4 @@ -# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json +# yaml-language-server: $schema=https://www.schemastore.org/github-issue-config.json blank_issues_enabled: false contact_links: - name: Feature Request diff --git a/crates/json_schema_store/src/schemas/package.json b/crates/json_schema_store/src/schemas/package.json index a24583fa8848891d661114291951d4df28f463fd..0906dcf36e30dcc9bdb64153a963368d3647a6d9 100644 --- a/crates/json_schema_store/src/schemas/package.json +++ b/crates/json_schema_store/src/schemas/package.json @@ -1030,22 +1030,22 @@ "$ref": "#" }, "eslintConfig": { - "$ref": "https://json.schemastore.org/eslintrc.json" + "$ref": "https://www.schemastore.org/eslintrc.json" }, "prettier": { - "$ref": "https://json.schemastore.org/prettierrc.json" + "$ref": "https://www.schemastore.org/prettierrc.json" }, "stylelint": { - "$ref": "https://json.schemastore.org/stylelintrc.json" + "$ref": "https://www.schemastore.org/stylelintrc.json" }, "ava": { - "$ref": "https://json.schemastore.org/ava.json" + "$ref": "https://www.schemastore.org/ava.json" }, "release": { - "$ref": "https://json.schemastore.org/semantic-release.json" + "$ref": "https://www.schemastore.org/semantic-release.json" }, "jscpd": { - "$ref": "https://json.schemastore.org/jscpd.json" + "$ref": "https://www.schemastore.org/jscpd.json" }, "pnpm": { "description": "Defines pnpm specific configuration.", @@ -1305,5 +1305,5 @@ ] } ], - "$id": "https://json.schemastore.org/package.json" + "$id": "https://www.schemastore.org/package.json" } diff --git a/crates/json_schema_store/src/schemas/tsconfig.json b/crates/json_schema_store/src/schemas/tsconfig.json index 4b9088725401e27dfc24c14d7c58acfae4355631..9484c027df59c2efe0d2c4024046fb6a839e78a9 100644 --- a/crates/json_schema_store/src/schemas/tsconfig.json +++ b/crates/json_schema_store/src/schemas/tsconfig.json @@ -1466,7 +1466,7 @@ } } }, - "id": "https://json.schemastore.org/tsconfig", + "id": "https://www.schemastore.org/tsconfig", "title": "JSON schema for the TypeScript compiler's configuration file", "type": "object" } diff --git a/docs/src/languages/deno.md b/docs/src/languages/deno.md index a4192257765d6aa131232ff8a80a3af452a38d57..0fa645291e4c4788ae126bc3ccb0120f26a78cb0 100644 --- a/docs/src/languages/deno.md +++ b/docs/src/languages/deno.md @@ -78,7 +78,7 @@ To get completions for `deno.json` or `package.json` you can add the following t "fileMatch": [ "package.json" ], - "url": "http://json.schemastore.org/package" + "url": "https://www.schemastore.org/package" } ] } diff --git a/docs/src/languages/yaml.md b/docs/src/languages/yaml.md index 477d197d11fa4f0ad0e62ee25e416eee7c35ee67..33b92df94ec59f6f4cdf9a0afe83c3ad74dc3bda 100644 --- a/docs/src/languages/yaml.md +++ b/docs/src/languages/yaml.md @@ -19,7 +19,7 @@ You can configure various [yaml-language-server settings](https://github.com/red "singleQuote": true }, "schemas": { - "http://json.schemastore.org/composer": ["/*"], + "https://getcomposer.org/schema.json": ["/*"], "../relative/path/schema.json": ["/config*.yaml"] } } @@ -70,7 +70,7 @@ By default yaml-language-server will attempt to determine the correct schema for You can override any auto-detected schema via the `schemas` settings key (demonstrated above) or by providing an [inlined schema](https://github.com/redhat-developer/yaml-language-server#using-inlined-schema) reference via a modeline comment at the top of your yaml file: ```yaml -# yaml-language-server: $schema=https://json.schemastore.org/github-action.json +# yaml-language-server: $schema=https://www.schemastore.org/github-action.json name: Issue Assignment on: issues: From 10d5d78dedce7526739c8d7c488b820bef33c7c6 Mon Sep 17 00:00:00 2001 From: liuyanghejerry Date: Tue, 11 Nov 2025 21:45:03 +0800 Subject: [PATCH 59/74] Improve error messages on extension loading (#42266) This pull request improves error message when extension loading goes wrong. Before: ``` 2025-11-08T21:16:02+08:00 ERROR [extension_host::extension_host] failed to load arkts extension.toml Caused by: No such file or directory (os error 2) ``` Now: ``` 2025-11-08T22:57:00+08:00 ERROR [extension_host::extension_host] failed to load arkts extension.toml, "/Users/user_name_placeholder/Library/Application Support/Zed/extensions/installed/arkts/extension.toml" Caused by: No such file or directory (os error 2) ``` Release Notes: - N/A --- crates/extension/src/extension_manifest.rs | 14 ++++++-------- crates/extension_host/src/wasm_host.rs | 6 +++--- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/crates/extension/src/extension_manifest.rs b/crates/extension/src/extension_manifest.rs index 7e074ffcab77ceb2a63fd92448faa2e13f4ec8c4..7a15a3c58b7a907fa56a12633343a48d150b6bcf 100644 --- a/crates/extension/src/extension_manifest.rs +++ b/crates/extension/src/extension_manifest.rs @@ -267,10 +267,9 @@ impl ExtensionManifest { let mut extension_manifest_path = extension_dir.join("extension.json"); if fs.is_file(&extension_manifest_path).await { - let manifest_content = fs - .load(&extension_manifest_path) - .await - .with_context(|| format!("failed to load {extension_name} extension.json"))?; + let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| { + format!("loading {extension_name} extension.json, {extension_manifest_path:?}") + })?; let manifest_json = serde_json::from_str::(&manifest_content) .with_context(|| { format!("invalid extension.json for extension {extension_name}") @@ -279,10 +278,9 @@ impl ExtensionManifest { Ok(manifest_from_old_manifest(manifest_json, extension_name)) } else { extension_manifest_path.set_extension("toml"); - let manifest_content = fs - .load(&extension_manifest_path) - .await - .with_context(|| format!("failed to load {extension_name} extension.toml"))?; + let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| { + format!("loading {extension_name} extension.toml, {extension_manifest_path:?}") + })?; toml::from_str(&manifest_content).map_err(|err| { anyhow!("Invalid extension.toml for extension {extension_name}:\n{err}") }) diff --git a/crates/extension_host/src/wasm_host.rs b/crates/extension_host/src/wasm_host.rs index eb26c44f20519b7cdb3a38859f23ce99365fe505..1e4bed7a50b44c710384f19c901e4e74854df0e2 100644 --- a/crates/extension_host/src/wasm_host.rs +++ b/crates/extension_host/src/wasm_host.rs @@ -763,17 +763,17 @@ impl WasmExtension { .fs .open_sync(&path) .await - .context("failed to open wasm file")?; + .context(format!("opening wasm file, path: {path:?}"))?; let mut wasm_bytes = Vec::new(); wasm_file .read_to_end(&mut wasm_bytes) - .context("failed to read wasm")?; + .context(format!("reading wasm file, path: {path:?}"))?; wasm_host .load_extension(wasm_bytes, manifest, cx) .await - .with_context(|| format!("failed to load wasm extension {}", manifest.id)) + .with_context(|| format!("loading wasm extension: {}", manifest.id)) } pub async fn call(&self, f: Fn) -> Result From 38e2c7aa66a999a3e1064cf12dea03a48f774927 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 14:56:04 +0100 Subject: [PATCH 60/74] editor: Hide file blame on editor cancel (ESC) (#42436) Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/editor/src/editor.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/crates/editor/src/editor.rs b/crates/editor/src/editor.rs index 223dbb776550e949d0ce86dca6f68aff6482433d..f7eb309fd1b67272103133d47303ef7f0b9e5f35 100644 --- a/crates/editor/src/editor.rs +++ b/crates/editor/src/editor.rs @@ -4076,17 +4076,24 @@ impl Editor { self.selection_mark_mode = false; self.selection_drag_state = SelectionDragState::None; + if self.dismiss_menus_and_popups(true, window, cx) { + cx.notify(); + return; + } if self.clear_expanded_diff_hunks(cx) { cx.notify(); return; } - if self.dismiss_menus_and_popups(true, window, cx) { + if self.show_git_blame_gutter { + self.show_git_blame_gutter = false; + cx.notify(); return; } if self.mode.is_full() && self.change_selections(Default::default(), window, cx, |s| s.try_cancel()) { + cx.notify(); return; } From a19d11184dcff70e3473ac9d43da721af310583a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 16:09:56 +0100 Subject: [PATCH 61/74] remote: Add more context to error logging in wsl (#42450) cc https://github.com/zed-industries/zed/issues/40892 Release Notes: - N/A *or* Added/Fixed/Improved ... --- crates/remote/src/transport/wsl.rs | 41 ++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/crates/remote/src/transport/wsl.rs b/crates/remote/src/transport/wsl.rs index 702b2dd0601ab94969d1c88de6b6eaae8951c9b2..3e14fcfc8b7b8a7f74b1de2a10107a143461108b 100644 --- a/crates/remote/src/transport/wsl.rs +++ b/crates/remote/src/transport/wsl.rs @@ -19,6 +19,7 @@ use std::{ time::Instant, }; use util::{ + ResultExt as _, paths::{PathStyle, RemotePathBuf}, rel_path::RelPath, shell::ShellKind, @@ -79,20 +80,27 @@ impl WslRemoteConnection { can_exec: true, }; delegate.set_status(Some("Detecting WSL environment"), cx); - this.shell = this.detect_shell().await?; + this.shell = this + .detect_shell() + .await + .context("failed detecting shell")?; this.shell_kind = ShellKind::new(&this.shell, false); - this.can_exec = this.detect_can_exec().await?; - this.platform = this.detect_platform().await?; + this.can_exec = this.detect_can_exec().await; + this.platform = this + .detect_platform() + .await + .context("failed detecting platform")?; this.remote_binary_path = Some( this.ensure_server_binary(&delegate, release_channel, version, commit, cx) - .await?, + .await + .context("failed ensuring server binary")?, ); log::debug!("Detected WSL environment: {this:#?}"); Ok(this) } - async fn detect_can_exec(&self) -> Result { + async fn detect_can_exec(&self) -> bool { let options = &self.connection_options; let program = self.shell_kind.prepend_command_prefix("uname"); let args = &["-m"]; @@ -101,10 +109,13 @@ impl WslRemoteConnection { .await; if !output.is_ok_and(|output| output.status.success()) { - run_wsl_command_impl(options, &program, args, false).await?; - Ok(false) + run_wsl_command_impl(options, &program, args, false) + .await + .context("failed detecting exec status") + .log_err(); + false } else { - Ok(true) + true } } async fn detect_platform(&self) -> Result { @@ -504,7 +515,9 @@ impl RemoteConnection for WslRemoteConnection { /// `wslpath` is a executable available in WSL, it's a linux binary. /// So it doesn't support Windows style paths. async fn sanitize_path(path: &Path) -> Result { - let path = smol::fs::canonicalize(path).await?; + let path = smol::fs::canonicalize(path) + .await + .with_context(|| format!("Failed to canonicalize path {}", path.display()))?; let path_str = path.to_string_lossy(); let sanitized = path_str.strip_prefix(r"\\?\").unwrap_or(&path_str); @@ -526,14 +539,16 @@ async fn run_wsl_command_impl( args: &[&str], exec: bool, ) -> Result { - let output = wsl_command_impl(options, program, args, exec) + let mut command = wsl_command_impl(options, program, args, exec); + let output = command .output() - .await?; + .await + .with_context(|| format!("Failed to run command '{:?}'", command))?; if !output.status.success() { return Err(anyhow!( - "Command '{}' failed: {}", - program, + "Command '{:?}' failed: {}", + command, String::from_utf8_lossy(&output.stderr).trim() )); } From 28d019be2e5ba7208874c5779aefc1d8f6f07ae7 Mon Sep 17 00:00:00 2001 From: tidely <43219534+tidely@users.noreply.github.com> Date: Tue, 11 Nov 2025 17:10:47 +0200 Subject: [PATCH 62/74] ollama: Fix tool calling (#42275) Closes #42303 Ollama added tool call identifiers (https://github.com/ollama/ollama/pull/12956) in its latest version [v0.12.10](https://github.com/ollama/ollama/releases/tag/v0.12.10). This broke our json schema and made all tool calls fail. This PR fixes the schema and uses the Ollama provided tool call identifier when available. We remain backwards compatible and still use our own identifier with older versions of Ollama. I added a `TODO` to remove the `Option` around the new field when most users have updated their installations to v0.12.10 or above. Note to reviewer: The fix to this issue should likely get cherry-picked into the next release, since Ollama becomes unusable as an agent without it. Release Notes: - Fixed tool calling when using the latest version of Ollama --- crates/language_models/src/provider/ollama.rs | 47 +++++++-------- crates/ollama/src/ollama.rs | 59 ++++++++++++++++++- 2 files changed, 80 insertions(+), 26 deletions(-) diff --git a/crates/language_models/src/provider/ollama.rs b/crates/language_models/src/provider/ollama.rs index a0aada7d1a7b557e1e5aa07f19dd3e38492fc972..b6870f5f72b08d2ca4decc101deae59b6a56c224 100644 --- a/crates/language_models/src/provider/ollama.rs +++ b/crates/language_models/src/provider/ollama.rs @@ -381,10 +381,13 @@ impl OllamaLanguageModel { thinking = Some(text) } MessageContent::ToolUse(tool_use) => { - tool_calls.push(OllamaToolCall::Function(OllamaFunctionCall { - name: tool_use.name.to_string(), - arguments: tool_use.input, - })); + tool_calls.push(OllamaToolCall { + id: Some(tool_use.id.to_string()), + function: OllamaFunctionCall { + name: tool_use.name.to_string(), + arguments: tool_use.input, + }, + }); } _ => (), } @@ -575,25 +578,23 @@ fn map_to_language_model_completion_events( } if let Some(tool_call) = tool_calls.and_then(|v| v.into_iter().next()) { - match tool_call { - OllamaToolCall::Function(function) => { - let tool_id = format!( - "{}-{}", - &function.name, - TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed) - ); - let event = - LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { - id: LanguageModelToolUseId::from(tool_id), - name: Arc::from(function.name), - raw_input: function.arguments.to_string(), - input: function.arguments, - is_input_complete: true, - }); - events.push(Ok(event)); - state.used_tools = true; - } - } + let OllamaToolCall { id, function } = tool_call; + let id = id.unwrap_or_else(|| { + format!( + "{}-{}", + &function.name, + TOOL_CALL_COUNTER.fetch_add(1, Ordering::Relaxed) + ) + }); + let event = LanguageModelCompletionEvent::ToolUse(LanguageModelToolUse { + id: LanguageModelToolUseId::from(id), + name: Arc::from(function.name), + raw_input: function.arguments.to_string(), + input: function.arguments, + is_input_complete: true, + }); + events.push(Ok(event)); + state.used_tools = true; } else if !content.is_empty() { events.push(Ok(LanguageModelCompletionEvent::Text(content))); } diff --git a/crates/ollama/src/ollama.rs b/crates/ollama/src/ollama.rs index 0ed3b6da17d952cc874485337ec380ef3ca990a8..f6614379fa999883405a20d17328c61d7da448f2 100644 --- a/crates/ollama/src/ollama.rs +++ b/crates/ollama/src/ollama.rs @@ -102,9 +102,11 @@ pub enum ChatMessage { } #[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "lowercase")] -pub enum OllamaToolCall { - Function(OllamaFunctionCall), +pub struct OllamaToolCall { + // TODO: Remove `Option` after most users have updated to Ollama v0.12.10, + // which was released on the 4th of November 2025 + pub id: Option, + pub function: OllamaFunctionCall, } #[derive(Serialize, Deserialize, Debug)] @@ -444,6 +446,7 @@ mod tests { "content": "", "tool_calls": [ { + "id": "call_llama3.2:3b_145155", "function": { "name": "weather", "arguments": { @@ -479,6 +482,56 @@ mod tests { } } + // Backwards compatibility with Ollama versions prior to v0.12.10 November 2025 + // This test is a copy of `parse_tool_call()` with the `id` field omitted. + #[test] + fn parse_tool_call_pre_0_12_10() { + let response = serde_json::json!({ + "model": "llama3.2:3b", + "created_at": "2025-04-28T20:02:02.140489Z", + "message": { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "function": { + "name": "weather", + "arguments": { + "city": "london", + } + } + } + ] + }, + "done_reason": "stop", + "done": true, + "total_duration": 2758629166u64, + "load_duration": 1770059875, + "prompt_eval_count": 147, + "prompt_eval_duration": 684637583, + "eval_count": 16, + "eval_duration": 302561917, + }); + + let result: ChatResponseDelta = serde_json::from_value(response).unwrap(); + match result.message { + ChatMessage::Assistant { + content, + tool_calls: Some(tool_calls), + images: _, + thinking, + } => { + assert!(content.is_empty()); + assert!(thinking.is_none()); + + // When the `Option` around `id` is removed, this test should complain + // and be subsequently deleted in favor of `parse_tool_call()` + assert!(tool_calls.first().is_some_and(|call| call.id.is_none())) + } + _ => panic!("Deserialized wrong role"), + } + } + #[test] fn parse_show_model() { let response = serde_json::json!({ From ee2e69065714297679d199c25bb25bde558dacdc Mon Sep 17 00:00:00 2001 From: Bennet Bo Fenner Date: Tue, 11 Nov 2025 16:25:27 +0100 Subject: [PATCH 63/74] agent_servers: Fix panic when setting default mode (#42452) Closes ZED-35A Release Notes: - Fixed an issue where Zed would panic when trying to set the default mode for ACP agents --- crates/agent_servers/src/custom.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index a51ed8a51a24d28aa6f2867797207bb15643a67d..7d36cc758389a828b819a822c91c9bb4b3444985 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -50,13 +50,14 @@ impl crate::AgentServer for CustomAgentServer { fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { let name = self.name(); update_settings_file(fs, cx, move |settings, _| { - settings + if let Some(settings) = settings .agent_servers .get_or_insert_default() .custom .get_mut(&name) - .unwrap() - .default_mode = mode_id.map(|m| m.to_string()) + { + settings.default_mode = mode_id.map(|m| m.to_string()) + } }); } From 8467a3dbd6aa3af529de021226d9196f2dc621d6 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 11 Nov 2025 12:47:08 -0300 Subject: [PATCH 64/74] agent_ui: Allow to uninstall agent servers from the settings view (#42445) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR also adds items within the "Add Agent" menu to: 1. Add more agent servers from extensions, opening up the extensions page with "Agent Servers" already filtered 2. Go to the agent server + ACP docs to learn more about them I feel like having them there is a nice way to promote this knowledge from within the product and have users learn more about them. Screenshot 2025-11-11 at 10  46 3@2x Release Notes: - agent: Enabled uninstalled agent servers from the agent panel's settings view. --- crates/agent_ui/src/agent_configuration.rs | 201 ++++++++++++++++----- crates/client/src/zed_urls.rs | 8 + crates/project/src/agent_server_store.rs | 12 ++ 3 files changed, 174 insertions(+), 47 deletions(-) diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index 8ace684234e90c5203528cae360a28b30798bea3..125dc223796f6d9b7e96bee452bee25a2409adb1 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -8,6 +8,7 @@ use std::{ops::Range, sync::Arc}; use agent::ContextServerRegistry; use anyhow::Result; +use client::zed_urls; use cloud_llm_client::{Plan, PlanV1, PlanV2}; use collections::HashMap; use context_server::ContextServerId; @@ -26,18 +27,20 @@ use language_model::{ use language_models::AllLanguageModelSettings; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ - agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME}, + agent_server_store::{ + AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME, + }, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; use settings::{Settings, SettingsStore, update_settings_file}; use ui::{ - Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, - ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch, - SwitchColor, Tooltip, WithScrollbar, prelude::*, + Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure, + Divider, DividerColor, ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, + PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*, }; use util::ResultExt as _; use workspace::{Workspace, create_and_open_local_file}; -use zed_actions::ExtensionCategoryFilter; +use zed_actions::{ExtensionCategoryFilter, OpenBrowser}; pub(crate) use configure_context_server_modal::ConfigureContextServerModal; pub(crate) use configure_context_server_tools_modal::ConfigureContextServerToolsModal; @@ -415,6 +418,7 @@ impl AgentConfiguration { cx: &mut Context, ) -> impl IntoElement { let providers = LanguageModelRegistry::read_global(cx).providers(); + let popover_menu = PopoverMenu::new("add-provider-popover") .trigger( Button::new("add-provider", "Add Provider") @@ -425,7 +429,6 @@ impl AgentConfiguration { .icon_color(Color::Muted) .label_size(LabelSize::Small), ) - .anchor(gpui::Corner::TopRight) .menu({ let workspace = self.workspace.clone(); move |window, cx| { @@ -447,6 +450,11 @@ impl AgentConfiguration { }) })) } + }) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), }); v_flex() @@ -541,7 +549,6 @@ impl AgentConfiguration { .icon_color(Color::Muted) .label_size(LabelSize::Small), ) - .anchor(gpui::Corner::TopRight) .menu({ move |window, cx| { Some(ContextMenu::build(window, cx, |menu, _window, _cx| { @@ -564,6 +571,11 @@ impl AgentConfiguration { }) })) } + }) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), }); v_flex() @@ -943,7 +955,7 @@ impl AgentConfiguration { .cloned() .collect::>(); - let user_defined_agents = user_defined_agents + let user_defined_agents: Vec<_> = user_defined_agents .into_iter() .map(|name| { let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) { @@ -951,27 +963,93 @@ impl AgentConfiguration { } else { AgentIcon::Name(IconName::Ai) }; - self.render_agent_server(icon, name, true) - .into_any_element() + (name, icon) }) - .collect::>(); + .collect(); - let add_agens_button = Button::new("add-agent", "Add Agent") - .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) - .label_size(LabelSize::Small) - .on_click(move |_, window, cx| { - if let Some(workspace) = window.root().flatten() { - let workspace = workspace.downgrade(); - window - .spawn(cx, async |cx| { - open_new_agent_servers_entry_in_settings_editor(workspace, cx).await + let add_agent_popover = PopoverMenu::new("add-agent-server-popover") + .trigger( + Button::new("add-agent", "Add Agent") + .style(ButtonStyle::Outlined) + .icon_position(IconPosition::Start) + .icon(IconName::Plus) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .label_size(LabelSize::Small), + ) + .menu({ + move |window, cx| { + Some(ContextMenu::build(window, cx, |menu, _window, _cx| { + menu.entry("Install from Extensions", None, { + |window, cx| { + window.dispatch_action( + zed_actions::Extensions { + category_filter: Some( + ExtensionCategoryFilter::AgentServers, + ), + id: None, + } + .boxed_clone(), + cx, + ) + } }) - .detach_and_log_err(cx); + .entry("Add Custom Agent", None, { + move |window, cx| { + if let Some(workspace) = window.root().flatten() { + let workspace = workspace.downgrade(); + window + .spawn(cx, async |cx| { + open_new_agent_servers_entry_in_settings_editor( + workspace, cx, + ) + .await + }) + .detach_and_log_err(cx); + } + } + }) + .separator() + .header("Learn More") + .item( + ContextMenuEntry::new("Agent Servers Docs") + .icon(IconName::ArrowUpRight) + .icon_color(Color::Muted) + .icon_position(IconPosition::End) + .handler({ + move |window, cx| { + window.dispatch_action( + Box::new(OpenBrowser { + url: zed_urls::agent_server_docs(cx), + }), + cx, + ); + } + }), + ) + .item( + ContextMenuEntry::new("ACP Docs") + .icon(IconName::ArrowUpRight) + .icon_color(Color::Muted) + .icon_position(IconPosition::End) + .handler({ + move |window, cx| { + window.dispatch_action( + Box::new(OpenBrowser { + url: "https://agentclientprotocol.com/".into(), + }), + cx, + ); + } + }), + ) + })) } + }) + .anchor(gpui::Corner::TopRight) + .offset(gpui::Point { + x: px(0.0), + y: px(2.0), }); v_flex() @@ -982,7 +1060,7 @@ impl AgentConfiguration { .child(self.render_section_title( "External Agents", "All agents connected through the Agent Client Protocol.", - add_agens_button.into_any_element(), + add_agent_popover.into_any_element(), )) .child( v_flex() @@ -993,26 +1071,29 @@ impl AgentConfiguration { AgentIcon::Name(IconName::AiClaude), "Claude Code", false, + cx, )) .child(Divider::horizontal().color(DividerColor::BorderFaded)) .child(self.render_agent_server( AgentIcon::Name(IconName::AiOpenAi), "Codex CLI", false, + cx, )) .child(Divider::horizontal().color(DividerColor::BorderFaded)) .child(self.render_agent_server( AgentIcon::Name(IconName::AiGemini), "Gemini CLI", false, + cx, )) .map(|mut parent| { - for agent in user_defined_agents { + for (name, icon) in user_defined_agents { parent = parent .child( Divider::horizontal().color(DividerColor::BorderFaded), ) - .child(agent); + .child(self.render_agent_server(icon, name, true, cx)); } parent }), @@ -1025,6 +1106,7 @@ impl AgentConfiguration { icon: AgentIcon, name: impl Into, external: bool, + cx: &mut Context, ) -> impl IntoElement { let name = name.into(); let icon = match icon { @@ -1039,28 +1121,53 @@ impl AgentConfiguration { let tooltip_id = SharedString::new(format!("agent-source-{}", name)); let tooltip_message = format!("The {} agent was installed from an extension.", name); + let agent_server_name = ExternalAgentServerName(name.clone()); + + let uninstall_btn_id = SharedString::from(format!("uninstall-{}", name)); + let uninstall_button = IconButton::new(uninstall_btn_id, IconName::Trash) + .icon_color(Color::Muted) + .icon_size(IconSize::Small) + .tooltip(Tooltip::text("Uninstall Agent Extension")) + .on_click(cx.listener(move |this, _, _window, cx| { + let agent_name = agent_server_name.clone(); + + if let Some(ext_id) = this.agent_server_store.update(cx, |store, _cx| { + store.get_extension_id_for_agent(&agent_name) + }) { + ExtensionStore::global(cx) + .update(cx, |store, cx| store.uninstall_extension(ext_id, cx)) + .detach_and_log_err(cx); + } + })); + h_flex() - .gap_1p5() - .child(icon) - .child(Label::new(name)) - .when(external, |this| { - this.child( - div() - .id(tooltip_id) - .flex_none() - .tooltip(Tooltip::text(tooltip_message)) - .child( - Icon::new(IconName::ZedSrcExtension) - .size(IconSize::Small) - .color(Color::Muted), - ), - ) - }) + .gap_1() + .justify_between() .child( - Icon::new(IconName::Check) - .color(Color::Success) - .size(IconSize::Small), + h_flex() + .gap_1p5() + .child(icon) + .child(Label::new(name)) + .when(external, |this| { + this.child( + div() + .id(tooltip_id) + .flex_none() + .tooltip(Tooltip::text(tooltip_message)) + .child( + Icon::new(IconName::ZedSrcExtension) + .size(IconSize::Small) + .color(Color::Muted), + ), + ) + }) + .child( + Icon::new(IconName::Check) + .color(Color::Success) + .size(IconSize::Small), + ), ) + .when(external, |this| this.child(uninstall_button)) } } diff --git a/crates/client/src/zed_urls.rs b/crates/client/src/zed_urls.rs index 7193c099473c95794796c2fc4d3eaaf2f06eb1ac..957d6c68f773db025b4ee604666f5b3d8101148b 100644 --- a/crates/client/src/zed_urls.rs +++ b/crates/client/src/zed_urls.rs @@ -51,3 +51,11 @@ pub fn external_agents_docs(cx: &App) -> String { server_url = server_url(cx) ) } + +/// Returns the URL to Zed agent servers documentation. +pub fn agent_server_docs(cx: &App) -> String { + format!( + "{server_url}/docs/extensions/agent-servers", + server_url = server_url(cx) + ) +} diff --git a/crates/project/src/agent_server_store.rs b/crates/project/src/agent_server_store.rs index ef8079bd014ecc7b26102aafc931029f9ab1cafa..d3c078ffa101c8c66d1c5ab75fb8b59d7748127a 100644 --- a/crates/project/src/agent_server_store.rs +++ b/crates/project/src/agent_server_store.rs @@ -759,6 +759,18 @@ impl AgentServerStore { } }) } + + pub fn get_extension_id_for_agent( + &mut self, + name: &ExternalAgentServerName, + ) -> Option> { + self.external_agents.get_mut(name).and_then(|agent| { + agent + .as_any_mut() + .downcast_ref::() + .map(|ext_agent| ext_agent.extension_id.clone()) + }) + } } fn get_or_npm_install_builtin_agent( From 993919d360da1af40af7578b0d099cf52a512f19 Mon Sep 17 00:00:00 2001 From: Danilo Leal <67129314+danilo-leal@users.noreply.github.com> Date: Tue, 11 Nov 2025 12:50:56 -0300 Subject: [PATCH 65/74] agent_ui: Add icon button to trigger the @-mention completions menu (#42449) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Closes https://github.com/zed-industries/zed/issues/37087 This PR adds an icon button to the footer of the message editor enabling to trigger and interact with the @-mention completions menu with the mouse. This is a first step towards making other types of context you can add in Zed's agent panel more discoverable. Next, I want to improve the discoverability of images and selections, given that you wouldn't necessarily know they work in Zed without a clear way to see them. But I think that for now, this is enough to close the issue above, which had lots of productive comments and discussion! Screenshot 2025-11-11 at 10  46 3@2x Release Notes: - agent: Added an icon button in the agent panel that allows to trigger the @-mention menu (for adding context) now also with the mouse. --- assets/icons/at_sign.svg | 4 ++ .../agent_ui/src/acp/completion_provider.rs | 8 ++- crates/agent_ui/src/acp/message_editor.rs | 49 +++++++++++++++++++ crates/agent_ui/src/acp/thread_view.rs | 25 ++++++++++ crates/icons/src/icons.rs | 1 + 5 files changed, 85 insertions(+), 2 deletions(-) create mode 100644 assets/icons/at_sign.svg diff --git a/assets/icons/at_sign.svg b/assets/icons/at_sign.svg new file mode 100644 index 0000000000000000000000000000000000000000..531c10c8dc151fb27f2a53d424ab57acecd7d03c --- /dev/null +++ b/assets/icons/at_sign.svg @@ -0,0 +1,4 @@ + + + + diff --git a/crates/agent_ui/src/acp/completion_provider.rs b/crates/agent_ui/src/acp/completion_provider.rs index 84d75ebe4133b3145b892eec659867b137bce2f0..408dbedcfdd4998ca8d2e094aab4799bad168629 100644 --- a/crates/agent_ui/src/acp/completion_provider.rs +++ b/crates/agent_ui/src/acp/completion_provider.rs @@ -694,14 +694,18 @@ fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) - } fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel { - let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId); + let path = cx + .theme() + .syntax() + .highlight_id("variable") + .map(HighlightId); let mut label = CodeLabelBuilder::default(); label.push_str(file_name, None); label.push_str(" ", None); if let Some(directory) = directory { - label.push_str(directory, comment_id); + label.push_str(directory, path); } label.build() diff --git a/crates/agent_ui/src/acp/message_editor.rs b/crates/agent_ui/src/acp/message_editor.rs index 4f919a6c0425e48575d09380339730d7ddb26172..b7037a6413d93fb4ee538af7062049df9f58e818 100644 --- a/crates/agent_ui/src/acp/message_editor.rs +++ b/crates/agent_ui/src/acp/message_editor.rs @@ -15,6 +15,7 @@ use editor::{ EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, Inlay, MultiBuffer, ToOffset, actions::Paste, + code_context_menus::CodeContextMenu, display_map::{Crease, CreaseId, FoldId}, scroll::Autoscroll, }; @@ -272,6 +273,15 @@ impl MessageEditor { self.editor.read(cx).is_empty(cx) } + pub fn is_completions_menu_visible(&self, cx: &App) -> bool { + self.editor + .read(cx) + .context_menu() + .borrow() + .as_ref() + .is_some_and(|menu| matches!(menu, CodeContextMenu::Completions(_)) && menu.visible()) + } + pub fn mentions(&self) -> HashSet { self.mention_set .mentions @@ -836,6 +846,45 @@ impl MessageEditor { cx.emit(MessageEditorEvent::Send) } + pub fn trigger_completion_menu(&mut self, window: &mut Window, cx: &mut Context) { + let editor = self.editor.clone(); + + cx.spawn_in(window, async move |_, cx| { + editor + .update_in(cx, |editor, window, cx| { + let menu_is_open = + editor.context_menu().borrow().as_ref().is_some_and(|menu| { + matches!(menu, CodeContextMenu::Completions(_)) && menu.visible() + }); + + let has_at_sign = { + let snapshot = editor.display_snapshot(cx); + let cursor = editor.selections.newest::(&snapshot).head(); + let offset = cursor.to_offset(&snapshot); + if offset > 0 { + snapshot + .buffer_snapshot() + .reversed_chars_at(offset) + .next() + .map(|sign| sign == '@') + .unwrap_or(false) + } else { + false + } + }; + + if menu_is_open && has_at_sign { + return; + } + + editor.insert("@", window, cx); + editor.show_completions(&editor::actions::ShowCompletions, window, cx); + }) + .log_err(); + }) + .detach(); + } + fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context) { self.send(cx); } diff --git a/crates/agent_ui/src/acp/thread_view.rs b/crates/agent_ui/src/acp/thread_view.rs index 17daf5a18e97829d5e4d64d30d266b5d5d271e7b..4f3bbe718d3c6265f54f3cc4a949256b81c25572 100644 --- a/crates/agent_ui/src/acp/thread_view.rs +++ b/crates/agent_ui/src/acp/thread_view.rs @@ -4188,6 +4188,8 @@ impl AcpThreadView { .justify_between() .child( h_flex() + .gap_0p5() + .child(self.render_add_context_button(cx)) .child(self.render_follow_toggle(cx)) .children(self.render_burn_mode_toggle(cx)), ) @@ -4502,6 +4504,29 @@ impl AcpThreadView { })) } + fn render_add_context_button(&self, cx: &mut Context) -> impl IntoElement { + let message_editor = self.message_editor.clone(); + let menu_visible = message_editor.read(cx).is_completions_menu_visible(cx); + + IconButton::new("add-context", IconName::AtSign) + .icon_size(IconSize::Small) + .icon_color(Color::Muted) + .when(!menu_visible, |this| { + this.tooltip(move |_window, cx| { + Tooltip::with_meta("Add Context", None, "Or type @ to include context", cx) + }) + }) + .on_click(cx.listener(move |_this, _, window, cx| { + let message_editor_clone = message_editor.clone(); + + window.defer(cx, move |window, cx| { + message_editor_clone.update(cx, |message_editor, cx| { + message_editor.trigger_completion_menu(window, cx); + }); + }); + })) + } + fn render_markdown(&self, markdown: Entity, style: MarkdownStyle) -> MarkdownElement { let workspace = self.workspace.clone(); MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| { diff --git a/crates/icons/src/icons.rs b/crates/icons/src/icons.rs index fb45ca1eb5f8334190c11ad811a31128396ba23a..a0865773ac394722c113a43fe323de218b2f145a 100644 --- a/crates/icons/src/icons.rs +++ b/crates/icons/src/icons.rs @@ -35,6 +35,7 @@ pub enum IconName { ArrowUp, ArrowUpRight, Attach, + AtSign, AudioOff, AudioOn, Backspace, From 0268b170964f22bd91bac197bd1b803554fb547c Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 11 Nov 2025 11:07:57 -0500 Subject: [PATCH 66/74] Add more secrets to eval workflows (#42459) Release Notes: - N/A --- .github/workflows/run_agent_evals.yml | 3 +++ crates/eval/src/eval.rs | 2 +- tooling/xtask/src/tasks/workflows/run_agent_evals.rs | 3 +++ tooling/xtask/src/tasks/workflows/vars.rs | 3 +++ 4 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml index 1a875aa2c463d264002f14264993b9c99ae1f49c..034a69c276d7b11325dcc4493b25d3c61d3f6455 100644 --- a/.github/workflows/run_agent_evals.yml +++ b/.github/workflows/run_agent_evals.yml @@ -6,6 +6,9 @@ env: CARGO_INCREMENTAL: '0' RUST_BACKTRACE: '1' ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_EVAL_TELEMETRY: '1' MODEL_NAME: ${{ inputs.model_name }} diff --git a/crates/eval/src/eval.rs b/crates/eval/src/eval.rs index d04dad8e99961480ce5f08328fa97aeabf5eda10..3166a7321782069153deb74b90e6b5b71fc99e06 100644 --- a/crates/eval/src/eval.rs +++ b/crates/eval/src/eval.rs @@ -463,8 +463,8 @@ pub fn find_model( .ok_or_else(|| { anyhow::anyhow!( "No language model with ID {}/{} was available. Available models: {}", - selected.model.0, selected.provider.0, + selected.model.0, model_registry .available_models(cx) .map(|model| format!("{}/{}", model.provider_id().0, model.id().0)) diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs index 4601d5a5bf6a60435a87edab9cd6d62b77ef52a8..341ca1d4a517489bc9248a0d52e39f75e8da7074 100644 --- a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -19,6 +19,9 @@ pub(crate) fn run_agent_evals() -> Workflow { .add_env(("CARGO_INCREMENTAL", 0)) .add_env(("RUST_BACKTRACE", 1)) .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)) + .add_env(("OPENAI_API_KEY", vars::OPENAI_API_KEY)) + .add_env(("GOOGLE_API_KEY", vars::GOOGLE_API_KEY)) + .add_env(("GOOGLE_CLOUD_PROJECT", vars::GOOGLE_CLOUD_PROJECT)) .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) .add_env(("ZED_EVAL_TELEMETRY", 1)) .add_env(("MODEL_NAME", model_name.to_string())) diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index 51f6383b1eddafd2a723e31ea23247b90b3a955c..58f2ba7b63aabcff80fb2c1073da892da2e7b25f 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -17,6 +17,9 @@ macro_rules! var { } secret!(ANTHROPIC_API_KEY); +secret!(OPENAI_API_KEY); +secret!(GOOGLE_API_KEY); +secret!(GOOGLE_CLOUD_PROJECT); secret!(APPLE_NOTARIZATION_ISSUER_ID); secret!(APPLE_NOTARIZATION_KEY); secret!(APPLE_NOTARIZATION_KEY_ID); From 03acbb7de35afc27ae7eb32b43b1c4121875da8f Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 11 Nov 2025 11:13:59 -0500 Subject: [PATCH 67/74] collab: Remove unused embeddings queries and model (#42463) This PR removes the queries and database model for embeddings, as they're no longer used. Release Notes: - N/A --- crates/collab/src/db/queries.rs | 1 - crates/collab/src/db/queries/embeddings.rs | 94 ------------------- crates/collab/src/db/tables.rs | 1 - crates/collab/src/db/tables/embedding.rs | 18 ---- crates/collab/src/db/tests.rs | 3 - crates/collab/src/db/tests/embedding_tests.rs | 87 ----------------- crates/collab/src/main.rs | 4 +- 7 files changed, 1 insertion(+), 207 deletions(-) delete mode 100644 crates/collab/src/db/queries/embeddings.rs delete mode 100644 crates/collab/src/db/tables/embedding.rs delete mode 100644 crates/collab/src/db/tests/embedding_tests.rs diff --git a/crates/collab/src/db/queries.rs b/crates/collab/src/db/queries.rs index 7b457a5da438e0a9ab7c6cd79368b2845e962318..db91021c22b35b7b7159bd5cd54e28f8fa1a14e2 100644 --- a/crates/collab/src/db/queries.rs +++ b/crates/collab/src/db/queries.rs @@ -5,7 +5,6 @@ pub mod buffers; pub mod channels; pub mod contacts; pub mod contributors; -pub mod embeddings; pub mod extensions; pub mod notifications; pub mod projects; diff --git a/crates/collab/src/db/queries/embeddings.rs b/crates/collab/src/db/queries/embeddings.rs deleted file mode 100644 index 6ae8013284f4652d5cb0d4a19214c3a5c1a42df0..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/queries/embeddings.rs +++ /dev/null @@ -1,94 +0,0 @@ -use super::*; -use time::Duration; -use time::OffsetDateTime; - -impl Database { - pub async fn get_embeddings( - &self, - model: &str, - digests: &[Vec], - ) -> Result, Vec>> { - self.transaction(|tx| async move { - let embeddings = { - let mut db_embeddings = embedding::Entity::find() - .filter( - embedding::Column::Model.eq(model).and( - embedding::Column::Digest - .is_in(digests.iter().map(|digest| digest.as_slice())), - ), - ) - .stream(&*tx) - .await?; - - let mut embeddings = HashMap::default(); - while let Some(db_embedding) = db_embeddings.next().await { - let db_embedding = db_embedding?; - embeddings.insert(db_embedding.digest, db_embedding.dimensions); - } - embeddings - }; - - if !embeddings.is_empty() { - let now = OffsetDateTime::now_utc(); - let retrieved_at = PrimitiveDateTime::new(now.date(), now.time()); - - embedding::Entity::update_many() - .filter( - embedding::Column::Digest - .is_in(embeddings.keys().map(|digest| digest.as_slice())), - ) - .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at)) - .exec(&*tx) - .await?; - } - - Ok(embeddings) - }) - .await - } - - pub async fn save_embeddings( - &self, - model: &str, - embeddings: &HashMap, Vec>, - ) -> Result<()> { - self.transaction(|tx| async move { - embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| { - let now_offset_datetime = OffsetDateTime::now_utc(); - let retrieved_at = - PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time()); - - embedding::ActiveModel { - model: ActiveValue::set(model.to_string()), - digest: ActiveValue::set(digest.clone()), - dimensions: ActiveValue::set(dimensions.clone()), - retrieved_at: ActiveValue::set(retrieved_at), - } - })) - .on_conflict( - OnConflict::columns([embedding::Column::Model, embedding::Column::Digest]) - .do_nothing() - .to_owned(), - ) - .exec_without_returning(&*tx) - .await?; - Ok(()) - }) - .await - } - - pub async fn purge_old_embeddings(&self) -> Result<()> { - self.transaction(|tx| async move { - embedding::Entity::delete_many() - .filter( - embedding::Column::RetrievedAt - .lte(OffsetDateTime::now_utc() - Duration::days(60)), - ) - .exec(&*tx) - .await?; - - Ok(()) - }) - .await - } -} diff --git a/crates/collab/src/db/tables.rs b/crates/collab/src/db/tables.rs index e619acaaf2bc237caac67dedcb5c738114d260d5..0220955824af30f489afe32f9695af3dbb52cdc9 100644 --- a/crates/collab/src/db/tables.rs +++ b/crates/collab/src/db/tables.rs @@ -8,7 +8,6 @@ pub mod channel_chat_participant; pub mod channel_member; pub mod contact; pub mod contributor; -pub mod embedding; pub mod extension; pub mod extension_version; pub mod follower; diff --git a/crates/collab/src/db/tables/embedding.rs b/crates/collab/src/db/tables/embedding.rs deleted file mode 100644 index 8743b4b9e65751bf350bff1db532de38ce73f368..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tables/embedding.rs +++ /dev/null @@ -1,18 +0,0 @@ -use sea_orm::entity::prelude::*; -use time::PrimitiveDateTime; - -#[derive(Clone, Debug, PartialEq, DeriveEntityModel)] -#[sea_orm(table_name = "embeddings")] -pub struct Model { - #[sea_orm(primary_key)] - pub model: String, - #[sea_orm(primary_key)] - pub digest: Vec, - pub dimensions: Vec, - pub retrieved_at: PrimitiveDateTime, -} - -#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)] -pub enum Relation {} - -impl ActiveModelBehavior for ActiveModel {} diff --git a/crates/collab/src/db/tests.rs b/crates/collab/src/db/tests.rs index 67c36576aac0f938bbc040202d7fa83e35af2d3b..7aed2ebc2dd16f31cde4116a70377b40b1cb8b2f 100644 --- a/crates/collab/src/db/tests.rs +++ b/crates/collab/src/db/tests.rs @@ -2,9 +2,6 @@ mod buffer_tests; mod channel_tests; mod contributor_tests; mod db_tests; -// we only run postgres tests on macos right now -#[cfg(target_os = "macos")] -mod embedding_tests; mod extension_tests; use crate::migrations::run_database_migrations; diff --git a/crates/collab/src/db/tests/embedding_tests.rs b/crates/collab/src/db/tests/embedding_tests.rs deleted file mode 100644 index 5d8d69c0304d3a16b55e9d7b1477fe62cc22024a..0000000000000000000000000000000000000000 --- a/crates/collab/src/db/tests/embedding_tests.rs +++ /dev/null @@ -1,87 +0,0 @@ -use super::TestDb; -use crate::db::embedding; -use collections::HashMap; -use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, sea_query::Expr}; -use std::ops::Sub; -use time::{Duration, OffsetDateTime, PrimitiveDateTime}; - -// SQLite does not support array arguments, so we only test this against a real postgres instance -#[gpui::test] -async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) { - let test_db = TestDb::postgres(cx.executor()); - let db = test_db.db(); - - let provider = "test_model"; - let digest1 = vec![1, 2, 3]; - let digest2 = vec![4, 5, 6]; - let embeddings = HashMap::from_iter([ - (digest1.clone(), vec![0.1, 0.2, 0.3]), - (digest2.clone(), vec![0.4, 0.5, 0.6]), - ]); - - // Save embeddings - db.save_embeddings(provider, &embeddings).await.unwrap(); - - // Retrieve embeddings - let retrieved_embeddings = db - .get_embeddings(provider, &[digest1.clone(), digest2.clone()]) - .await - .unwrap(); - assert_eq!(retrieved_embeddings.len(), 2); - assert!(retrieved_embeddings.contains_key(&digest1)); - assert!(retrieved_embeddings.contains_key(&digest2)); - - // Check if the retrieved embeddings are correct - assert_eq!(retrieved_embeddings[&digest1], vec![0.1, 0.2, 0.3]); - assert_eq!(retrieved_embeddings[&digest2], vec![0.4, 0.5, 0.6]); -} - -#[gpui::test] -async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) { - let test_db = TestDb::postgres(cx.executor()); - let db = test_db.db(); - - let model = "test_model"; - let digest = vec![7, 8, 9]; - let embeddings = HashMap::from_iter([(digest.clone(), vec![0.7, 0.8, 0.9])]); - - // Save old embeddings - db.save_embeddings(model, &embeddings).await.unwrap(); - - // Reach into the DB and change the retrieved at to be > 60 days - db.transaction(|tx| { - let digest = digest.clone(); - async move { - let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61)); - let retrieved_at = PrimitiveDateTime::new(sixty_days_ago.date(), sixty_days_ago.time()); - - embedding::Entity::update_many() - .filter( - embedding::Column::Model - .eq(model) - .and(embedding::Column::Digest.eq(digest)), - ) - .col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at)) - .exec(&*tx) - .await - .unwrap(); - - Ok(()) - } - }) - .await - .unwrap(); - - // Purge old embeddings - db.purge_old_embeddings().await.unwrap(); - - // Try to retrieve the purged embeddings - let retrieved_embeddings = db - .get_embeddings(model, std::slice::from_ref(&digest)) - .await - .unwrap(); - assert!( - retrieved_embeddings.is_empty(), - "Old embeddings should have been purged" - ); -} diff --git a/crates/collab/src/main.rs b/crates/collab/src/main.rs index 6b94459910647c1e48ee69f2b0dd38afd3723821..08047c56e55c016f3fd2b34d0935fb33a61b5dad 100644 --- a/crates/collab/src/main.rs +++ b/crates/collab/src/main.rs @@ -13,7 +13,7 @@ use collab::llm::db::LlmDatabase; use collab::migrations::run_database_migrations; use collab::{ AppState, Config, Result, api::fetch_extensions_from_blob_store_periodically, db, env, - executor::Executor, rpc::ResultExt, + executor::Executor, }; use db::Database; use std::{ @@ -95,8 +95,6 @@ async fn main() -> Result<()> { let state = AppState::new(config, Executor::Production).await?; if mode.is_collab() { - state.db.purge_old_embeddings().await.trace_err(); - let epoch = state .db .create_server(&state.config.zed_environment) From 83351283e4f3690d12ca2d9fc005f381bf028704 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 11 Nov 2025 17:15:12 +0100 Subject: [PATCH 68/74] settings: Skip terminal env vars with substitutions in vscode import (#42464) Closes https://github.com/zed-industries/zed/issues/40547 Release Notes: - Fixed vscode import creating faulty terminal env vars in terminal settings --- crates/auto_update/src/auto_update.rs | 4 +--- crates/settings/src/vscode_import.rs | 8 +++++++- crates/terminal/src/terminal.rs | 10 +++++++++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/crates/auto_update/src/auto_update.rs b/crates/auto_update/src/auto_update.rs index facb55e0df951633b082f23477e35ce2b55f6f84..accda1b1ce4b09db0bc4cc0fb5824290725cb8ee 100644 --- a/crates/auto_update/src/auto_update.rs +++ b/crates/auto_update/src/auto_update.rs @@ -350,8 +350,7 @@ impl AutoUpdater { pub fn start_polling(&self, cx: &mut Context) -> Task> { cx.spawn(async move |this, cx| { - #[cfg(target_os = "windows")] - { + if cfg!(target_os = "windows") { use util::ResultExt; cleanup_windows() @@ -903,7 +902,6 @@ async fn install_release_macos( Ok(None) } -#[cfg(target_os = "windows")] async fn cleanup_windows() -> Result<()> { let parent = std::env::current_exe()? .parent() diff --git a/crates/settings/src/vscode_import.rs b/crates/settings/src/vscode_import.rs index 36bd84e1a145a9a64eadbaec9411f904b9a881c9..cbffb33b1795dbf71e48df8089c472ee534306c1 100644 --- a/crates/settings/src/vscode_import.rs +++ b/crates/settings/src/vscode_import.rs @@ -753,7 +753,13 @@ impl VsCodeSettings { let env = self .read_value(&format!("terminal.integrated.env.{platform}")) .and_then(|v| v.as_object()) - .map(|v| v.iter().map(|(k, v)| (k.clone(), v.to_string())).collect()); + .map(|v| { + v.iter() + .map(|(k, v)| (k.clone(), v.to_string())) + // zed does not support substitutions, so this can break env vars + .filter(|(_, v)| !v.contains('$')) + .collect() + }); ProjectTerminalSettingsContent { // TODO: handle arguments diff --git a/crates/terminal/src/terminal.rs b/crates/terminal/src/terminal.rs index c635cb9b417f422ad0ddd91611233cb04a771679..59c71474d47b9c79d33b28bd7cbbc1a187b4ebfd 100644 --- a/crates/terminal/src/terminal.rs +++ b/crates/terminal/src/terminal.rs @@ -1386,7 +1386,15 @@ impl Terminal { /// (This is a no-op for display-only terminals.) fn write_to_pty(&self, input: impl Into>) { if let TerminalType::Pty { pty_tx, .. } = &self.terminal_type { - pty_tx.notify(input.into()); + let input = input.into(); + if log::log_enabled!(log::Level::Debug) { + if let Ok(str) = str::from_utf8(&input) { + log::debug!("Writing to PTY: {:?}", str); + } else { + log::debug!("Writing to PTY: {:?}", input); + } + } + pty_tx.notify(input); } } From 9e1e732db86076a21465baa9e0ab75a417ed64a0 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 11 Nov 2025 11:37:20 -0500 Subject: [PATCH 69/74] Use longer timeout on evals (#42465) The GPT-5 ones in particular can take a long time! Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- .github/workflows/run_agent_evals.yml | 2 +- .github/workflows/run_unit_evals.yml | 4 ++-- crates/gpui/src/executor.rs | 6 +++++- script/run-unit-evals | 2 +- tooling/xtask/src/tasks/workflows/run_agent_evals.rs | 3 ++- tooling/xtask/src/tasks/workflows/vars.rs | 2 +- 6 files changed, 12 insertions(+), 7 deletions(-) diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml index 034a69c276d7b11325dcc4493b25d3c61d3f6455..0ee8f3c5150589bc4565cd93326cf437d384c428 100644 --- a/.github/workflows/run_agent_evals.yml +++ b/.github/workflows/run_agent_evals.yml @@ -7,7 +7,7 @@ env: RUST_BACKTRACE: '1' ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} ZED_EVAL_TELEMETRY: '1' diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml index a41b4fb6d7058a97dcd5a98894a0d2c4687ceed4..e3f03c2f9ecd7a48423939f315ce41e13b934d7d 100644 --- a/.github/workflows/run_unit_evals.yml +++ b/.github/workflows/run_unit_evals.yml @@ -1,6 +1,6 @@ -# Generated from xtask::workflows::run_agent_evals +# Generated from xtask::workflows::run_unit_evals # Rebuild with `cargo xtask workflows`. -name: run_agent_evals +name: run_unit_evals env: CARGO_TERM_COLOR: always CARGO_INCREMENTAL: '0' diff --git a/crates/gpui/src/executor.rs b/crates/gpui/src/executor.rs index c05cf5745d6e19172191e298fa4f31e76513a00b..86cd7451fb3559ffd7da4001bdf6f6bd121e8b39 100644 --- a/crates/gpui/src/executor.rs +++ b/crates/gpui/src/executor.rs @@ -281,7 +281,11 @@ impl BackgroundExecutor { }); let mut cx = std::task::Context::from_waker(&waker); - let duration = Duration::from_secs(180); + let duration = Duration::from_secs( + option_env!("GPUI_TEST_TIMEOUT") + .and_then(|s| s.parse::().ok()) + .unwrap_or(180), + ); let mut test_should_end_by = Instant::now() + duration; loop { diff --git a/script/run-unit-evals b/script/run-unit-evals index 02481e1ce9dde7d2cbde9603f663093bf7a2ee38..7a72d0b6a64b9ae9f3dcf340c16d7426d88d6a0b 100755 --- a/script/run-unit-evals +++ b/script/run-unit-evals @@ -2,4 +2,4 @@ set -euxo pipefail -cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' +GPUI_TEST_TIMEOUT=1500 cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs index 341ca1d4a517489bc9248a0d52e39f75e8da7074..846001201f62fd65bf9d05af53ace59646ea197c 100644 --- a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -20,7 +20,7 @@ pub(crate) fn run_agent_evals() -> Workflow { .add_env(("RUST_BACKTRACE", 1)) .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)) .add_env(("OPENAI_API_KEY", vars::OPENAI_API_KEY)) - .add_env(("GOOGLE_API_KEY", vars::GOOGLE_API_KEY)) + .add_env(("GOOGLE_AI_API_KEY", vars::GOOGLE_AI_API_KEY)) .add_env(("GOOGLE_CLOUD_PROJECT", vars::GOOGLE_CLOUD_PROJECT)) .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) .add_env(("ZED_EVAL_TELEMETRY", 1)) @@ -53,6 +53,7 @@ pub(crate) fn run_unit_evals() -> Workflow { let unit_evals = unit_evals(); named::workflow() + .name("run_unit_evals") .on(Event::default() .schedule([ // GitHub might drop jobs at busy times, so we choose a random time in the middle of the night. diff --git a/tooling/xtask/src/tasks/workflows/vars.rs b/tooling/xtask/src/tasks/workflows/vars.rs index 58f2ba7b63aabcff80fb2c1073da892da2e7b25f..6493bd0ab62699454af6c100d526143c54f8774f 100644 --- a/tooling/xtask/src/tasks/workflows/vars.rs +++ b/tooling/xtask/src/tasks/workflows/vars.rs @@ -18,7 +18,7 @@ macro_rules! var { secret!(ANTHROPIC_API_KEY); secret!(OPENAI_API_KEY); -secret!(GOOGLE_API_KEY); +secret!(GOOGLE_AI_API_KEY); secret!(GOOGLE_CLOUD_PROJECT); secret!(APPLE_NOTARIZATION_ISSUER_ID); secret!(APPLE_NOTARIZATION_KEY); From 7e491ac5001c374616b9aa87593ca1aeccac09e9 Mon Sep 17 00:00:00 2001 From: Marshall Bowers Date: Tue, 11 Nov 2025 11:44:04 -0500 Subject: [PATCH 70/74] collab: Drop `embeddings` table (#42466) This PR drops the `embeddings` table, as it is no longer used. Release Notes: - N/A --- crates/collab/migrations/20251111161644_drop_embeddings.sql | 1 + 1 file changed, 1 insertion(+) create mode 100644 crates/collab/migrations/20251111161644_drop_embeddings.sql diff --git a/crates/collab/migrations/20251111161644_drop_embeddings.sql b/crates/collab/migrations/20251111161644_drop_embeddings.sql new file mode 100644 index 0000000000000000000000000000000000000000..80f42c7d2c88b258ef8cc63757694a7e229643c7 --- /dev/null +++ b/crates/collab/migrations/20251111161644_drop_embeddings.sql @@ -0,0 +1 @@ +drop table embeddings; From c50f8216138e6069ddb51af07b612e1ec041c522 Mon Sep 17 00:00:00 2001 From: brequet <125278669+brequet@users.noreply.github.com> Date: Tue, 11 Nov 2025 17:58:18 +0100 Subject: [PATCH 71/74] docs: Fix typo in `configuring-zed.md` (#42454) Fix a minor typo in the setting key: `auto_install_extension` should be `auto_install_extensions`. Release Notes: - N/A --- docs/src/configuring-zed.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 145620c3962984407db73bf7ac4c0a3bbfa75324..14f11df256167928931280cb906cd996615b101b 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -213,7 +213,7 @@ Note: This setting has no effect in Vim mode, as rewrap is already allowed every ## Auto Install extensions - Description: Define extensions to be autoinstalled or never be installed. -- Setting: `auto_install_extension` +- Setting: `auto_install_extensions` - Default: `{ "html": true }` **Options** From 5f4d0dbaab48190ae17a173c614a2b9025bf4ba4 Mon Sep 17 00:00:00 2001 From: feeiyu <158308373+feeiyu@users.noreply.github.com> Date: Wed, 12 Nov 2025 01:20:38 +0800 Subject: [PATCH 72/74] Fix circular reference issue around PopoverMenu (#42461) Follow up to https://github.com/zed-industries/zed/pull/42351 Release Notes: - N/A --- .../src/edit_prediction_button.rs | 24 +++++++++++-------- crates/language_tools/src/lsp_button.rs | 9 +++++-- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/crates/edit_prediction_button/src/edit_prediction_button.rs b/crates/edit_prediction_button/src/edit_prediction_button.rs index 6e9000bc62eea94d5c48dca2416781f46428522c..a0bbe18b4bcaaf433b15db333b3e5a7eb8dfb4d0 100644 --- a/crates/edit_prediction_button/src/edit_prediction_button.rs +++ b/crates/edit_prediction_button/src/edit_prediction_button.rs @@ -128,20 +128,21 @@ impl Render for EditPredictionButton { }), ); } - let this = cx.entity(); + let this = cx.weak_entity(); div().child( PopoverMenu::new("copilot") .menu(move |window, cx| { let current_status = Copilot::global(cx)?.read(cx).status(); - Some(match current_status { + match current_status { Status::Authorized => this.update(cx, |this, cx| { this.build_copilot_context_menu(window, cx) }), _ => this.update(cx, |this, cx| { this.build_copilot_start_menu(window, cx) }), - }) + } + .ok() }) .anchor(Corner::BottomRight) .trigger_with_tooltip( @@ -182,7 +183,7 @@ impl Render for EditPredictionButton { let icon = status.to_icon(); let tooltip_text = status.to_tooltip(); let has_menu = status.has_menu(); - let this = cx.entity(); + let this = cx.weak_entity(); let fs = self.fs.clone(); div().child( @@ -209,9 +210,11 @@ impl Render for EditPredictionButton { ) })) } - SupermavenButtonStatus::Ready => Some(this.update(cx, |this, cx| { - this.build_supermaven_context_menu(window, cx) - })), + SupermavenButtonStatus::Ready => this + .update(cx, |this, cx| { + this.build_supermaven_context_menu(window, cx) + }) + .ok(), _ => None, }) .anchor(Corner::BottomRight) @@ -233,15 +236,16 @@ impl Render for EditPredictionButton { let enabled = self.editor_enabled.unwrap_or(true); let has_api_key = CodestralCompletionProvider::has_api_key(cx); let fs = self.fs.clone(); - let this = cx.entity(); + let this = cx.weak_entity(); div().child( PopoverMenu::new("codestral") .menu(move |window, cx| { if has_api_key { - Some(this.update(cx, |this, cx| { + this.update(cx, |this, cx| { this.build_codestral_context_menu(window, cx) - })) + }) + .ok() } else { Some(ContextMenu::build(window, cx, |menu, _, _| { let fs = fs.clone(); diff --git a/crates/language_tools/src/lsp_button.rs b/crates/language_tools/src/lsp_button.rs index 7dc2e93a5c707eaa3829caba6d6d2a04773883b1..ee49114b787e764989453fae1d12f61253eea099 100644 --- a/crates/language_tools/src/lsp_button.rs +++ b/crates/language_tools/src/lsp_button.rs @@ -1053,11 +1053,16 @@ impl Render for LspButton { (None, "All Servers Operational") }; - let lsp_button = cx.entity(); + let lsp_button = cx.weak_entity(); div().child( PopoverMenu::new("lsp-tool") - .menu(move |_, cx| lsp_button.read(cx).lsp_menu.clone()) + .menu(move |_, cx| { + lsp_button + .read_with(cx, |lsp_button, _| lsp_button.lsp_menu.clone()) + .ok() + .flatten() + }) .anchor(Corner::BottomLeft) .with_handle(self.popover_menu_handle.clone()) .trigger_with_tooltip( From 908ef035025335ebc94e04e807ee0a0ce5311ee6 Mon Sep 17 00:00:00 2001 From: Richard Feldman Date: Tue, 11 Nov 2025 13:45:48 -0500 Subject: [PATCH 73/74] Split out cron and non-cron unit evals (#42472) Release Notes: - N/A --------- Co-authored-by: Bennet Bo Fenner --- .github/workflows/run_agent_evals.yml | 5 + .github/workflows/run_cron_unit_evals.yml | 78 ++++++++++++++++ .github/workflows/run_unit_evals.yml | 21 ++++- script/run-unit-evals | 4 + tooling/xtask/src/tasks/workflows.rs | 4 + .../src/tasks/workflows/run_agent_evals.rs | 91 ++++++++++++++----- 6 files changed, 178 insertions(+), 25 deletions(-) create mode 100644 .github/workflows/run_cron_unit_evals.yml diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml index 0ee8f3c5150589bc4565cd93326cf437d384c428..421d5a1c8003eaa42977339b4ab8e5e0df7ee014 100644 --- a/.github/workflows/run_agent_evals.yml +++ b/.github/workflows/run_agent_evals.yml @@ -51,6 +51,11 @@ jobs: - name: run_agent_evals::agent_evals::run_eval run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}" shell: bash -euxo pipefail {0} + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} - name: steps::cleanup_cargo_config if: always() run: | diff --git a/.github/workflows/run_cron_unit_evals.yml b/.github/workflows/run_cron_unit_evals.yml new file mode 100644 index 0000000000000000000000000000000000000000..9137d1599c920d5f3c72ba7c884bc76d9aed6f54 --- /dev/null +++ b/.github/workflows/run_cron_unit_evals.yml @@ -0,0 +1,78 @@ +# Generated from xtask::workflows::run_cron_unit_evals +# Rebuild with `cargo xtask workflows`. +name: run_cron_unit_evals +env: + CARGO_TERM_COLOR: always + CARGO_INCREMENTAL: '0' + RUST_BACKTRACE: '1' + ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} +on: + schedule: + - cron: 47 1 * * 2 + workflow_dispatch: {} +jobs: + cron_unit_evals: + runs-on: namespace-profile-16x32-ubuntu-2204 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + shell: bash -euxo pipefail {0} + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + - name: steps::setup_linux + run: ./script/linux + shell: bash -euxo pipefail {0} + - name: steps::install_mold + run: ./script/install-mold + shell: bash -euxo pipefail {0} + - name: steps::download_wasi_sdk + run: ./script/download-wasi-sdk + shell: bash -euxo pipefail {0} + - name: steps::cargo_install_nextest + run: cargo install cargo-nextest --locked + shell: bash -euxo pipefail {0} + - name: steps::clear_target_dir_if_large + run: ./script/clear-target-dir-if-larger-than 250 + shell: bash -euxo pipefail {0} + - name: ./script/run-unit-evals + run: ./script/run-unit-evals + shell: bash -euxo pipefail {0} + env: + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} + - name: run_agent_evals::unit_evals::send_failure_to_slack + if: ${{ failure() }} + uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 + with: + method: chat.postMessage + token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} + payload: | + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" + - name: steps::cleanup_cargo_config + if: always() + run: | + rm -rf ./../.cargo + shell: bash -euxo pipefail {0} + - name: run_agent_evals::cron_unit_evals::send_failure_to_slack + if: ${{ failure() }} + uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 + with: + method: chat.postMessage + token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }} + payload: | + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml index e3f03c2f9ecd7a48423939f315ce41e13b934d7d..9f2af35dca5429488e169fd1fe6d9ac098a5059a 100644 --- a/.github/workflows/run_unit_evals.yml +++ b/.github/workflows/run_unit_evals.yml @@ -6,12 +6,21 @@ env: CARGO_INCREMENTAL: '0' RUST_BACKTRACE: '1' ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }} + ZED_EVAL_TELEMETRY: '1' + MODEL_NAME: ${{ inputs.model_name }} on: - schedule: - - cron: 47 1 * * 2 - workflow_dispatch: {} + workflow_dispatch: + inputs: + model_name: + description: model_name + required: true + type: string + commit_sha: + description: commit_sha + required: true + type: string jobs: - unit_evals: + run_unit_evals: runs-on: namespace-profile-16x32-ubuntu-2204 steps: - name: steps::checkout_repo @@ -47,6 +56,10 @@ jobs: shell: bash -euxo pipefail {0} env: ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }} + GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }} + UNIT_EVAL_COMMIT: ${{ inputs.commit_sha }} - name: run_agent_evals::unit_evals::send_failure_to_slack if: ${{ failure() }} uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52 diff --git a/script/run-unit-evals b/script/run-unit-evals index 7a72d0b6a64b9ae9f3dcf340c16d7426d88d6a0b..c5178add7a1e4c76151b3907771abe81ba46aaaf 100755 --- a/script/run-unit-evals +++ b/script/run-unit-evals @@ -2,4 +2,8 @@ set -euxo pipefail +if [ -n "${UNIT_EVAL_COMMIT:-}" ]; then + git checkout "$UNIT_EVAL_COMMIT" +fi + GPUI_TEST_TIMEOUT=1500 cargo nextest run --workspace --no-fail-fast --features unit-eval --no-capture -E 'test(::eval_)' diff --git a/tooling/xtask/src/tasks/workflows.rs b/tooling/xtask/src/tasks/workflows.rs index bf6a332075c52cd08dcc44d73fc37239bd60a740..374a22f3ea9c65dcfc9743f77448a5c29117cedf 100644 --- a/tooling/xtask/src/tasks/workflows.rs +++ b/tooling/xtask/src/tasks/workflows.rs @@ -33,6 +33,10 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> { ("cherry_pick.yml", cherry_pick::cherry_pick()), ("compare_perf.yml", compare_perf::compare_perf()), ("run_unit_evals.yml", run_agent_evals::run_unit_evals()), + ( + "run_cron_unit_evals.yml", + run_agent_evals::run_cron_unit_evals(), + ), ("run_agent_evals.yml", run_agent_evals::run_agent_evals()), ("after_release.yml", after_release::after_release()), ]; diff --git a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs index 846001201f62fd65bf9d05af53ace59646ea197c..b69216e5a00a61762625e92b2592fd4cbe0cef30 100644 --- a/tooling/xtask/src/tasks/workflows/run_agent_evals.rs +++ b/tooling/xtask/src/tasks/workflows/run_agent_evals.rs @@ -28,6 +28,36 @@ pub(crate) fn run_agent_evals() -> Workflow { .add_job(agent_evals.name, agent_evals.job) } +pub(crate) fn run_unit_evals() -> Workflow { + let model_name = Input::string("model_name", None); + let commit_sha = Input::string("commit_sha", None); + + let unit_evals = named::job(unit_evals(Some(&commit_sha))); + + named::workflow() + .name("run_unit_evals") + .on(Event::default().workflow_dispatch( + WorkflowDispatch::default() + .add_input(model_name.name, model_name.input()) + .add_input(commit_sha.name, commit_sha.input()), + )) + .concurrency(vars::one_workflow_per_non_main_branch()) + .add_env(("CARGO_TERM_COLOR", "always")) + .add_env(("CARGO_INCREMENTAL", 0)) + .add_env(("RUST_BACKTRACE", 1)) + .add_env(("ZED_CLIENT_CHECKSUM_SEED", vars::ZED_CLIENT_CHECKSUM_SEED)) + .add_env(("ZED_EVAL_TELEMETRY", 1)) + .add_env(("MODEL_NAME", model_name.to_string())) + .add_job(unit_evals.name, unit_evals.job) +} + +fn add_api_keys(step: Step) -> Step { + step.add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)) + .add_env(("OPENAI_API_KEY", vars::OPENAI_API_KEY)) + .add_env(("GOOGLE_AI_API_KEY", vars::GOOGLE_AI_API_KEY)) + .add_env(("GOOGLE_CLOUD_PROJECT", vars::GOOGLE_CLOUD_PROJECT)) +} + fn agent_evals() -> NamedJob { fn run_eval() -> Step { named::bash( @@ -44,16 +74,16 @@ fn agent_evals() -> NamedJob { .map(steps::install_linux_dependencies) .add_step(setup_cargo_config(Platform::Linux)) .add_step(steps::script("cargo build --package=eval")) - .add_step(run_eval()) + .add_step(add_api_keys(run_eval())) .add_step(steps::cleanup_cargo_config(Platform::Linux)), ) } -pub(crate) fn run_unit_evals() -> Workflow { - let unit_evals = unit_evals(); +pub(crate) fn run_cron_unit_evals() -> Workflow { + let unit_evals = cron_unit_evals(); named::workflow() - .name("run_unit_evals") + .name("run_cron_unit_evals") .on(Event::default() .schedule([ // GitHub might drop jobs at busy times, so we choose a random time in the middle of the night. @@ -68,7 +98,7 @@ pub(crate) fn run_unit_evals() -> Workflow { .add_job(unit_evals.name, unit_evals.job) } -fn unit_evals() -> NamedJob { +fn cron_unit_evals() -> NamedJob { fn send_failure_to_slack() -> Step { named::uses( "slackapi", @@ -84,20 +114,39 @@ fn unit_evals() -> NamedJob { "#})) } - named::job( - Job::default() - .runs_on(runners::LINUX_DEFAULT) - .add_step(steps::checkout_repo()) - .add_step(steps::setup_cargo_config(Platform::Linux)) - .add_step(steps::cache_rust_dependencies_namespace()) - .map(steps::install_linux_dependencies) - .add_step(steps::cargo_install_nextest(Platform::Linux)) - .add_step(steps::clear_target_dir_if_large(Platform::Linux)) - .add_step( - steps::script("./script/run-unit-evals") - .add_env(("ANTHROPIC_API_KEY", vars::ANTHROPIC_API_KEY)), - ) - .add_step(send_failure_to_slack()) - .add_step(steps::cleanup_cargo_config(Platform::Linux)), - ) + named::job(unit_evals(None).add_step(send_failure_to_slack())) +} + +fn unit_evals(commit: Option<&Input>) -> Job { + fn send_failure_to_slack() -> Step { + named::uses( + "slackapi", + "slack-github-action", + "b0fa283ad8fea605de13dc3f449259339835fc52", + ) + .if_condition(Expression::new("${{ failure() }}")) + .add_with(("method", "chat.postMessage")) + .add_with(("token", vars::SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN)) + .add_with(("payload", indoc::indoc!{r#" + channel: C04UDRNNJFQ + text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}" + "#})) + } + + let script_step = add_api_keys(steps::script("./script/run-unit-evals")); + + Job::default() + .runs_on(runners::LINUX_DEFAULT) + .add_step(steps::checkout_repo()) + .add_step(steps::setup_cargo_config(Platform::Linux)) + .add_step(steps::cache_rust_dependencies_namespace()) + .map(steps::install_linux_dependencies) + .add_step(steps::cargo_install_nextest(Platform::Linux)) + .add_step(steps::clear_target_dir_if_large(Platform::Linux)) + .add_step(match commit { + Some(commit) => script_step.add_env(("UNIT_EVAL_COMMIT", commit)), + None => script_step, + }) + .add_step(send_failure_to_slack()) + .add_step(steps::cleanup_cargo_config(Platform::Linux)) } From e01e0b83c4ceeba28fef70385362ebf98aab92ed Mon Sep 17 00:00:00 2001 From: Lay Sheth Date: Wed, 12 Nov 2025 00:21:57 +0530 Subject: [PATCH 74/74] Avoid panics in LSP store path handling (#42117) Release Notes: - Fixed incorrect journal paths handling --- crates/journal/src/journal.rs | 73 +++++++++++++++++++++++++++++++-- crates/project/src/lsp_store.rs | 5 +-- docs/src/configuring-zed.md | 3 +- 3 files changed, 74 insertions(+), 7 deletions(-) diff --git a/crates/journal/src/journal.rs b/crates/journal/src/journal.rs index 9e73e0da550e806b4a642942766414a4b28249ae..2e30b91dab833d18f5fc9c35ad7ea4934d197fa8 100644 --- a/crates/journal/src/journal.rs +++ b/crates/journal/src/journal.rs @@ -173,9 +173,15 @@ pub fn new_journal_entry(workspace: &Workspace, window: &mut Window, cx: &mut Ap } fn journal_dir(path: &str) -> Option { - shellexpand::full(path) //TODO handle this better - .ok() - .map(|dir| Path::new(&dir.to_string()).to_path_buf().join("journal")) + let expanded = shellexpand::full(path).ok()?; + let base_path = Path::new(expanded.as_ref()); + let absolute_path = if base_path.is_absolute() { + base_path.to_path_buf() + } else { + log::warn!("Invalid journal path {path:?} (not absolute), falling back to home directory",); + std::env::home_dir()? + }; + Some(absolute_path.join("journal")) } fn heading_entry(now: NaiveTime, hour_format: &HourFormat) -> String { @@ -224,4 +230,65 @@ mod tests { assert_eq!(actual_heading_entry, expected_heading_entry); } } + + mod journal_dir_tests { + use super::super::*; + + #[test] + #[cfg(target_family = "unix")] + fn test_absolute_unix_path() { + let result = journal_dir("/home/user"); + assert!(result.is_some()); + let path = result.unwrap(); + assert!(path.is_absolute()); + assert_eq!(path, PathBuf::from("/home/user/journal")); + } + + #[test] + fn test_tilde_expansion() { + let result = journal_dir("~/documents"); + assert!(result.is_some()); + let path = result.unwrap(); + + assert!(path.is_absolute(), "Tilde should expand to absolute path"); + + if let Some(home) = std::env::home_dir() { + assert_eq!(path, home.join("documents").join("journal")); + } + } + + #[test] + fn test_relative_path_falls_back_to_home() { + for relative_path in ["relative/path", "NONEXT/some/path", "../some/path"] { + let result = journal_dir(relative_path); + assert!(result.is_some(), "Failed for path: {}", relative_path); + let path = result.unwrap(); + + assert!( + path.is_absolute(), + "Path should be absolute for input '{}', got: {:?}", + relative_path, + path + ); + + if let Some(home) = std::env::home_dir() { + assert_eq!( + path, + home.join("journal"), + "Should fall back to home directory for input '{}'", + relative_path + ); + } + } + } + + #[test] + #[cfg(target_os = "windows")] + fn test_absolute_path_windows_style() { + let result = journal_dir("C:\\Users\\user\\Documents"); + assert!(result.is_some()); + let path = result.unwrap(); + assert_eq!(path, PathBuf::from("C:\\Users\\user\\Documents\\journal")); + } + } } diff --git a/crates/project/src/lsp_store.rs b/crates/project/src/lsp_store.rs index 90675e364b4d962b5c67cafb941b2b6cb9e1df9b..540a1a8eb0ac205d5f777e1728bbe7322bbe6187 100644 --- a/crates/project/src/lsp_store.rs +++ b/crates/project/src/lsp_store.rs @@ -7654,14 +7654,13 @@ impl LspStore { let uri = lsp::Uri::from_file_path(&abs_path) .ok() .with_context(|| format!("Failed to convert path to URI: {}", abs_path.display())) - .unwrap(); + .log_err()?; let next_snapshot = buffer.text_snapshot(); for language_server in language_servers { let language_server = language_server.clone(); let buffer_snapshots = self - .as_local_mut() - .unwrap() + .as_local_mut()? .buffer_snapshots .get_mut(&buffer.remote_id()) .and_then(|m| m.get_mut(&language_server.server_id()))?; diff --git a/docs/src/configuring-zed.md b/docs/src/configuring-zed.md index 14f11df256167928931280cb906cd996615b101b..07d93fd6d167bafeb0a8e4bc72f80f52265edee1 100644 --- a/docs/src/configuring-zed.md +++ b/docs/src/configuring-zed.md @@ -2519,11 +2519,12 @@ Unspecified values have a `false` value, hints won't be toggled if all the modif "path": "~", "hour_format": "hour12" } + ``` ### Path -- Description: The path of the directory where journal entries are stored. +- Description: The path of the directory where journal entries are stored. If an invalid path is specified, the journal will fall back to using `~` (the home directory). - Setting: `path` - Default: `~`