Detailed changes
@@ -0,0 +1,35 @@
+name: Bug Report (Git)
+description: Zed Git Related Bugs
+type: "Bug"
+labels: ["git"]
+title: "Git: <a short description of the Git bug>"
+body:
+ - type: textarea
+ attributes:
+ label: Summary
+ description: Describe the bug with a one-line summary, and provide detailed reproduction steps
+ value: |
+ <!-- Please insert a one-line summary of the issue below -->
+ SUMMARY_SENTENCE_HERE
+
+ ### Description
+ <!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
+ Steps to trigger the problem:
+ 1.
+ 2.
+ 3.
+
+ **Expected Behavior**:
+ **Actual Behavior**:
+
+ validations:
+ required: true
+ - type: textarea
+ id: environment
+ attributes:
+ label: Zed Version and System Specs
+ description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
+ placeholder: |
+ Output of "zed: copy system specs into clipboard"
+ validations:
+ required: true
@@ -4132,6 +4132,7 @@ dependencies = [
"bincode 1.3.3",
"cfg-if",
"crash-handler",
+ "extension_host",
"log",
"mach2 0.5.0",
"minidumper",
@@ -4531,7 +4532,6 @@ dependencies = [
"paths",
"serde",
"serde_json",
- "shlex",
"smol",
"task",
"util",
@@ -4757,7 +4757,6 @@ dependencies = [
"serde_json",
"serde_json_lenient",
"settings",
- "shlex",
"sysinfo 0.37.2",
"task",
"tasks_ui",
@@ -7070,7 +7069,6 @@ dependencies = [
"notifications",
"panel",
"picker",
- "postage",
"pretty_assertions",
"project",
"schemars 1.0.4",
@@ -12926,7 +12924,6 @@ dependencies = [
"settings",
"sha2",
"shellexpand 2.1.2",
- "shlex",
"smallvec",
"smol",
"snippet",
@@ -13839,7 +13836,6 @@ dependencies = [
"serde",
"serde_json",
"settings",
- "shlex",
"smol",
"tempfile",
"thiserror 2.0.17",
@@ -14226,6 +14222,7 @@ dependencies = [
"log",
"rand 0.9.2",
"rayon",
+ "regex",
"smallvec",
"sum_tree",
"unicode-segmentation",
@@ -15333,6 +15330,7 @@ dependencies = [
"picker",
"pretty_assertions",
"project",
+ "release_channel",
"schemars 1.0.4",
"search",
"serde",
@@ -17032,7 +17030,6 @@ dependencies = [
"parking_lot",
"postage",
"rand 0.9.2",
- "regex",
"rope",
"smallvec",
"sum_tree",
@@ -366,7 +366,7 @@
}
},
{
- "context": "PromptLibrary",
+ "context": "RulesLibrary",
"bindings": {
"new": "rules_library::NewRule",
"ctrl-n": "rules_library::NewRule",
@@ -423,7 +423,7 @@
}
},
{
- "context": "PromptLibrary",
+ "context": "RulesLibrary",
"use_key_equivalents": true,
"bindings": {
"cmd-n": "rules_library::NewRule",
@@ -375,7 +375,7 @@
}
},
{
- "context": "PromptLibrary",
+ "context": "RulesLibrary",
"use_key_equivalents": true,
"bindings": {
"ctrl-n": "rules_library::NewRule",
@@ -1772,6 +1772,9 @@
"allow_rewrap": "anywhere"
},
"Python": {
+ "code_actions_on_format": {
+ "source.organizeImports.ruff": true
+ },
"formatter": {
"language_server": {
"name": "ruff"
@@ -1483,11 +1483,11 @@ impl EditAgentTest {
fs.insert_tree("/root", json!({})).await;
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let agent_model = SelectedModel::from_str(
- &std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-4-sonnet-latest".into()),
+ &std::env::var("ZED_AGENT_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()),
)
.unwrap();
let judge_model = SelectedModel::from_str(
- &std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-4-sonnet-latest".into()),
+ &std::env::var("ZED_JUDGE_MODEL").unwrap_or("anthropic/claude-sonnet-4-latest".into()),
)
.unwrap();
@@ -1547,7 +1547,7 @@ impl EditAgentTest {
model.provider_id() == selected_model.provider
&& model.id() == selected_model.model
})
- .expect("Model not found");
+ .unwrap_or_else(|| panic!("Model {} not found", selected_model.model.0));
model
})
}
@@ -581,13 +581,11 @@ impl Item for AgentDiffPane {
_workspace_id: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| {
- Self::new(self.thread.clone(), self.workspace.clone(), window, cx)
- })))
+ Some(cx.new(|cx| Self::new(self.thread.clone(), self.workspace.clone(), window, cx)))
}
fn is_dirty(&self, cx: &App) -> bool {
@@ -6,8 +6,11 @@ use std::sync::Arc;
use acp_thread::AcpThread;
use agent::{ContextServerRegistry, DbThreadMetadata, HistoryEntry, HistoryStore};
use db::kvp::{Dismissable, KEY_VALUE_STORE};
-use project::agent_server_store::{
- AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
+use project::{
+ ExternalAgentServerName,
+ agent_server_store::{
+ AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME,
+ },
};
use serde::{Deserialize, Serialize};
use settings::{
@@ -41,6 +44,8 @@ use assistant_text_thread::{TextThread, TextThreadEvent, TextThreadSummary};
use client::{UserStore, zed_urls};
use cloud_llm_client::{Plan, PlanV1, PlanV2, UsageLimit};
use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
+use extension::ExtensionEvents;
+use extension_host::ExtensionStore;
use fs::Fs;
use gpui::{
Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter,
@@ -422,6 +427,7 @@ pub struct AgentPanel {
agent_panel_menu_handle: PopoverMenuHandle<ContextMenu>,
agent_navigation_menu_handle: PopoverMenuHandle<ContextMenu>,
agent_navigation_menu: Option<Entity<ContextMenu>>,
+ _extension_subscription: Option<Subscription>,
width: Option<Pixels>,
height: Option<Pixels>,
zoomed: bool,
@@ -632,7 +638,24 @@ impl AgentPanel {
)
});
- Self {
+ // Subscribe to extension events to sync agent servers when extensions change
+ let extension_subscription = if let Some(extension_events) = ExtensionEvents::try_global(cx)
+ {
+ Some(
+ cx.subscribe(&extension_events, |this, _source, event, cx| match event {
+ extension::Event::ExtensionInstalled(_)
+ | extension::Event::ExtensionUninstalled(_)
+ | extension::Event::ExtensionsInstalledChanged => {
+ this.sync_agent_servers_from_extensions(cx);
+ }
+ _ => {}
+ }),
+ )
+ } else {
+ None
+ };
+
+ let mut panel = Self {
active_view,
workspace,
user_store,
@@ -650,6 +673,7 @@ impl AgentPanel {
agent_panel_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu: None,
+ _extension_subscription: extension_subscription,
width: None,
height: None,
zoomed: false,
@@ -659,7 +683,11 @@ impl AgentPanel {
history_store,
selected_agent: AgentType::default(),
loading: false,
- }
+ };
+
+ // Initial sync of agent servers from extensions
+ panel.sync_agent_servers_from_extensions(cx);
+ panel
}
pub fn toggle_focus(
@@ -1309,6 +1337,31 @@ impl AgentPanel {
self.selected_agent.clone()
}
+ fn sync_agent_servers_from_extensions(&mut self, cx: &mut Context<Self>) {
+ if let Some(extension_store) = ExtensionStore::try_global(cx) {
+ let (manifests, extensions_dir) = {
+ let store = extension_store.read(cx);
+ let installed = store.installed_extensions();
+ let manifests: Vec<_> = installed
+ .iter()
+ .map(|(id, entry)| (id.clone(), entry.manifest.clone()))
+ .collect();
+ let extensions_dir = paths::extensions_dir().join("installed");
+ (manifests, extensions_dir)
+ };
+
+ self.project.update(cx, |project, cx| {
+ project.agent_server_store().update(cx, |store, cx| {
+ let manifest_refs: Vec<_> = manifests
+ .iter()
+ .map(|(id, manifest)| (id.as_ref(), manifest.as_ref()))
+ .collect();
+ store.sync_extension_agents(manifest_refs, extensions_dir, cx);
+ });
+ });
+ }
+ }
+
pub fn new_agent_thread(
&mut self,
agent: AgentType,
@@ -1744,6 +1797,16 @@ impl AgentPanel {
let agent_server_store = self.project.read(cx).agent_server_store().clone();
let focus_handle = self.focus_handle(cx);
+ // Get custom icon path for selected agent before building menu (to avoid borrow issues)
+ let selected_agent_custom_icon =
+ if let AgentType::Custom { name, .. } = &self.selected_agent {
+ agent_server_store
+ .read(cx)
+ .agent_icon(&ExternalAgentServerName(name.clone()))
+ } else {
+ None
+ };
+
let active_thread = match &self.active_view {
ActiveView::ExternalAgentThread { thread_view } => {
thread_view.read(cx).as_native_thread(cx)
@@ -1757,12 +1820,7 @@ impl AgentPanel {
{
let focus_handle = focus_handle.clone();
move |_window, cx| {
- Tooltip::for_action_in(
- "Newβ¦",
- &ToggleNewThreadMenu,
- &focus_handle,
- cx,
- )
+ Tooltip::for_action_in("Newβ¦", &ToggleNewThreadMenu, &focus_handle, cx)
}
},
)
@@ -1781,8 +1839,7 @@ impl AgentPanel {
let active_thread = active_thread.clone();
Some(ContextMenu::build(window, cx, |menu, _window, cx| {
- menu
- .context(focus_handle.clone())
+ menu.context(focus_handle.clone())
.header("Zed Agent")
.when_some(active_thread, |this, active_thread| {
let thread = active_thread.read(cx);
@@ -1939,77 +1996,110 @@ impl AgentPanel {
}),
)
.map(|mut menu| {
- let agent_names = agent_server_store
- .read(cx)
+ let agent_server_store_read = agent_server_store.read(cx);
+ let agent_names = agent_server_store_read
.external_agents()
.filter(|name| {
- name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
+ name.0 != GEMINI_NAME
+ && name.0 != CLAUDE_CODE_NAME
+ && name.0 != CODEX_NAME
})
.cloned()
.collect::<Vec<_>>();
- let custom_settings = cx.global::<SettingsStore>().get::<AllAgentServersSettings>(None).custom.clone();
+ let custom_settings = cx
+ .global::<SettingsStore>()
+ .get::<AllAgentServersSettings>(None)
+ .custom
+ .clone();
for agent_name in agent_names {
- menu = menu.item(
- ContextMenuEntry::new(format!("New {} Thread", agent_name))
- .icon(IconName::Terminal)
- .icon_color(Color::Muted)
- .disabled(is_via_collab)
- .handler({
- let workspace = workspace.clone();
- let agent_name = agent_name.clone();
- let custom_settings = custom_settings.clone();
- move |window, cx| {
- if let Some(workspace) = workspace.upgrade() {
- workspace.update(cx, |workspace, cx| {
- if let Some(panel) =
- workspace.panel::<AgentPanel>(cx)
- {
- panel.update(cx, |panel, cx| {
- panel.new_agent_thread(
- AgentType::Custom {
- name: agent_name.clone().into(),
- command: custom_settings
- .get(&agent_name.0)
- .map(|settings| {
- settings.command.clone()
- })
- .unwrap_or(placeholder_command()),
- },
- window,
- cx,
- );
- });
- }
- });
- }
+ let icon_path = agent_server_store_read.agent_icon(&agent_name);
+ let mut entry =
+ ContextMenuEntry::new(format!("New {} Thread", agent_name));
+ if let Some(icon_path) = icon_path {
+ entry = entry.custom_icon_path(icon_path);
+ } else {
+ entry = entry.icon(IconName::Terminal);
+ }
+ entry = entry
+ .icon_color(Color::Muted)
+ .disabled(is_via_collab)
+ .handler({
+ let workspace = workspace.clone();
+ let agent_name = agent_name.clone();
+ let custom_settings = custom_settings.clone();
+ move |window, cx| {
+ if let Some(workspace) = workspace.upgrade() {
+ workspace.update(cx, |workspace, cx| {
+ if let Some(panel) =
+ workspace.panel::<AgentPanel>(cx)
+ {
+ panel.update(cx, |panel, cx| {
+ panel.new_agent_thread(
+ AgentType::Custom {
+ name: agent_name
+ .clone()
+ .into(),
+ command: custom_settings
+ .get(&agent_name.0)
+ .map(|settings| {
+ settings
+ .command
+ .clone()
+ })
+ .unwrap_or(
+ placeholder_command(
+ ),
+ ),
+ },
+ window,
+ cx,
+ );
+ });
+ }
+ });
}
- }),
- );
+ }
+ });
+ menu = menu.item(entry);
}
menu
})
- .separator().link(
- "Add Other Agents",
- OpenBrowser {
- url: zed_urls::external_agents_docs(cx),
- }
- .boxed_clone(),
- )
+ .separator()
+ .link(
+ "Add Other Agents",
+ OpenBrowser {
+ url: zed_urls::external_agents_docs(cx),
+ }
+ .boxed_clone(),
+ )
}))
}
});
let selected_agent_label = self.selected_agent.label();
+
+ let has_custom_icon = selected_agent_custom_icon.is_some();
let selected_agent = div()
.id("selected_agent_icon")
- .when_some(self.selected_agent.icon(), |this, icon| {
+ .when_some(selected_agent_custom_icon, |this, icon_path| {
+ let label = selected_agent_label.clone();
this.px(DynamicSpacing::Base02.rems(cx))
- .child(Icon::new(icon).color(Color::Muted))
+ .child(Icon::from_path(icon_path).color(Color::Muted))
.tooltip(move |_window, cx| {
- Tooltip::with_meta(selected_agent_label.clone(), None, "Selected Agent", cx)
+ Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
})
})
+ .when(!has_custom_icon, |this| {
+ this.when_some(self.selected_agent.icon(), |this, icon| {
+ let label = selected_agent_label.clone();
+ this.px(DynamicSpacing::Base02.rems(cx))
+ .child(Icon::new(icon).color(Color::Muted))
+ .tooltip(move |_window, cx| {
+ Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
+ })
+ })
+ })
.into_any_element();
h_flex()
@@ -20,7 +20,7 @@ use futures::{
};
use gpui::{AsyncApp, BackgroundExecutor, Task};
use smol::fs;
-use util::{ResultExt as _, debug_panic, maybe, paths::PathExt};
+use util::{ResultExt as _, debug_panic, maybe, paths::PathExt, shell::ShellKind};
/// Path to the program used for askpass
///
@@ -199,9 +199,15 @@ impl PasswordProxy {
let current_exec =
std::env::current_exe().context("Failed to determine current zed executable path.")?;
+ // TODO: inferred from the use of powershell.exe in askpass_helper_script
+ let shell_kind = if cfg!(windows) {
+ ShellKind::PowerShell
+ } else {
+ ShellKind::Posix
+ };
let askpass_program = ASKPASS_PROGRAM
.get_or_init(|| current_exec)
- .try_shell_safe()
+ .try_shell_safe(shell_kind)
.context("Failed to shell-escape Askpass program path.")?
.to_string();
// Create an askpass script that communicates back to this process.
@@ -343,7 +349,7 @@ fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Pa
format!(
r#"
$ErrorActionPreference = 'Stop';
- ($args -join [char]0) | & "{askpass_program}" --askpass={askpass_socket} 2> $null
+ ($args -join [char]0) | & {askpass_program} --askpass={askpass_socket} 2> $null
"#,
askpass_socket = askpass_socket.display(),
)
@@ -1162,34 +1162,22 @@ impl BufferDiff {
self.hunks_intersecting_range(start..end, buffer, cx)
}
- pub fn set_base_text_buffer(
- &mut self,
- base_buffer: Entity<language::Buffer>,
- buffer: text::BufferSnapshot,
- cx: &mut Context<Self>,
- ) -> oneshot::Receiver<()> {
- let base_buffer = base_buffer.read(cx);
- let language_registry = base_buffer.language_registry();
- let base_buffer = base_buffer.snapshot();
- self.set_base_text(base_buffer, language_registry, buffer, cx)
- }
-
/// Used in cases where the change set isn't derived from git.
pub fn set_base_text(
&mut self,
- base_buffer: language::BufferSnapshot,
+ base_text: Option<Arc<String>>,
+ language: Option<Arc<Language>>,
language_registry: Option<Arc<LanguageRegistry>>,
buffer: text::BufferSnapshot,
cx: &mut Context<Self>,
) -> oneshot::Receiver<()> {
let (tx, rx) = oneshot::channel();
let this = cx.weak_entity();
- let base_text = Arc::new(base_buffer.text());
let snapshot = BufferDiffSnapshot::new_with_base_text(
buffer.clone(),
- Some(base_text),
- base_buffer.language().cloned(),
+ base_text,
+ language,
language_registry,
cx,
);
@@ -467,6 +467,7 @@ CREATE TABLE extension_versions (
provides_grammars BOOLEAN NOT NULL DEFAULT FALSE,
provides_language_servers BOOLEAN NOT NULL DEFAULT FALSE,
provides_context_servers BOOLEAN NOT NULL DEFAULT FALSE,
+ provides_agent_servers BOOLEAN NOT NULL DEFAULT FALSE,
provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE,
provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE,
provides_snippets BOOLEAN NOT NULL DEFAULT FALSE,
@@ -0,0 +1,2 @@
+alter table extension_versions
+add column provides_agent_servers bool not null default false
@@ -310,6 +310,9 @@ impl Database {
.provides
.contains(&ExtensionProvides::ContextServers),
),
+ provides_agent_servers: ActiveValue::Set(
+ version.provides.contains(&ExtensionProvides::AgentServers),
+ ),
provides_slash_commands: ActiveValue::Set(
version.provides.contains(&ExtensionProvides::SlashCommands),
),
@@ -422,6 +425,10 @@ fn apply_provides_filter(
condition = condition.add(extension_version::Column::ProvidesContextServers.eq(true));
}
+ if provides_filter.contains(&ExtensionProvides::AgentServers) {
+ condition = condition.add(extension_version::Column::ProvidesAgentServers.eq(true));
+ }
+
if provides_filter.contains(&ExtensionProvides::SlashCommands) {
condition = condition.add(extension_version::Column::ProvidesSlashCommands.eq(true));
}
@@ -24,6 +24,7 @@ pub struct Model {
pub provides_grammars: bool,
pub provides_language_servers: bool,
pub provides_context_servers: bool,
+ pub provides_agent_servers: bool,
pub provides_slash_commands: bool,
pub provides_indexed_docs_providers: bool,
pub provides_snippets: bool,
@@ -57,6 +58,10 @@ impl Model {
provides.insert(ExtensionProvides::ContextServers);
}
+ if self.provides_agent_servers {
+ provides.insert(ExtensionProvides::AgentServers);
+ }
+
if self.provides_slash_commands {
provides.insert(ExtensionProvides::SlashCommands);
}
@@ -16,6 +16,72 @@ test_both_dbs!(
test_extensions_sqlite
);
+test_both_dbs!(
+ test_agent_servers_filter,
+ test_agent_servers_filter_postgres,
+ test_agent_servers_filter_sqlite
+);
+
+async fn test_agent_servers_filter(db: &Arc<Database>) {
+ // No extensions initially
+ let versions = db.get_known_extension_versions().await.unwrap();
+ assert!(versions.is_empty());
+
+ // Shared timestamp
+ let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
+ let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time());
+
+ // Insert two extensions, only one provides AgentServers
+ db.insert_extension_versions(
+ &[
+ (
+ "ext_agent_servers",
+ vec![NewExtensionVersion {
+ name: "Agent Servers Provider".into(),
+ version: semver::Version::parse("1.0.0").unwrap(),
+ description: "has agent servers".into(),
+ authors: vec!["author".into()],
+ repository: "org/agent-servers".into(),
+ schema_version: 1,
+ wasm_api_version: None,
+ provides: BTreeSet::from_iter([ExtensionProvides::AgentServers]),
+ published_at: t0,
+ }],
+ ),
+ (
+ "ext_plain",
+ vec![NewExtensionVersion {
+ name: "Plain Extension".into(),
+ version: semver::Version::parse("0.1.0").unwrap(),
+ description: "no agent servers".into(),
+ authors: vec!["author2".into()],
+ repository: "org/plain".into(),
+ schema_version: 1,
+ wasm_api_version: None,
+ provides: BTreeSet::default(),
+ published_at: t0,
+ }],
+ ),
+ ]
+ .into_iter()
+ .collect(),
+ )
+ .await
+ .unwrap();
+
+ // Filter by AgentServers provides
+ let provides_filter = BTreeSet::from_iter([ExtensionProvides::AgentServers]);
+
+ let filtered = db
+ .get_extensions(None, Some(&provides_filter), 1, 10)
+ .await
+ .unwrap();
+
+ // Expect only the extension that declared AgentServers
+ assert_eq!(filtered.len(), 1);
+ assert_eq!(filtered[0].id.as_ref(), "ext_agent_servers");
+}
+
async fn test_extensions(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
@@ -347,6 +347,7 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::GetColorPresentation>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
+ .add_request_handler(forward_read_only_project_request::<proto::GetDefaultBranch>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUncommittedDiff>)
.add_request_handler(forward_read_only_project_request::<proto::LspExtExpandMacro>)
@@ -461,6 +462,8 @@ impl Server {
.add_message_handler(broadcast_project_message_from_host::<proto::BreakpointsForFile>)
.add_request_handler(forward_mutating_project_request::<proto::OpenCommitMessageBuffer>)
.add_request_handler(forward_mutating_project_request::<proto::GitDiff>)
+ .add_request_handler(forward_mutating_project_request::<proto::GetTreeDiff>)
+ .add_request_handler(forward_mutating_project_request::<proto::GetBlobContent>)
.add_request_handler(forward_mutating_project_request::<proto::GitCreateBranch>)
.add_request_handler(forward_mutating_project_request::<proto::GitChangeBranch>)
.add_request_handler(forward_mutating_project_request::<proto::CheckForPushedCommits>)
@@ -776,30 +776,26 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
.unwrap();
// Clients A and B follow each other in split panes
- workspace_a
- .update_in(cx_a, |workspace, window, cx| {
- workspace.split_and_clone(
- workspace.active_pane().clone(),
- SplitDirection::Right,
- window,
- cx,
- )
- })
- .await;
+ workspace_a.update_in(cx_a, |workspace, window, cx| {
+ workspace.split_and_clone(
+ workspace.active_pane().clone(),
+ SplitDirection::Right,
+ window,
+ cx,
+ );
+ });
workspace_a.update_in(cx_a, |workspace, window, cx| {
workspace.follow(client_b.peer_id().unwrap(), window, cx)
});
executor.run_until_parked();
- workspace_b
- .update_in(cx_b, |workspace, window, cx| {
- workspace.split_and_clone(
- workspace.active_pane().clone(),
- SplitDirection::Right,
- window,
- cx,
- )
- })
- .await;
+ workspace_b.update_in(cx_b, |workspace, window, cx| {
+ workspace.split_and_clone(
+ workspace.active_pane().clone(),
+ SplitDirection::Right,
+ window,
+ cx,
+ );
+ });
workspace_b.update_in(cx_b, |workspace, window, cx| {
workspace.follow(client_a.peer_id().unwrap(), window, cx)
});
@@ -1373,11 +1369,9 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
);
// When client B activates a different pane, it continues following client A in the original pane.
- workspace_b
- .update_in(cx_b, |workspace, window, cx| {
- workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, window, cx)
- })
- .await;
+ workspace_b.update_in(cx_b, |workspace, window, cx| {
+ workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, window, cx)
+ });
assert_eq!(
workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
Some(leader_id.into())
@@ -6748,7 +6748,7 @@ async fn test_preview_tabs(cx: &mut TestAppContext) {
pane.update(cx, |pane, cx| {
pane.split(workspace::SplitDirection::Right, cx);
});
- cx.run_until_parked();
+
let right_pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
pane.update(cx, |pane, cx| {
@@ -498,8 +498,8 @@ impl Item for ChannelView {
_: Option<WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>> {
- Task::ready(Some(cx.new(|cx| {
+ ) -> Option<Entity<Self>> {
+ Some(cx.new(|cx| {
Self::new(
self.project.clone(),
self.workspace.clone(),
@@ -508,7 +508,7 @@ impl Item for ChannelView {
window,
cx,
)
- })))
+ }))
}
fn navigate(
@@ -9,6 +9,7 @@ license = "GPL-3.0-or-later"
bincode.workspace = true
cfg-if.workspace = true
crash-handler.workspace = true
+extension_host.workspace = true
log.workspace = true
minidumper.workspace = true
paths.workspace = true
@@ -286,6 +286,11 @@ impl minidumper::ServerHandler for CrashServer {
}
pub fn panic_hook(info: &PanicHookInfo) {
+ // Don't handle a panic on threads that are not relevant to the main execution.
+ if extension_host::wasm_host::IS_WASM_THREAD.with(|v| v.load(Ordering::Acquire)) {
+ return;
+ }
+
let message = info
.payload()
.downcast_ref::<&str>()
@@ -35,7 +35,6 @@ log.workspace = true
paths.workspace = true
serde.workspace = true
serde_json.workspace = true
-shlex.workspace = true
smol.workspace = true
task.workspace = true
util.workspace = true
@@ -6,7 +6,7 @@ use gpui::AsyncApp;
use serde_json::Value;
use std::{path::PathBuf, sync::OnceLock};
use task::DebugRequest;
-use util::{ResultExt, maybe};
+use util::{ResultExt, maybe, shell::ShellKind};
use crate::*;
@@ -67,7 +67,7 @@ impl JsDebugAdapter {
.get("type")
.filter(|value| value == &"node-terminal")?;
let command = configuration.get("command")?.as_str()?.to_owned();
- let mut args = shlex::split(&command)?.into_iter();
+ let mut args = ShellKind::Posix.split(&command)?.into_iter();
let program = args.next()?;
configuration.insert("runtimeExecutable".to_owned(), program.into());
configuration.insert(
@@ -60,7 +60,6 @@ serde.workspace = true
serde_json.workspace = true
serde_json_lenient.workspace = true
settings.workspace = true
-shlex.workspace = true
sysinfo.workspace = true
task.workspace = true
tasks_ui.workspace = true
@@ -9,7 +9,7 @@ use task::ZedDebugConfig;
use util::debug_panic;
use std::sync::Arc;
-use sysinfo::System;
+use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind};
use ui::{Context, Tooltip, prelude::*};
use ui::{ListItem, ListItemSpacing};
use workspace::{ModalView, Workspace};
@@ -362,7 +362,12 @@ fn get_processes_for_project(project: &Entity<Project>, cx: &mut App) -> Task<Ar
Arc::from(processes.into_boxed_slice())
})
} else {
- let mut processes: Box<[_]> = System::new_all()
+ let refresh_kind = RefreshKind::nothing().with_processes(
+ ProcessRefreshKind::nothing()
+ .without_tasks()
+ .with_cmd(UpdateKind::Always),
+ );
+ let mut processes: Box<[_]> = System::new_with_specifics(refresh_kind)
.processes()
.values()
.map(|process| {
@@ -32,7 +32,7 @@ use ui::{
SharedString, Styled, StyledExt, ToggleButton, ToggleState, Toggleable, Tooltip, Window, div,
h_flex, relative, rems, v_flex,
};
-use util::{ResultExt, rel_path::RelPath};
+use util::{ResultExt, rel_path::RelPath, shell::ShellKind};
use workspace::{ModalView, Workspace, notifications::DetachAndPromptErr, pane};
use crate::{attach_modal::AttachModal, debugger_panel::DebugPanel};
@@ -839,7 +839,11 @@ impl ConfigureMode {
};
}
let command = self.program.read(cx).text(cx);
- let mut args = shlex::split(&command).into_iter().flatten().peekable();
+ let mut args = ShellKind::Posix
+ .split(&command)
+ .into_iter()
+ .flatten()
+ .peekable();
let mut env = FxHashMap::default();
while args.peek().is_some_and(|arg| arg.contains('=')) {
let arg = args.next().unwrap();
@@ -1265,7 +1269,11 @@ impl PickerDelegate for DebugDelegate {
})
.unwrap_or_default();
- let mut args = shlex::split(&text).into_iter().flatten().peekable();
+ let mut args = ShellKind::Posix
+ .split(&text)
+ .into_iter()
+ .flatten()
+ .peekable();
let mut env = HashMap::default();
while args.peek().is_some_and(|arg| arg.contains('=')) {
let arg = args.next().unwrap();
@@ -693,11 +693,11 @@ impl Item for BufferDiagnosticsEditor {
_workspace_id: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| {
+ Some(cx.new(|cx| {
BufferDiagnosticsEditor::new(
self.project_path.clone(),
self.project.clone(),
@@ -706,7 +706,7 @@ impl Item for BufferDiagnosticsEditor {
window,
cx,
)
- })))
+ }))
}
fn deactivated(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -732,11 +732,11 @@ impl Item for ProjectDiagnosticsEditor {
_workspace_id: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| {
+ Some(cx.new(|cx| {
ProjectDiagnosticsEditor::new(
self.include_warnings,
self.project.clone(),
@@ -744,7 +744,7 @@ impl Item for ProjectDiagnosticsEditor {
window,
cx,
)
- })))
+ }))
}
fn is_dirty(&self, cx: &App) -> bool {
@@ -32,7 +32,6 @@ mod lsp_ext;
mod mouse_context_menu;
pub mod movement;
mod persistence;
-mod proposed_changes_editor;
mod rust_analyzer_ext;
pub mod scroll;
mod selections_collection;
@@ -68,14 +67,12 @@ pub use multi_buffer::{
Anchor, AnchorRangeExt, ExcerptId, ExcerptRange, MultiBuffer, MultiBufferSnapshot, PathKey,
RowInfo, ToOffset, ToPoint,
};
-pub use proposed_changes_editor::{
- ProposedChangeLocation, ProposedChangesEditor, ProposedChangesEditorToolbar,
-};
pub use text::Bias;
use ::git::{
Restore,
blame::{BlameEntry, ParsedCommitMessage},
+ status::FileStatus,
};
use aho_corasick::AhoCorasick;
use anyhow::{Context as _, Result, anyhow};
@@ -847,6 +844,10 @@ pub trait Addon: 'static {
None
}
+ fn override_status_for_buffer_id(&self, _: BufferId, _: &App) -> Option<FileStatus> {
+ None
+ }
+
fn to_any(&self) -> &dyn std::any::Any;
fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
@@ -10671,6 +10672,20 @@ impl Editor {
}
}
+ pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
+ if let Some(status) = self
+ .addons
+ .iter()
+ .find_map(|(_, addon)| addon.override_status_for_buffer_id(buffer_id, cx))
+ {
+ return Some(status);
+ }
+ self.project
+ .as_ref()?
+ .read(cx)
+ .status_for_buffer_id(buffer_id, cx)
+ }
+
pub fn open_active_item_in_terminal(
&mut self,
_: &OpenInTerminal,
@@ -11581,7 +11596,7 @@ impl Editor {
end
} else {
text.push('\n');
- Point::new(rows.end.0, 0)
+ Point::new(rows.start.0, 0)
}
} else {
text.push('\n');
@@ -11597,11 +11612,57 @@ impl Editor {
}
}
- self.transact(window, cx, |this, _, cx| {
+ self.transact(window, cx, |this, window, cx| {
this.buffer.update(cx, |buffer, cx| {
buffer.edit(edits, None, cx);
});
+ // When duplicating upward with whole lines, move the cursor to the duplicated line
+ if upwards && whole_lines {
+ let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx));
+
+ this.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ let mut new_ranges = Vec::new();
+ let selections = s.all::<Point>(&display_map);
+ let mut selections_iter = selections.iter().peekable();
+
+ while let Some(first_selection) = selections_iter.next() {
+ // Group contiguous selections together to find the total row span
+ let mut group_selections = vec![first_selection];
+ let mut rows = first_selection.spanned_rows(false, &display_map);
+
+ while let Some(next_selection) = selections_iter.peek() {
+ let next_rows = next_selection.spanned_rows(false, &display_map);
+ if next_rows.start < rows.end {
+ rows.end = next_rows.end;
+ group_selections.push(selections_iter.next().unwrap());
+ } else {
+ break;
+ }
+ }
+
+ let row_count = rows.end.0 - rows.start.0;
+
+ // Move all selections in this group up by the total number of duplicated rows
+ for selection in group_selections {
+ let new_start = Point::new(
+ selection.start.row.saturating_sub(row_count),
+ selection.start.column,
+ );
+
+ let new_end = Point::new(
+ selection.end.row.saturating_sub(row_count),
+ selection.end.column,
+ );
+
+ new_ranges.push(new_start..new_end);
+ }
+ }
+
+ s.select_ranges(new_ranges);
+ });
+ }
+
this.request_autoscroll(Autoscroll::fit(), cx);
});
}
@@ -20995,65 +21056,6 @@ impl Editor {
self.searchable
}
- fn open_proposed_changes_editor(
- &mut self,
- _: &OpenProposedChangesEditor,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let Some(workspace) = self.workspace() else {
- cx.propagate();
- return;
- };
-
- let selections = self.selections.all::<usize>(&self.display_snapshot(cx));
- let multi_buffer = self.buffer.read(cx);
- let multi_buffer_snapshot = multi_buffer.snapshot(cx);
- let mut new_selections_by_buffer = HashMap::default();
- for selection in selections {
- for (buffer, range, _) in
- multi_buffer_snapshot.range_to_buffer_ranges(selection.start..selection.end)
- {
- let mut range = range.to_point(buffer);
- range.start.column = 0;
- range.end.column = buffer.line_len(range.end.row);
- new_selections_by_buffer
- .entry(multi_buffer.buffer(buffer.remote_id()).unwrap())
- .or_insert(Vec::new())
- .push(range)
- }
- }
-
- let proposed_changes_buffers = new_selections_by_buffer
- .into_iter()
- .map(|(buffer, ranges)| ProposedChangeLocation { buffer, ranges })
- .collect::<Vec<_>>();
- let proposed_changes_editor = cx.new(|cx| {
- ProposedChangesEditor::new(
- "Proposed changes",
- proposed_changes_buffers,
- self.project.clone(),
- window,
- cx,
- )
- });
-
- window.defer(cx, move |window, cx| {
- workspace.update(cx, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.add_item(
- Box::new(proposed_changes_editor),
- true,
- true,
- None,
- window,
- cx,
- );
- });
- });
- });
- }
-
pub fn open_excerpts_in_split(
&mut self,
_: &OpenExcerptsSplit,
@@ -5646,8 +5646,8 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
);
});
- // With `move_upwards` the selections stay in place, except for
- // the lines inserted above them
+ // With `duplicate_line_up` the selections move to the duplicated lines,
+ // which are inserted above the original lines
let editor = cx.add_window(|window, cx| {
let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx);
build_editor(buffer, window, cx)
@@ -5669,7 +5669,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1),
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2),
DisplayPoint::new(DisplayRow(2), 0)..DisplayPoint::new(DisplayRow(2), 0),
- DisplayPoint::new(DisplayRow(6), 0)..DisplayPoint::new(DisplayRow(6), 0),
+ DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 0),
]
);
});
@@ -12676,6 +12676,12 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
);
}
});
+
+ #[cfg(target_os = "windows")]
+ let line_ending = "\r\n";
+ #[cfg(not(target_os = "windows"))]
+ let line_ending = "\n";
+
// Handle formatting requests to the language server.
cx.lsp
.set_request_handler::<lsp::request::Formatting, _, _>({
@@ -12699,7 +12705,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
),
(
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
- "\n".into()
+ line_ending.into()
),
]
);
@@ -12710,14 +12716,14 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
lsp::Position::new(1, 0),
lsp::Position::new(1, 0),
),
- new_text: "\n".into(),
+ new_text: line_ending.into(),
},
lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(2, 0),
lsp::Position::new(2, 0),
),
- new_text: "\n".into(),
+ new_text: line_ending.into(),
},
]))
}
@@ -26823,6 +26829,83 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult
));
}
+#[gpui::test]
+async fn test_non_linux_line_endings_registration(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let unix_newlines_file_text = "fn main() {
+ let a = 5;
+ }";
+ let clrf_file_text = unix_newlines_file_text.lines().join("\r\n");
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/a"),
+ json!({
+ "first.rs": &clrf_file_text,
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
+ let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let cx = &mut VisualTestContext::from_window(*workspace, cx);
+
+ let registered_text = Arc::new(Mutex::new(Vec::new()));
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ language_registry.add(rust_lang());
+ let mut fake_servers = language_registry.register_fake_lsp(
+ "Rust",
+ FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ color_provider: Some(lsp::ColorProviderCapability::Simple(true)),
+ ..lsp::ServerCapabilities::default()
+ },
+ name: "rust-analyzer",
+ initializer: Some({
+ let registered_text = registered_text.clone();
+ Box::new(move |fake_server| {
+ fake_server.handle_notification::<lsp::notification::DidOpenTextDocument, _>({
+ let registered_text = registered_text.clone();
+ move |params, _| {
+ registered_text.lock().push(params.text_document.text);
+ }
+ });
+ })
+ }),
+ ..FakeLspAdapter::default()
+ },
+ );
+
+ let editor = workspace
+ .update(cx, |workspace, window, cx| {
+ workspace.open_abs_path(
+ PathBuf::from(path!("/a/first.rs")),
+ OpenOptions::default(),
+ window,
+ cx,
+ )
+ })
+ .unwrap()
+ .await
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap();
+ let _fake_language_server = fake_servers.next().await.unwrap();
+ cx.executor().run_until_parked();
+
+ assert_eq!(
+ editor.update(cx, |editor, cx| editor.text(cx)),
+ unix_newlines_file_text,
+ "Default text API returns \n-separated text",
+ );
+ assert_eq!(
+ vec![clrf_file_text],
+ registered_text.lock().drain(..).collect::<Vec<_>>(),
+ "Expected the language server to receive the exact same text from the FS",
+ );
+}
+
#[gpui::test]
async fn test_race_in_multibuffer_save(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -26972,8 +27055,8 @@ fn test_duplicate_line_up_on_last_line_without_newline(cx: &mut TestAppContext)
assert_eq!(
editor.selections.display_ranges(cx),
- vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)],
- "Selection should remain on the original line"
+ vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)],
+ "Selection should move to the duplicated line"
);
})
.unwrap();
@@ -458,7 +458,6 @@ impl EditorElement {
register_action(editor, window, Editor::toggle_code_actions);
register_action(editor, window, Editor::open_excerpts);
register_action(editor, window, Editor::open_excerpts_in_split);
- register_action(editor, window, Editor::open_proposed_changes_editor);
register_action(editor, window, Editor::toggle_soft_wrap);
register_action(editor, window, Editor::toggle_tab_bar);
register_action(editor, window, Editor::toggle_line_numbers);
@@ -3828,13 +3827,7 @@ impl EditorElement {
let multi_buffer = editor.buffer.read(cx);
let file_status = multi_buffer
.all_diff_hunks_expanded()
- .then(|| {
- editor
- .project
- .as_ref()?
- .read(cx)
- .status_for_buffer_id(for_excerpt.buffer_id, cx)
- })
+ .then(|| editor.status_for_buffer_id(for_excerpt.buffer_id, cx))
.flatten();
let indicator = multi_buffer
.buffer(for_excerpt.buffer_id)
@@ -762,11 +762,11 @@ impl Item for Editor {
_workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Editor>>>
+ ) -> Option<Entity<Editor>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| self.clone(window, cx))))
+ Some(cx.new(|cx| self.clone(window, cx)))
}
fn set_nav_history(
@@ -1,523 +0,0 @@
-use crate::{ApplyAllDiffHunks, Editor, EditorEvent, SelectionEffects, SemanticsProvider};
-use buffer_diff::BufferDiff;
-use collections::{HashMap, HashSet};
-use futures::{channel::mpsc, future::join_all};
-use gpui::{App, Entity, EventEmitter, Focusable, Render, Subscription, Task};
-use language::{Buffer, BufferEvent, BufferRow, Capability};
-use multi_buffer::{ExcerptRange, MultiBuffer};
-use project::{InvalidationStrategy, Project, lsp_store::CacheInlayHints};
-use smol::stream::StreamExt;
-use std::{any::TypeId, ops::Range, rc::Rc, time::Duration};
-use text::{BufferId, ToOffset};
-use ui::{ButtonLike, KeyBinding, prelude::*};
-use workspace::{
- Item, ItemHandle as _, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
- item::SaveOptions, searchable::SearchableItemHandle,
-};
-
-pub struct ProposedChangesEditor {
- editor: Entity<Editor>,
- multibuffer: Entity<MultiBuffer>,
- title: SharedString,
- buffer_entries: Vec<BufferEntry>,
- _recalculate_diffs_task: Task<Option<()>>,
- recalculate_diffs_tx: mpsc::UnboundedSender<RecalculateDiff>,
-}
-
-pub struct ProposedChangeLocation<T> {
- pub buffer: Entity<Buffer>,
- pub ranges: Vec<Range<T>>,
-}
-
-struct BufferEntry {
- base: Entity<Buffer>,
- branch: Entity<Buffer>,
- _subscription: Subscription,
-}
-
-pub struct ProposedChangesEditorToolbar {
- current_editor: Option<Entity<ProposedChangesEditor>>,
-}
-
-struct RecalculateDiff {
- buffer: Entity<Buffer>,
- debounce: bool,
-}
-
-/// A provider of code semantics for branch buffers.
-///
-/// Requests in edited regions will return nothing, but requests in unchanged
-/// regions will be translated into the base buffer's coordinates.
-struct BranchBufferSemanticsProvider(Rc<dyn SemanticsProvider>);
-
-impl ProposedChangesEditor {
- pub fn new<T: Clone + ToOffset>(
- title: impl Into<SharedString>,
- locations: Vec<ProposedChangeLocation<T>>,
- project: Option<Entity<Project>>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) -> Self {
- let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
- let (recalculate_diffs_tx, mut recalculate_diffs_rx) = mpsc::unbounded();
- let mut this = Self {
- editor: cx.new(|cx| {
- let mut editor = Editor::for_multibuffer(multibuffer.clone(), project, window, cx);
- editor.set_expand_all_diff_hunks(cx);
- editor.set_completion_provider(None);
- editor.clear_code_action_providers();
- editor.set_semantics_provider(
- editor
- .semantics_provider()
- .map(|provider| Rc::new(BranchBufferSemanticsProvider(provider)) as _),
- );
- editor
- }),
- multibuffer,
- title: title.into(),
- buffer_entries: Vec::new(),
- recalculate_diffs_tx,
- _recalculate_diffs_task: cx.spawn_in(window, async move |this, cx| {
- let mut buffers_to_diff = HashSet::default();
- while let Some(mut recalculate_diff) = recalculate_diffs_rx.next().await {
- buffers_to_diff.insert(recalculate_diff.buffer);
-
- while recalculate_diff.debounce {
- cx.background_executor()
- .timer(Duration::from_millis(50))
- .await;
- let mut had_further_changes = false;
- while let Ok(next_recalculate_diff) = recalculate_diffs_rx.try_next() {
- let next_recalculate_diff = next_recalculate_diff?;
- recalculate_diff.debounce &= next_recalculate_diff.debounce;
- buffers_to_diff.insert(next_recalculate_diff.buffer);
- had_further_changes = true;
- }
- if !had_further_changes {
- break;
- }
- }
-
- let recalculate_diff_futures = this
- .update(cx, |this, cx| {
- buffers_to_diff
- .drain()
- .filter_map(|buffer| {
- let buffer = buffer.read(cx);
- let base_buffer = buffer.base_buffer()?;
- let buffer = buffer.text_snapshot();
- let diff =
- this.multibuffer.read(cx).diff_for(buffer.remote_id())?;
- Some(diff.update(cx, |diff, cx| {
- diff.set_base_text_buffer(base_buffer.clone(), buffer, cx)
- }))
- })
- .collect::<Vec<_>>()
- })
- .ok()?;
-
- join_all(recalculate_diff_futures).await;
- }
- None
- }),
- };
- this.reset_locations(locations, window, cx);
- this
- }
-
- pub fn branch_buffer_for_base(&self, base_buffer: &Entity<Buffer>) -> Option<Entity<Buffer>> {
- self.buffer_entries.iter().find_map(|entry| {
- if &entry.base == base_buffer {
- Some(entry.branch.clone())
- } else {
- None
- }
- })
- }
-
- pub fn set_title(&mut self, title: SharedString, cx: &mut Context<Self>) {
- self.title = title;
- cx.notify();
- }
-
- pub fn reset_locations<T: Clone + ToOffset>(
- &mut self,
- locations: Vec<ProposedChangeLocation<T>>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- // Undo all branch changes
- for entry in &self.buffer_entries {
- let base_version = entry.base.read(cx).version();
- entry.branch.update(cx, |buffer, cx| {
- let undo_counts = buffer
- .operations()
- .iter()
- .filter_map(|(timestamp, _)| {
- if !base_version.observed(*timestamp) {
- Some((*timestamp, u32::MAX))
- } else {
- None
- }
- })
- .collect();
- buffer.undo_operations(undo_counts, cx);
- });
- }
-
- self.multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.clear(cx);
- });
-
- let mut buffer_entries = Vec::new();
- let mut new_diffs = Vec::new();
- for location in locations {
- let branch_buffer;
- if let Some(ix) = self
- .buffer_entries
- .iter()
- .position(|entry| entry.base == location.buffer)
- {
- let entry = self.buffer_entries.remove(ix);
- branch_buffer = entry.branch.clone();
- buffer_entries.push(entry);
- } else {
- branch_buffer = location.buffer.update(cx, |buffer, cx| buffer.branch(cx));
- new_diffs.push(cx.new(|cx| {
- let mut diff = BufferDiff::new(&branch_buffer.read(cx).snapshot(), cx);
- let _ = diff.set_base_text_buffer(
- location.buffer.clone(),
- branch_buffer.read(cx).text_snapshot(),
- cx,
- );
- diff
- }));
- buffer_entries.push(BufferEntry {
- branch: branch_buffer.clone(),
- base: location.buffer.clone(),
- _subscription: cx.subscribe(&branch_buffer, Self::on_buffer_event),
- });
- }
-
- self.multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.push_excerpts(
- branch_buffer,
- location
- .ranges
- .into_iter()
- .map(|range| ExcerptRange::new(range)),
- cx,
- );
- });
- }
-
- self.buffer_entries = buffer_entries;
- self.editor.update(cx, |editor, cx| {
- editor.change_selections(SelectionEffects::no_scroll(), window, cx, |selections| {
- selections.refresh()
- });
- editor.buffer.update(cx, |buffer, cx| {
- for diff in new_diffs {
- buffer.add_diff(diff, cx)
- }
- })
- });
- }
-
- pub fn recalculate_all_buffer_diffs(&self) {
- for (ix, entry) in self.buffer_entries.iter().enumerate().rev() {
- self.recalculate_diffs_tx
- .unbounded_send(RecalculateDiff {
- buffer: entry.branch.clone(),
- debounce: ix > 0,
- })
- .ok();
- }
- }
-
- fn on_buffer_event(
- &mut self,
- buffer: Entity<Buffer>,
- event: &BufferEvent,
- _cx: &mut Context<Self>,
- ) {
- if let BufferEvent::Operation { .. } = event {
- self.recalculate_diffs_tx
- .unbounded_send(RecalculateDiff {
- buffer,
- debounce: true,
- })
- .ok();
- }
- }
-}
-
-impl Render for ProposedChangesEditor {
- fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
- div()
- .size_full()
- .key_context("ProposedChangesEditor")
- .child(self.editor.clone())
- }
-}
-
-impl Focusable for ProposedChangesEditor {
- fn focus_handle(&self, cx: &App) -> gpui::FocusHandle {
- self.editor.focus_handle(cx)
- }
-}
-
-impl EventEmitter<EditorEvent> for ProposedChangesEditor {}
-
-impl Item for ProposedChangesEditor {
- type Event = EditorEvent;
-
- fn tab_icon(&self, _window: &Window, _cx: &App) -> Option<Icon> {
- Some(Icon::new(IconName::Diff))
- }
-
- fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
- self.title.clone()
- }
-
- fn as_searchable(&self, _: &Entity<Self>) -> Option<Box<dyn SearchableItemHandle>> {
- Some(Box::new(self.editor.clone()))
- }
-
- fn act_as_type<'a>(
- &'a self,
- type_id: TypeId,
- self_handle: &'a Entity<Self>,
- _: &'a App,
- ) -> Option<gpui::AnyView> {
- if type_id == TypeId::of::<Self>() {
- Some(self_handle.to_any())
- } else if type_id == TypeId::of::<Editor>() {
- Some(self.editor.to_any())
- } else {
- None
- }
- }
-
- fn added_to_workspace(
- &mut self,
- workspace: &mut Workspace,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- self.editor.update(cx, |editor, cx| {
- Item::added_to_workspace(editor, workspace, window, cx)
- });
- }
-
- fn deactivated(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- self.editor
- .update(cx, |editor, cx| editor.deactivated(window, cx));
- }
-
- fn navigate(
- &mut self,
- data: Box<dyn std::any::Any>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) -> bool {
- self.editor
- .update(cx, |editor, cx| Item::navigate(editor, data, window, cx))
- }
-
- fn set_nav_history(
- &mut self,
- nav_history: workspace::ItemNavHistory,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- self.editor.update(cx, |editor, cx| {
- Item::set_nav_history(editor, nav_history, window, cx)
- });
- }
-
- fn can_save(&self, cx: &App) -> bool {
- self.editor.read(cx).can_save(cx)
- }
-
- fn save(
- &mut self,
- options: SaveOptions,
- project: Entity<Project>,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) -> Task<anyhow::Result<()>> {
- self.editor.update(cx, |editor, cx| {
- Item::save(editor, options, project, window, cx)
- })
- }
-}
-
-impl ProposedChangesEditorToolbar {
- pub fn new() -> Self {
- Self {
- current_editor: None,
- }
- }
-
- fn get_toolbar_item_location(&self) -> ToolbarItemLocation {
- if self.current_editor.is_some() {
- ToolbarItemLocation::PrimaryRight
- } else {
- ToolbarItemLocation::Hidden
- }
- }
-}
-
-impl Render for ProposedChangesEditorToolbar {
- fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let button_like = ButtonLike::new("apply-changes").child(Label::new("Apply All"));
-
- match &self.current_editor {
- Some(editor) => {
- let focus_handle = editor.focus_handle(cx);
- let keybinding = KeyBinding::for_action_in(&ApplyAllDiffHunks, &focus_handle, cx);
-
- button_like.child(keybinding).on_click({
- move |_event, window, cx| {
- focus_handle.dispatch_action(&ApplyAllDiffHunks, window, cx)
- }
- })
- }
- None => button_like.disabled(true),
- }
- }
-}
-
-impl EventEmitter<ToolbarItemEvent> for ProposedChangesEditorToolbar {}
-
-impl ToolbarItemView for ProposedChangesEditorToolbar {
- fn set_active_pane_item(
- &mut self,
- active_pane_item: Option<&dyn workspace::ItemHandle>,
- _window: &mut Window,
- _cx: &mut Context<Self>,
- ) -> workspace::ToolbarItemLocation {
- self.current_editor =
- active_pane_item.and_then(|item| item.downcast::<ProposedChangesEditor>());
- self.get_toolbar_item_location()
- }
-}
-
-impl BranchBufferSemanticsProvider {
- fn to_base(
- &self,
- buffer: &Entity<Buffer>,
- positions: &[text::Anchor],
- cx: &App,
- ) -> Option<Entity<Buffer>> {
- let base_buffer = buffer.read(cx).base_buffer()?;
- let version = base_buffer.read(cx).version();
- if positions
- .iter()
- .any(|position| !version.observed(position.timestamp))
- {
- return None;
- }
- Some(base_buffer)
- }
-}
-
-impl SemanticsProvider for BranchBufferSemanticsProvider {
- fn hover(
- &self,
- buffer: &Entity<Buffer>,
- position: text::Anchor,
- cx: &mut App,
- ) -> Option<Task<Option<Vec<project::Hover>>>> {
- let buffer = self.to_base(buffer, &[position], cx)?;
- self.0.hover(&buffer, position, cx)
- }
-
- fn applicable_inlay_chunks(
- &self,
- buffer: &Entity<Buffer>,
- ranges: &[Range<text::Anchor>],
- cx: &mut App,
- ) -> Vec<Range<BufferRow>> {
- self.0.applicable_inlay_chunks(buffer, ranges, cx)
- }
-
- fn invalidate_inlay_hints(&self, for_buffers: &HashSet<BufferId>, cx: &mut App) {
- self.0.invalidate_inlay_hints(for_buffers, cx);
- }
-
- fn inlay_hints(
- &self,
- invalidate: InvalidationStrategy,
- buffer: Entity<Buffer>,
- ranges: Vec<Range<text::Anchor>>,
- known_chunks: Option<(clock::Global, HashSet<Range<BufferRow>>)>,
- cx: &mut App,
- ) -> Option<HashMap<Range<BufferRow>, Task<anyhow::Result<CacheInlayHints>>>> {
- let positions = ranges
- .iter()
- .flat_map(|range| [range.start, range.end])
- .collect::<Vec<_>>();
- let buffer = self.to_base(&buffer, &positions, cx)?;
- self.0
- .inlay_hints(invalidate, buffer, ranges, known_chunks, cx)
- }
-
- fn inline_values(
- &self,
- _: Entity<Buffer>,
- _: Range<text::Anchor>,
- _: &mut App,
- ) -> Option<Task<anyhow::Result<Vec<project::InlayHint>>>> {
- None
- }
-
- fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
- if let Some(buffer) = self.to_base(buffer, &[], cx) {
- self.0.supports_inlay_hints(&buffer, cx)
- } else {
- false
- }
- }
-
- fn document_highlights(
- &self,
- buffer: &Entity<Buffer>,
- position: text::Anchor,
- cx: &mut App,
- ) -> Option<Task<anyhow::Result<Vec<project::DocumentHighlight>>>> {
- let buffer = self.to_base(buffer, &[position], cx)?;
- self.0.document_highlights(&buffer, position, cx)
- }
-
- fn definitions(
- &self,
- buffer: &Entity<Buffer>,
- position: text::Anchor,
- kind: crate::GotoDefinitionKind,
- cx: &mut App,
- ) -> Option<Task<anyhow::Result<Option<Vec<project::LocationLink>>>>> {
- let buffer = self.to_base(buffer, &[position], cx)?;
- self.0.definitions(&buffer, position, kind, cx)
- }
-
- fn range_for_rename(
- &self,
- _: &Entity<Buffer>,
- _: text::Anchor,
- _: &mut App,
- ) -> Option<Task<anyhow::Result<Option<Range<text::Anchor>>>>> {
- None
- }
-
- fn perform_rename(
- &self,
- _: &Entity<Buffer>,
- _: text::Anchor,
- _: String,
- _: &mut App,
- ) -> Option<Task<anyhow::Result<project::ProjectTransaction>>> {
- None
- }
-}
@@ -82,6 +82,8 @@ pub struct ExtensionManifest {
#[serde(default)]
pub context_servers: BTreeMap<Arc<str>, ContextServerManifestEntry>,
#[serde(default)]
+ pub agent_servers: BTreeMap<Arc<str>, AgentServerManifestEntry>,
+ #[serde(default)]
pub slash_commands: BTreeMap<Arc<str>, SlashCommandManifestEntry>,
#[serde(default)]
pub snippets: Option<PathBuf>,
@@ -138,6 +140,48 @@ pub struct LibManifestEntry {
pub version: Option<SemanticVersion>,
}
+#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
+pub struct AgentServerManifestEntry {
+ /// Display name for the agent (shown in menus).
+ pub name: String,
+ /// Environment variables to set when launching the agent server.
+ #[serde(default)]
+ pub env: HashMap<String, String>,
+ /// Optional icon path (relative to extension root, e.g., "ai.svg").
+ /// Should be a small SVG icon for display in menus.
+ #[serde(default)]
+ pub icon: Option<String>,
+ /// Per-target configuration for archive-based installation.
+ /// The key format is "{os}-{arch}" where:
+ /// - os: "darwin" (macOS), "linux", "windows"
+ /// - arch: "aarch64" (arm64), "x86_64"
+ ///
+ /// Example:
+ /// ```toml
+ /// [agent_servers.myagent.targets.darwin-aarch64]
+ /// archive = "https://example.com/myagent-darwin-arm64.zip"
+ /// cmd = "./myagent"
+ /// args = ["--serve"]
+ /// sha256 = "abc123..." # optional
+ /// ```
+ pub targets: HashMap<String, TargetConfig>,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
+pub struct TargetConfig {
+ /// URL to download the archive from (e.g., "https://github.com/owner/repo/releases/download/v1.0.0/myagent-darwin-arm64.zip")
+ pub archive: String,
+ /// Command to run (e.g., "./myagent" or "./myagent.exe")
+ pub cmd: String,
+ /// Command-line arguments to pass to the agent server.
+ #[serde(default)]
+ pub args: Vec<String>,
+ /// Optional SHA-256 hash of the archive for verification.
+ /// If not provided and the URL is a GitHub release, we'll attempt to fetch it from GitHub.
+ #[serde(default)]
+ pub sha256: Option<String>,
+}
+
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub enum ExtensionLibraryKind {
Rust,
@@ -266,6 +310,7 @@ fn manifest_from_old_manifest(
.collect(),
language_servers: Default::default(),
context_servers: BTreeMap::default(),
+ agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
@@ -298,6 +343,7 @@ mod tests {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
+ agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: vec![],
@@ -404,4 +450,31 @@ mod tests {
);
assert!(manifest.allow_exec("docker", &["ps"]).is_err()); // wrong first arg
}
+ #[test]
+ fn parse_manifest_with_agent_server_archive_launcher() {
+ let toml_src = r#"
+id = "example.agent-server-ext"
+name = "Agent Server Example"
+version = "1.0.0"
+schema_version = 0
+
+[agent_servers.foo]
+name = "Foo Agent"
+
+[agent_servers.foo.targets.linux-x86_64]
+archive = "https://example.com/agent-linux-x64.tar.gz"
+cmd = "./agent"
+args = ["--serve"]
+"#;
+
+ let manifest: ExtensionManifest = toml::from_str(toml_src).expect("manifest should parse");
+ assert_eq!(manifest.id.as_ref(), "example.agent-server-ext");
+ assert!(manifest.agent_servers.contains_key("foo"));
+ let entry = manifest.agent_servers.get("foo").unwrap();
+ assert!(entry.targets.contains_key("linux-x86_64"));
+ let target = entry.targets.get("linux-x86_64").unwrap();
+ assert_eq!(target.archive, "https://example.com/agent-linux-x64.tar.gz");
+ assert_eq!(target.cmd, "./agent");
+ assert_eq!(target.args, vec!["--serve"]);
+ }
}
@@ -235,6 +235,21 @@ async fn copy_extension_resources(
.with_context(|| "failed to copy icons")?;
}
+ for (_, agent_entry) in &manifest.agent_servers {
+ if let Some(icon_path) = &agent_entry.icon {
+ let source_icon = extension_path.join(icon_path);
+ let dest_icon = output_dir.join(icon_path);
+
+ // Create parent directory if needed
+ if let Some(parent) = dest_icon.parent() {
+ fs::create_dir_all(parent)?;
+ }
+
+ fs::copy(&source_icon, &dest_icon)
+ .with_context(|| format!("failed to copy agent server icon '{}'", icon_path))?;
+ }
+ }
+
if !manifest.languages.is_empty() {
let output_languages_dir = output_dir.join("languages");
fs::create_dir_all(&output_languages_dir)?;
@@ -132,6 +132,7 @@ fn manifest() -> ExtensionManifest {
.into_iter()
.collect(),
context_servers: BTreeMap::default(),
+ agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: vec![ExtensionCapability::ProcessExec(
@@ -107,6 +107,7 @@ mod tests {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
+ agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: vec![],
@@ -159,6 +159,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
.collect(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
+ agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
@@ -189,6 +190,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
+ agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
@@ -368,6 +370,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
grammars: BTreeMap::default(),
language_servers: BTreeMap::default(),
context_servers: BTreeMap::default(),
+ agent_servers: BTreeMap::default(),
slash_commands: BTreeMap::default(),
snippets: None,
capabilities: Vec::new(),
@@ -30,12 +30,14 @@ use node_runtime::NodeRuntime;
use release_channel::ReleaseChannel;
use semantic_version::SemanticVersion;
use settings::Settings;
-use std::borrow::Cow;
-use std::sync::{LazyLock, OnceLock};
-use std::time::Duration;
use std::{
+ borrow::Cow,
path::{Path, PathBuf},
- sync::Arc,
+ sync::{
+ Arc, LazyLock, OnceLock,
+ atomic::{AtomicBool, Ordering},
+ },
+ time::Duration,
};
use task::{DebugScenario, SpawnInTerminal, TaskTemplate, ZedDebugConfig};
use util::paths::SanitizedPath;
@@ -495,6 +497,11 @@ pub struct WasmState {
pub(crate) capability_granter: CapabilityGranter,
}
+std::thread_local! {
+ /// Used by the crash handler to ignore panics in extension-related threads.
+ pub static IS_WASM_THREAD: AtomicBool = const { AtomicBool::new(false) };
+}
+
type MainThreadCall = Box<dyn Send + for<'a> FnOnce(&'a mut AsyncApp) -> LocalBoxFuture<'a, ()>>;
type ExtensionCall = Box<
@@ -529,6 +536,7 @@ fn wasm_engine(executor: &BackgroundExecutor) -> wasmtime::Engine {
let engine_ref = engine.weak();
executor
.spawn(async move {
+ IS_WASM_THREAD.with(|v| v.store(true, Ordering::Release));
// Somewhat arbitrary interval, as it isn't a guaranteed interval.
// But this is a rough upper bound for how long the extension execution can block on
// `Future::poll`.
@@ -66,6 +66,7 @@ pub fn init(cx: &mut App) {
ExtensionCategoryFilter::ContextServers => {
ExtensionProvides::ContextServers
}
+ ExtensionCategoryFilter::AgentServers => ExtensionProvides::AgentServers,
ExtensionCategoryFilter::SlashCommands => ExtensionProvides::SlashCommands,
ExtensionCategoryFilter::IndexedDocsProviders => {
ExtensionProvides::IndexedDocsProviders
@@ -189,6 +190,7 @@ fn extension_provides_label(provides: ExtensionProvides) -> &'static str {
ExtensionProvides::Grammars => "Grammars",
ExtensionProvides::LanguageServers => "Language Servers",
ExtensionProvides::ContextServers => "MCP Servers",
+ ExtensionProvides::AgentServers => "Agent Servers",
ExtensionProvides::SlashCommands => "Slash Commands",
ExtensionProvides::IndexedDocsProviders => "Indexed Docs Providers",
ExtensionProvides::Snippets => "Snippets",
@@ -9,7 +9,10 @@ use git::{
AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository,
GitRepositoryCheckpoint, PushOptions, Remote, RepoPath, ResetMode,
},
- status::{FileStatus, GitStatus, StatusCode, TrackedStatus, UnmergedStatus},
+ status::{
+ DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
+ UnmergedStatus,
+ },
};
use gpui::{AsyncApp, BackgroundExecutor, SharedString, Task, TaskLabel};
use ignore::gitignore::GitignoreBuilder;
@@ -41,6 +44,9 @@ pub struct FakeGitRepositoryState {
pub unmerged_paths: HashMap<RepoPath, UnmergedStatus>,
pub head_contents: HashMap<RepoPath, String>,
pub index_contents: HashMap<RepoPath, String>,
+ // everything in commit contents is in oids
+ pub merge_base_contents: HashMap<RepoPath, Oid>,
+ pub oids: HashMap<Oid, String>,
pub blames: HashMap<RepoPath, Blame>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
@@ -60,6 +66,8 @@ impl FakeGitRepositoryState {
branches: Default::default(),
simulated_index_write_error_message: Default::default(),
refs: HashMap::from_iter([("HEAD".into(), "abc".into())]),
+ merge_base_contents: Default::default(),
+ oids: Default::default(),
}
}
}
@@ -110,6 +118,13 @@ impl GitRepository for FakeGitRepository {
.boxed()
}
+ fn load_blob_content(&self, oid: git::Oid) -> BoxFuture<'_, Result<String>> {
+ self.with_state_async(false, move |state| {
+ state.oids.get(&oid).cloned().context("oid does not exist")
+ })
+ .boxed()
+ }
+
fn load_commit(
&self,
_commit: String,
@@ -140,6 +155,34 @@ impl GitRepository for FakeGitRepository {
None
}
+ fn diff_tree(&self, _request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
+ let mut entries = HashMap::default();
+ self.with_state_async(false, |state| {
+ for (path, content) in &state.head_contents {
+ let status = if let Some((oid, original)) = state
+ .merge_base_contents
+ .get(path)
+ .map(|oid| (oid, &state.oids[oid]))
+ {
+ if original == content {
+ continue;
+ }
+ TreeDiffStatus::Modified { old: *oid }
+ } else {
+ TreeDiffStatus::Added
+ };
+ entries.insert(path.clone(), status);
+ }
+ for (path, oid) in &state.merge_base_contents {
+ if !entries.contains_key(path) {
+ entries.insert(path.clone(), TreeDiffStatus::Deleted { old: *oid });
+ }
+ }
+ Ok(TreeDiff { entries })
+ })
+ .boxed()
+ }
+
fn revparse_batch(&self, revs: Vec<String>) -> BoxFuture<'_, Result<Vec<Option<String>>>> {
self.with_state_async(false, |state| {
Ok(revs
@@ -523,7 +566,7 @@ impl GitRepository for FakeGitRepository {
let repository_dir_path = self.repository_dir_path.parent().unwrap().to_path_buf();
async move {
executor.simulate_random_delay().await;
- let oid = Oid::random(&mut executor.rng());
+ let oid = git::Oid::random(&mut executor.rng());
let entry = fs.entry(&repository_dir_path)?;
checkpoints.lock().insert(oid, entry);
Ok(GitRepositoryCheckpoint { commit_sha: oid })
@@ -579,7 +622,7 @@ impl GitRepository for FakeGitRepository {
}
fn default_branch(&self) -> BoxFuture<'_, Result<Option<SharedString>>> {
- unimplemented!()
+ async { Ok(Some("main".into())) }.boxed()
}
}
@@ -1752,6 +1752,26 @@ impl FakeFs {
.unwrap();
}
+ pub fn set_merge_base_content_for_repo(
+ &self,
+ dot_git: &Path,
+ contents_by_path: &[(&str, String)],
+ ) {
+ self.with_git_state(dot_git, true, |state| {
+ use git::Oid;
+
+ state.merge_base_contents.clear();
+ let oids = (1..)
+ .map(|n| n.to_string())
+ .map(|n| Oid::from_bytes(n.repeat(20).as_bytes()).unwrap());
+ for ((path, content), oid) in contents_by_path.iter().zip(oids) {
+ state.merge_base_contents.insert(repo_path(path), oid);
+ state.oids.insert(oid, content.clone());
+ }
+ })
+ .unwrap();
+ }
+
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(RepoPath, git::blame::Blame)>) {
self.with_git_state(dot_git, true, |state| {
state.blames.clear();
@@ -1,6 +1,6 @@
use crate::commit::parse_git_diff_name_status;
use crate::stash::GitStash;
-use crate::status::{GitStatus, StatusCode};
+use crate::status::{DiffTreeType, GitStatus, StatusCode, TreeDiff};
use crate::{Oid, SHORT_SHA_LENGTH};
use anyhow::{Context as _, Result, anyhow, bail};
use collections::HashMap;
@@ -350,6 +350,7 @@ pub trait GitRepository: Send + Sync {
///
/// Also returns `None` for symlinks.
fn load_committed_text(&self, path: RepoPath) -> BoxFuture<'_, Option<String>>;
+ fn load_blob_content(&self, oid: Oid) -> BoxFuture<'_, Result<String>>;
fn set_index_text(
&self,
@@ -379,6 +380,7 @@ pub trait GitRepository: Send + Sync {
fn merge_message(&self) -> BoxFuture<'_, Option<String>>;
fn status(&self, path_prefixes: &[RepoPath]) -> Task<Result<GitStatus>>;
+ fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>>;
fn stash_entries(&self) -> BoxFuture<'_, Result<GitStash>>;
@@ -908,6 +910,17 @@ impl GitRepository for RealGitRepository {
.boxed()
}
+ fn load_blob_content(&self, oid: Oid) -> BoxFuture<'_, Result<String>> {
+ let repo = self.repository.clone();
+ self.executor
+ .spawn(async move {
+ let repo = repo.lock();
+ let content = repo.find_blob(oid.0)?.content().to_owned();
+ Ok(String::from_utf8(content)?)
+ })
+ .boxed()
+ }
+
fn set_index_text(
&self,
path: RepoPath,
@@ -1060,6 +1073,50 @@ impl GitRepository for RealGitRepository {
})
}
+ fn diff_tree(&self, request: DiffTreeType) -> BoxFuture<'_, Result<TreeDiff>> {
+ let git_binary_path = self.any_git_binary_path.clone();
+ let working_directory = match self.working_directory() {
+ Ok(working_directory) => working_directory,
+ Err(e) => return Task::ready(Err(e)).boxed(),
+ };
+
+ let mut args = vec![
+ OsString::from("--no-optional-locks"),
+ OsString::from("diff-tree"),
+ OsString::from("-r"),
+ OsString::from("-z"),
+ OsString::from("--no-renames"),
+ ];
+ match request {
+ DiffTreeType::MergeBase { base, head } => {
+ args.push("--merge-base".into());
+ args.push(OsString::from(base.as_str()));
+ args.push(OsString::from(head.as_str()));
+ }
+ DiffTreeType::Since { base, head } => {
+ args.push(OsString::from(base.as_str()));
+ args.push(OsString::from(head.as_str()));
+ }
+ }
+
+ self.executor
+ .spawn(async move {
+ let output = new_smol_command(&git_binary_path)
+ .current_dir(working_directory)
+ .args(args)
+ .output()
+ .await?;
+ if output.status.success() {
+ let stdout = String::from_utf8_lossy(&output.stdout);
+ stdout.parse()
+ } else {
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ anyhow::bail!("git status failed: {stderr}");
+ }
+ })
+ .boxed()
+ }
+
fn stash_entries(&self) -> BoxFuture<'_, Result<GitStash>> {
let git_binary_path = self.any_git_binary_path.clone();
let working_directory = self.working_directory();
@@ -1827,13 +1884,23 @@ impl GitRepository for RealGitRepository {
return Ok(output);
}
- let output = git
- .run(&["symbolic-ref", "refs/remotes/origin/HEAD"])
- .await?;
+ if let Ok(output) = git.run(&["symbolic-ref", "refs/remotes/origin/HEAD"]).await {
+ return Ok(output
+ .strip_prefix("refs/remotes/origin/")
+ .map(|s| SharedString::from(s.to_owned())));
+ }
+
+ if let Ok(default_branch) = git.run(&["config", "init.defaultBranch"]).await {
+ if git.run(&["rev-parse", &default_branch]).await.is_ok() {
+ return Ok(Some(default_branch.into()));
+ }
+ }
+
+ if git.run(&["rev-parse", "master"]).await.is_ok() {
+ return Ok(Some("master".into()));
+ }
- Ok(output
- .strip_prefix("refs/remotes/origin/")
- .map(|s| SharedString::from(s.to_owned())))
+ Ok(None)
})
.boxed()
}
@@ -1,5 +1,7 @@
-use crate::repository::RepoPath;
-use anyhow::Result;
+use crate::{Oid, repository::RepoPath};
+use anyhow::{Result, anyhow};
+use collections::HashMap;
+use gpui::SharedString;
use serde::{Deserialize, Serialize};
use std::{str::FromStr, sync::Arc};
use util::{ResultExt, rel_path::RelPath};
@@ -190,7 +192,11 @@ impl FileStatus {
}
pub fn is_deleted(self) -> bool {
- matches!(self, FileStatus::Tracked(tracked) if matches!((tracked.index_status, tracked.worktree_status), (StatusCode::Deleted, _) | (_, StatusCode::Deleted)))
+ let FileStatus::Tracked(tracked) = self else {
+ return false;
+ };
+ tracked.index_status == StatusCode::Deleted && tracked.worktree_status != StatusCode::Added
+ || tracked.worktree_status == StatusCode::Deleted
}
pub fn is_untracked(self) -> bool {
@@ -486,3 +492,128 @@ impl Default for GitStatus {
}
}
}
+
+pub enum DiffTreeType {
+ MergeBase {
+ base: SharedString,
+ head: SharedString,
+ },
+ Since {
+ base: SharedString,
+ head: SharedString,
+ },
+}
+
+impl DiffTreeType {
+ pub fn base(&self) -> &SharedString {
+ match self {
+ DiffTreeType::MergeBase { base, .. } => base,
+ DiffTreeType::Since { base, .. } => base,
+ }
+ }
+
+ pub fn head(&self) -> &SharedString {
+ match self {
+ DiffTreeType::MergeBase { head, .. } => head,
+ DiffTreeType::Since { head, .. } => head,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq)]
+pub struct TreeDiff {
+ pub entries: HashMap<RepoPath, TreeDiffStatus>,
+}
+
+#[derive(Debug, Clone, PartialEq)]
+pub enum TreeDiffStatus {
+ Added,
+ Modified { old: Oid },
+ Deleted { old: Oid },
+}
+
+impl FromStr for TreeDiff {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result<Self> {
+ let mut fields = s.split('\0');
+ let mut parsed = HashMap::default();
+ while let Some((status, path)) = fields.next().zip(fields.next()) {
+ let path = RepoPath(RelPath::unix(path)?.into());
+
+ let mut fields = status.split(" ").skip(2);
+ let old_sha = fields
+ .next()
+ .ok_or_else(|| anyhow!("expected to find old_sha"))?
+ .to_owned()
+ .parse()?;
+ let _new_sha = fields
+ .next()
+ .ok_or_else(|| anyhow!("expected to find new_sha"))?;
+ let status = fields
+ .next()
+ .and_then(|s| {
+ if s.len() == 1 {
+ s.as_bytes().first()
+ } else {
+ None
+ }
+ })
+ .ok_or_else(|| anyhow!("expected to find status"))?;
+
+ let result = match StatusCode::from_byte(*status)? {
+ StatusCode::Modified => TreeDiffStatus::Modified { old: old_sha },
+ StatusCode::Added => TreeDiffStatus::Added,
+ StatusCode::Deleted => TreeDiffStatus::Deleted { old: old_sha },
+ _status => continue,
+ };
+
+ parsed.insert(path, result);
+ }
+
+ Ok(Self { entries: parsed })
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use crate::{
+ repository::RepoPath,
+ status::{TreeDiff, TreeDiffStatus},
+ };
+
+ #[test]
+ fn test_tree_diff_parsing() {
+ let input = ":000000 100644 0000000000000000000000000000000000000000 0062c311b8727c3a2e3cd7a41bc9904feacf8f98 A\x00.zed/settings.json\x00".to_owned() +
+ ":100644 000000 bb3e9ed2e97a8c02545bae243264d342c069afb3 0000000000000000000000000000000000000000 D\x00README.md\x00" +
+ ":100644 100644 42f097005a1f21eb2260fad02ec8c991282beee8 a437d85f63bb8c62bd78f83f40c506631fabf005 M\x00parallel.go\x00";
+
+ let output: TreeDiff = input.parse().unwrap();
+ assert_eq!(
+ output,
+ TreeDiff {
+ entries: [
+ (
+ RepoPath::new(".zed/settings.json").unwrap(),
+ TreeDiffStatus::Added,
+ ),
+ (
+ RepoPath::new("README.md").unwrap(),
+ TreeDiffStatus::Deleted {
+ old: "bb3e9ed2e97a8c02545bae243264d342c069afb3".parse().unwrap()
+ }
+ ),
+ (
+ RepoPath::new("parallel.go").unwrap(),
+ TreeDiffStatus::Modified {
+ old: "42f097005a1f21eb2260fad02ec8c991282beee8".parse().unwrap(),
+ }
+ ),
+ ]
+ .into_iter()
+ .collect()
+ }
+ )
+ }
+}
@@ -44,7 +44,6 @@ multi_buffer.workspace = true
notifications.workspace = true
panel.workspace = true
picker.workspace = true
-postage.workspace = true
project.workspace = true
schemars.workspace = true
serde.workspace = true
@@ -4,8 +4,8 @@ use editor::{Editor, EditorEvent, MultiBuffer, SelectionEffects, multibuffer_con
use git::repository::{CommitDetails, CommitDiff, RepoPath};
use gpui::{
Action, AnyElement, AnyView, App, AppContext as _, AsyncApp, AsyncWindowContext, Context,
- Entity, EventEmitter, FocusHandle, Focusable, IntoElement, PromptLevel, Render, Task,
- WeakEntity, Window, actions,
+ Entity, EventEmitter, FocusHandle, Focusable, IntoElement, PromptLevel, Render, WeakEntity,
+ Window, actions,
};
use language::{
Anchor, Buffer, Capability, DiskState, File, LanguageRegistry, LineEnding, OffsetRangeExt as _,
@@ -561,11 +561,11 @@ impl Item for CommitView {
_workspace_id: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| {
+ Some(cx.new(|cx| {
let editor = cx.new(|cx| {
self.editor
.update(cx, |editor, cx| editor.clone(window, cx))
@@ -577,7 +577,7 @@ impl Item for CommitView {
commit: self.commit.clone(),
stash: self.stash,
}
- })))
+ }))
}
}
@@ -4,16 +4,15 @@ use crate::{
git_panel_settings::GitPanelSettings,
remote_button::{render_publish_button, render_push_button},
};
-use anyhow::Result;
+use anyhow::{Context as _, Result, anyhow};
use buffer_diff::{BufferDiff, DiffHunkSecondaryStatus};
use collections::{HashMap, HashSet};
use editor::{
- Editor, EditorEvent, SelectionEffects,
+ Addon, Editor, EditorEvent, SelectionEffects,
actions::{GoToHunk, GoToPreviousHunk},
multibuffer_context_lines,
scroll::Autoscroll,
};
-use futures::StreamExt;
use git::{
Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus},
@@ -27,18 +26,23 @@ use language::{Anchor, Buffer, Capability, OffsetRangeExt};
use multi_buffer::{MultiBuffer, PathKey};
use project::{
Project, ProjectPath,
- git_store::{GitStore, GitStoreEvent, Repository, RepositoryEvent},
+ git_store::{
+ Repository,
+ branch_diff::{self, BranchDiffEvent, DiffBase},
+ },
};
use settings::{Settings, SettingsStore};
use std::any::{Any, TypeId};
use std::ops::Range;
+use std::sync::Arc;
use theme::ActiveTheme;
use ui::{KeyBinding, Tooltip, prelude::*, vertical_divider};
-use util::ResultExt as _;
+use util::{ResultExt as _, rel_path::RelPath};
use workspace::{
CloseActiveItem, ItemNavHistory, SerializableItem, ToolbarItemEvent, ToolbarItemLocation,
ToolbarItemView, Workspace,
item::{BreadcrumbText, Item, ItemEvent, ItemHandle, SaveOptions, TabContentParams},
+ notifications::NotifyTaskExt,
searchable::SearchableItemHandle,
};
@@ -48,30 +52,24 @@ actions!(
/// Shows the diff between the working directory and the index.
Diff,
/// Adds files to the git staging area.
- Add
+ Add,
+ /// Shows the diff between the working directory and your default
+ /// branch (typically main or master).
+ BranchDiff
]
);
pub struct ProjectDiff {
project: Entity<Project>,
multibuffer: Entity<MultiBuffer>,
+ branch_diff: Entity<branch_diff::BranchDiff>,
editor: Entity<Editor>,
- git_store: Entity<GitStore>,
- buffer_diff_subscriptions: HashMap<RepoPath, (Entity<BufferDiff>, Subscription)>,
+ buffer_diff_subscriptions: HashMap<Arc<RelPath>, (Entity<BufferDiff>, Subscription)>,
workspace: WeakEntity<Workspace>,
focus_handle: FocusHandle,
- update_needed: postage::watch::Sender<()>,
pending_scroll: Option<PathKey>,
_task: Task<Result<()>>,
- _git_store_subscription: Subscription,
-}
-
-#[derive(Debug)]
-struct DiffBuffer {
- path_key: PathKey,
- buffer: Entity<Buffer>,
- diff: Entity<BufferDiff>,
- file_status: FileStatus,
+ _subscription: Subscription,
}
const CONFLICT_SORT_PREFIX: u64 = 1;
@@ -81,6 +79,7 @@ const NEW_SORT_PREFIX: u64 = 3;
impl ProjectDiff {
pub(crate) fn register(workspace: &mut Workspace, cx: &mut Context<Workspace>) {
workspace.register_action(Self::deploy);
+ workspace.register_action(Self::deploy_branch_diff);
workspace.register_action(|workspace, _: &Add, window, cx| {
Self::deploy(workspace, &Diff, window, cx);
});
@@ -96,6 +95,40 @@ impl ProjectDiff {
Self::deploy_at(workspace, None, window, cx)
}
+ fn deploy_branch_diff(
+ workspace: &mut Workspace,
+ _: &BranchDiff,
+ window: &mut Window,
+ cx: &mut Context<Workspace>,
+ ) {
+ telemetry::event!("Git Branch Diff Opened");
+ let project = workspace.project().clone();
+
+ let existing = workspace
+ .items_of_type::<Self>(cx)
+ .find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Merge { .. }));
+ if let Some(existing) = existing {
+ workspace.activate_item(&existing, true, true, window, cx);
+ return;
+ }
+ let workspace = cx.entity();
+ window
+ .spawn(cx, async move |cx| {
+ let this = cx
+ .update(|window, cx| {
+ Self::new_with_default_branch(project, workspace.clone(), window, cx)
+ })?
+ .await?;
+ workspace
+ .update_in(cx, |workspace, window, cx| {
+ workspace.add_item_to_active_pane(Box::new(this), None, true, window, cx);
+ })
+ .ok();
+ anyhow::Ok(())
+ })
+ .detach_and_notify_err(window, cx);
+ }
+
pub fn deploy_at(
workspace: &mut Workspace,
entry: Option<GitStatusEntry>,
@@ -110,7 +143,10 @@ impl ProjectDiff {
"Action"
}
);
- let project_diff = if let Some(existing) = workspace.item_of_type::<Self>(cx) {
+ let existing = workspace
+ .items_of_type::<Self>(cx)
+ .find(|item| matches!(item.read(cx).diff_base(cx), DiffBase::Head));
+ let project_diff = if let Some(existing) = existing {
workspace.activate_item(&existing, true, true, window, cx);
existing
} else {
@@ -139,11 +175,54 @@ impl ProjectDiff {
})
}
+ fn new_with_default_branch(
+ project: Entity<Project>,
+ workspace: Entity<Workspace>,
+ window: &mut Window,
+ cx: &mut App,
+ ) -> Task<Result<Entity<Self>>> {
+ let Some(repo) = project.read(cx).git_store().read(cx).active_repository() else {
+ return Task::ready(Err(anyhow!("No active repository")));
+ };
+ let main_branch = repo.update(cx, |repo, _| repo.default_branch());
+ window.spawn(cx, async move |cx| {
+ let main_branch = main_branch
+ .await??
+ .context("Could not determine default branch")?;
+
+ let branch_diff = cx.new_window_entity(|window, cx| {
+ branch_diff::BranchDiff::new(
+ DiffBase::Merge {
+ base_ref: main_branch,
+ },
+ project.clone(),
+ window,
+ cx,
+ )
+ })?;
+ cx.new_window_entity(|window, cx| {
+ Self::new_impl(branch_diff, project, workspace, window, cx)
+ })
+ })
+ }
+
fn new(
project: Entity<Project>,
workspace: Entity<Workspace>,
window: &mut Window,
cx: &mut Context<Self>,
+ ) -> Self {
+ let branch_diff =
+ cx.new(|cx| branch_diff::BranchDiff::new(DiffBase::Head, project.clone(), window, cx));
+ Self::new_impl(branch_diff, project, workspace, window, cx)
+ }
+
+ fn new_impl(
+ branch_diff: Entity<branch_diff::BranchDiff>,
+ project: Entity<Project>,
+ workspace: Entity<Workspace>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
) -> Self {
let focus_handle = cx.focus_handle();
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
@@ -153,9 +232,25 @@ impl ProjectDiff {
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
diff_display_editor.disable_diagnostics(cx);
diff_display_editor.set_expand_all_diff_hunks(cx);
- diff_display_editor.register_addon(GitPanelAddon {
- workspace: workspace.downgrade(),
- });
+
+ match branch_diff.read(cx).diff_base() {
+ DiffBase::Head => {
+ diff_display_editor.register_addon(GitPanelAddon {
+ workspace: workspace.downgrade(),
+ });
+ }
+ DiffBase::Merge { .. } => {
+ diff_display_editor.register_addon(BranchDiffAddon {
+ branch_diff: branch_diff.clone(),
+ });
+ diff_display_editor.start_temporary_diff_override();
+ diff_display_editor.set_render_diff_hunk_controls(
+ Arc::new(|_, _, _, _, _, _, _, _| gpui::Empty.into_any_element()),
+ cx,
+ );
+ //
+ }
+ }
diff_display_editor
});
window.defer(cx, {
@@ -172,71 +267,71 @@ impl ProjectDiff {
cx.subscribe_in(&editor, window, Self::handle_editor_event)
.detach();
- let git_store = project.read(cx).git_store().clone();
- let git_store_subscription = cx.subscribe_in(
- &git_store,
+ let branch_diff_subscription = cx.subscribe_in(
+ &branch_diff,
window,
- move |this, _git_store, event, _window, _cx| match event {
- GitStoreEvent::ActiveRepositoryChanged(_)
- | GitStoreEvent::RepositoryUpdated(
- _,
- RepositoryEvent::StatusesChanged { full_scan: _ },
- true,
- )
- | GitStoreEvent::ConflictsUpdated => {
- *this.update_needed.borrow_mut() = ();
+ move |this, _git_store, event, window, cx| match event {
+ BranchDiffEvent::FileListChanged => {
+ this._task = window.spawn(cx, {
+ let this = cx.weak_entity();
+ async |cx| Self::refresh(this, cx).await
+ })
}
- _ => {}
},
);
let mut was_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
let mut was_collapse_untracked_diff =
GitPanelSettings::get_global(cx).collapse_untracked_diff;
- cx.observe_global::<SettingsStore>(move |this, cx| {
+ cx.observe_global_in::<SettingsStore>(window, move |this, window, cx| {
let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
let is_collapse_untracked_diff =
GitPanelSettings::get_global(cx).collapse_untracked_diff;
if is_sort_by_path != was_sort_by_path
|| is_collapse_untracked_diff != was_collapse_untracked_diff
{
- *this.update_needed.borrow_mut() = ();
+ this._task = {
+ window.spawn(cx, {
+ let this = cx.weak_entity();
+ async |cx| Self::refresh(this, cx).await
+ })
+ }
}
was_sort_by_path = is_sort_by_path;
was_collapse_untracked_diff = is_collapse_untracked_diff;
})
.detach();
- let (mut send, recv) = postage::watch::channel::<()>();
- let worker = window.spawn(cx, {
+ let task = window.spawn(cx, {
let this = cx.weak_entity();
- async |cx| Self::handle_status_updates(this, recv, cx).await
+ async |cx| Self::refresh(this, cx).await
});
- // Kick off a refresh immediately
- *send.borrow_mut() = ();
Self {
project,
- git_store: git_store.clone(),
workspace: workspace.downgrade(),
+ branch_diff,
focus_handle,
editor,
multibuffer,
buffer_diff_subscriptions: Default::default(),
pending_scroll: None,
- update_needed: send,
- _task: worker,
- _git_store_subscription: git_store_subscription,
+ _task: task,
+ _subscription: branch_diff_subscription,
}
}
+ pub fn diff_base<'a>(&'a self, cx: &'a App) -> &'a DiffBase {
+ self.branch_diff.read(cx).diff_base()
+ }
+
pub fn move_to_entry(
&mut self,
entry: GitStatusEntry,
window: &mut Window,
cx: &mut Context<Self>,
) {
- let Some(git_repo) = self.git_store.read(cx).active_repository() else {
+ let Some(git_repo) = self.branch_diff.read(cx).repo() else {
return;
};
let repo = git_repo.read(cx);
@@ -366,77 +461,28 @@ impl ProjectDiff {
}
}
- fn load_buffers(&mut self, cx: &mut Context<Self>) -> Vec<Task<Result<DiffBuffer>>> {
- let Some(repo) = self.git_store.read(cx).active_repository() else {
- self.multibuffer.update(cx, |multibuffer, cx| {
- multibuffer.clear(cx);
- });
- self.buffer_diff_subscriptions.clear();
- return vec![];
- };
-
- let mut previous_paths = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
-
- let mut result = vec![];
- repo.update(cx, |repo, cx| {
- for entry in repo.cached_status() {
- if !entry.status.has_changes() {
- continue;
- }
- let Some(project_path) = repo.repo_path_to_project_path(&entry.repo_path, cx)
- else {
- continue;
- };
- let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx);
- let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone());
-
- previous_paths.remove(&path_key);
- let load_buffer = self
- .project
- .update(cx, |project, cx| project.open_buffer(project_path, cx));
-
- let project = self.project.clone();
- result.push(cx.spawn(async move |_, cx| {
- let buffer = load_buffer.await?;
- let changes = project
- .update(cx, |project, cx| {
- project.open_uncommitted_diff(buffer.clone(), cx)
- })?
- .await?;
- Ok(DiffBuffer {
- path_key,
- buffer,
- diff: changes,
- file_status: entry.status,
- })
- }));
- }
- });
- self.multibuffer.update(cx, |multibuffer, cx| {
- for path in previous_paths {
- self.buffer_diff_subscriptions
- .remove(&path.path.clone().into());
- multibuffer.remove_excerpts_for_path(path, cx);
- }
- });
- result
- }
-
fn register_buffer(
&mut self,
- diff_buffer: DiffBuffer,
+ path_key: PathKey,
+ file_status: FileStatus,
+ buffer: Entity<Buffer>,
+ diff: Entity<BufferDiff>,
window: &mut Window,
cx: &mut Context<Self>,
) {
- let path_key = diff_buffer.path_key.clone();
- let buffer = diff_buffer.buffer.clone();
- let diff = diff_buffer.diff.clone();
-
- let subscription = cx.subscribe(&diff, move |this, _, _, _| {
- *this.update_needed.borrow_mut() = ();
+ if self.branch_diff.read(cx).diff_base().is_merge_base() {
+ self.multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.add_diff(diff.clone(), cx);
+ });
+ }
+ let subscription = cx.subscribe_in(&diff, window, move |this, _, _, window, cx| {
+ this._task = window.spawn(cx, {
+ let this = cx.weak_entity();
+ async |cx| Self::refresh(this, cx).await
+ })
});
self.buffer_diff_subscriptions
- .insert(path_key.path.clone().into(), (diff.clone(), subscription));
+ .insert(path_key.path.clone(), (diff.clone(), subscription));
let conflict_addon = self
.editor
@@ -480,8 +526,8 @@ impl ProjectDiff {
});
}
if is_excerpt_newly_added
- && (diff_buffer.file_status.is_deleted()
- || (diff_buffer.file_status.is_untracked()
+ && (file_status.is_deleted()
+ || (file_status.is_untracked()
&& GitPanelSettings::get_global(cx).collapse_untracked_diff))
{
editor.fold_buffer(snapshot.text.remote_id(), cx)
@@ -506,26 +552,51 @@ impl ProjectDiff {
}
}
- pub async fn handle_status_updates(
- this: WeakEntity<Self>,
- mut recv: postage::watch::Receiver<()>,
- cx: &mut AsyncWindowContext,
- ) -> Result<()> {
- while (recv.next().await).is_some() {
- let buffers_to_load = this.update(cx, |this, cx| this.load_buffers(cx))?;
- for buffer_to_load in buffers_to_load {
- if let Some(buffer) = buffer_to_load.await.log_err() {
- cx.update(|window, cx| {
- this.update(cx, |this, cx| this.register_buffer(buffer, window, cx))
- .ok();
- })?;
+ pub async fn refresh(this: WeakEntity<Self>, cx: &mut AsyncWindowContext) -> Result<()> {
+ let mut path_keys = Vec::new();
+ let buffers_to_load = this.update(cx, |this, cx| {
+ let (repo, buffers_to_load) = this.branch_diff.update(cx, |branch_diff, cx| {
+ let load_buffers = branch_diff.load_buffers(cx);
+ (branch_diff.repo().cloned(), load_buffers)
+ });
+ let mut previous_paths = this.multibuffer.read(cx).paths().collect::<HashSet<_>>();
+
+ if let Some(repo) = repo {
+ let repo = repo.read(cx);
+
+ path_keys = Vec::with_capacity(buffers_to_load.len());
+ for entry in buffers_to_load.iter() {
+ let sort_prefix = sort_prefix(&repo, &entry.repo_path, entry.file_status, cx);
+ let path_key =
+ PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone());
+ previous_paths.remove(&path_key);
+ path_keys.push(path_key)
}
}
- this.update(cx, |this, cx| {
- this.pending_scroll.take();
- cx.notify();
- })?;
+
+ this.multibuffer.update(cx, |multibuffer, cx| {
+ for path in previous_paths {
+ this.buffer_diff_subscriptions.remove(&path.path);
+ multibuffer.remove_excerpts_for_path(path, cx);
+ }
+ });
+ buffers_to_load
+ })?;
+
+ for (entry, path_key) in buffers_to_load.into_iter().zip(path_keys.into_iter()) {
+ if let Some((buffer, diff)) = entry.load.await.log_err() {
+ cx.update(|window, cx| {
+ this.update(cx, |this, cx| {
+ this.register_buffer(path_key, entry.file_status, buffer, diff, window, cx)
+ })
+ .ok();
+ })?;
+ }
}
+ this.update(cx, |this, cx| {
+ this.pending_scroll.take();
+ cx.notify();
+ })?;
Ok(())
}
@@ -594,8 +665,8 @@ impl Item for ProjectDiff {
Some("Project Diff".into())
}
- fn tab_content(&self, params: TabContentParams, _window: &Window, _: &App) -> AnyElement {
- Label::new("Uncommitted Changes")
+ fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
+ Label::new(self.tab_content_text(0, cx))
.color(if params.selected {
Color::Default
} else {
@@ -604,8 +675,11 @@ impl Item for ProjectDiff {
.into_any_element()
}
- fn tab_content_text(&self, _detail: usize, _: &App) -> SharedString {
- "Uncommitted Changes".into()
+ fn tab_content_text(&self, _detail: usize, cx: &App) -> SharedString {
+ match self.branch_diff.read(cx).diff_base() {
+ DiffBase::Head => "Uncommitted Changes".into(),
+ DiffBase::Merge { base_ref } => format!("Changes since {}", base_ref).into(),
+ }
}
fn telemetry_event_text(&self) -> Option<&'static str> {
@@ -640,16 +714,12 @@ impl Item for ProjectDiff {
_workspace_id: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- let Some(workspace) = self.workspace.upgrade() else {
- return Task::ready(None);
- };
- Task::ready(Some(cx.new(|cx| {
- ProjectDiff::new(self.project.clone(), workspace, window, cx)
- })))
+ let workspace = self.workspace.upgrade()?;
+ Some(cx.new(|cx| ProjectDiff::new(self.project.clone(), workspace, window, cx)))
}
fn is_dirty(&self, cx: &App) -> bool {
@@ -806,30 +876,47 @@ impl SerializableItem for ProjectDiff {
}
fn deserialize(
- _project: Entity<Project>,
+ project: Entity<Project>,
workspace: WeakEntity<Workspace>,
- _workspace_id: workspace::WorkspaceId,
- _item_id: workspace::ItemId,
+ workspace_id: workspace::WorkspaceId,
+ item_id: workspace::ItemId,
window: &mut Window,
cx: &mut App,
) -> Task<Result<Entity<Self>>> {
window.spawn(cx, async move |cx| {
- workspace.update_in(cx, |workspace, window, cx| {
- let workspace_handle = cx.entity();
- cx.new(|cx| Self::new(workspace.project().clone(), workspace_handle, window, cx))
- })
+ let diff_base = persistence::PROJECT_DIFF_DB.get_diff_base(item_id, workspace_id)?;
+
+ let diff = cx.update(|window, cx| {
+ let branch_diff = cx
+ .new(|cx| branch_diff::BranchDiff::new(diff_base, project.clone(), window, cx));
+ let workspace = workspace.upgrade().context("workspace gone")?;
+ anyhow::Ok(
+ cx.new(|cx| ProjectDiff::new_impl(branch_diff, project, workspace, window, cx)),
+ )
+ })??;
+
+ Ok(diff)
})
}
fn serialize(
&mut self,
- _workspace: &mut Workspace,
- _item_id: workspace::ItemId,
+ workspace: &mut Workspace,
+ item_id: workspace::ItemId,
_closing: bool,
_window: &mut Window,
- _cx: &mut Context<Self>,
+ cx: &mut Context<Self>,
) -> Option<Task<Result<()>>> {
- None
+ let workspace_id = workspace.database_id()?;
+ let diff_base = self.diff_base(cx).clone();
+
+ Some(cx.background_spawn({
+ async move {
+ persistence::PROJECT_DIFF_DB
+ .save_diff_base(item_id, workspace_id, diff_base.clone())
+ .await
+ }
+ }))
}
fn should_serialize(&self, _: &Self::Event) -> bool {
@@ -837,6 +924,80 @@ impl SerializableItem for ProjectDiff {
}
}
+mod persistence {
+
+ use anyhow::Context as _;
+ use db::{
+ sqlez::{domain::Domain, thread_safe_connection::ThreadSafeConnection},
+ sqlez_macros::sql,
+ };
+ use project::git_store::branch_diff::DiffBase;
+ use workspace::{ItemId, WorkspaceDb, WorkspaceId};
+
+ pub struct ProjectDiffDb(ThreadSafeConnection);
+
+ impl Domain for ProjectDiffDb {
+ const NAME: &str = stringify!(ProjectDiffDb);
+
+ const MIGRATIONS: &[&str] = &[sql!(
+ CREATE TABLE project_diffs(
+ workspace_id INTEGER,
+ item_id INTEGER UNIQUE,
+
+ diff_base TEXT,
+
+ PRIMARY KEY(workspace_id, item_id),
+ FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+ ON DELETE CASCADE
+ ) STRICT;
+ )];
+ }
+
+ db::static_connection!(PROJECT_DIFF_DB, ProjectDiffDb, [WorkspaceDb]);
+
+ impl ProjectDiffDb {
+ pub async fn save_diff_base(
+ &self,
+ item_id: ItemId,
+ workspace_id: WorkspaceId,
+ diff_base: DiffBase,
+ ) -> anyhow::Result<()> {
+ self.write(move |connection| {
+ let sql_stmt = sql!(
+ INSERT OR REPLACE INTO project_diffs(item_id, workspace_id, diff_base) VALUES (?, ?, ?)
+ );
+ let diff_base_str = serde_json::to_string(&diff_base)?;
+ let mut query = connection.exec_bound::<(ItemId, WorkspaceId, String)>(sql_stmt)?;
+ query((item_id, workspace_id, diff_base_str)).context(format!(
+ "exec_bound failed to execute or parse for: {}",
+ sql_stmt
+ ))
+ })
+ .await
+ }
+
+ pub fn get_diff_base(
+ &self,
+ item_id: ItemId,
+ workspace_id: WorkspaceId,
+ ) -> anyhow::Result<DiffBase> {
+ let sql_stmt =
+ sql!(SELECT diff_base FROM project_diffs WHERE item_id = ?AND workspace_id = ?);
+ let diff_base_str = self.select_row_bound::<(ItemId, WorkspaceId), String>(sql_stmt)?(
+ (item_id, workspace_id),
+ )
+ .context(::std::format!(
+ "Error in get_diff_base, select_row_bound failed to execute or parse for: {}",
+ sql_stmt
+ ))?;
+ let Some(diff_base_str) = diff_base_str else {
+ return Ok(DiffBase::Head);
+ };
+ serde_json::from_str(&diff_base_str).context("deserializing diff base")
+ }
+ }
+}
+
pub struct ProjectDiffToolbar {
project_diff: Option<WeakEntity<ProjectDiff>>,
workspace: WeakEntity<Workspace>,
@@ -901,6 +1062,7 @@ impl ToolbarItemView for ProjectDiffToolbar {
) -> ToolbarItemLocation {
self.project_diff = active_pane_item
.and_then(|item| item.act_as::<ProjectDiff>(cx))
+ .filter(|item| item.read(cx).diff_base(cx) == &DiffBase::Head)
.map(|entity| entity.downgrade());
if self.project_diff.is_some() {
ToolbarItemLocation::PrimaryRight
@@ -1370,18 +1532,42 @@ fn merge_anchor_ranges<'a>(
})
}
+struct BranchDiffAddon {
+ branch_diff: Entity<branch_diff::BranchDiff>,
+}
+
+impl Addon for BranchDiffAddon {
+ fn to_any(&self) -> &dyn std::any::Any {
+ self
+ }
+
+ fn override_status_for_buffer_id(
+ &self,
+ buffer_id: language::BufferId,
+ cx: &App,
+ ) -> Option<FileStatus> {
+ self.branch_diff
+ .read(cx)
+ .status_for_buffer_id(buffer_id, cx)
+ }
+}
+
#[cfg(test)]
mod tests {
+ use collections::HashMap;
use db::indoc;
use editor::test::editor_test_context::{EditorTestContext, assert_state_with_diff};
- use git::status::{UnmergedStatus, UnmergedStatusCode};
+ use git::status::{TrackedStatus, UnmergedStatus, UnmergedStatusCode};
use gpui::TestAppContext;
use project::FakeFs;
use serde_json::json;
use settings::SettingsStore;
use std::path::Path;
use unindent::Unindent as _;
- use util::{path, rel_path::rel_path};
+ use util::{
+ path,
+ rel_path::{RelPath, rel_path},
+ };
use super::*;
@@ -2019,6 +2205,99 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_branch_diff(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ path!("/project"),
+ json!({
+ ".git": {},
+ "a.txt": "C",
+ "b.txt": "new",
+ "c.txt": "in-merge-base-and-work-tree",
+ "d.txt": "created-in-head",
+ }),
+ )
+ .await;
+ let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
+ let (workspace, cx) =
+ cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let diff = cx
+ .update(|window, cx| {
+ ProjectDiff::new_with_default_branch(project.clone(), workspace, window, cx)
+ })
+ .await
+ .unwrap();
+ cx.run_until_parked();
+
+ fs.set_head_for_repo(
+ Path::new(path!("/project/.git")),
+ &[("a.txt", "B".into()), ("d.txt", "created-in-head".into())],
+ "sha",
+ );
+ // fs.set_index_for_repo(dot_git, index_state);
+ fs.set_merge_base_content_for_repo(
+ Path::new(path!("/project/.git")),
+ &[
+ ("a.txt", "A".into()),
+ ("c.txt", "in-merge-base-and-work-tree".into()),
+ ],
+ );
+ cx.run_until_parked();
+
+ let editor = diff.read_with(cx, |diff, _| diff.editor.clone());
+
+ assert_state_with_diff(
+ &editor,
+ cx,
+ &"
+ - A
+ + ΛC
+ + new
+ + created-in-head"
+ .unindent(),
+ );
+
+ let statuses: HashMap<Arc<RelPath>, Option<FileStatus>> =
+ editor.update(cx, |editor, cx| {
+ editor
+ .buffer()
+ .read(cx)
+ .all_buffers()
+ .iter()
+ .map(|buffer| {
+ (
+ buffer.read(cx).file().unwrap().path().clone(),
+ editor.status_for_buffer_id(buffer.read(cx).remote_id(), cx),
+ )
+ })
+ .collect()
+ });
+
+ assert_eq!(
+ statuses,
+ HashMap::from_iter([
+ (
+ rel_path("a.txt").into_arc(),
+ Some(FileStatus::Tracked(TrackedStatus {
+ index_status: git::status::StatusCode::Modified,
+ worktree_status: git::status::StatusCode::Modified
+ }))
+ ),
+ (rel_path("b.txt").into_arc(), Some(FileStatus::Untracked)),
+ (
+ rel_path("d.txt").into_arc(),
+ Some(FileStatus::Tracked(TrackedStatus {
+ index_status: git::status::StatusCode::Added,
+ worktree_status: git::status::StatusCode::Added
+ }))
+ )
+ ])
+ );
+ }
+
#[gpui::test]
async fn test_update_on_uncommit(cx: &mut TestAppContext) {
init_test(cx);
@@ -2,21 +2,20 @@ use crate::{App, PlatformDispatcher};
use async_task::Runnable;
use futures::channel::mpsc;
use smol::prelude::*;
-use std::mem::ManuallyDrop;
-use std::panic::Location;
-use std::thread::{self, ThreadId};
use std::{
fmt::Debug,
marker::PhantomData,
- mem,
+ mem::{self, ManuallyDrop},
num::NonZeroUsize,
+ panic::Location,
pin::Pin,
rc::Rc,
sync::{
Arc,
- atomic::{AtomicUsize, Ordering::SeqCst},
+ atomic::{AtomicUsize, Ordering},
},
task::{Context, Poll},
+ thread::{self, ThreadId},
time::{Duration, Instant},
};
use util::TryFutureExt;
@@ -123,7 +122,12 @@ impl TaskLabel {
/// Construct a new task label.
pub fn new() -> Self {
static NEXT_TASK_LABEL: AtomicUsize = AtomicUsize::new(1);
- Self(NEXT_TASK_LABEL.fetch_add(1, SeqCst).try_into().unwrap())
+ Self(
+ NEXT_TASK_LABEL
+ .fetch_add(1, Ordering::SeqCst)
+ .try_into()
+ .unwrap(),
+ )
}
}
@@ -271,7 +275,7 @@ impl BackgroundExecutor {
let awoken = awoken.clone();
let unparker = unparker.clone();
move || {
- awoken.store(true, SeqCst);
+ awoken.store(true, Ordering::SeqCst);
unparker.unpark();
}
});
@@ -287,7 +291,7 @@ impl BackgroundExecutor {
max_ticks -= 1;
if !dispatcher.tick(background_only) {
- if awoken.swap(false, SeqCst) {
+ if awoken.swap(false, Ordering::SeqCst) {
continue;
}
@@ -711,6 +711,41 @@ impl PlatformTextSystem for NoopTextSystem {
}
}
+// Adapted from https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+#[allow(dead_code)]
+pub(crate) fn get_gamma_correction_ratios(gamma: f32) -> [f32; 4] {
+ const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [
+ [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0
+ [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1
+ [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2
+ [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3
+ [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4
+ [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5
+ [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6
+ [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7
+ [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8
+ [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9
+ [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0
+ [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1
+ [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2
+ ];
+
+ const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32;
+ const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32;
+
+ let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10;
+ let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index];
+
+ [
+ ratios[0] * NORM13,
+ ratios[1] * NORM24,
+ ratios[2] * NORM13,
+ ratios[3] * NORM24,
+ ]
+}
+
#[derive(PartialEq, Eq, Hash, Clone)]
pub(crate) enum AtlasKey {
Glyph(RenderGlyphParams),
@@ -5,6 +5,7 @@ use super::{BladeAtlas, BladeContext};
use crate::{
Background, Bounds, DevicePixels, GpuSpecs, MonochromeSprite, Path, Point, PolychromeSprite,
PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline,
+ get_gamma_correction_ratios,
};
use blade_graphics as gpu;
use blade_util::{BufferBelt, BufferBeltDescriptor};
@@ -1023,7 +1024,7 @@ impl RenderingParameters {
.and_then(|v| v.parse().ok())
.unwrap_or(1.8_f32)
.clamp(1.0, 2.2);
- let gamma_ratios = Self::get_gamma_ratios(gamma);
+ let gamma_ratios = get_gamma_correction_ratios(gamma);
let grayscale_enhanced_contrast = env::var("ZED_FONTS_GRAYSCALE_ENHANCED_CONTRAST")
.ok()
.and_then(|v| v.parse().ok())
@@ -1036,37 +1037,4 @@ impl RenderingParameters {
grayscale_enhanced_contrast,
}
}
-
- // Gamma ratios for brightening/darkening edges for better contrast
- // https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp#L50
- fn get_gamma_ratios(gamma: f32) -> [f32; 4] {
- const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [
- [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0
- [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1
- [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2
- [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3
- [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4
- [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5
- [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6
- [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7
- [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8
- [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9
- [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0
- [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1
- [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2
- ];
-
- const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32;
- const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32;
-
- let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10;
- let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index];
-
- [
- ratios[0] * NORM13,
- ratios[1] * NORM24,
- ratios[2] * NORM13,
- ratios[3] * NORM24,
- ]
- }
}
@@ -28,6 +28,9 @@ fn heat_map_color(value: f32, minValue: f32, maxValue: f32, position: vec2<f32>)
*/
+// Contrast and gamma correction adapted from https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.hlsl
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
fn color_brightness(color: vec3<f32>) -> f32 {
// REC. 601 luminance coefficients for perceived brightness
return dot(color, vec3<f32>(0.30, 0.59, 0.11));
@@ -67,8 +67,6 @@ struct MacTextSystemState {
font_ids_by_postscript_name: HashMap<String, FontId>,
font_ids_by_font_key: HashMap<FontKey, SmallVec<[FontId; 4]>>,
postscript_names_by_font_id: HashMap<FontId, String>,
- /// UTF-16 indices of ZWNJS
- zwnjs_scratch_space: Vec<usize>,
}
impl MacTextSystem {
@@ -81,7 +79,6 @@ impl MacTextSystem {
font_ids_by_postscript_name: HashMap::default(),
font_ids_by_font_key: HashMap::default(),
postscript_names_by_font_id: HashMap::default(),
- zwnjs_scratch_space: Vec::new(),
}))
}
}
@@ -431,11 +428,6 @@ impl MacTextSystemState {
}
fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout {
- const ZWNJ: char = '\u{200C}';
- const ZWNJ_STR: &str = "\u{200C}";
- const ZWNJ_SIZE_16: usize = ZWNJ.len_utf16();
-
- self.zwnjs_scratch_space.clear();
// Construct the attributed string, converting UTF8 ranges to UTF16 ranges.
let mut string = CFMutableAttributedString::new();
let mut max_ascent = 0.0f32;
@@ -443,26 +435,14 @@ impl MacTextSystemState {
{
let mut ix_converter = StringIndexConverter::new(&text);
- let mut last_font_run = None;
for run in font_runs {
let text = &text[ix_converter.utf8_ix..][..run.len];
- // if the fonts are the same, we need to disconnect the text with a ZWNJ
- // to prevent core text from forming ligatures between them
- let needs_zwnj = last_font_run.replace(run.font_id) == Some(run.font_id);
let utf16_start = string.char_len(); // insert at end of string
ix_converter.advance_to_utf8_ix(ix_converter.utf8_ix + run.len);
// note: replace_str may silently ignore codepoints it dislikes (e.g., BOM at start of string)
string.replace_str(&CFString::new(text), CFRange::init(utf16_start, 0));
- if needs_zwnj {
- let zwnjs_pos = string.char_len();
- self.zwnjs_scratch_space.push(zwnjs_pos as usize);
- string.replace_str(
- &CFString::from_static_string(ZWNJ_STR),
- CFRange::init(zwnjs_pos, 0),
- );
- }
let utf16_end = string.char_len();
let cf_range = CFRange::init(utf16_start, utf16_end - utf16_start);
@@ -514,15 +494,6 @@ impl MacTextSystemState {
.zip(run.string_indices().iter())
{
let mut glyph_utf16_ix = usize::try_from(glyph_utf16_ix).unwrap();
- let r = self
- .zwnjs_scratch_space
- .binary_search_by(|&it| it.cmp(&glyph_utf16_ix));
- match r {
- // this glyph is a ZWNJ, skip it
- Ok(_) => continue,
- // adjust the index to account for the ZWNJs we've inserted
- Err(idx) => glyph_utf16_ix -= idx * ZWNJ_SIZE_16,
- }
if ix_converter.utf16_ix > glyph_utf16_ix {
// We cannot reuse current index converter, as it can only seek forward. Restart the search.
ix_converter = StringIndexConverter::new(text);
@@ -1,3 +1,7 @@
+// Adapted from https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.hlsl
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
float color_brightness(float3 color) {
// REC. 601 luminance coefficients for perceived brightness
return dot(color, float3(0.30f, 0.59f, 0.11f));
@@ -612,44 +612,11 @@ impl DirectXRenderer {
let render_params: IDWriteRenderingParams1 =
factory.CreateRenderingParams().unwrap().cast().unwrap();
FontInfo {
- gamma_ratios: Self::get_gamma_ratios(render_params.GetGamma()),
+ gamma_ratios: get_gamma_correction_ratios(render_params.GetGamma()),
grayscale_enhanced_contrast: render_params.GetGrayscaleEnhancedContrast(),
}
})
}
-
- // Gamma ratios for brightening/darkening edges for better contrast
- // https://github.com/microsoft/terminal/blob/1283c0f5b99a2961673249fa77c6b986efb5086c/src/renderer/atlas/dwrite.cpp#L50
- fn get_gamma_ratios(gamma: f32) -> [f32; 4] {
- const GAMMA_INCORRECT_TARGET_RATIOS: [[f32; 4]; 13] = [
- [0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0, 0.0000 / 4.0], // gamma = 1.0
- [0.0166 / 4.0, -0.0807 / 4.0, 0.2227 / 4.0, -0.0751 / 4.0], // gamma = 1.1
- [0.0350 / 4.0, -0.1760 / 4.0, 0.4325 / 4.0, -0.1370 / 4.0], // gamma = 1.2
- [0.0543 / 4.0, -0.2821 / 4.0, 0.6302 / 4.0, -0.1876 / 4.0], // gamma = 1.3
- [0.0739 / 4.0, -0.3963 / 4.0, 0.8167 / 4.0, -0.2287 / 4.0], // gamma = 1.4
- [0.0933 / 4.0, -0.5161 / 4.0, 0.9926 / 4.0, -0.2616 / 4.0], // gamma = 1.5
- [0.1121 / 4.0, -0.6395 / 4.0, 1.1588 / 4.0, -0.2877 / 4.0], // gamma = 1.6
- [0.1300 / 4.0, -0.7649 / 4.0, 1.3159 / 4.0, -0.3080 / 4.0], // gamma = 1.7
- [0.1469 / 4.0, -0.8911 / 4.0, 1.4644 / 4.0, -0.3234 / 4.0], // gamma = 1.8
- [0.1627 / 4.0, -1.0170 / 4.0, 1.6051 / 4.0, -0.3347 / 4.0], // gamma = 1.9
- [0.1773 / 4.0, -1.1420 / 4.0, 1.7385 / 4.0, -0.3426 / 4.0], // gamma = 2.0
- [0.1908 / 4.0, -1.2652 / 4.0, 1.8650 / 4.0, -0.3476 / 4.0], // gamma = 2.1
- [0.2031 / 4.0, -1.3864 / 4.0, 1.9851 / 4.0, -0.3501 / 4.0], // gamma = 2.2
- ];
-
- const NORM13: f32 = ((0x10000 as f64) / (255.0 * 255.0) * 4.0) as f32;
- const NORM24: f32 = ((0x100 as f64) / (255.0) * 4.0) as f32;
-
- let index = ((gamma * 10.0).round() as usize).clamp(10, 22) - 10;
- let ratios = GAMMA_INCORRECT_TARGET_RATIOS[index];
-
- [
- ratios[0] * NORM13,
- ratios[1] * NORM24,
- ratios[2] * NORM13,
- ratios[3] * NORM24,
- ]
- }
}
impl DirectXResources {
@@ -179,15 +179,15 @@ impl Item for ImageView {
_workspace_id: Option<WorkspaceId>,
_: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| Self {
+ Some(cx.new(|cx| Self {
image_item: self.image_item.clone(),
project: self.project.clone(),
focus_handle: cx.focus_handle(),
- })))
+ }))
}
fn has_deleted_file(&self, cx: &App) -> bool {
@@ -2008,7 +2008,7 @@ impl Buffer {
self.end_transaction(cx)
}
- fn has_unsaved_edits(&self) -> bool {
+ pub fn has_unsaved_edits(&self) -> bool {
let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
if last_version == self.version {
@@ -1,7 +1,6 @@
use gpui::{
Action, App, AppContext as _, Entity, EventEmitter, FocusHandle, Focusable,
- KeyBindingContextPredicate, KeyContext, Keystroke, MouseButton, Render, Subscription, Task,
- actions,
+ KeyBindingContextPredicate, KeyContext, Keystroke, MouseButton, Render, Subscription, actions,
};
use itertools::Itertools;
use serde_json::json;
@@ -158,11 +157,11 @@ impl Item for KeyContextView {
_workspace_id: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| KeyContextView::new(window, cx))))
+ Some(cx.new(|cx| KeyContextView::new(window, cx)))
}
}
@@ -3,7 +3,7 @@ use copilot::Copilot;
use editor::{Editor, EditorEvent, actions::MoveToEnd, scroll::Autoscroll};
use gpui::{
AnyView, App, Context, Corner, Entity, EventEmitter, FocusHandle, Focusable, IntoElement,
- ParentElement, Render, Styled, Subscription, Task, WeakEntity, Window, actions, div,
+ ParentElement, Render, Styled, Subscription, WeakEntity, Window, actions, div,
};
use itertools::Itertools;
use language::{LanguageServerId, language_settings::SoftWrap};
@@ -763,11 +763,11 @@ impl Item for LspLogView {
_workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| {
+ Some(cx.new(|cx| {
let mut new_view = Self::new(self.project.clone(), self.log_store.clone(), window, cx);
if let Some(server_id) = self.current_server_id {
match self.active_entry_kind {
@@ -778,7 +778,7 @@ impl Item for LspLogView {
}
}
new_view
- })))
+ }))
}
}
@@ -3,7 +3,7 @@ use editor::{Anchor, Editor, ExcerptId, SelectionEffects, scroll::Autoscroll};
use gpui::{
App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
Hsla, InteractiveElement, IntoElement, MouseButton, MouseDownEvent, MouseMoveEvent,
- ParentElement, Render, ScrollStrategy, SharedString, Styled, Task, UniformListScrollHandle,
+ ParentElement, Render, ScrollStrategy, SharedString, Styled, UniformListScrollHandle,
WeakEntity, Window, actions, div, rems, uniform_list,
};
use language::{Buffer, OwnedSyntaxLayer};
@@ -573,17 +573,17 @@ impl Item for SyntaxTreeView {
_: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| {
+ Some(cx.new(|cx| {
let mut clone = Self::new(self.workspace_handle.clone(), None, window, cx);
if let Some(editor) = &self.editor {
clone.set_editor(editor.editor.clone(), window, cx)
}
clone
- })))
+ }))
}
}
@@ -17,6 +17,7 @@ pub fn make_file_finder_include_ignored_an_enum(value: &mut Value) -> Result<()>
Value::Bool(true) => Value::String("all".to_string()),
Value::Bool(false) => Value::String("indexed".to_string()),
Value::Null => Value::String("smart".to_string()),
+ Value::String(s) if s == "all" || s == "indexed" || s == "smart" => return Ok(()),
_ => anyhow::bail!("Expected include_ignored to be a boolean or null"),
};
Ok(())
@@ -366,7 +366,13 @@ mod tests {
#[track_caller]
fn assert_migrate_settings(input: &str, output: Option<&str>) {
let migrated = migrate_settings(input).unwrap();
- assert_migrated_correctly(migrated, output);
+ assert_migrated_correctly(migrated.clone(), output);
+
+ // expect that rerunning the migration does not result in another migration
+ if let Some(migrated) = migrated {
+ let rerun = migrate_settings(&migrated).unwrap();
+ assert_migrated_correctly(rerun, None);
+ }
}
#[track_caller]
@@ -376,7 +382,13 @@ mod tests {
output: Option<&str>,
) {
let migrated = run_migrations(input, migrations).unwrap();
- assert_migrated_correctly(migrated, output);
+ assert_migrated_correctly(migrated.clone(), output);
+
+ // expect that rerunning the migration does not result in another migration
+ if let Some(migrated) = migrated {
+ let rerun = run_migrations(&migrated, migrations).unwrap();
+ assert_migrated_correctly(rerun, None);
+ }
}
#[test]
@@ -383,14 +383,14 @@ impl Item for Onboarding {
_workspace_id: Option<WorkspaceId>,
_: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>> {
- Task::ready(Some(cx.new(|cx| Onboarding {
+ ) -> Option<Entity<Self>> {
+ Some(cx.new(|cx| Onboarding {
workspace: self.workspace.clone(),
user_store: self.user_store.clone(),
scroll_handle: ScrollHandle::new(),
focus_handle: cx.focus_handle(),
_settings_subscription: cx.observe_global::<SettingsStore>(move |_, cx| cx.notify()),
- })))
+ }))
}
fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) {
@@ -73,7 +73,6 @@ serde_json.workspace = true
settings.workspace = true
sha2.workspace = true
shellexpand.workspace = true
-shlex.workspace = true
smallvec.workspace = true
smol.workspace = true
snippet.workspace = true
@@ -1,6 +1,7 @@
use std::{
any::Any,
borrow::Borrow,
+ collections::HashSet,
path::{Path, PathBuf},
str::FromStr as _,
sync::Arc,
@@ -126,13 +127,198 @@ enum AgentServerStoreState {
pub struct AgentServerStore {
state: AgentServerStoreState,
external_agents: HashMap<ExternalAgentServerName, Box<dyn ExternalAgentServer>>,
+ agent_icons: HashMap<ExternalAgentServerName, SharedString>,
}
pub struct AgentServersUpdated;
impl EventEmitter<AgentServersUpdated> for AgentServerStore {}
+#[cfg(test)]
+mod ext_agent_tests {
+ use super::*;
+ use std::fmt::Write as _;
+
+ // Helper to build a store in Collab mode so we can mutate internal maps without
+ // needing to spin up a full project environment.
+ fn collab_store() -> AgentServerStore {
+ AgentServerStore {
+ state: AgentServerStoreState::Collab,
+ external_agents: HashMap::default(),
+ agent_icons: HashMap::default(),
+ }
+ }
+
+ // A simple fake that implements ExternalAgentServer without needing async plumbing.
+ struct NoopExternalAgent;
+
+ impl ExternalAgentServer for NoopExternalAgent {
+ fn get_command(
+ &mut self,
+ _root_dir: Option<&str>,
+ _extra_env: HashMap<String, String>,
+ _status_tx: Option<watch::Sender<SharedString>>,
+ _new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ _cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+ Task::ready(Ok((
+ AgentServerCommand {
+ path: PathBuf::from("noop"),
+ args: Vec::new(),
+ env: None,
+ },
+ "".to_string(),
+ None,
+ )))
+ }
+
+ fn as_any_mut(&mut self) -> &mut dyn Any {
+ self
+ }
+ }
+
+ #[test]
+ fn external_agent_server_name_display() {
+ let name = ExternalAgentServerName(SharedString::from("Ext: Tool"));
+ let mut s = String::new();
+ write!(&mut s, "{name}").unwrap();
+ assert_eq!(s, "Ext: Tool");
+ }
+
+ #[test]
+ fn sync_extension_agents_removes_previous_extension_entries() {
+ let mut store = collab_store();
+
+ // Seed with a couple of agents that will be replaced by extensions
+ store.external_agents.insert(
+ ExternalAgentServerName(SharedString::from("foo-agent")),
+ Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+ );
+ store.external_agents.insert(
+ ExternalAgentServerName(SharedString::from("bar-agent")),
+ Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+ );
+ store.external_agents.insert(
+ ExternalAgentServerName(SharedString::from("custom")),
+ Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+ );
+
+ // Simulate the removal phase: if we're syncing extensions that provide
+ // "foo-agent" and "bar-agent", those should be removed first
+ let extension_agent_names: HashSet<String> =
+ ["foo-agent".to_string(), "bar-agent".to_string()]
+ .into_iter()
+ .collect();
+
+ let keys_to_remove: Vec<_> = store
+ .external_agents
+ .keys()
+ .filter(|name| extension_agent_names.contains(name.0.as_ref()))
+ .cloned()
+ .collect();
+
+ for key in keys_to_remove {
+ store.external_agents.remove(&key);
+ }
+
+ // Only the custom entry should remain.
+ let remaining: Vec<_> = store
+ .external_agents
+ .keys()
+ .map(|k| k.0.to_string())
+ .collect();
+ assert_eq!(remaining, vec!["custom".to_string()]);
+ }
+}
+
impl AgentServerStore {
+ /// Synchronizes extension-provided agent servers with the store.
+ pub fn sync_extension_agents<'a, I>(
+ &mut self,
+ manifests: I,
+ extensions_dir: PathBuf,
+ cx: &mut Context<Self>,
+ ) where
+ I: IntoIterator<Item = (&'a str, &'a extension::ExtensionManifest)>,
+ {
+ // Collect manifests first so we can iterate twice
+ let manifests: Vec<_> = manifests.into_iter().collect();
+
+ // Remove existing extension-provided agents by tracking which ones we're about to add
+ let extension_agent_names: HashSet<_> = manifests
+ .iter()
+ .flat_map(|(_, manifest)| manifest.agent_servers.keys().map(|k| k.to_string()))
+ .collect();
+
+ let keys_to_remove: Vec<_> = self
+ .external_agents
+ .keys()
+ .filter(|name| {
+ // Remove if it matches an extension agent name from any extension
+ extension_agent_names.contains(name.0.as_ref())
+ })
+ .cloned()
+ .collect();
+ for key in &keys_to_remove {
+ self.external_agents.remove(key);
+ self.agent_icons.remove(key);
+ }
+
+ // Insert agent servers from extension manifests
+ match &self.state {
+ AgentServerStoreState::Local {
+ project_environment,
+ fs,
+ http_client,
+ ..
+ } => {
+ for (ext_id, manifest) in manifests {
+ for (agent_name, agent_entry) in &manifest.agent_servers {
+ let display = SharedString::from(agent_entry.name.clone());
+
+ // Store absolute icon path if provided, resolving symlinks for dev extensions
+ if let Some(icon) = &agent_entry.icon {
+ let icon_path = extensions_dir.join(ext_id).join(icon);
+ // Canonicalize to resolve symlinks (dev extensions are symlinked)
+ let absolute_icon_path = icon_path
+ .canonicalize()
+ .unwrap_or(icon_path)
+ .to_string_lossy()
+ .to_string();
+ self.agent_icons.insert(
+ ExternalAgentServerName(display.clone()),
+ SharedString::from(absolute_icon_path),
+ );
+ }
+
+ // Archive-based launcher (download from URL)
+ self.external_agents.insert(
+ ExternalAgentServerName(display),
+ Box::new(LocalExtensionArchiveAgent {
+ fs: fs.clone(),
+ http_client: http_client.clone(),
+ project_environment: project_environment.clone(),
+ extension_id: Arc::from(ext_id),
+ agent_id: agent_name.clone(),
+ targets: agent_entry.targets.clone(),
+ env: agent_entry.env.clone(),
+ }) as Box<dyn ExternalAgentServer>,
+ );
+ }
+ }
+ }
+ _ => {
+ // Only local projects support local extension agents
+ }
+ }
+
+ cx.emit(AgentServersUpdated);
+ }
+
+ pub fn agent_icon(&self, name: &ExternalAgentServerName) -> Option<SharedString> {
+ self.agent_icons.get(name).cloned()
+ }
+
pub fn init_remote(session: &AnyProtoClient) {
session.add_entity_message_handler(Self::handle_external_agents_updated);
session.add_entity_message_handler(Self::handle_loading_status_updated);
@@ -202,7 +388,7 @@ impl AgentServerStore {
.gemini
.as_ref()
.and_then(|settings| settings.ignore_system_version)
- .unwrap_or(true),
+ .unwrap_or(false),
}),
);
self.external_agents.insert(
@@ -279,7 +465,9 @@ impl AgentServerStore {
_subscriptions: [subscription],
},
external_agents: Default::default(),
+ agent_icons: Default::default(),
};
+ if let Some(_events) = extension::ExtensionEvents::try_global(cx) {}
this.agent_servers_settings_changed(cx);
this
}
@@ -288,7 +476,7 @@ impl AgentServerStore {
// Set up the builtin agents here so they're immediately available in
// remote projects--we know that the HeadlessProject on the other end
// will have them.
- let external_agents = [
+ let external_agents: [(ExternalAgentServerName, Box<dyn ExternalAgentServer>); 3] = [
(
CLAUDE_CODE_NAME.into(),
Box::new(RemoteExternalAgentServer {
@@ -319,16 +507,15 @@ impl AgentServerStore {
new_version_available_tx: None,
}) as Box<dyn ExternalAgentServer>,
),
- ]
- .into_iter()
- .collect();
+ ];
Self {
state: AgentServerStoreState::Remote {
project_id,
upstream_client,
},
- external_agents,
+ external_agents: external_agents.into_iter().collect(),
+ agent_icons: HashMap::default(),
}
}
@@ -336,6 +523,7 @@ impl AgentServerStore {
Self {
state: AgentServerStoreState::Collab,
external_agents: Default::default(),
+ agent_icons: Default::default(),
}
}
@@ -392,7 +580,7 @@ impl AgentServerStore {
envelope: TypedEnvelope<proto::GetAgentServerCommand>,
mut cx: AsyncApp,
) -> Result<proto::AgentServerCommand> {
- let (command, root_dir, login) = this
+ let (command, root_dir, login_command) = this
.update(&mut cx, |this, cx| {
let AgentServerStoreState::Local {
downstream_client, ..
@@ -466,7 +654,7 @@ impl AgentServerStore {
.map(|env| env.into_iter().collect())
.unwrap_or_default(),
root_dir: root_dir,
- login: login.map(|login| login.to_proto()),
+ login: login_command.map(|cmd| cmd.to_proto()),
})
}
@@ -811,9 +999,7 @@ impl ExternalAgentServer for RemoteExternalAgentServer {
env: Some(command.env),
},
root_dir,
- response
- .login
- .map(|login| task::SpawnInTerminal::from_proto(login)),
+ None,
))
})
}
@@ -959,7 +1145,7 @@ impl ExternalAgentServer for LocalClaudeCode {
.unwrap_or_default();
env.insert("ANTHROPIC_API_KEY".into(), "".into());
- let (mut command, login) = if let Some(mut custom_command) = custom_command {
+ let (mut command, login_command) = if let Some(mut custom_command) = custom_command {
env.extend(custom_command.env.unwrap_or_default());
custom_command.env = Some(env);
(custom_command, None)
@@ -1000,7 +1186,11 @@ impl ExternalAgentServer for LocalClaudeCode {
};
command.env.get_or_insert_default().extend(extra_env);
- Ok((command, root_dir.to_string_lossy().into_owned(), login))
+ Ok((
+ command,
+ root_dir.to_string_lossy().into_owned(),
+ login_command,
+ ))
})
}
@@ -1080,10 +1270,15 @@ impl ExternalAgentServer for LocalCodex {
.into_iter()
.find(|asset| asset.name == asset_name)
.with_context(|| format!("no asset found matching `{asset_name:?}`"))?;
+ // Strip "sha256:" prefix from digest if present (GitHub API format)
+ let digest = asset
+ .digest
+ .as_deref()
+ .and_then(|d| d.strip_prefix("sha256:").or(Some(d)));
::http_client::github_download::download_server_binary(
&*http,
&asset.browser_download_url,
- asset.digest.as_deref(),
+ digest,
&version_dir,
if cfg!(target_os = "windows") && cfg!(target_arch = "x86_64") {
AssetKind::Zip
@@ -1127,11 +1322,7 @@ impl ExternalAgentServer for LocalCodex {
pub const CODEX_ACP_REPO: &str = "zed-industries/codex-acp";
-/// Assemble Codex release URL for the current OS/arch and the given version number.
-/// Returns None if the current target is unsupported.
-/// Example output:
-/// https://github.com/zed-industries/codex-acp/releases/download/v{version}/codex-acp-{version}-{arch}-{platform}.{ext}
-fn asset_name(version: &str) -> Option<String> {
+fn get_platform_info() -> Option<(&'static str, &'static str, &'static str)> {
let arch = if cfg!(target_arch = "x86_64") {
"x86_64"
} else if cfg!(target_arch = "aarch64") {
@@ -1157,14 +1348,220 @@ fn asset_name(version: &str) -> Option<String> {
"tar.gz"
};
+ Some((arch, platform, ext))
+}
+
+fn asset_name(version: &str) -> Option<String> {
+ let (arch, platform, ext) = get_platform_info()?;
Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}"))
}
+struct LocalExtensionArchiveAgent {
+ fs: Arc<dyn Fs>,
+ http_client: Arc<dyn HttpClient>,
+ project_environment: Entity<ProjectEnvironment>,
+ extension_id: Arc<str>,
+ agent_id: Arc<str>,
+ targets: HashMap<String, extension::TargetConfig>,
+ env: HashMap<String, String>,
+}
+
struct LocalCustomAgent {
project_environment: Entity<ProjectEnvironment>,
command: AgentServerCommand,
}
+impl ExternalAgentServer for LocalExtensionArchiveAgent {
+ fn get_command(
+ &mut self,
+ root_dir: Option<&str>,
+ extra_env: HashMap<String, String>,
+ _status_tx: Option<watch::Sender<SharedString>>,
+ _new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+ let fs = self.fs.clone();
+ let http_client = self.http_client.clone();
+ let project_environment = self.project_environment.downgrade();
+ let extension_id = self.extension_id.clone();
+ let agent_id = self.agent_id.clone();
+ let targets = self.targets.clone();
+ let base_env = self.env.clone();
+
+ let root_dir: Arc<Path> = root_dir
+ .map(|root_dir| Path::new(root_dir))
+ .unwrap_or(paths::home_dir())
+ .into();
+
+ cx.spawn(async move |cx| {
+ // Get project environment
+ let mut env = project_environment
+ .update(cx, |project_environment, cx| {
+ project_environment.get_local_directory_environment(
+ &Shell::System,
+ root_dir.clone(),
+ cx,
+ )
+ })?
+ .await
+ .unwrap_or_default();
+
+ // Merge manifest env and extra env
+ env.extend(base_env);
+ env.extend(extra_env);
+
+ let cache_key = format!("{}/{}", extension_id, agent_id);
+ let dir = paths::data_dir().join("external_agents").join(&cache_key);
+ fs.create_dir(&dir).await?;
+
+ // Determine platform key
+ let os = if cfg!(target_os = "macos") {
+ "darwin"
+ } else if cfg!(target_os = "linux") {
+ "linux"
+ } else if cfg!(target_os = "windows") {
+ "windows"
+ } else {
+ anyhow::bail!("unsupported OS");
+ };
+
+ let arch = if cfg!(target_arch = "aarch64") {
+ "aarch64"
+ } else if cfg!(target_arch = "x86_64") {
+ "x86_64"
+ } else {
+ anyhow::bail!("unsupported architecture");
+ };
+
+ let platform_key = format!("{}-{}", os, arch);
+ let target_config = targets.get(&platform_key).with_context(|| {
+ format!(
+ "no target specified for platform '{}'. Available platforms: {}",
+ platform_key,
+ targets
+ .keys()
+ .map(|k| k.as_str())
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ })?;
+
+ let archive_url = &target_config.archive;
+
+ // Use URL as version identifier for caching
+ // Hash the URL to get a stable directory name
+ use std::collections::hash_map::DefaultHasher;
+ use std::hash::{Hash, Hasher};
+ let mut hasher = DefaultHasher::new();
+ archive_url.hash(&mut hasher);
+ let url_hash = hasher.finish();
+ let version_dir = dir.join(format!("v_{:x}", url_hash));
+
+ if !fs.is_dir(&version_dir).await {
+ // Determine SHA256 for verification
+ let sha256 = if let Some(provided_sha) = &target_config.sha256 {
+ // Use provided SHA256
+ Some(provided_sha.clone())
+ } else if archive_url.starts_with("https://github.com/") {
+ // Try to fetch SHA256 from GitHub API
+ // Parse URL to extract repo and tag/file info
+ // Format: https://github.com/owner/repo/releases/download/tag/file.zip
+ if let Some(caps) = archive_url.strip_prefix("https://github.com/") {
+ let parts: Vec<&str> = caps.split('/').collect();
+ if parts.len() >= 6 && parts[2] == "releases" && parts[3] == "download" {
+ let repo = format!("{}/{}", parts[0], parts[1]);
+ let tag = parts[4];
+ let filename = parts[5..].join("/");
+
+ // Try to get release info from GitHub
+ if let Ok(release) = ::http_client::github::get_release_by_tag_name(
+ &repo,
+ tag,
+ http_client.clone(),
+ )
+ .await
+ {
+ // Find matching asset
+ if let Some(asset) =
+ release.assets.iter().find(|a| a.name == filename)
+ {
+ // Strip "sha256:" prefix if present
+ asset.digest.as_ref().and_then(|d| {
+ d.strip_prefix("sha256:")
+ .map(|s| s.to_string())
+ .or_else(|| Some(d.clone()))
+ })
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ // Determine archive type from URL
+ let asset_kind = if archive_url.ends_with(".zip") {
+ AssetKind::Zip
+ } else if archive_url.ends_with(".tar.gz") || archive_url.ends_with(".tgz") {
+ AssetKind::TarGz
+ } else {
+ anyhow::bail!("unsupported archive type in URL: {}", archive_url);
+ };
+
+ // Download and extract
+ ::http_client::github_download::download_server_binary(
+ &*http_client,
+ archive_url,
+ sha256.as_deref(),
+ &version_dir,
+ asset_kind,
+ )
+ .await?;
+ }
+
+ // Validate and resolve cmd path
+ let cmd = &target_config.cmd;
+ if cmd.contains("..") {
+ anyhow::bail!("command path cannot contain '..': {}", cmd);
+ }
+
+ let cmd_path = if cmd.starts_with("./") || cmd.starts_with(".\\") {
+ // Relative to extraction directory
+ version_dir.join(&cmd[2..])
+ } else {
+ // On PATH
+ anyhow::bail!("command must be relative (start with './'): {}", cmd);
+ };
+
+ anyhow::ensure!(
+ fs.is_file(&cmd_path).await,
+ "Missing command {} after extraction",
+ cmd_path.to_string_lossy()
+ );
+
+ let command = AgentServerCommand {
+ path: cmd_path,
+ args: target_config.args.clone(),
+ env: Some(env),
+ };
+
+ Ok((command, root_dir.to_string_lossy().into_owned(), None))
+ })
+ }
+
+ fn as_any_mut(&mut self) -> &mut dyn Any {
+ self
+ }
+}
+
impl ExternalAgentServer for LocalCustomAgent {
fn get_command(
&mut self,
@@ -1203,42 +1600,6 @@ impl ExternalAgentServer for LocalCustomAgent {
}
}
-#[cfg(test)]
-mod tests {
- #[test]
- fn assembles_codex_release_url_for_current_target() {
- let version_number = "0.1.0";
-
- // This test fails the build if we are building a version of Zed
- // which does not have a known build of codex-acp, to prevent us
- // from accidentally doing a release on a new target without
- // realizing that codex-acp support will not work on that target!
- //
- // Additionally, it verifies that our logic for assembling URLs
- // correctly resolves to a known-good URL on each of our targets.
- let allowed = [
- "codex-acp-0.1.0-aarch64-apple-darwin.tar.gz",
- "codex-acp-0.1.0-aarch64-pc-windows-msvc.tar.gz",
- "codex-acp-0.1.0-aarch64-unknown-linux-gnu.tar.gz",
- "codex-acp-0.1.0-x86_64-apple-darwin.tar.gz",
- "codex-acp-0.1.0-x86_64-pc-windows-msvc.zip",
- "codex-acp-0.1.0-x86_64-unknown-linux-gnu.tar.gz",
- ];
-
- if let Some(url) = super::asset_name(version_number) {
- assert!(
- allowed.contains(&url.as_str()),
- "Assembled asset name {} not in allowed list",
- url
- );
- } else {
- panic!(
- "This target does not have a known codex-acp release! We should fix this by building a release of codex-acp for this target, as otherwise codex-acp will not be usable with this Zed build."
- );
- }
- }
-}
-
pub const GEMINI_NAME: &'static str = "gemini";
pub const CLAUDE_CODE_NAME: &'static str = "claude";
pub const CODEX_NAME: &'static str = "codex";
@@ -1331,3 +1692,200 @@ impl settings::Settings for AllAgentServersSettings {
}
}
}
+
+#[cfg(test)]
+mod extension_agent_tests {
+ use super::*;
+ use gpui::TestAppContext;
+ use std::sync::Arc;
+
+ #[test]
+ fn extension_agent_constructs_proper_display_names() {
+ // Verify the display name format for extension-provided agents
+ let name1 = ExternalAgentServerName(SharedString::from("Extension: Agent"));
+ assert!(name1.0.contains(": "));
+
+ let name2 = ExternalAgentServerName(SharedString::from("MyExt: MyAgent"));
+ assert_eq!(name2.0, "MyExt: MyAgent");
+
+ // Non-extension agents shouldn't have the separator
+ let custom = ExternalAgentServerName(SharedString::from("custom"));
+ assert!(!custom.0.contains(": "));
+ }
+
+ struct NoopExternalAgent;
+
+ impl ExternalAgentServer for NoopExternalAgent {
+ fn get_command(
+ &mut self,
+ _root_dir: Option<&str>,
+ _extra_env: HashMap<String, String>,
+ _status_tx: Option<watch::Sender<SharedString>>,
+ _new_version_available_tx: Option<watch::Sender<Option<String>>>,
+ _cx: &mut AsyncApp,
+ ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+ Task::ready(Ok((
+ AgentServerCommand {
+ path: PathBuf::from("noop"),
+ args: Vec::new(),
+ env: None,
+ },
+ "".to_string(),
+ None,
+ )))
+ }
+
+ fn as_any_mut(&mut self) -> &mut dyn Any {
+ self
+ }
+ }
+
+ #[test]
+ fn sync_removes_only_extension_provided_agents() {
+ let mut store = AgentServerStore {
+ state: AgentServerStoreState::Collab,
+ external_agents: HashMap::default(),
+ agent_icons: HashMap::default(),
+ };
+
+ // Seed with extension agents (contain ": ") and custom agents (don't contain ": ")
+ store.external_agents.insert(
+ ExternalAgentServerName(SharedString::from("Ext1: Agent1")),
+ Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+ );
+ store.external_agents.insert(
+ ExternalAgentServerName(SharedString::from("Ext2: Agent2")),
+ Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+ );
+ store.external_agents.insert(
+ ExternalAgentServerName(SharedString::from("custom-agent")),
+ Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+ );
+
+ // Simulate removal phase
+ let keys_to_remove: Vec<_> = store
+ .external_agents
+ .keys()
+ .filter(|name| name.0.contains(": "))
+ .cloned()
+ .collect();
+
+ for key in keys_to_remove {
+ store.external_agents.remove(&key);
+ }
+
+ // Only custom-agent should remain
+ assert_eq!(store.external_agents.len(), 1);
+ assert!(
+ store
+ .external_agents
+ .contains_key(&ExternalAgentServerName(SharedString::from("custom-agent")))
+ );
+ }
+
+ #[test]
+ fn archive_launcher_constructs_with_all_fields() {
+ use extension::AgentServerManifestEntry;
+
+ let mut env = HashMap::default();
+ env.insert("GITHUB_TOKEN".into(), "secret".into());
+
+ let mut targets = HashMap::default();
+ targets.insert(
+ "darwin-aarch64".to_string(),
+ extension::TargetConfig {
+ archive:
+ "https://github.com/owner/repo/releases/download/v1.0.0/agent-darwin-arm64.zip"
+ .into(),
+ cmd: "./agent".into(),
+ args: vec![],
+ sha256: None,
+ },
+ );
+
+ let _entry = AgentServerManifestEntry {
+ name: "GitHub Agent".into(),
+ targets,
+ env,
+ icon: None,
+ };
+
+ // Verify display name construction
+ let expected_name = ExternalAgentServerName(SharedString::from("GitHub Agent"));
+ assert_eq!(expected_name.0, "GitHub Agent");
+ }
+
+ #[gpui::test]
+ async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) {
+ let fs = fs::FakeFs::new(cx.background_executor.clone());
+ let http_client = http_client::FakeHttpClient::with_404_response();
+ let project_environment = cx.new(|cx| crate::ProjectEnvironment::new(None, cx));
+
+ let agent = LocalExtensionArchiveAgent {
+ fs,
+ http_client,
+ project_environment,
+ extension_id: Arc::from("my-extension"),
+ agent_id: Arc::from("my-agent"),
+ targets: {
+ let mut map = HashMap::default();
+ map.insert(
+ "darwin-aarch64".to_string(),
+ extension::TargetConfig {
+ archive: "https://example.com/my-agent-darwin-arm64.zip".into(),
+ cmd: "./my-agent".into(),
+ args: vec!["--serve".into()],
+ sha256: None,
+ },
+ );
+ map
+ },
+ env: {
+ let mut map = HashMap::default();
+ map.insert("PORT".into(), "8080".into());
+ map
+ },
+ };
+
+ // Verify agent is properly constructed
+ assert_eq!(agent.extension_id.as_ref(), "my-extension");
+ assert_eq!(agent.agent_id.as_ref(), "my-agent");
+ assert_eq!(agent.env.get("PORT"), Some(&"8080".to_string()));
+ assert!(agent.targets.contains_key("darwin-aarch64"));
+ }
+
+ #[test]
+ fn sync_extension_agents_registers_archive_launcher() {
+ use extension::AgentServerManifestEntry;
+
+ let expected_name = ExternalAgentServerName(SharedString::from("Release Agent"));
+ assert_eq!(expected_name.0, "Release Agent");
+
+ // Verify the manifest entry structure for archive-based installation
+ let mut env = HashMap::default();
+ env.insert("API_KEY".into(), "secret".into());
+
+ let mut targets = HashMap::default();
+ targets.insert(
+ "linux-x86_64".to_string(),
+ extension::TargetConfig {
+ archive: "https://github.com/org/project/releases/download/v2.1.0/release-agent-linux-x64.tar.gz".into(),
+ cmd: "./release-agent".into(),
+ args: vec!["serve".into()],
+ sha256: None,
+ },
+ );
+
+ let manifest_entry = AgentServerManifestEntry {
+ name: "Release Agent".into(),
+ targets: targets.clone(),
+ env,
+ icon: None,
+ };
+
+ // Verify target config is present
+ assert!(manifest_entry.targets.contains_key("linux-x86_64"));
+ let target = manifest_entry.targets.get("linux-x86_64").unwrap();
+ assert_eq!(target.cmd, "./release-agent");
+ }
+}
@@ -909,7 +909,14 @@ impl BufferStore {
};
cx.spawn(async move |this, cx| {
task.await?;
- this.update(cx, |_, cx| {
+ this.update(cx, |this, cx| {
+ old_file.clone().and_then(|file| {
+ this.path_to_buffer_id.remove(&ProjectPath {
+ worktree_id: file.worktree_id(cx),
+ path: file.path().clone(),
+ })
+ });
+
cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
})
})
@@ -4,7 +4,7 @@ use language::Buffer;
use remote::RemoteClient;
use rpc::proto::{self, REMOTE_SERVER_PROJECT_ID};
use std::{collections::VecDeque, path::Path, sync::Arc};
-use task::Shell;
+use task::{Shell, shell_to_proto};
use util::ResultExt;
use worktree::Worktree;
@@ -198,7 +198,7 @@ impl ProjectEnvironment {
.proto_client()
.request(proto::GetDirectoryEnvironment {
project_id: REMOTE_SERVER_PROJECT_ID,
- shell: Some(shell.clone().to_proto()),
+ shell: Some(shell_to_proto(shell.clone())),
directory: abs_path.to_string_lossy().to_string(),
});
cx.spawn(async move |_, _| {
@@ -1,3 +1,4 @@
+pub mod branch_diff;
mod conflict_set;
pub mod git_traversal;
@@ -30,7 +31,8 @@ use git::{
},
stash::{GitStash, StashEntry},
status::{
- FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
+ DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
+ UnmergedStatus, UnmergedStatusCode,
},
};
use gpui::{
@@ -55,6 +57,7 @@ use std::{
mem,
ops::Range,
path::{Path, PathBuf},
+ str::FromStr,
sync::{
Arc,
atomic::{self, AtomicU64},
@@ -432,6 +435,8 @@ impl GitStore {
client.add_entity_request_handler(Self::handle_askpass);
client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
client.add_entity_request_handler(Self::handle_git_diff);
+ client.add_entity_request_handler(Self::handle_tree_diff);
+ client.add_entity_request_handler(Self::handle_get_blob_content);
client.add_entity_request_handler(Self::handle_open_unstaged_diff);
client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
client.add_entity_message_handler(Self::handle_update_diff_bases);
@@ -619,6 +624,52 @@ impl GitStore {
cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
}
+ pub fn open_diff_since(
+ &mut self,
+ oid: Option<git::Oid>,
+ buffer: Entity<Buffer>,
+ repo: Entity<Repository>,
+ languages: Arc<LanguageRegistry>,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<BufferDiff>>> {
+ cx.spawn(async move |this, cx| {
+ let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
+ let content = match oid {
+ None => None,
+ Some(oid) => Some(
+ repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
+ .await?,
+ ),
+ };
+ let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
+
+ buffer_diff
+ .update(cx, |buffer_diff, cx| {
+ buffer_diff.set_base_text(
+ content.map(Arc::new),
+ buffer_snapshot.language().cloned(),
+ Some(languages.clone()),
+ buffer_snapshot.text,
+ cx,
+ )
+ })?
+ .await?;
+ let unstaged_diff = this
+ .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
+ .await?;
+ buffer_diff.update(cx, |buffer_diff, _| {
+ buffer_diff.set_secondary_diff(unstaged_diff);
+ })?;
+
+ this.update(cx, |_, cx| {
+ cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
+ .detach();
+ })?;
+
+ Ok(buffer_diff)
+ })
+ }
+
pub fn open_uncommitted_diff(
&mut self,
buffer: Entity<Buffer>,
@@ -2168,6 +2219,75 @@ impl GitStore {
Ok(proto::GitDiffResponse { diff })
}
+ async fn handle_tree_diff(
+ this: Entity<Self>,
+ request: TypedEnvelope<proto::GetTreeDiff>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::GetTreeDiffResponse> {
+ let repository_id = RepositoryId(request.payload.repository_id);
+ let diff_type = if request.payload.is_merge {
+ DiffTreeType::MergeBase {
+ base: request.payload.base.into(),
+ head: request.payload.head.into(),
+ }
+ } else {
+ DiffTreeType::Since {
+ base: request.payload.base.into(),
+ head: request.payload.head.into(),
+ }
+ };
+
+ let diff = this
+ .update(&mut cx, |this, cx| {
+ let repository = this.repositories().get(&repository_id)?;
+ Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
+ })?
+ .context("missing repository")?
+ .await??;
+
+ Ok(proto::GetTreeDiffResponse {
+ entries: diff
+ .entries
+ .into_iter()
+ .map(|(path, status)| proto::TreeDiffStatus {
+ path: path.0.to_proto(),
+ status: match status {
+ TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
+ TreeDiffStatus::Modified { .. } => {
+ proto::tree_diff_status::Status::Modified.into()
+ }
+ TreeDiffStatus::Deleted { .. } => {
+ proto::tree_diff_status::Status::Deleted.into()
+ }
+ },
+ oid: match status {
+ TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
+ Some(old.to_string())
+ }
+ TreeDiffStatus::Added => None,
+ },
+ })
+ .collect(),
+ })
+ }
+
+ async fn handle_get_blob_content(
+ this: Entity<Self>,
+ request: TypedEnvelope<proto::GetBlobContent>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::GetBlobContentResponse> {
+ let oid = git::Oid::from_str(&request.payload.oid)?;
+ let repository_id = RepositoryId(request.payload.repository_id);
+ let content = this
+ .update(&mut cx, |this, cx| {
+ let repository = this.repositories().get(&repository_id)?;
+ Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
+ })?
+ .context("missing repository")?
+ .await?;
+ Ok(proto::GetBlobContentResponse { content })
+ }
+
async fn handle_open_unstaged_diff(
this: Entity<Self>,
request: TypedEnvelope<proto::OpenUnstagedDiff>,
@@ -3618,6 +3738,7 @@ impl Repository {
.read(cx)
.file()
.is_some_and(|file| file.disk_state().exists())
+ && buffer.read(cx).has_unsaved_edits()
{
save_futures.push(buffer_store.save_buffer(buffer, cx));
}
@@ -3684,6 +3805,7 @@ impl Repository {
.read(cx)
.file()
.is_some_and(|file| file.disk_state().exists())
+ && buffer.read(cx).has_unsaved_edits()
{
save_futures.push(buffer_store.save_buffer(buffer, cx));
}
@@ -4301,6 +4423,62 @@ impl Repository {
})
}
+ pub fn diff_tree(
+ &mut self,
+ diff_type: DiffTreeType,
+ _cx: &App,
+ ) -> oneshot::Receiver<Result<TreeDiff>> {
+ let repository_id = self.snapshot.id;
+ self.send_job(None, move |repo, _cx| async move {
+ match repo {
+ RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
+ RepositoryState::Remote { client, project_id } => {
+ let response = client
+ .request(proto::GetTreeDiff {
+ project_id: project_id.0,
+ repository_id: repository_id.0,
+ is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
+ base: diff_type.base().to_string(),
+ head: diff_type.head().to_string(),
+ })
+ .await?;
+
+ let entries = response
+ .entries
+ .into_iter()
+ .filter_map(|entry| {
+ let status = match entry.status() {
+ proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
+ proto::tree_diff_status::Status::Modified => {
+ TreeDiffStatus::Modified {
+ old: git::Oid::from_str(
+ &entry.oid.context("missing oid").log_err()?,
+ )
+ .log_err()?,
+ }
+ }
+ proto::tree_diff_status::Status::Deleted => {
+ TreeDiffStatus::Deleted {
+ old: git::Oid::from_str(
+ &entry.oid.context("missing oid").log_err()?,
+ )
+ .log_err()?,
+ }
+ }
+ };
+ Some((
+ RepoPath(RelPath::from_proto(&entry.path).log_err()?),
+ status,
+ ))
+ })
+ .collect();
+
+ Ok(TreeDiff { entries })
+ }
+ }
+ })
+ }
+
pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
let id = self.id;
self.send_job(None, move |repo, _cx| async move {
@@ -4773,6 +4951,25 @@ impl Repository {
cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
}
+ fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
+ let repository_id = self.snapshot.id;
+ let rx = self.send_job(None, move |state, _| async move {
+ match state {
+ RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
+ RepositoryState::Remote { client, project_id } => {
+ let response = client
+ .request(proto::GetBlobContent {
+ project_id: project_id.to_proto(),
+ repository_id: repository_id.0,
+ oid: oid.to_string(),
+ })
+ .await?;
+ Ok(response.content)
+ }
+ }
+ });
+ cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
+ }
fn paths_changed(
&mut self,
@@ -0,0 +1,386 @@
+use anyhow::Result;
+use buffer_diff::BufferDiff;
+use collections::HashSet;
+use futures::StreamExt;
+use git::{
+ repository::RepoPath,
+ status::{DiffTreeType, FileStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus},
+};
+use gpui::{
+ App, AsyncWindowContext, Context, Entity, EventEmitter, SharedString, Subscription, Task,
+ WeakEntity, Window,
+};
+
+use language::Buffer;
+use text::BufferId;
+use util::ResultExt;
+
+use crate::{
+ Project,
+ git_store::{GitStoreEvent, Repository, RepositoryEvent},
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
+pub enum DiffBase {
+ Head,
+ Merge { base_ref: SharedString },
+}
+
+impl DiffBase {
+ pub fn is_merge_base(&self) -> bool {
+ matches!(self, DiffBase::Merge { .. })
+ }
+}
+
+pub struct BranchDiff {
+ diff_base: DiffBase,
+ repo: Option<Entity<Repository>>,
+ project: Entity<Project>,
+ base_commit: Option<SharedString>,
+ head_commit: Option<SharedString>,
+ tree_diff: Option<TreeDiff>,
+ _subscription: Subscription,
+ update_needed: postage::watch::Sender<()>,
+ _task: Task<()>,
+}
+
+pub enum BranchDiffEvent {
+ FileListChanged,
+}
+
+impl EventEmitter<BranchDiffEvent> for BranchDiff {}
+
+impl BranchDiff {
+ pub fn new(
+ source: DiffBase,
+ project: Entity<Project>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Self {
+ let git_store = project.read(cx).git_store().clone();
+ let git_store_subscription = cx.subscribe_in(
+ &git_store,
+ window,
+ move |this, _git_store, event, _window, cx| match event {
+ GitStoreEvent::ActiveRepositoryChanged(_)
+ | GitStoreEvent::RepositoryUpdated(
+ _,
+ RepositoryEvent::StatusesChanged { full_scan: _ },
+ true,
+ )
+ | GitStoreEvent::ConflictsUpdated => {
+ cx.emit(BranchDiffEvent::FileListChanged);
+ *this.update_needed.borrow_mut() = ();
+ }
+ _ => {}
+ },
+ );
+
+ let (send, recv) = postage::watch::channel::<()>();
+ let worker = window.spawn(cx, {
+ let this = cx.weak_entity();
+ async |cx| Self::handle_status_updates(this, recv, cx).await
+ });
+ let repo = git_store.read(cx).active_repository();
+
+ Self {
+ diff_base: source,
+ repo,
+ project,
+ tree_diff: None,
+ base_commit: None,
+ head_commit: None,
+ _subscription: git_store_subscription,
+ _task: worker,
+ update_needed: send,
+ }
+ }
+
+ pub fn diff_base(&self) -> &DiffBase {
+ &self.diff_base
+ }
+
+ pub async fn handle_status_updates(
+ this: WeakEntity<Self>,
+ mut recv: postage::watch::Receiver<()>,
+ cx: &mut AsyncWindowContext,
+ ) {
+ Self::reload_tree_diff(this.clone(), cx).await.log_err();
+ while recv.next().await.is_some() {
+ let Ok(needs_update) = this.update(cx, |this, cx| {
+ let mut needs_update = false;
+ let active_repo = this
+ .project
+ .read(cx)
+ .git_store()
+ .read(cx)
+ .active_repository();
+ if active_repo != this.repo {
+ needs_update = true;
+ this.repo = active_repo;
+ } else if let Some(repo) = this.repo.as_ref() {
+ repo.update(cx, |repo, _| {
+ if let Some(branch) = &repo.branch
+ && let DiffBase::Merge { base_ref } = &this.diff_base
+ && let Some(commit) = branch.most_recent_commit.as_ref()
+ && &branch.ref_name == base_ref
+ && this.base_commit.as_ref() != Some(&commit.sha)
+ {
+ this.base_commit = Some(commit.sha.clone());
+ needs_update = true;
+ }
+
+ if repo.head_commit.as_ref().map(|c| &c.sha) != this.head_commit.as_ref() {
+ this.head_commit = repo.head_commit.as_ref().map(|c| c.sha.clone());
+ needs_update = true;
+ }
+ })
+ }
+ needs_update
+ }) else {
+ return;
+ };
+
+ if needs_update {
+ Self::reload_tree_diff(this.clone(), cx).await.log_err();
+ }
+ }
+ }
+
+ pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
+ let (repo, path) = self
+ .project
+ .read(cx)
+ .git_store()
+ .read(cx)
+ .repository_and_path_for_buffer_id(buffer_id, cx)?;
+ if self.repo() == Some(&repo) {
+ return self.merge_statuses(
+ repo.read(cx)
+ .status_for_path(&path)
+ .map(|status| status.status),
+ self.tree_diff
+ .as_ref()
+ .and_then(|diff| diff.entries.get(&path)),
+ );
+ }
+ None
+ }
+
+ pub fn merge_statuses(
+ &self,
+ diff_from_head: Option<FileStatus>,
+ diff_from_merge_base: Option<&TreeDiffStatus>,
+ ) -> Option<FileStatus> {
+ match (diff_from_head, diff_from_merge_base) {
+ (None, None) => None,
+ (Some(diff_from_head), None) => Some(diff_from_head),
+ (Some(diff_from_head @ FileStatus::Unmerged(_)), _) => Some(diff_from_head),
+
+ // file does not exist in HEAD
+ // but *does* exist in work-tree
+ // and *does* exist in merge-base
+ (
+ Some(FileStatus::Untracked)
+ | Some(FileStatus::Tracked(TrackedStatus {
+ index_status: StatusCode::Added,
+ worktree_status: _,
+ })),
+ Some(_),
+ ) => Some(FileStatus::Tracked(TrackedStatus {
+ index_status: StatusCode::Modified,
+ worktree_status: StatusCode::Modified,
+ })),
+
+ // file exists in HEAD
+ // but *does not* exist in work-tree
+ (Some(diff_from_head), Some(diff_from_merge_base)) if diff_from_head.is_deleted() => {
+ match diff_from_merge_base {
+ TreeDiffStatus::Added => None, // unchanged, didn't exist in merge base or worktree
+ _ => Some(diff_from_head),
+ }
+ }
+
+ // file exists in HEAD
+ // and *does* exist in work-tree
+ (Some(FileStatus::Tracked(_)), Some(tree_status)) => {
+ Some(FileStatus::Tracked(TrackedStatus {
+ index_status: match tree_status {
+ TreeDiffStatus::Added { .. } => StatusCode::Added,
+ _ => StatusCode::Modified,
+ },
+ worktree_status: match tree_status {
+ TreeDiffStatus::Added => StatusCode::Added,
+ _ => StatusCode::Modified,
+ },
+ }))
+ }
+
+ (_, Some(diff_from_merge_base)) => {
+ Some(diff_status_to_file_status(diff_from_merge_base))
+ }
+ }
+ }
+
+ pub async fn reload_tree_diff(
+ this: WeakEntity<Self>,
+ cx: &mut AsyncWindowContext,
+ ) -> Result<()> {
+ let task = this.update(cx, |this, cx| {
+ let DiffBase::Merge { base_ref } = this.diff_base.clone() else {
+ return None;
+ };
+ let Some(repo) = this.repo.as_ref() else {
+ this.tree_diff.take();
+ return None;
+ };
+ repo.update(cx, |repo, cx| {
+ Some(repo.diff_tree(
+ DiffTreeType::MergeBase {
+ base: base_ref,
+ head: "HEAD".into(),
+ },
+ cx,
+ ))
+ })
+ })?;
+ let Some(task) = task else { return Ok(()) };
+
+ let diff = task.await??;
+ this.update(cx, |this, cx| {
+ this.tree_diff = Some(diff);
+ cx.emit(BranchDiffEvent::FileListChanged);
+ cx.notify();
+ })
+ }
+
+ pub fn repo(&self) -> Option<&Entity<Repository>> {
+ self.repo.as_ref()
+ }
+
+ pub fn load_buffers(&mut self, cx: &mut Context<Self>) -> Vec<DiffBuffer> {
+ let mut output = Vec::default();
+ let Some(repo) = self.repo.clone() else {
+ return output;
+ };
+
+ self.project.update(cx, |_project, cx| {
+ let mut seen = HashSet::default();
+
+ for item in repo.read(cx).cached_status() {
+ seen.insert(item.repo_path.clone());
+ let branch_diff = self
+ .tree_diff
+ .as_ref()
+ .and_then(|t| t.entries.get(&item.repo_path))
+ .cloned();
+ let status = self
+ .merge_statuses(Some(item.status), branch_diff.as_ref())
+ .unwrap();
+ if !status.has_changes() {
+ continue;
+ }
+
+ let Some(project_path) =
+ repo.read(cx).repo_path_to_project_path(&item.repo_path, cx)
+ else {
+ continue;
+ };
+ let task = Self::load_buffer(branch_diff, project_path, repo.clone(), cx);
+
+ output.push(DiffBuffer {
+ repo_path: item.repo_path.clone(),
+ load: task,
+ file_status: item.status,
+ });
+ }
+ let Some(tree_diff) = self.tree_diff.as_ref() else {
+ return;
+ };
+
+ for (path, branch_diff) in tree_diff.entries.iter() {
+ if seen.contains(&path) {
+ continue;
+ }
+
+ let Some(project_path) = repo.read(cx).repo_path_to_project_path(&path, cx) else {
+ continue;
+ };
+ let task =
+ Self::load_buffer(Some(branch_diff.clone()), project_path, repo.clone(), cx);
+
+ let file_status = diff_status_to_file_status(branch_diff);
+
+ output.push(DiffBuffer {
+ repo_path: path.clone(),
+ load: task,
+ file_status,
+ });
+ }
+ });
+ output
+ }
+
+ fn load_buffer(
+ branch_diff: Option<git::status::TreeDiffStatus>,
+ project_path: crate::ProjectPath,
+ repo: Entity<Repository>,
+ cx: &Context<'_, Project>,
+ ) -> Task<Result<(Entity<Buffer>, Entity<BufferDiff>)>> {
+ let task = cx.spawn(async move |project, cx| {
+ let buffer = project
+ .update(cx, |project, cx| project.open_buffer(project_path, cx))?
+ .await?;
+
+ let languages = project.update(cx, |project, _cx| project.languages().clone())?;
+
+ let changes = if let Some(entry) = branch_diff {
+ let oid = match entry {
+ git::status::TreeDiffStatus::Added { .. } => None,
+ git::status::TreeDiffStatus::Modified { old, .. }
+ | git::status::TreeDiffStatus::Deleted { old } => Some(old),
+ };
+ project
+ .update(cx, |project, cx| {
+ project.git_store().update(cx, |git_store, cx| {
+ git_store.open_diff_since(oid, buffer.clone(), repo, languages, cx)
+ })
+ })?
+ .await?
+ } else {
+ project
+ .update(cx, |project, cx| {
+ project.open_uncommitted_diff(buffer.clone(), cx)
+ })?
+ .await?
+ };
+ Ok((buffer, changes))
+ });
+ task
+ }
+}
+
+fn diff_status_to_file_status(branch_diff: &git::status::TreeDiffStatus) -> FileStatus {
+ let file_status = match branch_diff {
+ git::status::TreeDiffStatus::Added { .. } => FileStatus::Tracked(TrackedStatus {
+ index_status: StatusCode::Added,
+ worktree_status: StatusCode::Added,
+ }),
+ git::status::TreeDiffStatus::Modified { .. } => FileStatus::Tracked(TrackedStatus {
+ index_status: StatusCode::Modified,
+ worktree_status: StatusCode::Modified,
+ }),
+ git::status::TreeDiffStatus::Deleted { .. } => FileStatus::Tracked(TrackedStatus {
+ index_status: StatusCode::Deleted,
+ worktree_status: StatusCode::Deleted,
+ }),
+ };
+ file_status
+}
+
+#[derive(Debug)]
+pub struct DiffBuffer {
+ pub repo_path: RepoPath,
+ pub file_status: FileStatus,
+ pub load: Task<Result<(Entity<Buffer>, Entity<BufferDiff>)>>,
+}
@@ -2487,7 +2487,7 @@ impl LocalLspStore {
uri.clone(),
adapter.language_id(&language.name()),
0,
- initial_snapshot.text(),
+ initial_snapshot.text_with_original_line_endings(),
);
vec![snapshot]
@@ -7522,6 +7522,7 @@ impl LspStore {
let previous_snapshot = buffer_snapshots.last()?;
let build_incremental_change = || {
+ let line_ending = next_snapshot.line_ending();
buffer
.edits_since::<Dimensions<PointUtf16, usize>>(
previous_snapshot.snapshot.version(),
@@ -7529,16 +7530,18 @@ impl LspStore {
.map(|edit| {
let edit_start = edit.new.start.0;
let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
- let new_text = next_snapshot
- .text_for_range(edit.new.start.1..edit.new.end.1)
- .collect();
lsp::TextDocumentContentChangeEvent {
range: Some(lsp::Range::new(
point_to_lsp(edit_start),
point_to_lsp(edit_end),
)),
range_length: None,
- text: new_text,
+ // Collect changed text and preserve line endings.
+ // text_for_range returns chunks with normalized \n, so we need to
+ // convert to the buffer's actual line ending for LSP.
+ text: line_ending.into_string(
+ next_snapshot.text_for_range(edit.new.start.1..edit.new.end.1),
+ ),
}
})
.collect()
@@ -7558,7 +7561,7 @@ impl LspStore {
vec![lsp::TextDocumentContentChangeEvent {
range: None,
range_length: None,
- text: next_snapshot.text(),
+ text: next_snapshot.text_with_original_line_endings(),
}]
}
Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
@@ -10923,13 +10926,12 @@ impl LspStore {
let snapshot = versions.last().unwrap();
let version = snapshot.version;
- let initial_snapshot = &snapshot.snapshot;
let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap();
language_server.register_buffer(
uri,
adapter.language_id(&language.name()),
version,
- initial_snapshot.text(),
+ buffer_handle.read(cx).text_with_original_line_endings(),
);
buffer_paths_registered.push((buffer_id, file.abs_path(cx)));
local
@@ -657,6 +657,7 @@ impl LspCommand for GetLspRunnables {
);
task_template.args.extend(cargo.cargo_args);
if !cargo.executable_args.is_empty() {
+ let shell_kind = task_template.shell.shell_kind(cfg!(windows));
task_template.args.push("--".to_string());
task_template.args.extend(
cargo
@@ -682,7 +683,7 @@ impl LspCommand for GetLspRunnables {
// That bit is not auto-expanded when using single quotes.
// Escape extra cargo args unconditionally as those are unlikely to contain `~`.
.flat_map(|extra_arg| {
- shlex::try_quote(&extra_arg).ok().map(|s| s.to_string())
+ shell_kind.try_quote(&extra_arg).map(|s| s.to_string())
}),
);
}
@@ -40,7 +40,7 @@ use crate::{
git_store::GitStore,
lsp_store::{SymbolLocation, log_store::LogKind},
};
-pub use agent_server_store::{AgentServerStore, AgentServersUpdated};
+pub use agent_server_store::{AgentServerStore, AgentServersUpdated, ExternalAgentServerName};
pub use git_store::{
ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate,
git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal},
@@ -4251,6 +4251,73 @@ async fn test_save_as(cx: &mut gpui::TestAppContext) {
assert_eq!(opened_buffer, buffer);
}
+#[gpui::test]
+async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+
+ fs.insert_tree(
+ path!("/dir"),
+ json!({
+ "data_a.txt": "data about a"
+ }),
+ )
+ .await;
+
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/data_a.txt"), cx)
+ })
+ .await
+ .unwrap();
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(11..12, "b")], None, cx);
+ });
+
+ // Save buffer's contents as a new file and confirm that the buffer's now
+ // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
+ // file associated with the buffer has now been updated to `data_b.txt`
+ project
+ .update(cx, |project, cx| {
+ let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
+ let new_path = ProjectPath {
+ worktree_id,
+ path: rel_path("data_b.txt").into(),
+ };
+
+ project.save_buffer_as(buffer.clone(), new_path, cx)
+ })
+ .await
+ .unwrap();
+
+ buffer.update(cx, |buffer, cx| {
+ assert_eq!(
+ buffer.file().unwrap().full_path(cx),
+ Path::new("dir/data_b.txt")
+ )
+ });
+
+ // Open the original `data_a.txt` file, confirming that its contents are
+ // unchanged and the resulting buffer's associated file is `data_a.txt`.
+ let original_buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/dir/data_a.txt"), cx)
+ })
+ .await
+ .unwrap();
+
+ original_buffer.update(cx, |buffer, cx| {
+ assert_eq!(buffer.text(), "data about a");
+ assert_eq!(
+ buffer.file().unwrap().full_path(cx),
+ Path::new("dir/data_a.txt")
+ )
+ });
+}
+
#[gpui::test(retries = 5)]
async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
use worktree::WorktreeModelHandle as _;
@@ -139,146 +139,142 @@ impl Project {
.await
.unwrap_or_default();
- let builder = project
- .update(cx, move |_, cx| {
- let format_to_run = || {
- if let Some(command) = &spawn_task.command {
- let mut command: Option<Cow<str>> = shell_kind.try_quote(command);
- if let Some(command) = &mut command
- && command.starts_with('"')
- && let Some(prefix) = shell_kind.command_prefix()
- {
- *command = Cow::Owned(format!("{prefix}{command}"));
- }
-
- let args = spawn_task
- .args
- .iter()
- .filter_map(|arg| shell_kind.try_quote(&arg));
-
- command.into_iter().chain(args).join(" ")
- } else {
- // todo: this breaks for remotes to windows
- format!("exec {shell} -l")
+ project.update(cx, move |this, cx| {
+ let format_to_run = || {
+ if let Some(command) = &spawn_task.command {
+ let mut command: Option<Cow<str>> = shell_kind.try_quote(command);
+ if let Some(command) = &mut command
+ && command.starts_with('"')
+ && let Some(prefix) = shell_kind.command_prefix()
+ {
+ *command = Cow::Owned(format!("{prefix}{command}"));
}
- };
- let (shell, env) = {
- env.extend(spawn_task.env);
- match remote_client {
- Some(remote_client) => match activation_script.clone() {
- activation_script if !activation_script.is_empty() => {
- let activation_script = activation_script.join("; ");
- let to_run = format_to_run();
- let args = vec![
- "-c".to_owned(),
- format!("{activation_script}; {to_run}"),
- ];
- create_remote_shell(
- Some((
- &remote_client
- .read(cx)
- .shell()
- .unwrap_or_else(get_default_system_shell),
- &args,
- )),
- env,
- path,
- remote_client,
- cx,
- )?
- }
- _ => create_remote_shell(
- spawn_task
- .command
- .as_ref()
- .map(|command| (command, &spawn_task.args)),
+ let args = spawn_task
+ .args
+ .iter()
+ .filter_map(|arg| shell_kind.try_quote(&arg));
+
+ command.into_iter().chain(args).join(" ")
+ } else {
+ // todo: this breaks for remotes to windows
+ format!("exec {shell} -l")
+ }
+ };
+
+ let (shell, env) = {
+ env.extend(spawn_task.env);
+ match remote_client {
+ Some(remote_client) => match activation_script.clone() {
+ activation_script if !activation_script.is_empty() => {
+ let separator = shell_kind.sequential_commands_separator();
+ let activation_script =
+ activation_script.join(&format!("{separator} "));
+ let to_run = format_to_run();
+ let shell = remote_client
+ .read(cx)
+ .shell()
+ .unwrap_or_else(get_default_system_shell);
+ let arg = format!("{activation_script}{separator} {to_run}");
+ let args = shell_kind.args_for_shell(false, arg);
+
+ create_remote_shell(
+ Some((&shell, &args)),
env,
path,
remote_client,
cx,
- )?,
- },
- None => match activation_script.clone() {
- activation_script if !activation_script.is_empty() => {
- let separator = shell_kind.sequential_commands_separator();
- let activation_script =
- activation_script.join(&format!("{separator} "));
- let to_run = format_to_run();
-
- let mut arg =
- format!("{activation_script}{separator} {to_run}");
- if shell_kind == ShellKind::Cmd {
- // We need to put the entire command in quotes since otherwise CMD tries to execute them
- // as separate commands rather than chaining one after another.
- arg = format!("\"{arg}\"");
- }
+ )?
+ }
+ _ => create_remote_shell(
+ spawn_task
+ .command
+ .as_ref()
+ .map(|command| (command, &spawn_task.args)),
+ env,
+ path,
+ remote_client,
+ cx,
+ )?,
+ },
+ None => match activation_script.clone() {
+ activation_script if !activation_script.is_empty() => {
+ let separator = shell_kind.sequential_commands_separator();
+ let activation_script =
+ activation_script.join(&format!("{separator} "));
+ let to_run = format_to_run();
+
+ let mut arg = format!("{activation_script}{separator} {to_run}");
+ if shell_kind == ShellKind::Cmd {
+ // We need to put the entire command in quotes since otherwise CMD tries to execute them
+ // as separate commands rather than chaining one after another.
+ arg = format!("\"{arg}\"");
+ }
- let args = shell_kind.args_for_shell(false, arg);
+ let args = shell_kind.args_for_shell(false, arg);
- (
- Shell::WithArguments {
- program: shell,
- args,
- title_override: None,
- },
- env,
- )
- }
- _ => (
- if let Some(program) = spawn_task.command {
- Shell::WithArguments {
- program,
- args: spawn_task.args,
- title_override: None,
- }
- } else {
- Shell::System
+ (
+ Shell::WithArguments {
+ program: shell,
+ args,
+ title_override: None,
},
env,
- ),
- },
+ )
+ }
+ _ => (
+ if let Some(program) = spawn_task.command {
+ Shell::WithArguments {
+ program,
+ args: spawn_task.args,
+ title_override: None,
+ }
+ } else {
+ Shell::System
+ },
+ env,
+ ),
+ },
+ }
+ };
+ TerminalBuilder::new(
+ local_path.map(|path| path.to_path_buf()),
+ task_state,
+ shell,
+ env,
+ settings.cursor_shape,
+ settings.alternate_scroll,
+ settings.max_scroll_history_lines,
+ is_via_remote,
+ cx.entity_id().as_u64(),
+ Some(completion_tx),
+ cx,
+ activation_script,
+ )
+ .map(|builder| {
+ let terminal_handle = cx.new(|cx| builder.subscribe(cx));
+
+ this.terminals
+ .local_handles
+ .push(terminal_handle.downgrade());
+
+ let id = terminal_handle.entity_id();
+ cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
+ let handles = &mut project.terminals.local_handles;
+
+ if let Some(index) = handles
+ .iter()
+ .position(|terminal| terminal.entity_id() == id)
+ {
+ handles.remove(index);
+ cx.notify();
}
- };
- anyhow::Ok(TerminalBuilder::new(
- local_path.map(|path| path.to_path_buf()),
- task_state,
- shell,
- env,
- settings.cursor_shape,
- settings.alternate_scroll,
- settings.max_scroll_history_lines,
- is_via_remote,
- cx.entity_id().as_u64(),
- Some(completion_tx),
- cx,
- activation_script,
- ))
- })??
- .await?;
- project.update(cx, move |this, cx| {
- let terminal_handle = cx.new(|cx| builder.subscribe(cx));
+ })
+ .detach();
- this.terminals
- .local_handles
- .push(terminal_handle.downgrade());
-
- let id = terminal_handle.entity_id();
- cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
- let handles = &mut project.terminals.local_handles;
-
- if let Some(index) = handles
- .iter()
- .position(|terminal| terminal.entity_id() == id)
- {
- handles.remove(index);
- cx.notify();
- }
+ terminal_handle
})
- .detach();
-
- terminal_handle
- })
+ })?
})
}
@@ -359,55 +355,53 @@ impl Project {
})
.await
.unwrap_or_default();
- let builder = project
- .update(cx, move |_, cx| {
- let (shell, env) = {
- match remote_client {
- Some(remote_client) => {
- create_remote_shell(None, env, path, remote_client, cx)?
- }
- None => (settings.shell, env),
- }
- };
- anyhow::Ok(TerminalBuilder::new(
- local_path.map(|path| path.to_path_buf()),
- None,
- shell,
- env,
- settings.cursor_shape,
- settings.alternate_scroll,
- settings.max_scroll_history_lines,
- is_via_remote,
- cx.entity_id().as_u64(),
- None,
- cx,
- activation_script,
- ))
- })??
- .await?;
project.update(cx, move |this, cx| {
- let terminal_handle = cx.new(|cx| builder.subscribe(cx));
-
- this.terminals
- .local_handles
- .push(terminal_handle.downgrade());
-
- let id = terminal_handle.entity_id();
- cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
- let handles = &mut project.terminals.local_handles;
-
- if let Some(index) = handles
- .iter()
- .position(|terminal| terminal.entity_id() == id)
- {
- handles.remove(index);
- cx.notify();
+ let (shell, env) = {
+ match remote_client {
+ Some(remote_client) => {
+ create_remote_shell(None, env, path, remote_client, cx)?
+ }
+ None => (settings.shell, env),
}
- })
- .detach();
+ };
+ TerminalBuilder::new(
+ local_path.map(|path| path.to_path_buf()),
+ None,
+ shell,
+ env,
+ settings.cursor_shape,
+ settings.alternate_scroll,
+ settings.max_scroll_history_lines,
+ is_via_remote,
+ cx.entity_id().as_u64(),
+ None,
+ cx,
+ activation_script,
+ )
+ .map(|builder| {
+ let terminal_handle = cx.new(|cx| builder.subscribe(cx));
+
+ this.terminals
+ .local_handles
+ .push(terminal_handle.downgrade());
+
+ let id = terminal_handle.entity_id();
+ cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
+ let handles = &mut project.terminals.local_handles;
+
+ if let Some(index) = handles
+ .iter()
+ .position(|terminal| terminal.entity_id() == id)
+ {
+ handles.remove(index);
+ cx.notify();
+ }
+ })
+ .detach();
- terminal_handle
- })
+ terminal_handle
+ })
+ })?
})
}
@@ -416,27 +410,20 @@ impl Project {
terminal: &Entity<Terminal>,
cx: &mut Context<'_, Project>,
cwd: Option<PathBuf>,
- ) -> Task<Result<Entity<Terminal>>> {
- // We cannot clone the task's terminal, as it will effectively re-spawn the task, which might not be desirable.
- // For now, create a new shell instead.
- if terminal.read(cx).task().is_some() {
- return self.create_terminal_shell(cwd, cx);
- }
-
+ ) -> Result<Entity<Terminal>> {
let local_path = if self.is_via_remote_server() {
None
} else {
cwd
};
- let builder = terminal.read(cx).clone_builder(cx, local_path);
- cx.spawn(async |project, cx| {
- let terminal = builder.await?;
- project.update(cx, |project, cx| {
- let terminal_handle = cx.new(|cx| terminal.subscribe(cx));
+ terminal
+ .read(cx)
+ .clone_builder(cx, local_path)
+ .map(|builder| {
+ let terminal_handle = cx.new(|cx| builder.subscribe(cx));
- project
- .terminals
+ self.terminals
.local_handles
.push(terminal_handle.downgrade());
@@ -456,7 +443,6 @@ impl Project {
terminal_handle
})
- })
}
pub fn terminal_settings<'a>(
@@ -562,7 +548,7 @@ fn create_remote_shell(
Shell::WithArguments {
program: command.program,
args: command.args,
- title_override: Some(format!("{} β Terminal", host).into()),
+ title_override: Some(format!("{} β Terminal", host)),
},
command.env,
))
@@ -472,3 +472,37 @@ message GetDefaultBranch {
message GetDefaultBranchResponse {
optional string branch = 1;
}
+
+message GetTreeDiff {
+ uint64 project_id = 1;
+ uint64 repository_id = 2;
+ bool is_merge = 3;
+ string base = 4;
+ string head = 5;
+}
+
+message GetTreeDiffResponse {
+ repeated TreeDiffStatus entries = 1;
+}
+
+message TreeDiffStatus {
+ enum Status {
+ ADDED = 0;
+ MODIFIED = 1;
+ DELETED = 2;
+ }
+
+ Status status = 1;
+ string path = 2;
+ optional string oid = 3;
+}
+
+message GetBlobContent {
+ uint64 project_id = 1;
+ uint64 repository_id = 2;
+ string oid =3;
+}
+
+message GetBlobContentResponse {
+ string content = 1;
+}
@@ -421,7 +421,13 @@ message Envelope {
RemoteStarted remote_started = 381;
GetDirectoryEnvironment get_directory_environment = 382;
- DirectoryEnvironment directory_environment = 383; // current max
+ DirectoryEnvironment directory_environment = 383;
+
+ GetTreeDiff get_tree_diff = 384;
+ GetTreeDiffResponse get_tree_diff_response = 385;
+
+ GetBlobContent get_blob_content = 386;
+ GetBlobContentResponse get_blob_content_response = 387; // current max
}
reserved 87 to 88;
@@ -316,6 +316,10 @@ messages!(
(PullWorkspaceDiagnostics, Background),
(GetDefaultBranch, Background),
(GetDefaultBranchResponse, Background),
+ (GetTreeDiff, Background),
+ (GetTreeDiffResponse, Background),
+ (GetBlobContent, Background),
+ (GetBlobContentResponse, Background),
(GitClone, Background),
(GitCloneResponse, Background),
(ToggleLspLogs, Background),
@@ -497,6 +501,8 @@ request_messages!(
(GetDocumentDiagnostics, GetDocumentDiagnosticsResponse),
(PullWorkspaceDiagnostics, Ack),
(GetDefaultBranch, GetDefaultBranchResponse),
+ (GetBlobContent, GetBlobContentResponse),
+ (GetTreeDiff, GetTreeDiffResponse),
(GitClone, GitCloneResponse),
(ToggleLspLogs, Ack),
(GetDirectoryEnvironment, DirectoryEnvironment),
@@ -659,6 +665,8 @@ entity_messages!(
GetDocumentDiagnostics,
PullWorkspaceDiagnostics,
GetDefaultBranch,
+ GetTreeDiff,
+ GetBlobContent,
GitClone,
GetAgentServerCommand,
ExternalAgentsUpdated,
@@ -34,7 +34,6 @@ rpc = { workspace = true, features = ["gpui"] }
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
-shlex.workspace = true
smol.workspace = true
tempfile.workspace = true
thiserror.workspace = true
@@ -125,7 +125,10 @@ async fn build_remote_server_from_source(
use std::env::VarError;
use std::path::Path;
- let build_remote_server = std::env::var("ZED_BUILD_REMOTE_SERVER").unwrap_or_default();
+ // By default, we make building remote server from source opt-out and we do not force artifact compression
+ // for quicker builds.
+ let build_remote_server =
+ std::env::var("ZED_BUILD_REMOTE_SERVER").unwrap_or("nocompress".into());
if build_remote_server == "false"
|| build_remote_server == "no"
@@ -203,17 +203,6 @@ impl AsMut<Child> for MasterProcess {
}
}
-macro_rules! shell_script {
- ($fmt:expr, $($name:ident = $arg:expr),+ $(,)?) => {{
- format!(
- $fmt,
- $(
- $name = shlex::try_quote($arg).unwrap()
- ),+
- )
- }};
-}
-
#[async_trait(?Send)]
impl RemoteConnection for SshRemoteConnection {
async fn kill(&self) -> Result<()> {
@@ -738,21 +727,23 @@ impl SshRemoteConnection {
delegate.set_status(Some("Extracting remote development server"), cx);
let server_mode = 0o755;
+ let shell_kind = ShellKind::Posix;
let orig_tmp_path = tmp_path.display(self.path_style());
+ let server_mode = format!("{:o}", server_mode);
+ let server_mode = shell_kind
+ .try_quote(&server_mode)
+ .context("shell quoting")?;
+ let dst_path = dst_path.display(self.path_style());
+ let dst_path = shell_kind.try_quote(&dst_path).context("shell quoting")?;
let script = if let Some(tmp_path) = orig_tmp_path.strip_suffix(".gz") {
- shell_script!(
+ format!(
"gunzip -f {orig_tmp_path} && chmod {server_mode} {tmp_path} && mv {tmp_path} {dst_path}",
- server_mode = &format!("{:o}", server_mode),
- dst_path = &dst_path.display(self.path_style()),
)
} else {
- shell_script!(
- "chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",
- server_mode = &format!("{:o}", server_mode),
- dst_path = &dst_path.display(self.path_style())
- )
+ format!("chmod {server_mode} {orig_tmp_path} && mv {orig_tmp_path} {dst_path}",)
};
- self.socket.run_command("sh", &["-c", &script]).await?;
+ let args = shell_kind.args_for_shell(false, script.to_string());
+ self.socket.run_command("sh", &args).await?;
Ok(())
}
@@ -886,8 +877,12 @@ impl SshSocket {
// into a machine. You must use `cd` to get back to $HOME.
// You need to do it like this: $ ssh host "cd; sh -c 'ls -l /tmp'"
fn ssh_command(&self, program: &str, args: &[impl AsRef<str>]) -> process::Command {
+ let shell_kind = ShellKind::Posix;
let mut command = util::command::new_smol_command("ssh");
- let mut to_run = shlex::try_quote(program).unwrap().into_owned();
+ let mut to_run = shell_kind
+ .try_quote(program)
+ .expect("shell quoting")
+ .into_owned();
for arg in args {
// We're trying to work with: sh, bash, zsh, fish, tcsh, ...?
debug_assert!(
@@ -895,9 +890,10 @@ impl SshSocket {
"multiline arguments do not work in all shells"
);
to_run.push(' ');
- to_run.push_str(&shlex::try_quote(arg.as_ref()).unwrap());
+ to_run.push_str(&shell_kind.try_quote(arg.as_ref()).expect("shell quoting"));
}
- let to_run = format!("cd; {to_run}");
+ let separator = shell_kind.sequential_commands_separator();
+ let to_run = format!("cd{separator} {to_run}");
self.ssh_options(&mut command, true)
.arg(self.connection_options.ssh_url())
.arg("-T")
@@ -906,7 +902,7 @@ impl SshSocket {
command
}
- async fn run_command(&self, program: &str, args: &[&str]) -> Result<String> {
+ async fn run_command(&self, program: &str, args: &[impl AsRef<str>]) -> Result<String> {
let output = self.ssh_command(program, args).output().await?;
anyhow::ensure!(
output.status.success(),
@@ -1080,7 +1076,10 @@ impl SshConnectionOptions {
"-w",
];
- let mut tokens = shlex::split(input).context("invalid input")?.into_iter();
+ let mut tokens = ShellKind::Posix
+ .split(input)
+ .context("invalid input")?
+ .into_iter();
'outer: while let Some(arg) = tokens.next() {
if ALLOWED_OPTS.contains(&(&arg as &str)) {
@@ -1243,6 +1242,7 @@ fn build_command(
) -> Result<CommandTemplate> {
use std::fmt::Write as _;
+ let shell_kind = ShellKind::new(ssh_shell, false);
let mut exec = String::new();
if let Some(working_dir) = working_dir {
let working_dir = RemotePathBuf::new(working_dir, ssh_path_style).to_string();
@@ -1252,29 +1252,38 @@ fn build_command(
const TILDE_PREFIX: &'static str = "~/";
if working_dir.starts_with(TILDE_PREFIX) {
let working_dir = working_dir.trim_start_matches("~").trim_start_matches("/");
- write!(exec, "cd \"$HOME/{working_dir}\" && ",).unwrap();
+ write!(exec, "cd \"$HOME/{working_dir}\" && ",)?;
} else {
- write!(exec, "cd \"{working_dir}\" && ",).unwrap();
+ write!(exec, "cd \"{working_dir}\" && ",)?;
}
} else {
- write!(exec, "cd && ").unwrap();
+ write!(exec, "cd && ")?;
};
- write!(exec, "exec env ").unwrap();
+ write!(exec, "exec env ")?;
for (k, v) in input_env.iter() {
- if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) {
- write!(exec, "{}={} ", k, v).unwrap();
- }
+ write!(
+ exec,
+ "{}={} ",
+ k,
+ shell_kind.try_quote(v).context("shell quoting")?
+ )?;
}
if let Some(input_program) = input_program {
- write!(exec, "{}", shlex::try_quote(&input_program).unwrap()).unwrap();
+ write!(
+ exec,
+ "{}",
+ shell_kind
+ .try_quote(&input_program)
+ .context("shell quoting")?
+ )?;
for arg in input_args {
- let arg = shlex::try_quote(&arg)?;
- write!(exec, " {}", &arg).unwrap();
+ let arg = shell_kind.try_quote(&arg).context("shell quoting")?;
+ write!(exec, " {}", &arg)?;
}
} else {
- write!(exec, "{ssh_shell} -l").unwrap();
+ write!(exec, "{ssh_shell} -l")?;
};
let mut args = Vec::new();
@@ -2,7 +2,7 @@ use crate::{
RemoteClientDelegate, RemotePlatform,
remote_client::{CommandTemplate, RemoteConnection, RemoteConnectionOptions},
};
-use anyhow::{Result, anyhow, bail};
+use anyhow::{Context, Result, anyhow, bail};
use async_trait::async_trait;
use collections::HashMap;
use futures::channel::mpsc::{Sender, UnboundedReceiver, UnboundedSender};
@@ -441,6 +441,7 @@ impl RemoteConnection for WslRemoteConnection {
bail!("WSL shares the network interface with the host system");
}
+ let shell_kind = ShellKind::new(&self.shell, false);
let working_dir = working_dir
.map(|working_dir| RemotePathBuf::new(working_dir, PathStyle::Posix).to_string())
.unwrap_or("~".to_string());
@@ -448,19 +449,26 @@ impl RemoteConnection for WslRemoteConnection {
let mut exec = String::from("exec env ");
for (k, v) in env.iter() {
- if let Some((k, v)) = shlex::try_quote(k).ok().zip(shlex::try_quote(v).ok()) {
- write!(exec, "{}={} ", k, v).unwrap();
- }
+ write!(
+ exec,
+ "{}={} ",
+ k,
+ shell_kind.try_quote(v).context("shell quoting")?
+ )?;
}
if let Some(program) = program {
- write!(exec, "{}", shlex::try_quote(&program)?).unwrap();
+ write!(
+ exec,
+ "{}",
+ shell_kind.try_quote(&program).context("shell quoting")?
+ )?;
for arg in args {
- let arg = shlex::try_quote(&arg)?;
- write!(exec, " {}", &arg).unwrap();
+ let arg = shell_kind.try_quote(&arg).context("shell quoting")?;
+ write!(exec, " {}", &arg)?;
}
} else {
- write!(&mut exec, "{} -l", self.shell).unwrap();
+ write!(&mut exec, "{} -l", self.shell)?;
}
let wsl_args = if let Some(user) = &self.connection_options.user {
@@ -32,7 +32,7 @@ use std::{
path::{Path, PathBuf},
sync::{Arc, atomic::AtomicUsize},
};
-use sysinfo::System;
+use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind};
use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
use worktree::Worktree;
@@ -747,9 +747,16 @@ impl HeadlessProject {
_cx: AsyncApp,
) -> Result<proto::GetProcessesResponse> {
let mut processes = Vec::new();
- let system = System::new_all();
+ let refresh_kind = RefreshKind::nothing().with_processes(
+ ProcessRefreshKind::nothing()
+ .without_tasks()
+ .with_cmd(UpdateKind::Always),
+ );
- for (_pid, process) in system.processes() {
+ for process in System::new_with_specifics(refresh_kind)
+ .processes()
+ .values()
+ {
let name = process.name().to_string_lossy().into_owned();
let command = process
.cmd()
@@ -774,7 +781,7 @@ impl HeadlessProject {
envelope: TypedEnvelope<proto::GetDirectoryEnvironment>,
mut cx: AsyncApp,
) -> Result<proto::DirectoryEnvironment> {
- let shell = task::Shell::from_proto(envelope.payload.shell.context("missing shell")?)?;
+ let shell = task::shell_from_proto(envelope.payload.shell.context("missing shell")?)?;
let directory = PathBuf::from(envelope.payload.directory);
let environment = this
.update(&mut cx, |this, cx| {
@@ -699,13 +699,11 @@ impl Item for NotebookEditor {
_workspace_id: Option<workspace::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| {
- Self::new(self.project.clone(), self.notebook_item.clone(), window, cx)
- })))
+ Some(cx.new(|cx| Self::new(self.project.clone(), self.notebook_item.clone(), window, cx)))
}
fn buffer_kind(&self, _: &App) -> workspace::item::ItemBufferKind {
@@ -15,6 +15,7 @@ path = "src/rope.rs"
arrayvec = "0.7.1"
log.workspace = true
rayon.workspace = true
+regex.workspace = true
smallvec.workspace = true
sum_tree.workspace = true
unicode-segmentation.workspace = true
@@ -5,11 +5,14 @@ mod point_utf16;
mod unclipped;
use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
+use regex::Regex;
use smallvec::SmallVec;
use std::{
+ borrow::Cow,
cmp, fmt, io, mem,
ops::{self, AddAssign, Range},
str,
+ sync::{Arc, LazyLock},
};
use sum_tree::{Bias, Dimension, Dimensions, SumTree};
@@ -21,6 +24,95 @@ pub use unclipped::Unclipped;
use crate::chunk::Bitmap;
+static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
+ LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum LineEnding {
+ Unix,
+ Windows,
+}
+
+impl Default for LineEnding {
+ fn default() -> Self {
+ #[cfg(unix)]
+ return Self::Unix;
+
+ #[cfg(not(unix))]
+ return Self::Windows;
+ }
+}
+
+impl LineEnding {
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ LineEnding::Unix => "\n",
+ LineEnding::Windows => "\r\n",
+ }
+ }
+
+ pub fn label(&self) -> &'static str {
+ match self {
+ LineEnding::Unix => "LF",
+ LineEnding::Windows => "CRLF",
+ }
+ }
+
+ pub fn detect(text: &str) -> Self {
+ let mut max_ix = cmp::min(text.len(), 1000);
+ while !text.is_char_boundary(max_ix) {
+ max_ix -= 1;
+ }
+
+ if let Some(ix) = text[..max_ix].find(['\n']) {
+ if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
+ Self::Windows
+ } else {
+ Self::Unix
+ }
+ } else {
+ Self::default()
+ }
+ }
+
+ pub fn normalize(text: &mut String) {
+ if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
+ *text = replaced;
+ }
+ }
+
+ pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
+ if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
+ replaced.into()
+ } else {
+ text
+ }
+ }
+
+ pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
+ if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
+ replaced.into()
+ } else {
+ text
+ }
+ }
+
+ /// Converts text chunks into a [`String`] using the current line ending.
+ pub fn into_string(&self, chunks: Chunks<'_>) -> String {
+ match self {
+ LineEnding::Unix => chunks.collect(),
+ LineEnding::Windows => {
+ let line_ending = self.as_str();
+ let mut result = String::new();
+ for chunk in chunks {
+ result.push_str(&chunk.replace('\n', line_ending));
+ }
+ result
+ }
+ }
+ }
+}
+
#[derive(Clone, Default)]
pub struct Rope {
chunks: SumTree<Chunk>,
@@ -370,6 +462,16 @@ impl Rope {
Chunks::new(self, range, true)
}
+ /// Formats the rope's text with the specified line ending string.
+ /// This replaces all `\n` characters with the provided line ending.
+ ///
+ /// The rope internally stores all line breaks as `\n` (see `Display` impl).
+ /// Use this method to convert to different line endings for file operations,
+ /// LSP communication, or other scenarios requiring specific line ending formats.
+ pub fn to_string_with_line_ending(&self, line_ending: LineEnding) -> String {
+ line_ending.into_string(self.chunks())
+ }
+
pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
if offset >= self.summary().len {
return self.summary().len_utf16;
@@ -611,10 +713,16 @@ impl From<&String> for Rope {
}
}
+/// Display implementation for Rope.
+///
+/// Note: This always uses `\n` as the line separator, regardless of the original
+/// file's line endings. The rope internally normalizes all line breaks to `\n`.
+/// If you need to preserve original line endings (e.g., for LSP communication),
+/// use `to_string_with_line_ending` instead.
impl fmt::Display for Rope {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for chunk in self.chunks() {
- write!(f, "{}", chunk)?;
+ write!(f, "{chunk}")?;
}
Ok(())
}
@@ -2264,6 +2372,53 @@ mod tests {
}
}
+ #[test]
+ fn test_to_string_with_line_ending() {
+ // Test Unix line endings (no conversion)
+ let rope = Rope::from("line1\nline2\nline3");
+ assert_eq!(
+ rope.to_string_with_line_ending(LineEnding::Unix),
+ "line1\nline2\nline3"
+ );
+
+ // Test Windows line endings
+ assert_eq!(
+ rope.to_string_with_line_ending(LineEnding::Windows),
+ "line1\r\nline2\r\nline3"
+ );
+
+ // Test empty rope
+ let empty_rope = Rope::from("");
+ assert_eq!(
+ empty_rope.to_string_with_line_ending(LineEnding::Windows),
+ ""
+ );
+
+ // Test single line (no newlines)
+ let single_line = Rope::from("single line");
+ assert_eq!(
+ single_line.to_string_with_line_ending(LineEnding::Windows),
+ "single line"
+ );
+
+ // Test rope ending with newline
+ let ending_newline = Rope::from("line1\nline2\n");
+ assert_eq!(
+ ending_newline.to_string_with_line_ending(LineEnding::Windows),
+ "line1\r\nline2\r\n"
+ );
+
+ // Test large rope with multiple chunks
+ let mut large_rope = Rope::new();
+ for i in 0..100 {
+ large_rope.push(&format!("line{}\n", i));
+ }
+ let result = large_rope.to_string_with_line_ending(LineEnding::Windows);
+ assert!(result.contains("\r\n"));
+ assert!(!result.contains("\n\n"));
+ assert_eq!(result.matches("\r\n").count(), 100);
+ }
+
fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize {
while !text.is_char_boundary(offset) {
match bias {
@@ -42,6 +42,7 @@ pub enum ExtensionProvides {
Grammars,
LanguageServers,
ContextServers,
+ AgentServers,
SlashCommands,
IndexedDocsProviders,
Snippets,
@@ -1102,23 +1102,42 @@ impl RulesLibrary {
.w_64()
.overflow_x_hidden()
.bg(cx.theme().colors().panel_background)
- .child(
- h_flex()
- .p(DynamicSpacing::Base04.rems(cx))
- .h_9()
- .w_full()
- .flex_none()
- .justify_end()
- .child(
- IconButton::new("new-rule", IconName::Plus)
- .tooltip(move |_window, cx| {
- Tooltip::for_action("New Rule", &NewRule, cx)
- })
- .on_click(|_, window, cx| {
- window.dispatch_action(Box::new(NewRule), cx);
- }),
- ),
- )
+ .map(|this| {
+ if cfg!(target_os = "macos") {
+ this.child(
+ h_flex()
+ .p(DynamicSpacing::Base04.rems(cx))
+ .h_9()
+ .w_full()
+ .flex_none()
+ .justify_end()
+ .child(
+ IconButton::new("new-rule", IconName::Plus)
+ .tooltip(move |_window, cx| {
+ Tooltip::for_action("New Rule", &NewRule, cx)
+ })
+ .on_click(|_, window, cx| {
+ window.dispatch_action(Box::new(NewRule), cx);
+ }),
+ ),
+ )
+ } else {
+ this.child(
+ h_flex().p_1().w_full().child(
+ Button::new("new-rule", "New Rule")
+ .full_width()
+ .style(ButtonStyle::Outlined)
+ .icon(IconName::Plus)
+ .icon_size(IconSize::Small)
+ .icon_position(IconPosition::Start)
+ .icon_color(Color::Muted)
+ .on_click(|_, window, cx| {
+ window.dispatch_action(Box::new(NewRule), cx);
+ }),
+ ),
+ )
+ }
+ })
.child(div().flex_grow().child(self.picker.clone()))
}
@@ -1348,9 +1367,8 @@ impl Render for RulesLibrary {
client_side_decorations(
v_flex()
- .bg(theme.colors().background)
.id("rules-library")
- .key_context("PromptLibrary")
+ .key_context("RulesLibrary")
.on_action(cx.listener(|this, &NewRule, window, cx| this.new_rule(window, cx)))
.on_action(
cx.listener(|this, &DeleteRule, window, cx| {
@@ -1368,60 +1386,33 @@ impl Render for RulesLibrary {
.font(ui_font)
.text_color(theme.colors().text)
.children(self.title_bar.clone())
+ .bg(theme.colors().background)
.child(
h_flex()
.flex_1()
+ .when(!cfg!(target_os = "macos"), |this| {
+ this.border_t_1().border_color(cx.theme().colors().border)
+ })
.child(self.render_rule_list(cx))
.map(|el| {
if self.store.read(cx).prompt_count() == 0 {
el.child(
v_flex()
- .w_2_3()
.h_full()
+ .flex_1()
.items_center()
.justify_center()
- .gap_4()
+ .border_l_1()
+ .border_color(cx.theme().colors().border)
.bg(cx.theme().colors().editor_background)
.child(
- h_flex()
- .gap_2()
- .child(
- Icon::new(IconName::Book)
- .size(IconSize::Medium)
- .color(Color::Muted),
- )
- .child(
- Label::new("No rules yet")
- .size(LabelSize::Large)
- .color(Color::Muted),
- ),
- )
- .child(
- h_flex()
- .child(h_flex())
- .child(
- v_flex()
- .gap_1()
- .child(Label::new(
- "Create your first rule:",
- ))
- .child(
- Button::new("create-rule", "New Rule")
- .full_width()
- .key_binding(
- KeyBinding::for_action(
- &NewRule, cx,
- ),
- )
- .on_click(|_, window, cx| {
- window.dispatch_action(
- NewRule.boxed_clone(),
- cx,
- )
- }),
- ),
- )
- .child(h_flex()),
+ Button::new("create-rule", "New Rule")
+ .style(ButtonStyle::Outlined)
+ .key_binding(KeyBinding::for_action(&NewRule, cx))
+ .on_click(|_, window, cx| {
+ window
+ .dispatch_action(NewRule.boxed_clone(), cx)
+ }),
),
)
} else {
@@ -572,14 +572,12 @@ impl Item for ProjectSearchView {
_workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
let model = self.entity.update(cx, |model, cx| model.clone(cx));
- Task::ready(Some(cx.new(|cx| {
- Self::new(self.workspace.clone(), model, window, cx, None)
- })))
+ Some(cx.new(|cx| Self::new(self.workspace.clone(), model, window, cx, None)))
}
fn added_to_workspace(
@@ -3679,7 +3677,6 @@ pub mod tests {
)
})
.unwrap()
- .await
.unwrap();
assert_eq!(cx.update(|cx| second_pane.read(cx).items_len()), 1);
@@ -3875,7 +3872,6 @@ pub mod tests {
)
})
.unwrap()
- .await
.unwrap();
assert_eq!(cx.update(|cx| second_pane.read(cx).items_len()), 1);
assert!(
@@ -28,6 +28,7 @@ menu.workspace = true
paths.workspace = true
picker.workspace = true
project.workspace = true
+release_channel.workspace = true
schemars.workspace = true
search.workspace = true
serde.workspace = true
@@ -796,7 +796,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
SettingsPageItem::SettingItem(SettingItem {
files: USER,
title: "Font Features",
- description: "The Opentype features to enable for rendering in UI elements.",
+ description: "The OpenType features to enable for rendering in UI elements.",
field: Box::new(
SettingField {
json_path: Some("ui_font_features"),
@@ -13,6 +13,7 @@ use gpui::{
};
use heck::ToTitleCase as _;
use project::{Project, WorktreeId};
+use release_channel::ReleaseChannel;
use schemars::JsonSchema;
use serde::Deserialize;
use settings::{Settings, SettingsContent, SettingsStore};
@@ -579,6 +580,7 @@ pub fn open_settings_editor(
let scale_factor = current_rem_size / default_rem_size;
let scaled_bounds: gpui::Size<Pixels> = default_bounds.map(|axis| axis * scale_factor);
+ let app_id = ReleaseChannel::global(cx).app_id();
let window_decorations = match std::env::var("ZED_WINDOW_DECORATIONS") {
Ok(val) if val == "server" => gpui::WindowDecorations::Server,
Ok(val) if val == "client" => gpui::WindowDecorations::Client,
@@ -588,7 +590,7 @@ pub fn open_settings_editor(
cx.open_window(
WindowOptions {
titlebar: Some(TitlebarOptions {
- title: Some("Settings Window".into()),
+ title: Some("Zed β Settings".into()),
appears_transparent: true,
traffic_light_position: Some(point(px(12.0), px(12.0))),
}),
@@ -597,6 +599,7 @@ pub fn open_settings_editor(
is_movable: true,
kind: gpui::WindowKind::Floating,
window_background: cx.theme().window_background_appearance(),
+ app_id: Some(app_id.to_owned()),
window_decorations: Some(window_decorations),
window_min_size: Some(scaled_bounds),
window_bounds: Some(WindowBounds::centered(scaled_bounds, cx)),
@@ -764,14 +767,16 @@ impl SettingsPageItem {
});
let field = match field_renderer_or_warning {
- Ok(field_renderer) => field_renderer(
- settings_window,
- setting_item,
- file.clone(),
- setting_item.metadata.as_deref(),
- window,
- cx,
- ),
+ Ok(field_renderer) => window.with_id(item_index, |window| {
+ field_renderer(
+ settings_window,
+ setting_item,
+ file.clone(),
+ setting_item.metadata.as_deref(),
+ window,
+ cx,
+ )
+ }),
Err(warning) => render_settings_item(
settings_window,
setting_item,
@@ -3063,6 +3068,9 @@ impl Render for SettingsWindow {
.font(ui_font)
.bg(cx.theme().colors().background)
.text_color(cx.theme().colors().text)
+ .when(!cfg!(target_os = "macos"), |this| {
+ this.border_t_1().border_color(cx.theme().colors().border)
+ })
.child(self.render_nav(window, cx))
.child(self.render_page(window, cx)),
),
@@ -3187,7 +3195,8 @@ fn render_toggle_button<B: Into<bool> + From<bool> + Copy>(
};
Switch::new("toggle_button", toggle_state)
- .color(ui::SwitchColor::Accent)
+ .tab_index(0_isize)
+ .color(SwitchColor::Accent)
.on_click({
move |state, _window, cx| {
let state = *state == ui::ToggleState::Selected;
@@ -3197,8 +3206,6 @@ fn render_toggle_button<B: Into<bool> + From<bool> + Copy>(
.log_err(); // todo(settings_ui) don't log err
}
})
- .tab_index(0_isize)
- .color(SwitchColor::Accent)
.into_any_element()
}
@@ -3254,39 +3261,41 @@ where
} else {
current_value_label.to_string()
},
- ContextMenu::build(window, cx, move |mut menu, _, _| {
- for (&value, &label) in std::iter::zip(variants(), labels()) {
- let file = file.clone();
- menu = menu.toggleable_entry(
- if should_do_titlecase {
- label.to_title_case()
- } else {
- label.to_string()
- },
- value == current_value,
- IconPosition::End,
- None,
- move |_, cx| {
- if value == current_value {
- return;
- }
- update_settings_file(file.clone(), cx, move |settings, _cx| {
- (field.write)(settings, Some(value));
- })
- .log_err(); // todo(settings_ui) don't log err
- },
- );
- }
- menu
+ window.use_state(cx, |window, cx| {
+ ContextMenu::new(window, cx, move |mut menu, _, _| {
+ for (&value, &label) in std::iter::zip(variants(), labels()) {
+ let file = file.clone();
+ menu = menu.toggleable_entry(
+ if should_do_titlecase {
+ label.to_title_case()
+ } else {
+ label.to_string()
+ },
+ value == current_value,
+ IconPosition::End,
+ None,
+ move |_, cx| {
+ if value == current_value {
+ return;
+ }
+ update_settings_file(file.clone(), cx, move |settings, _cx| {
+ (field.write)(settings, Some(value));
+ })
+ .log_err(); // todo(settings_ui) don't log err
+ },
+ );
+ }
+ menu
+ })
}),
)
+ .tab_index(0)
.trigger_size(ButtonSize::Medium)
.style(DropdownStyle::Outlined)
.offset(gpui::Point {
x: px(0.0),
y: px(2.0),
})
- .tab_index(0)
.into_any_element()
}
@@ -3305,7 +3314,7 @@ fn render_font_picker(
field: SettingField<settings::FontFamilyName>,
file: SettingsUiFile,
_metadata: Option<&SettingsFieldMetadata>,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut App,
) -> AnyElement {
let current_value = SettingsStore::global(cx)
@@ -3314,26 +3323,29 @@ fn render_font_picker(
.cloned()
.unwrap_or_else(|| SharedString::default().into());
- let font_picker = cx.new(|cx| {
- font_picker(
- current_value.clone().into(),
- move |font_name, cx| {
- update_settings_file(file.clone(), cx, move |settings, _cx| {
- (field.write)(settings, Some(font_name.into()));
- })
- .log_err(); // todo(settings_ui) don't log err
- },
- window,
- cx,
- )
- });
-
PopoverMenu::new("font-picker")
- .menu(move |_window, _cx| Some(font_picker.clone()))
.trigger(render_picker_trigger_button(
"font_family_picker_trigger".into(),
- current_value.into(),
+ current_value.clone().into(),
))
+ .menu(move |window, cx| {
+ let file = file.clone();
+ let current_value = current_value.clone();
+
+ Some(cx.new(move |cx| {
+ font_picker(
+ current_value.clone().into(),
+ move |font_name, cx| {
+ update_settings_file(file.clone(), cx, move |settings, _cx| {
+ (field.write)(settings, Some(font_name.into()));
+ })
+ .log_err(); // todo(settings_ui) don't log err
+ },
+ window,
+ cx,
+ )
+ }))
+ })
.anchor(gpui::Corner::TopLeft)
.offset(gpui::Point {
x: px(0.0),
@@ -3347,7 +3359,7 @@ fn render_theme_picker(
field: SettingField<settings::ThemeName>,
file: SettingsUiFile,
_metadata: Option<&SettingsFieldMetadata>,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut App,
) -> AnyElement {
let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick);
@@ -3356,26 +3368,28 @@ fn render_theme_picker(
.map(|theme_name| theme_name.0.into())
.unwrap_or_else(|| cx.theme().name.clone());
- let theme_picker = cx.new(|cx| {
- theme_picker(
- current_value.clone(),
- move |theme_name, cx| {
- update_settings_file(file.clone(), cx, move |settings, _cx| {
- (field.write)(settings, Some(settings::ThemeName(theme_name.into())));
- })
- .log_err(); // todo(settings_ui) don't log err
- },
- window,
- cx,
- )
- });
-
PopoverMenu::new("theme-picker")
- .menu(move |_window, _cx| Some(theme_picker.clone()))
.trigger(render_picker_trigger_button(
"theme_picker_trigger".into(),
- current_value,
+ current_value.clone(),
))
+ .menu(move |window, cx| {
+ Some(cx.new(|cx| {
+ let file = file.clone();
+ let current_value = current_value.clone();
+ theme_picker(
+ current_value,
+ move |theme_name, cx| {
+ update_settings_file(file.clone(), cx, move |settings, _cx| {
+ (field.write)(settings, Some(settings::ThemeName(theme_name.into())));
+ })
+ .log_err(); // todo(settings_ui) don't log err
+ },
+ window,
+ cx,
+ )
+ }))
+ })
.anchor(gpui::Corner::TopLeft)
.offset(gpui::Point {
x: px(0.0),
@@ -3389,7 +3403,7 @@ fn render_icon_theme_picker(
field: SettingField<settings::IconThemeName>,
file: SettingsUiFile,
_metadata: Option<&SettingsFieldMetadata>,
- window: &mut Window,
+ _window: &mut Window,
cx: &mut App,
) -> AnyElement {
let (_, value) = SettingsStore::global(cx).get_value_from_file(file.to_settings(), field.pick);
@@ -3398,26 +3412,31 @@ fn render_icon_theme_picker(
.map(|theme_name| theme_name.0.into())
.unwrap_or_else(|| cx.theme().name.clone());
- let icon_theme_picker = cx.new(|cx| {
- icon_theme_picker(
- current_value.clone(),
- move |theme_name, cx| {
- update_settings_file(file.clone(), cx, move |settings, _cx| {
- (field.write)(settings, Some(settings::IconThemeName(theme_name.into())));
- })
- .log_err(); // todo(settings_ui) don't log err
- },
- window,
- cx,
- )
- });
-
PopoverMenu::new("icon-theme-picker")
- .menu(move |_window, _cx| Some(icon_theme_picker.clone()))
.trigger(render_picker_trigger_button(
"icon_theme_picker_trigger".into(),
- current_value,
+ current_value.clone(),
))
+ .menu(move |window, cx| {
+ Some(cx.new(|cx| {
+ let file = file.clone();
+ let current_value = current_value.clone();
+ icon_theme_picker(
+ current_value,
+ move |theme_name, cx| {
+ update_settings_file(file.clone(), cx, move |settings, _cx| {
+ (field.write)(
+ settings,
+ Some(settings::IconThemeName(theme_name.into())),
+ );
+ })
+ .log_err(); // todo(settings_ui) don't log err
+ },
+ window,
+ cx,
+ )
+ }))
+ })
.anchor(gpui::Corner::TopLeft)
.offset(gpui::Point {
x: px(0.0),
@@ -3,7 +3,6 @@
mod adapter_schema;
mod debug_format;
mod serde_helpers;
-mod shell_builder;
pub mod static_source;
mod task_template;
mod vscode_debug_format;
@@ -12,23 +11,22 @@ mod vscode_format;
use anyhow::Context as _;
use collections::{HashMap, HashSet, hash_map};
use gpui::SharedString;
-use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::path::PathBuf;
use std::str::FromStr;
-use util::get_system_shell;
pub use adapter_schema::{AdapterSchema, AdapterSchemas};
pub use debug_format::{
AttachRequest, BuildTaskDefinition, DebugRequest, DebugScenario, DebugTaskFile, LaunchRequest,
Request, TcpArgumentsTemplate, ZedDebugConfig,
};
-pub use shell_builder::{ShellBuilder, ShellKind};
pub use task_template::{
DebugArgsRequest, HideStrategy, RevealStrategy, TaskTemplate, TaskTemplates,
substitute_variables_in_map, substitute_variables_in_str,
};
+pub use util::shell::{Shell, ShellKind};
+pub use util::shell_builder::ShellBuilder;
pub use vscode_debug_format::VsCodeDebugTaskFile;
pub use vscode_format::VsCodeTaskFile;
pub use zed_actions::RevealTarget;
@@ -318,81 +316,32 @@ pub struct TaskContext {
#[derive(Clone, Debug)]
pub struct RunnableTag(pub SharedString);
-/// Shell configuration to open the terminal with.
-#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)]
-#[serde(rename_all = "snake_case")]
-pub enum Shell {
- /// Use the system's default terminal configuration in /etc/passwd
- #[default]
- System,
- /// Use a specific program with no arguments.
- Program(String),
- /// Use a specific program with arguments.
- WithArguments {
- /// The program to run.
- program: String,
- /// The arguments to pass to the program.
- args: Vec<String>,
- /// An optional string to override the title of the terminal tab
- title_override: Option<SharedString>,
- },
+pub fn shell_from_proto(proto: proto::Shell) -> anyhow::Result<Shell> {
+ let shell_type = proto.shell_type.context("invalid shell type")?;
+ let shell = match shell_type {
+ proto::shell::ShellType::System(_) => Shell::System,
+ proto::shell::ShellType::Program(program) => Shell::Program(program),
+ proto::shell::ShellType::WithArguments(program) => Shell::WithArguments {
+ program: program.program,
+ args: program.args,
+ title_override: None,
+ },
+ };
+ Ok(shell)
}
-impl Shell {
- pub fn program(&self) -> String {
- match self {
- Shell::Program(program) => program.clone(),
- Shell::WithArguments { program, .. } => program.clone(),
- Shell::System => get_system_shell(),
- }
- }
-
- pub fn program_and_args(&self) -> (String, &[String]) {
- match self {
- Shell::Program(program) => (program.clone(), &[]),
- Shell::WithArguments { program, args, .. } => (program.clone(), args),
- Shell::System => (get_system_shell(), &[]),
- }
- }
-
- pub fn shell_kind(&self, is_windows: bool) -> ShellKind {
- match self {
- Shell::Program(program) => ShellKind::new(program, is_windows),
- Shell::WithArguments { program, .. } => ShellKind::new(program, is_windows),
- Shell::System => ShellKind::system(),
- }
- }
-
- pub fn from_proto(proto: proto::Shell) -> anyhow::Result<Self> {
- let shell_type = proto.shell_type.context("invalid shell type")?;
- let shell = match shell_type {
- proto::shell::ShellType::System(_) => Self::System,
- proto::shell::ShellType::Program(program) => Self::Program(program),
- proto::shell::ShellType::WithArguments(program) => Self::WithArguments {
- program: program.program,
- args: program.args,
- title_override: None,
- },
- };
- Ok(shell)
- }
-
- pub fn to_proto(self) -> proto::Shell {
- let shell_type = match self {
- Shell::System => proto::shell::ShellType::System(proto::System {}),
- Shell::Program(program) => proto::shell::ShellType::Program(program),
- Shell::WithArguments {
- program,
- args,
- title_override: _,
- } => proto::shell::ShellType::WithArguments(proto::shell::WithArguments {
- program,
- args,
- }),
- };
- proto::Shell {
- shell_type: Some(shell_type),
- }
+pub fn shell_to_proto(shell: Shell) -> proto::Shell {
+ let shell_type = match shell {
+ Shell::System => proto::shell::ShellType::System(proto::System {}),
+ Shell::Program(program) => proto::shell::ShellType::Program(program),
+ Shell::WithArguments {
+ program,
+ args,
+ title_override: _,
+ } => proto::shell::ShellType::WithArguments(proto::shell::WithArguments { program, args }),
+ };
+ proto::Shell {
+ shell_type: Some(shell_type),
}
}
@@ -67,7 +67,7 @@ use thiserror::Error;
use gpui::{
App, AppContext as _, Bounds, ClipboardItem, Context, EventEmitter, Hsla, Keystroke, Modifiers,
MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, Pixels, Point, Rgba,
- ScrollWheelEvent, SharedString, Size, Task, TouchPhase, Window, actions, black, px,
+ ScrollWheelEvent, Size, Task, TouchPhase, Window, actions, black, px,
};
use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str};
@@ -277,7 +277,7 @@ pub struct TerminalError {
pub directory: Option<PathBuf>,
pub program: Option<String>,
pub args: Option<Vec<String>>,
- pub title_override: Option<SharedString>,
+ pub title_override: Option<String>,
pub source: std::io::Error,
}
@@ -423,233 +423,230 @@ impl TerminalBuilder {
completion_tx: Option<Sender<Option<ExitStatus>>>,
cx: &App,
activation_script: Vec<String>,
- ) -> Task<Result<TerminalBuilder>> {
- let version = release_channel::AppVersion::global(cx);
- cx.background_spawn(async move {
- // If the parent environment doesn't have a locale set
- // (As is the case when launched from a .app on MacOS),
- // and the Project doesn't have a locale set, then
- // set a fallback for our child environment to use.
- if std::env::var("LANG").is_err() {
- env.entry("LANG".to_string())
- .or_insert_with(|| "en_US.UTF-8".to_string());
- }
-
- env.insert("ZED_TERM".to_string(), "true".to_string());
- env.insert("TERM_PROGRAM".to_string(), "zed".to_string());
- env.insert("TERM".to_string(), "xterm-256color".to_string());
- env.insert("COLORTERM".to_string(), "truecolor".to_string());
- env.insert("TERM_PROGRAM_VERSION".to_string(), version.to_string());
-
- #[derive(Default)]
- struct ShellParams {
+ ) -> Result<TerminalBuilder> {
+ // If the parent environment doesn't have a locale set
+ // (As is the case when launched from a .app on MacOS),
+ // and the Project doesn't have a locale set, then
+ // set a fallback for our child environment to use.
+ if std::env::var("LANG").is_err() {
+ env.entry("LANG".to_string())
+ .or_insert_with(|| "en_US.UTF-8".to_string());
+ }
+
+ env.insert("ZED_TERM".to_string(), "true".to_string());
+ env.insert("TERM_PROGRAM".to_string(), "zed".to_string());
+ env.insert("TERM".to_string(), "xterm-256color".to_string());
+ env.insert("COLORTERM".to_string(), "truecolor".to_string());
+ env.insert(
+ "TERM_PROGRAM_VERSION".to_string(),
+ release_channel::AppVersion::global(cx).to_string(),
+ );
+
+ #[derive(Default)]
+ struct ShellParams {
+ program: String,
+ args: Option<Vec<String>>,
+ title_override: Option<String>,
+ }
+
+ impl ShellParams {
+ fn new(
program: String,
args: Option<Vec<String>>,
- title_override: Option<SharedString>,
- }
-
- impl ShellParams {
- fn new(
- program: String,
- args: Option<Vec<String>>,
- title_override: Option<SharedString>,
- ) -> Self {
- log::debug!("Using {program} as shell");
- Self {
- program,
- args,
- title_override,
- }
+ title_override: Option<String>,
+ ) -> Self {
+ log::info!("Using {program} as shell");
+ Self {
+ program,
+ args,
+ title_override,
}
}
+ }
- let shell_params = match shell.clone() {
- Shell::System => {
- if cfg!(windows) {
- Some(ShellParams::new(
- util::shell::get_windows_system_shell(),
- None,
- None,
- ))
- } else {
- None
- }
+ let shell_params = match shell.clone() {
+ Shell::System => {
+ if cfg!(windows) {
+ Some(ShellParams::new(
+ util::shell::get_windows_system_shell(),
+ None,
+ None,
+ ))
+ } else {
+ None
}
- Shell::Program(program) => Some(ShellParams::new(program, None, None)),
- Shell::WithArguments {
- program,
- args,
- title_override,
- } => Some(ShellParams::new(program, Some(args), title_override)),
- };
- let terminal_title_override =
- shell_params.as_ref().and_then(|e| e.title_override.clone());
+ }
+ Shell::Program(program) => Some(ShellParams::new(program, None, None)),
+ Shell::WithArguments {
+ program,
+ args,
+ title_override,
+ } => Some(ShellParams::new(program, Some(args), title_override)),
+ };
+ let terminal_title_override = shell_params.as_ref().and_then(|e| e.title_override.clone());
- #[cfg(windows)]
- let shell_program = shell_params.as_ref().map(|params| {
- use util::ResultExt;
+ #[cfg(windows)]
+ let shell_program = shell_params.as_ref().map(|params| {
+ use util::ResultExt;
- Self::resolve_path(¶ms.program)
- .log_err()
- .unwrap_or(params.program.clone())
- });
+ Self::resolve_path(¶ms.program)
+ .log_err()
+ .unwrap_or(params.program.clone())
+ });
- // Note: when remoting, this shell_kind will scrutinize `ssh` or
- // `wsl.exe` as a shell and fall back to posix or powershell based on
- // the compilation target. This is fine right now due to the restricted
- // way we use the return value, but would become incorrect if we
- // supported remoting into windows.
- let shell_kind = shell.shell_kind(cfg!(windows));
-
- let pty_options = {
- let alac_shell = shell_params.as_ref().map(|params| {
- alacritty_terminal::tty::Shell::new(
- params.program.clone(),
- params.args.clone().unwrap_or_default(),
- )
- });
+ // Note: when remoting, this shell_kind will scrutinize `ssh` or
+ // `wsl.exe` as a shell and fall back to posix or powershell based on
+ // the compilation target. This is fine right now due to the restricted
+ // way we use the return value, but would become incorrect if we
+ // supported remoting into windows.
+ let shell_kind = shell.shell_kind(cfg!(windows));
+
+ let pty_options = {
+ let alac_shell = shell_params.as_ref().map(|params| {
+ alacritty_terminal::tty::Shell::new(
+ params.program.clone(),
+ params.args.clone().unwrap_or_default(),
+ )
+ });
- alacritty_terminal::tty::Options {
- shell: alac_shell,
- working_directory: working_directory.clone(),
- drain_on_exit: true,
- env: env.clone().into_iter().collect(),
- // We do not want to escape arguments if we are using CMD as our shell.
- // If we do we end up with too many quotes/escaped quotes for CMD to handle.
- #[cfg(windows)]
- escape_args: shell_kind != util::shell::ShellKind::Cmd,
- }
- };
+ alacritty_terminal::tty::Options {
+ shell: alac_shell,
+ working_directory: working_directory.clone(),
+ drain_on_exit: true,
+ env: env.clone().into_iter().collect(),
+ #[cfg(windows)]
+ escape_args: shell_kind.tty_escape_args(),
+ }
+ };
- let default_cursor_style = AlacCursorStyle::from(cursor_shape);
- let scrolling_history = if task.is_some() {
- // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling.
- // After the task finishes, we do not allow appending to that terminal, so small tasks output should not
- // cause excessive memory usage over time.
- MAX_SCROLL_HISTORY_LINES
- } else {
- max_scroll_history_lines
- .unwrap_or(DEFAULT_SCROLL_HISTORY_LINES)
- .min(MAX_SCROLL_HISTORY_LINES)
- };
- let config = Config {
- scrolling_history,
- default_cursor_style,
- ..Config::default()
- };
+ let default_cursor_style = AlacCursorStyle::from(cursor_shape);
+ let scrolling_history = if task.is_some() {
+ // Tasks like `cargo build --all` may produce a lot of output, ergo allow maximum scrolling.
+ // After the task finishes, we do not allow appending to that terminal, so small tasks output should not
+ // cause excessive memory usage over time.
+ MAX_SCROLL_HISTORY_LINES
+ } else {
+ max_scroll_history_lines
+ .unwrap_or(DEFAULT_SCROLL_HISTORY_LINES)
+ .min(MAX_SCROLL_HISTORY_LINES)
+ };
+ let config = Config {
+ scrolling_history,
+ default_cursor_style,
+ ..Config::default()
+ };
- //Spawn a task so the Alacritty EventLoop can communicate with us
- //TODO: Remove with a bounded sender which can be dispatched on &self
- let (events_tx, events_rx) = unbounded();
- //Set up the terminal...
- let mut term = Term::new(
- config.clone(),
- &TerminalBounds::default(),
- ZedListener(events_tx.clone()),
- );
+ //Spawn a task so the Alacritty EventLoop can communicate with us
+ //TODO: Remove with a bounded sender which can be dispatched on &self
+ let (events_tx, events_rx) = unbounded();
+ //Set up the terminal...
+ let mut term = Term::new(
+ config.clone(),
+ &TerminalBounds::default(),
+ ZedListener(events_tx.clone()),
+ );
- //Alacritty defaults to alternate scrolling being on, so we just need to turn it off.
- if let AlternateScroll::Off = alternate_scroll {
- term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll));
- }
+ //Alacritty defaults to alternate scrolling being on, so we just need to turn it off.
+ if let AlternateScroll::Off = alternate_scroll {
+ term.unset_private_mode(PrivateMode::Named(NamedPrivateMode::AlternateScroll));
+ }
- let term = Arc::new(FairMutex::new(term));
+ let term = Arc::new(FairMutex::new(term));
- //Setup the pty...
- let pty = match tty::new(&pty_options, TerminalBounds::default().into(), window_id) {
- Ok(pty) => pty,
- Err(error) => {
- bail!(TerminalError {
- directory: working_directory,
- program: shell_params.as_ref().map(|params| params.program.clone()),
- args: shell_params.as_ref().and_then(|params| params.args.clone()),
- title_override: terminal_title_override,
- source: error,
- });
- }
- };
+ //Setup the pty...
+ let pty = match tty::new(&pty_options, TerminalBounds::default().into(), window_id) {
+ Ok(pty) => pty,
+ Err(error) => {
+ bail!(TerminalError {
+ directory: working_directory,
+ program: shell_params.as_ref().map(|params| params.program.clone()),
+ args: shell_params.as_ref().and_then(|params| params.args.clone()),
+ title_override: terminal_title_override,
+ source: error,
+ });
+ }
+ };
- let pty_info = PtyProcessInfo::new(&pty);
+ let pty_info = PtyProcessInfo::new(&pty);
- //And connect them together
- let event_loop = EventLoop::new(
- term.clone(),
- ZedListener(events_tx),
- pty,
- pty_options.drain_on_exit,
- false,
- )
- .context("failed to create event loop")?;
+ //And connect them together
+ let event_loop = EventLoop::new(
+ term.clone(),
+ ZedListener(events_tx),
+ pty,
+ pty_options.drain_on_exit,
+ false,
+ )
+ .context("failed to create event loop")?;
- //Kick things off
- let pty_tx = event_loop.channel();
- let _io_thread = event_loop.spawn(); // DANGER
+ //Kick things off
+ let pty_tx = event_loop.channel();
+ let _io_thread = event_loop.spawn(); // DANGER
- let no_task = task.is_none();
+ let no_task = task.is_none();
- let terminal = Terminal {
- task,
- terminal_type: TerminalType::Pty {
- pty_tx: Notifier(pty_tx),
- info: pty_info,
- },
- completion_tx,
- term,
- term_config: config,
- title_override: terminal_title_override,
- events: VecDeque::with_capacity(10), //Should never get this high.
- last_content: Default::default(),
- last_mouse: None,
- matches: Vec::new(),
- selection_head: None,
- breadcrumb_text: String::new(),
- scroll_px: px(0.),
- next_link_id: 0,
- selection_phase: SelectionPhase::Ended,
- hyperlink_regex_searches: RegexSearches::new(),
- vi_mode_enabled: false,
- is_ssh_terminal,
- last_mouse_move_time: Instant::now(),
- last_hyperlink_search_position: None,
- #[cfg(windows)]
- shell_program,
- activation_script: activation_script.clone(),
- template: CopyTemplate {
- shell,
- env,
- cursor_shape,
- alternate_scroll,
- max_scroll_history_lines,
- window_id,
- },
- child_exited: None,
- };
+ let terminal = Terminal {
+ task,
+ terminal_type: TerminalType::Pty {
+ pty_tx: Notifier(pty_tx),
+ info: pty_info,
+ },
+ completion_tx,
+ term,
+ term_config: config,
+ title_override: terminal_title_override,
+ events: VecDeque::with_capacity(10), //Should never get this high.
+ last_content: Default::default(),
+ last_mouse: None,
+ matches: Vec::new(),
+ selection_head: None,
+ breadcrumb_text: String::new(),
+ scroll_px: px(0.),
+ next_link_id: 0,
+ selection_phase: SelectionPhase::Ended,
+ hyperlink_regex_searches: RegexSearches::new(),
+ vi_mode_enabled: false,
+ is_ssh_terminal,
+ last_mouse_move_time: Instant::now(),
+ last_hyperlink_search_position: None,
+ #[cfg(windows)]
+ shell_program,
+ activation_script: activation_script.clone(),
+ template: CopyTemplate {
+ shell,
+ env,
+ cursor_shape,
+ alternate_scroll,
+ max_scroll_history_lines,
+ window_id,
+ },
+ child_exited: None,
+ };
- if !activation_script.is_empty() && no_task {
- for activation_script in activation_script {
- terminal.write_to_pty(activation_script.into_bytes());
- // Simulate enter key press
- // NOTE(PowerShell): using `\r\n` will put PowerShell in a continuation mode (infamous >> character)
- // and generally mess up the rendering.
- terminal.write_to_pty(b"\x0d");
- }
- // In order to clear the screen at this point, we have two options:
- // 1. We can send a shell-specific command such as "clear" or "cls"
- // 2. We can "echo" a marker message that we will then catch when handling a Wakeup event
- // and clear the screen using `terminal.clear()` method
- // We cannot issue a `terminal.clear()` command at this point as alacritty is evented
- // and while we have sent the activation script to the pty, it will be executed asynchronously.
- // Therefore, we somehow need to wait for the activation script to finish executing before we
- // can proceed with clearing the screen.
- terminal.write_to_pty(shell_kind.clear_screen_command().as_bytes());
+ if !activation_script.is_empty() && no_task {
+ for activation_script in activation_script {
+ terminal.write_to_pty(activation_script.into_bytes());
// Simulate enter key press
+ // NOTE(PowerShell): using `\r\n` will put PowerShell in a continuation mode (infamous >> character)
+ // and generally mess up the rendering.
terminal.write_to_pty(b"\x0d");
}
+ // In order to clear the screen at this point, we have two options:
+ // 1. We can send a shell-specific command such as "clear" or "cls"
+ // 2. We can "echo" a marker message that we will then catch when handling a Wakeup event
+ // and clear the screen using `terminal.clear()` method
+ // We cannot issue a `terminal.clear()` command at this point as alacritty is evented
+ // and while we have sent the activation script to the pty, it will be executed asynchronously.
+ // Therefore, we somehow need to wait for the activation script to finish executing before we
+ // can proceed with clearing the screen.
+ terminal.write_to_pty(shell_kind.clear_screen_command().as_bytes());
+ // Simulate enter key press
+ terminal.write_to_pty(b"\x0d");
+ }
- Ok(TerminalBuilder {
- terminal,
- events_rx,
- })
+ Ok(TerminalBuilder {
+ terminal,
+ events_rx,
})
}
@@ -824,7 +821,7 @@ pub struct Terminal {
pub last_content: TerminalContent,
pub selection_head: Option<AlacPoint>,
pub breadcrumb_text: String,
- title_override: Option<SharedString>,
+ title_override: Option<String>,
scroll_px: Pixels,
next_link_id: usize,
selection_phase: SelectionPhase,
@@ -2154,7 +2151,7 @@ impl Terminal {
self.vi_mode_enabled
}
- pub fn clone_builder(&self, cx: &App, cwd: Option<PathBuf>) -> Task<Result<TerminalBuilder>> {
+ pub fn clone_builder(&self, cx: &App, cwd: Option<PathBuf>) -> Result<TerminalBuilder> {
let working_directory = self.working_directory().or_else(|| cwd);
TerminalBuilder::new(
working_directory,
@@ -2390,30 +2387,28 @@ mod tests {
let (completion_tx, completion_rx) = smol::channel::unbounded();
let (program, args) = ShellBuilder::new(&Shell::System, false)
.build(Some("echo".to_owned()), &["hello".to_owned()]);
- let builder = cx
- .update(|cx| {
- TerminalBuilder::new(
- None,
- None,
- task::Shell::WithArguments {
- program,
- args,
- title_override: None,
- },
- HashMap::default(),
- CursorShape::default(),
- AlternateScroll::On,
- None,
- false,
- 0,
- Some(completion_tx),
- cx,
- vec![],
- )
- })
- .await
- .unwrap();
- let terminal = cx.new(|cx| builder.subscribe(cx));
+ let terminal = cx.new(|cx| {
+ TerminalBuilder::new(
+ None,
+ None,
+ task::Shell::WithArguments {
+ program,
+ args,
+ title_override: None,
+ },
+ HashMap::default(),
+ CursorShape::default(),
+ AlternateScroll::On,
+ None,
+ false,
+ 0,
+ Some(completion_tx),
+ cx,
+ vec![],
+ )
+ .unwrap()
+ .subscribe(cx)
+ });
assert_eq!(
completion_rx.recv().await.unwrap(),
Some(ExitStatus::default())
@@ -2442,27 +2437,25 @@ mod tests {
cx.executor().allow_parking();
let (completion_tx, completion_rx) = smol::channel::unbounded();
- let builder = cx
- .update(|cx| {
- TerminalBuilder::new(
- None,
- None,
- task::Shell::System,
- HashMap::default(),
- CursorShape::default(),
- AlternateScroll::On,
- None,
- false,
- 0,
- Some(completion_tx),
- cx,
- Vec::new(),
- )
- })
- .await
- .unwrap();
// Build an empty command, which will result in a tty shell spawned.
- let terminal = cx.new(|cx| builder.subscribe(cx));
+ let terminal = cx.new(|cx| {
+ TerminalBuilder::new(
+ None,
+ None,
+ task::Shell::System,
+ HashMap::default(),
+ CursorShape::default(),
+ AlternateScroll::On,
+ None,
+ false,
+ 0,
+ Some(completion_tx),
+ cx,
+ Vec::new(),
+ )
+ .unwrap()
+ .subscribe(cx)
+ });
let (event_tx, event_rx) = smol::channel::unbounded::<Event>();
cx.update(|cx| {
@@ -2513,30 +2506,28 @@ mod tests {
let (completion_tx, completion_rx) = smol::channel::unbounded();
let (program, args) = ShellBuilder::new(&Shell::System, false)
.build(Some("asdasdasdasd".to_owned()), &["@@@@@".to_owned()]);
- let builder = cx
- .update(|cx| {
- TerminalBuilder::new(
- None,
- None,
- task::Shell::WithArguments {
- program,
- args,
- title_override: None,
- },
- HashMap::default(),
- CursorShape::default(),
- AlternateScroll::On,
- None,
- false,
- 0,
- Some(completion_tx),
- cx,
- Vec::new(),
- )
- })
- .await
- .unwrap();
- let terminal = cx.new(|cx| builder.subscribe(cx));
+ let terminal = cx.new(|cx| {
+ TerminalBuilder::new(
+ None,
+ None,
+ task::Shell::WithArguments {
+ program,
+ args,
+ title_override: None,
+ },
+ HashMap::default(),
+ CursorShape::default(),
+ AlternateScroll::On,
+ None,
+ false,
+ 0,
+ Some(completion_tx),
+ cx,
+ Vec::new(),
+ )
+ .unwrap()
+ .subscribe(cx)
+ });
let (event_tx, event_rx) = smol::channel::unbounded::<Event>();
cx.update(|cx| {
@@ -66,7 +66,7 @@ fn settings_shell_to_task_shell(shell: settings::Shell) -> Shell {
} => Shell::WithArguments {
program,
args,
- title_override,
+ title_override: title_override.map(Into::into),
},
}
}
@@ -214,6 +214,14 @@ async fn deserialize_pane_group(
}
SerializedPaneGroup::Pane(serialized_pane) => {
let active = serialized_pane.active;
+ let new_items = deserialize_terminal_views(
+ workspace_id,
+ project.clone(),
+ workspace.clone(),
+ serialized_pane.children.as_slice(),
+ cx,
+ )
+ .await;
let pane = panel
.update_in(cx, |terminal_panel, window, cx| {
@@ -228,71 +236,56 @@ async fn deserialize_pane_group(
.log_err()?;
let active_item = serialized_pane.active_item;
let pinned_count = serialized_pane.pinned_count;
- let new_items = deserialize_terminal_views(
- workspace_id,
- project.clone(),
- workspace.clone(),
- serialized_pane.children.as_slice(),
- cx,
- );
- cx.spawn({
- let pane = pane.downgrade();
- async move |cx| {
- let new_items = new_items.await;
-
- let items = pane.update_in(cx, |pane, window, cx| {
- populate_pane_items(pane, new_items, active_item, window, cx);
- pane.set_pinned_count(pinned_count);
- pane.items_len()
- });
+ let terminal = pane
+ .update_in(cx, |pane, window, cx| {
+ populate_pane_items(pane, new_items, active_item, window, cx);
+ pane.set_pinned_count(pinned_count);
// Avoid blank panes in splits
- if items.is_ok_and(|items| items == 0) {
+ if pane.items_len() == 0 {
let working_directory = workspace
.update(cx, |workspace, cx| default_working_directory(workspace, cx))
.ok()
.flatten();
- let Some(terminal) = project
- .update(cx, |project, cx| {
- project.create_terminal_shell(working_directory, cx)
- })
- .log_err()
- else {
- return;
- };
-
- let terminal = terminal.await.log_err();
- pane.update_in(cx, |pane, window, cx| {
- if let Some(terminal) = terminal {
- let terminal_view = Box::new(cx.new(|cx| {
- TerminalView::new(
- terminal,
- workspace.clone(),
- Some(workspace_id),
- project.downgrade(),
- window,
- cx,
- )
- }));
- pane.add_item(terminal_view, true, false, None, window, cx);
- }
- })
- .ok();
+ let terminal = project.update(cx, |project, cx| {
+ project.create_terminal_shell(working_directory, cx)
+ });
+ Some(Some(terminal))
+ } else {
+ Some(None)
}
- }
- })
- .detach();
+ })
+ .ok()
+ .flatten()?;
+ if let Some(terminal) = terminal {
+ let terminal = terminal.await.ok()?;
+ pane.update_in(cx, |pane, window, cx| {
+ let terminal_view = Box::new(cx.new(|cx| {
+ TerminalView::new(
+ terminal,
+ workspace.clone(),
+ Some(workspace_id),
+ project.downgrade(),
+ window,
+ cx,
+ )
+ }));
+ pane.add_item(terminal_view, true, false, None, window, cx);
+ })
+ .ok()?;
+ }
Some((Member::Pane(pane.clone()), active.then_some(pane)))
}
}
}
-fn deserialize_terminal_views(
+async fn deserialize_terminal_views(
workspace_id: WorkspaceId,
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
item_ids: &[u64],
cx: &mut AsyncWindowContext,
-) -> impl Future<Output = Vec<Entity<TerminalView>>> + use<> {
+) -> Vec<Entity<TerminalView>> {
+ let mut items = Vec::with_capacity(item_ids.len());
let mut deserialized_items = item_ids
.iter()
.map(|item_id| {
@@ -309,15 +302,12 @@ fn deserialize_terminal_views(
.unwrap_or_else(|e| Task::ready(Err(e.context("no window present"))))
})
.collect::<FuturesUnordered<_>>();
- async move {
- let mut items = Vec::with_capacity(deserialized_items.len());
- while let Some(item) = deserialized_items.next().await {
- if let Some(item) = item.log_err() {
- items.push(item);
- }
+ while let Some(item) = deserialized_items.next().await {
+ if let Some(item) = item.log_err() {
+ items.push(item);
}
- items
}
+ items
}
#[derive(Debug, Serialize, Deserialize)]
@@ -461,11 +461,11 @@ impl TerminalPanel {
cx.spawn_in(window, async move |panel, cx| {
let terminal = project
.update(cx, |project, cx| match terminal_view {
- Some(view) => project.clone_terminal(
+ Some(view) => Task::ready(project.clone_terminal(
&view.read(cx).terminal.clone(),
cx,
working_directory,
- ),
+ )),
None => project.create_terminal_shell(working_directory, cx),
})
.ok()?
@@ -1218,31 +1218,28 @@ impl Item for TerminalView {
workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>> {
- let Ok(terminal) = self.project.update(cx, |project, cx| {
- let cwd = project
- .active_project_directory(cx)
- .map(|it| it.to_path_buf());
- project.clone_terminal(self.terminal(), cx, cwd)
- }) else {
- return Task::ready(None);
- };
- cx.spawn_in(window, async move |this, cx| {
- let terminal = terminal.await.log_err()?;
- this.update_in(cx, |this, window, cx| {
- cx.new(|cx| {
- TerminalView::new(
- terminal,
- this.workspace.clone(),
- workspace_id,
- this.project.clone(),
- window,
- cx,
- )
- })
+ ) -> Option<Entity<Self>> {
+ let terminal = self
+ .project
+ .update(cx, |project, cx| {
+ let cwd = project
+ .active_project_directory(cx)
+ .map(|it| it.to_path_buf());
+ project.clone_terminal(self.terminal(), cx, cwd)
})
- .ok()
- })
+ .ok()?
+ .log_err()?;
+
+ Some(cx.new(|cx| {
+ TerminalView::new(
+ terminal,
+ self.workspace.clone(),
+ workspace_id,
+ self.project.clone(),
+ window,
+ cx,
+ )
+ }))
}
fn is_dirty(&self, cx: &gpui::App) -> bool {
@@ -23,7 +23,6 @@ log.workspace = true
parking_lot.workspace = true
postage.workspace = true
rand = { workspace = true, optional = true }
-regex.workspace = true
rope.workspace = true
smallvec.workspace = true
sum_tree.workspace = true
@@ -20,11 +20,9 @@ use operation_queue::OperationQueue;
pub use patch::Patch;
use postage::{oneshot, prelude::*};
-use regex::Regex;
pub use rope::*;
pub use selection::*;
use std::{
- borrow::Cow,
cmp::{self, Ordering, Reverse},
fmt::Display,
future::Future,
@@ -32,7 +30,7 @@ use std::{
num::NonZeroU64,
ops::{self, Deref, Range, Sub},
str,
- sync::{Arc, LazyLock},
+ sync::Arc,
time::{Duration, Instant},
};
pub use subscription::*;
@@ -43,9 +41,6 @@ use undo_map::UndoMap;
#[cfg(any(test, feature = "test-support"))]
use util::RandomCharIter;
-static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
- LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
-
pub type TransactionId = clock::Lamport;
pub struct Buffer {
@@ -2019,10 +2014,24 @@ impl BufferSnapshot {
start..position
}
+ /// Returns the buffer's text as a String.
+ ///
+ /// Note: This always uses `\n` as the line separator, regardless of the buffer's
+ /// actual line ending setting. For LSP communication or other cases where you need
+ /// to preserve the original line endings, use [`Self::text_with_original_line_endings`] instead.
pub fn text(&self) -> String {
self.visible_text.to_string()
}
+ /// Returns the buffer's text with line same endings as in buffer's file.
+ ///
+ /// Unlike [`Self::text`] which always uses `\n`, this method formats the text using
+ /// the buffer's actual line ending setting (Unix `\n` or Windows `\r\n`).
+ pub fn text_with_original_line_endings(&self) -> String {
+ self.visible_text
+ .to_string_with_line_ending(self.line_ending)
+ }
+
pub fn line_ending(&self) -> LineEnding {
self.line_ending
}
@@ -2126,6 +2135,10 @@ impl BufferSnapshot {
self.visible_text.reversed_bytes_in_range(start..end)
}
+ /// Returns the text in the given range.
+ ///
+ /// Note: This always uses `\n` as the line separator, regardless of the buffer's
+ /// actual line ending setting.
pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
let start = range.start.to_offset(self);
let end = range.end.to_offset(self);
@@ -3246,77 +3259,6 @@ impl FromAnchor for usize {
}
}
-#[derive(Clone, Copy, Debug, PartialEq)]
-pub enum LineEnding {
- Unix,
- Windows,
-}
-
-impl Default for LineEnding {
- fn default() -> Self {
- #[cfg(unix)]
- return Self::Unix;
-
- #[cfg(not(unix))]
- return Self::Windows;
- }
-}
-
-impl LineEnding {
- pub fn as_str(&self) -> &'static str {
- match self {
- LineEnding::Unix => "\n",
- LineEnding::Windows => "\r\n",
- }
- }
-
- pub fn label(&self) -> &'static str {
- match self {
- LineEnding::Unix => "LF",
- LineEnding::Windows => "CRLF",
- }
- }
-
- pub fn detect(text: &str) -> Self {
- let mut max_ix = cmp::min(text.len(), 1000);
- while !text.is_char_boundary(max_ix) {
- max_ix -= 1;
- }
-
- if let Some(ix) = text[..max_ix].find(['\n']) {
- if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
- Self::Windows
- } else {
- Self::Unix
- }
- } else {
- Self::default()
- }
- }
-
- pub fn normalize(text: &mut String) {
- if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
- *text = replaced;
- }
- }
-
- pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
- if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
- replaced.into()
- } else {
- text
- }
- }
-
- pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
- if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
- replaced.into()
- } else {
- text
- }
- }
-}
-
#[cfg(debug_assertions)]
pub mod debug {
use super::*;
@@ -640,6 +640,11 @@ impl RenderOnce for ButtonLike {
.filter(|_| self.selected)
.unwrap_or(self.style);
+ let is_outlined = matches!(
+ self.style,
+ ButtonStyle::Outlined | ButtonStyle::OutlinedGhost
+ );
+
self.base
.h_flex()
.id(self.id.clone())
@@ -654,13 +659,7 @@ impl RenderOnce for ButtonLike {
.when_some(self.width, |this, width| {
this.w(width).justify_center().text_center()
})
- .when(
- matches!(
- self.style,
- ButtonStyle::Outlined | ButtonStyle::OutlinedGhost
- ),
- |this| this.border_1(),
- )
+ .when(is_outlined, |this| this.border_1())
.when_some(self.rounding, |this, rounding| {
this.when(rounding.top_left, |this| this.rounded_tl_sm())
.when(rounding.top_right, |this| this.rounded_tr_sm())
@@ -688,13 +687,16 @@ impl RenderOnce for ButtonLike {
let hovered_style = style.hovered(self.layer, cx);
let focus_color =
|refinement: StyleRefinement| refinement.bg(hovered_style.background);
+
this.cursor(self.cursor_style)
.hover(focus_color)
.map(|this| {
- if matches!(self.style, ButtonStyle::Outlined) {
- this.focus(|s| s.border_color(cx.theme().colors().border_focused))
+ if is_outlined {
+ this.focus_visible(|s| {
+ s.border_color(cx.theme().colors().border_focused)
+ })
} else {
- this.focus(focus_color)
+ this.focus_visible(focus_color)
}
})
.active(|active| active.bg(style.active(cx).background))
@@ -47,6 +47,7 @@ pub struct ContextMenuEntry {
toggle: Option<(IconPosition, bool)>,
label: SharedString,
icon: Option<IconName>,
+ custom_icon_path: Option<SharedString>,
icon_position: IconPosition,
icon_size: IconSize,
icon_color: Option<Color>,
@@ -66,6 +67,7 @@ impl ContextMenuEntry {
toggle: None,
label: label.into(),
icon: None,
+ custom_icon_path: None,
icon_position: IconPosition::Start,
icon_size: IconSize::Small,
icon_color: None,
@@ -90,6 +92,12 @@ impl ContextMenuEntry {
self
}
+ pub fn custom_icon_path(mut self, path: impl Into<SharedString>) -> Self {
+ self.custom_icon_path = Some(path.into());
+ self.icon = None; // Clear IconName if custom path is set
+ self
+ }
+
pub fn icon_position(mut self, position: IconPosition) -> Self {
self.icon_position = position;
self
@@ -206,39 +214,46 @@ impl EventEmitter<DismissEvent> for ContextMenu {}
impl FluentBuilder for ContextMenu {}
impl ContextMenu {
+ pub fn new(
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ f: impl FnOnce(Self, &mut Window, &mut Context<Self>) -> Self,
+ ) -> Self {
+ let focus_handle = cx.focus_handle();
+ let _on_blur_subscription = cx.on_blur(
+ &focus_handle,
+ window,
+ |this: &mut ContextMenu, window, cx| this.cancel(&menu::Cancel, window, cx),
+ );
+ window.refresh();
+
+ f(
+ Self {
+ builder: None,
+ items: Default::default(),
+ focus_handle,
+ action_context: None,
+ selected_index: None,
+ delayed: false,
+ clicked: false,
+ key_context: "menu".into(),
+ _on_blur_subscription,
+ keep_open_on_confirm: false,
+ documentation_aside: None,
+ fixed_width: None,
+ end_slot_action: None,
+ },
+ window,
+ cx,
+ )
+ }
+
pub fn build(
window: &mut Window,
cx: &mut App,
f: impl FnOnce(Self, &mut Window, &mut Context<Self>) -> Self,
) -> Entity<Self> {
- cx.new(|cx| {
- let focus_handle = cx.focus_handle();
- let _on_blur_subscription = cx.on_blur(
- &focus_handle,
- window,
- |this: &mut ContextMenu, window, cx| this.cancel(&menu::Cancel, window, cx),
- );
- window.refresh();
- f(
- Self {
- builder: None,
- items: Default::default(),
- focus_handle,
- action_context: None,
- selected_index: None,
- delayed: false,
- clicked: false,
- key_context: "menu".into(),
- _on_blur_subscription,
- keep_open_on_confirm: false,
- documentation_aside: None,
- fixed_width: None,
- end_slot_action: None,
- },
- window,
- cx,
- )
- })
+ cx.new(|cx| Self::new(window, cx, f))
}
/// Builds a [`ContextMenu`] that will stay open when making changes instead of closing after each confirmation.
@@ -380,6 +395,7 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
+ custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -408,6 +424,7 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
+ custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -436,6 +453,7 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
+ custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -463,6 +481,7 @@ impl ContextMenu {
label: label.into(),
handler: Rc::new(move |_, window, cx| handler(window, cx)),
icon: None,
+ custom_icon_path: None,
icon_position: position,
icon_size: IconSize::Small,
icon_color: None,
@@ -521,6 +540,7 @@ impl ContextMenu {
window.dispatch_action(action.boxed_clone(), cx);
}),
icon: None,
+ custom_icon_path: None,
icon_position: IconPosition::End,
icon_size: IconSize::Small,
icon_color: None,
@@ -551,6 +571,7 @@ impl ContextMenu {
window.dispatch_action(action.boxed_clone(), cx);
}),
icon: None,
+ custom_icon_path: None,
icon_size: IconSize::Small,
icon_position: IconPosition::End,
icon_color: None,
@@ -571,6 +592,7 @@ impl ContextMenu {
action: Some(action.boxed_clone()),
handler: Rc::new(move |_, window, cx| window.dispatch_action(action.boxed_clone(), cx)),
icon: Some(IconName::ArrowUpRight),
+ custom_icon_path: None,
icon_size: IconSize::XSmall,
icon_position: IconPosition::End,
icon_color: None,
@@ -890,6 +912,7 @@ impl ContextMenu {
label,
handler,
icon,
+ custom_icon_path,
icon_position,
icon_size,
icon_color,
@@ -920,7 +943,29 @@ impl ContextMenu {
Color::Default
};
- let label_element = if let Some(icon_name) = icon {
+ let label_element = if let Some(custom_path) = custom_icon_path {
+ h_flex()
+ .gap_1p5()
+ .when(
+ *icon_position == IconPosition::Start && toggle.is_none(),
+ |flex| {
+ flex.child(
+ Icon::from_path(custom_path.clone())
+ .size(*icon_size)
+ .color(icon_color),
+ )
+ },
+ )
+ .child(Label::new(label.clone()).color(label_color).truncate())
+ .when(*icon_position == IconPosition::End, |flex| {
+ flex.child(
+ Icon::from_path(custom_path.clone())
+ .size(*icon_size)
+ .color(icon_color),
+ )
+ })
+ .into_any_element()
+ } else if let Some(icon_name) = icon {
h_flex()
.gap_1p5()
.when(
@@ -514,7 +514,7 @@ impl RenderOnce for Switch {
self.tab_index.filter(|_| !self.disabled),
|this, tab_index| {
this.tab_index(tab_index)
- .focus(|mut style| {
+ .focus_visible(|mut style| {
style.border_color = Some(cx.theme().colors().border_focused);
style
})
@@ -159,7 +159,7 @@ impl RenderOnce for TreeViewItem {
.rounded_sm()
.border_1()
.border_color(transparent_border)
- .focus(|s| s.border_color(focused_border))
+ .focus_visible(|s| s.border_color(focused_border))
.when(self.selected, |this| {
this.border_color(selected_border).bg(selected_bg)
})
@@ -338,7 +338,7 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
.border_color(border_color)
.bg(bg_color)
.hover(|s| s.bg(hover_bg_color))
- .focus(|s| s.border_color(focus_border_color).bg(hover_bg_color))
+ .focus_visible(|s| s.border_color(focus_border_color).bg(hover_bg_color))
.child(Icon::new(icon).size(IconSize::Small))
};
@@ -369,7 +369,6 @@ impl<T: NumberFieldType> RenderOnce for NumberField<T> {
let new_value = value.saturating_sub(step);
let new_value = if new_value < min { min } else { new_value };
on_change(&new_value, window, cx);
- window.focus_prev();
}
};
@@ -15,7 +15,7 @@ use std::{
sync::LazyLock,
};
-use crate::rel_path::RelPath;
+use crate::{rel_path::RelPath, shell::ShellKind};
static HOME_DIR: OnceLock<PathBuf> = OnceLock::new();
@@ -84,9 +84,7 @@ pub trait PathExt {
fn multiple_extensions(&self) -> Option<String>;
/// Try to make a shell-safe representation of the path.
- ///
- /// For Unix, the path is escaped to be safe for POSIX shells
- fn try_shell_safe(&self) -> anyhow::Result<String>;
+ fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result<String>;
}
impl<T: AsRef<Path>> PathExt for T {
@@ -164,24 +162,16 @@ impl<T: AsRef<Path>> PathExt for T {
Some(parts.into_iter().join("."))
}
- fn try_shell_safe(&self) -> anyhow::Result<String> {
- #[cfg(target_os = "windows")]
- {
- Ok(self.as_ref().to_string_lossy().to_string())
- }
-
- #[cfg(not(target_os = "windows"))]
- {
- let path_str = self
- .as_ref()
- .to_str()
- .with_context(|| "Path contains invalid UTF-8")?;
-
- // As of writing, this can only be fail if the path contains a null byte, which shouldn't be possible
- // but shlex has annotated the error as #[non_exhaustive] so we can't make it a compile error if other
- // errors are introduced in the future :(
- Ok(shlex::try_quote(path_str)?.into_owned())
- }
+ fn try_shell_safe(&self, shell_kind: ShellKind) -> anyhow::Result<String> {
+ let path_str = self
+ .as_ref()
+ .to_str()
+ .with_context(|| "Path contains invalid UTF-8")?;
+ shell_kind
+ .try_quote(path_str)
+ .as_deref()
+ .map(ToOwned::to_owned)
+ .context("Failed to quote path")
}
}
@@ -1,6 +1,53 @@
+use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{borrow::Cow, fmt, path::Path, sync::LazyLock};
+/// Shell configuration to open the terminal with.
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema, Hash)]
+#[serde(rename_all = "snake_case")]
+pub enum Shell {
+ /// Use the system's default terminal configuration in /etc/passwd
+ #[default]
+ System,
+ /// Use a specific program with no arguments.
+ Program(String),
+ /// Use a specific program with arguments.
+ WithArguments {
+ /// The program to run.
+ program: String,
+ /// The arguments to pass to the program.
+ args: Vec<String>,
+ /// An optional string to override the title of the terminal tab
+ title_override: Option<String>,
+ },
+}
+
+impl Shell {
+ pub fn program(&self) -> String {
+ match self {
+ Shell::Program(program) => program.clone(),
+ Shell::WithArguments { program, .. } => program.clone(),
+ Shell::System => get_system_shell(),
+ }
+ }
+
+ pub fn program_and_args(&self) -> (String, &[String]) {
+ match self {
+ Shell::Program(program) => (program.clone(), &[]),
+ Shell::WithArguments { program, args, .. } => (program.clone(), args),
+ Shell::System => (get_system_shell(), &[]),
+ }
+ }
+
+ pub fn shell_kind(&self, is_windows: bool) -> ShellKind {
+ match self {
+ Shell::Program(program) => ShellKind::new(program, is_windows),
+ Shell::WithArguments { program, .. } => ShellKind::new(program, is_windows),
+ Shell::System => ShellKind::system(),
+ }
+ }
+}
+
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum ShellKind {
#[default]
@@ -185,32 +232,20 @@ impl ShellKind {
.unwrap_or_else(|| program.as_os_str())
.to_string_lossy();
- if program == "powershell" || program == "pwsh" {
- ShellKind::PowerShell
- } else if program == "cmd" {
- ShellKind::Cmd
- } else if program == "nu" {
- ShellKind::Nushell
- } else if program == "fish" {
- ShellKind::Fish
- } else if program == "csh" {
- ShellKind::Csh
- } else if program == "tcsh" {
- ShellKind::Tcsh
- } else if program == "rc" {
- ShellKind::Rc
- } else if program == "xonsh" {
- ShellKind::Xonsh
- } else if program == "sh" || program == "bash" {
- ShellKind::Posix
- } else {
- if is_windows {
- ShellKind::PowerShell
- } else {
- // Some other shell detected, the user might install and use a
- // unix-like shell.
- ShellKind::Posix
- }
+ match &*program {
+ "powershell" | "pwsh" => ShellKind::PowerShell,
+ "cmd" => ShellKind::Cmd,
+ "nu" => ShellKind::Nushell,
+ "fish" => ShellKind::Fish,
+ "csh" => ShellKind::Csh,
+ "tcsh" => ShellKind::Tcsh,
+ "rc" => ShellKind::Rc,
+ "xonsh" => ShellKind::Xonsh,
+ "sh" | "bash" | "zsh" => ShellKind::Posix,
+ _ if is_windows => ShellKind::PowerShell,
+ // Some other shell detected, the user might install and use a
+ // unix-like shell.
+ _ => ShellKind::Posix,
}
}
@@ -363,14 +398,27 @@ impl ShellKind {
match self {
ShellKind::PowerShell => Some('&'),
ShellKind::Nushell => Some('^'),
- _ => None,
+ ShellKind::Posix
+ | ShellKind::Csh
+ | ShellKind::Tcsh
+ | ShellKind::Rc
+ | ShellKind::Fish
+ | ShellKind::Cmd
+ | ShellKind::Xonsh => None,
}
}
pub const fn sequential_commands_separator(&self) -> char {
match self {
ShellKind::Cmd => '&',
- _ => ';',
+ ShellKind::Posix
+ | ShellKind::Csh
+ | ShellKind::Tcsh
+ | ShellKind::Rc
+ | ShellKind::Fish
+ | ShellKind::PowerShell
+ | ShellKind::Nushell
+ | ShellKind::Xonsh => ';',
}
}
@@ -378,29 +426,103 @@ impl ShellKind {
shlex::try_quote(arg).ok().map(|arg| match self {
// If we are running in PowerShell, we want to take extra care when escaping strings.
// In particular, we want to escape strings with a backtick (`) rather than a backslash (\).
- // TODO double escaping backslashes is not necessary in PowerShell and probably CMD
- ShellKind::PowerShell => Cow::Owned(arg.replace("\\\"", "`\"")),
- _ => arg,
+ ShellKind::PowerShell => Cow::Owned(arg.replace("\\\"", "`\"").replace("\\\\", "\\")),
+ ShellKind::Cmd => Cow::Owned(arg.replace("\\\\", "\\")),
+ ShellKind::Posix
+ | ShellKind::Csh
+ | ShellKind::Tcsh
+ | ShellKind::Rc
+ | ShellKind::Fish
+ | ShellKind::Nushell
+ | ShellKind::Xonsh => arg,
})
}
+ pub fn split(&self, input: &str) -> Option<Vec<String>> {
+ shlex::split(input)
+ }
+
pub const fn activate_keyword(&self) -> &'static str {
match self {
ShellKind::Cmd => "",
ShellKind::Nushell => "overlay use",
ShellKind::PowerShell => ".",
- ShellKind::Fish => "source",
- ShellKind::Csh => "source",
- ShellKind::Tcsh => "source",
- ShellKind::Posix | ShellKind::Rc => "source",
- ShellKind::Xonsh => "source",
+ ShellKind::Fish
+ | ShellKind::Csh
+ | ShellKind::Tcsh
+ | ShellKind::Posix
+ | ShellKind::Rc
+ | ShellKind::Xonsh => "source",
}
}
pub const fn clear_screen_command(&self) -> &'static str {
match self {
ShellKind::Cmd => "cls",
- _ => "clear",
+ ShellKind::Posix
+ | ShellKind::Csh
+ | ShellKind::Tcsh
+ | ShellKind::Rc
+ | ShellKind::Fish
+ | ShellKind::PowerShell
+ | ShellKind::Nushell
+ | ShellKind::Xonsh => "clear",
+ }
+ }
+
+ #[cfg(windows)]
+ /// We do not want to escape arguments if we are using CMD as our shell.
+ /// If we do we end up with too many quotes/escaped quotes for CMD to handle.
+ pub const fn tty_escape_args(&self) -> bool {
+ match self {
+ ShellKind::Cmd => false,
+ ShellKind::Posix
+ | ShellKind::Csh
+ | ShellKind::Tcsh
+ | ShellKind::Rc
+ | ShellKind::Fish
+ | ShellKind::PowerShell
+ | ShellKind::Nushell
+ | ShellKind::Xonsh => true,
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ // Examples
+ // WSL
+ // wsl.exe --distribution NixOS --cd /home/user -- /usr/bin/zsh -c "echo hello"
+ // wsl.exe --distribution NixOS --cd /home/user -- /usr/bin/zsh -c "\"echo hello\"" | grep hello"
+ // wsl.exe --distribution NixOS --cd ~ env RUST_LOG=info,remote=debug .zed_wsl_server/zed-remote-server-dev-build proxy --identifier dev-workspace-53
+ // PowerShell from Nushell
+ // nu -c overlay use "C:\Users\kubko\dev\python\39007\tests\.venv\Scripts\activate.nu"; ^"C:\Program Files\PowerShell\7\pwsh.exe" -C "C:\Users\kubko\dev\python\39007\tests\.venv\Scripts\python.exe -m pytest \"test_foo.py::test_foo\""
+ // PowerShell from CMD
+ // cmd /C \" \"C:\\\\Users\\\\kubko\\\\dev\\\\python\\\\39007\\\\tests\\\\.venv\\\\Scripts\\\\activate.bat\"& \"C:\\\\Program Files\\\\PowerShell\\\\7\\\\pwsh.exe\" -C \"C:\\\\Users\\\\kubko\\\\dev\\\\python\\\\39007\\\\tests\\\\.venv\\\\Scripts\\\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"\"
+
+ #[test]
+ fn test_try_quote_powershell() {
+ let shell_kind = ShellKind::PowerShell;
+ assert_eq!(
+ shell_kind
+ .try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"")
+ .unwrap()
+ .into_owned(),
+ "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest `\"test_foo.py::test_foo`\"\"".to_string()
+ );
+ }
+
+ #[test]
+ fn test_try_quote_cmd() {
+ let shell_kind = ShellKind::Cmd;
+ assert_eq!(
+ shell_kind
+ .try_quote("C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \"test_foo.py::test_foo\"")
+ .unwrap()
+ .into_owned(),
+ "\"C:\\Users\\johndoe\\dev\\python\\39007\\tests\\.venv\\Scripts\\python.exe -m pytest \\\"test_foo.py::test_foo\\\"\"".to_string()
+ );
+ }
+}
@@ -1,8 +1,5 @@
-use util::shell::get_system_shell;
-
-use crate::Shell;
-
-pub use util::shell::ShellKind;
+use crate::shell::get_system_shell;
+use crate::shell::{Shell, ShellKind};
/// ShellBuilder is used to turn a user-requested task into a
/// program that can be executed by the shell.
@@ -35,8 +35,8 @@ async fn capture_unix(
use std::os::unix::process::CommandExt;
use std::process::Stdio;
- let zed_path = super::get_shell_safe_zed_path()?;
let shell_kind = ShellKind::new(shell_path, false);
+ let zed_path = super::get_shell_safe_zed_path(shell_kind)?;
let mut command_string = String::new();
let mut command = std::process::Command::new(shell_path);
@@ -9,6 +9,7 @@ pub mod rel_path;
pub mod schemars;
pub mod serde;
pub mod shell;
+pub mod shell_builder;
pub mod shell_env;
pub mod size;
#[cfg(any(test, feature = "test-support"))]
@@ -295,12 +296,12 @@ fn load_shell_from_passwd() -> Result<()> {
}
/// Returns a shell escaped path for the current zed executable
-pub fn get_shell_safe_zed_path() -> anyhow::Result<String> {
+pub fn get_shell_safe_zed_path(shell_kind: shell::ShellKind) -> anyhow::Result<String> {
let zed_path =
std::env::current_exe().context("Failed to determine current zed executable path.")?;
zed_path
- .try_shell_safe()
+ .try_shell_safe(shell_kind)
.context("Failed to shell-escape Zed executable path.")
}
@@ -11,9 +11,8 @@ use anyhow::Result;
use client::{Client, proto};
use futures::{StreamExt, channel::mpsc};
use gpui::{
- Action, AnyElement, AnyView, App, AppContext, Context, Entity, EntityId, EventEmitter,
- FocusHandle, Focusable, Font, HighlightStyle, Pixels, Point, Render, SharedString, Task,
- WeakEntity, Window,
+ Action, AnyElement, AnyView, App, Context, Entity, EntityId, EventEmitter, FocusHandle,
+ Focusable, Font, HighlightStyle, Pixels, Point, Render, SharedString, Task, WeakEntity, Window,
};
use project::{Project, ProjectEntryId, ProjectPath};
pub use settings::{
@@ -218,11 +217,11 @@ pub trait Item: Focusable + EventEmitter<Self::Event> + Render + Sized {
_workspace_id: Option<WorkspaceId>,
_window: &mut Window,
_: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(None)
+ None
}
fn is_dirty(&self, _: &App) -> bool {
false
@@ -423,7 +422,7 @@ pub trait ItemHandle: 'static + Send {
workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut App,
- ) -> Task<Option<Box<dyn ItemHandle>>>;
+ ) -> Option<Box<dyn ItemHandle>>;
fn added_to_pane(
&self,
workspace: &mut Workspace,
@@ -636,12 +635,9 @@ impl<T: Item> ItemHandle for Entity<T> {
workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut App,
- ) -> Task<Option<Box<dyn ItemHandle>>> {
- let task = self.update(cx, |item, cx| item.clone_on_split(workspace_id, window, cx));
- cx.background_spawn(async move {
- task.await
- .map(|handle| Box::new(handle) as Box<dyn ItemHandle>)
- })
+ ) -> Option<Box<dyn ItemHandle>> {
+ self.update(cx, |item, cx| item.clone_on_split(workspace_id, window, cx))
+ .map(|handle| Box::new(handle) as Box<dyn ItemHandle>)
}
fn added_to_pane(
@@ -1508,11 +1504,11 @@ pub mod test {
_workspace_id: Option<WorkspaceId>,
_: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| Self {
+ Some(cx.new(|cx| Self {
state: self.state.clone(),
label: self.label.clone(),
save_count: self.save_count,
@@ -1529,7 +1525,7 @@ pub mod test {
workspace_id: self.workspace_id,
focus_handle: cx.focus_handle(),
serialize: None,
- })))
+ }))
}
fn is_dirty(&self, _: &App) -> bool {
@@ -3292,18 +3292,11 @@ impl Pane {
else {
return;
};
- let task = item.clone_on_split(database_id, window, cx);
- let to_pane = to_pane.downgrade();
- cx.spawn_in(window, async move |_, cx| {
- if let Some(item) = task.await {
- to_pane
- .update_in(cx, |pane, window, cx| {
- pane.add_item(item, true, true, None, window, cx)
- })
- .ok();
- }
- })
- .detach();
+ if let Some(item) = item.clone_on_split(database_id, window, cx) {
+ to_pane.update(cx, |pane, cx| {
+ pane.add_item(item, true, true, None, window, cx);
+ })
+ }
} else {
move_item(&from_pane, &to_pane, item_id, ix, true, window, cx);
}
@@ -6,7 +6,7 @@ use call::{RemoteVideoTrack, RemoteVideoTrackView, Room};
use client::{User, proto::PeerId};
use gpui::{
AppContext as _, Entity, EventEmitter, FocusHandle, Focusable, InteractiveElement,
- ParentElement, Render, SharedString, Styled, Task, div,
+ ParentElement, Render, SharedString, Styled, div,
};
use std::sync::Arc;
use ui::{Icon, IconName, prelude::*};
@@ -114,14 +114,14 @@ impl Item for SharedScreen {
_workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>> {
- Task::ready(Some(cx.new(|cx| Self {
+ ) -> Option<Entity<Self>> {
+ Some(cx.new(|cx| Self {
view: self.view.update(cx, |view, cx| view.clone(window, cx)),
peer_id: self.peer_id,
user: self.user.clone(),
nav_history: Default::default(),
focus: cx.focus_handle(),
- })))
+ }))
}
fn to_item_events(event: &Self::Event, mut f: impl FnMut(ItemEvent)) {
@@ -1,7 +1,5 @@
#![allow(unused, dead_code)]
-use gpui::{
- AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, Hsla, Task, actions, hsla,
-};
+use gpui::{AnyElement, App, Entity, EventEmitter, FocusHandle, Focusable, Hsla, actions, hsla};
use strum::IntoEnumIterator;
use theme::all_theme_colors;
use ui::{
@@ -102,11 +100,11 @@ impl Item for ThemePreview {
_workspace_id: Option<crate::WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Self>>>
+ ) -> Option<Entity<Self>>
where
Self: Sized,
{
- Task::ready(Some(cx.new(|cx| Self::new(window, cx))))
+ Some(cx.new(|cx| Self::new(window, cx)))
}
}
@@ -3627,8 +3627,7 @@ impl Workspace {
if let Some(pane) = panes.get(action.0).map(|p| (*p).clone()) {
window.focus(&pane.focus_handle(cx));
} else {
- self.split_and_clone(self.active_pane.clone(), SplitDirection::Right, window, cx)
- .detach();
+ self.split_and_clone(self.active_pane.clone(), SplitDirection::Right, window, cx);
}
}
@@ -3995,8 +3994,7 @@ impl Workspace {
clone_active_item,
} => {
if *clone_active_item {
- self.split_and_clone(pane.clone(), *direction, window, cx)
- .detach();
+ self.split_and_clone(pane.clone(), *direction, window, cx);
} else {
self.split_and_move(pane.clone(), *direction, window, cx);
}
@@ -4137,27 +4135,21 @@ impl Workspace {
direction: SplitDirection,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<Entity<Pane>>> {
- let Some(item) = pane.read(cx).active_item() else {
- return Task::ready(None);
- };
- let task = item.clone_on_split(self.database_id(), window, cx);
- cx.spawn_in(window, async move |this, cx| {
- if let Some(clone) = task.await {
- this.update_in(cx, |this, window, cx| {
- let new_pane = this.add_pane(window, cx);
- new_pane.update(cx, |pane, cx| {
- pane.add_item(clone, true, true, None, window, cx)
- });
- this.center.split(&pane, &new_pane, direction).unwrap();
- cx.notify();
- new_pane
- })
- .ok()
+ ) -> Option<Entity<Pane>> {
+ let item = pane.read(cx).active_item()?;
+ let maybe_pane_handle =
+ if let Some(clone) = item.clone_on_split(self.database_id(), window, cx) {
+ let new_pane = self.add_pane(window, cx);
+ new_pane.update(cx, |pane, cx| {
+ pane.add_item(clone, true, true, None, window, cx)
+ });
+ self.center.split(&pane, &new_pane, direction).unwrap();
+ cx.notify();
+ Some(new_pane)
} else {
None
- }
- })
+ };
+ maybe_pane_handle
}
pub fn join_all_panes(&mut self, window: &mut Window, cx: &mut Context<Self>) {
@@ -8191,27 +8183,19 @@ pub fn clone_active_item(
let Some(active_item) = source.read(cx).active_item() else {
return;
};
- let destination = destination.downgrade();
- let task = active_item.clone_on_split(workspace_id, window, cx);
- window
- .spawn(cx, async move |cx| {
- let Some(clone) = task.await else {
- return;
- };
- destination
- .update_in(cx, |target_pane, window, cx| {
- target_pane.add_item(
- clone,
- focus_destination,
- focus_destination,
- Some(target_pane.items_len()),
- window,
- cx,
- );
- })
- .log_err();
- })
- .detach();
+ destination.update(cx, |target_pane, cx| {
+ let Some(clone) = active_item.clone_on_split(workspace_id, window, cx) else {
+ return;
+ };
+ target_pane.add_item(
+ clone,
+ focus_destination,
+ focus_destination,
+ Some(target_pane.items_len()),
+ window,
+ cx,
+ );
+ });
}
#[derive(Debug)]
@@ -8718,24 +8702,25 @@ mod tests {
cx,
);
- let right_pane =
- workspace.split_and_clone(left_pane.clone(), SplitDirection::Right, window, cx);
+ let right_pane = workspace
+ .split_and_clone(left_pane.clone(), SplitDirection::Right, window, cx)
+ .unwrap();
- let boxed_clone = single_entry_items[1].boxed_clone();
- let right_pane = window.spawn(cx, async move |cx| {
- right_pane.await.inspect(|right_pane| {
- right_pane
- .update_in(cx, |pane, window, cx| {
- pane.add_item(boxed_clone, true, true, None, window, cx);
- pane.add_item(Box::new(item_3_4.clone()), true, true, None, window, cx);
- })
- .unwrap();
- })
+ right_pane.update(cx, |pane, cx| {
+ pane.add_item(
+ single_entry_items[1].boxed_clone(),
+ true,
+ true,
+ None,
+ window,
+ cx,
+ );
+ pane.add_item(Box::new(item_3_4.clone()), true, true, None, window, cx);
});
(left_pane, right_pane)
});
- let right_pane = right_pane.await.unwrap();
+
cx.focus(&right_pane);
let mut close = right_pane.update_in(cx, |pane, window, cx| {
@@ -10552,10 +10537,7 @@ mod tests {
window,
cx,
);
- });
- cx.run_until_parked();
- workspace.update(cx, |workspace, cx| {
assert_eq!(workspace.panes.len(), 3, "Two new panes were created");
for pane in workspace.panes() {
assert_eq!(
@@ -18,7 +18,6 @@ use breadcrumbs::Breadcrumbs;
use client::zed_urls;
use collections::VecDeque;
use debugger_ui::debugger_panel::DebugPanel;
-use editor::ProposedChangesEditorToolbar;
use editor::{Editor, MultiBuffer};
use extension_host::ExtensionStore;
use feature_flags::{FeatureFlagAppExt, PanicFeatureFlag};
@@ -1035,8 +1034,6 @@ fn initialize_pane(
)
});
toolbar.add_item(buffer_search_bar.clone(), window, cx);
- let proposed_change_bar = cx.new(|_| ProposedChangesEditorToolbar::new());
- toolbar.add_item(proposed_change_bar, window, cx);
let quick_action_bar =
cx.new(|cx| QuickActionBar::new(buffer_search_bar, workspace, cx));
toolbar.add_item(quick_action_bar, window, cx);
@@ -2839,16 +2836,14 @@ mod tests {
});
// Split the pane with the first entry, then open the second entry again.
- let (task1, task2) = window
+ window
.update(cx, |w, window, cx| {
- (
- w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, window, cx),
- w.open_path(file2.clone(), None, true, window, cx),
- )
+ w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, window, cx);
+ w.open_path(file2.clone(), None, true, window, cx)
})
+ .unwrap()
+ .await
.unwrap();
- task1.await.unwrap();
- task2.await.unwrap();
window
.read_with(cx, |w, cx| {
@@ -3471,13 +3466,7 @@ mod tests {
SplitDirection::Right,
window,
cx,
- )
- })
- .unwrap()
- .await
- .unwrap();
- window
- .update(cx, |workspace, window, cx| {
+ );
workspace.open_path(
(worktree.read(cx).id(), rel_path("the-new-name.rs")),
None,
@@ -720,7 +720,7 @@ impl Item for ComponentPreview {
_workspace_id: Option<WorkspaceId>,
window: &mut Window,
cx: &mut Context<Self>,
- ) -> Task<Option<gpui::Entity<Self>>>
+ ) -> Option<gpui::Entity<Self>>
where
Self: Sized,
{
@@ -742,13 +742,13 @@ impl Item for ComponentPreview {
cx,
);
- Task::ready(match self_result {
+ match self_result {
Ok(preview) => Some(cx.new(|_cx| preview)),
Err(e) => {
log::error!("Failed to clone component preview: {}", e);
None
}
- })
+ }
}
fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) {
@@ -70,6 +70,7 @@ pub enum ExtensionCategoryFilter {
Grammars,
LanguageServers,
ContextServers,
+ AgentServers,
SlashCommands,
IndexedDocsProviders,
Snippets,
@@ -34,7 +34,7 @@ For example, if you have Prettier installed and on your `PATH`, you can use it t
Zed supports JSX syntax highlighting out of the box.
-In JSX strings, the [`tailwindcss-language-server`](./tailwindcss.md) is used provide autocompletion for Tailwind CSS classes.
+In JSX strings, the [`tailwindcss-language-server`](./tailwindcss.md) is used to provide autocompletion for Tailwind CSS classes.
## JSDoc
@@ -19,3 +19,33 @@
--------------------------------------------------------------------------------
{{/each}}
+
+#### MIT License
+
+##### Used by:
+
+* [Windows Terminal]( https://github.com/microsoft/terminal )
+
+Copyright (c) Microsoft Corporation. All rights reserved.
+
+MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
+--------------------------------------------------------------------------------