Detailed changes
@@ -39,3 +39,21 @@ body:
Output of "zed: copy system specs into clipboard"
validations:
required: true
+
+ - type: textarea
+ attributes:
+ label: If applicable, attach your `Zed.log` file to this issue.
+ description: |
+ From the command palette, run `zed: open log` to see the last 1000 lines.
+ Or run `zed: reveal log in file manager` to reveal the log file itself.
+ value: |
+ <details><summary>Zed.log</summary>
+
+ <!-- Paste your log inside the code block. -->
+ ```log
+
+ ```
+
+ </details>
+ validations:
+ required: false
@@ -33,3 +33,21 @@ body:
Output of "zed: copy system specs into clipboard"
validations:
required: true
+
+ - type: textarea
+ attributes:
+ label: If applicable, attach your `Zed.log` file to this issue.
+ description: |
+ From the command palette, run `zed: open log` to see the last 1000 lines.
+ Or run `zed: reveal log in file manager` to reveal the log file itself.
+ value: |
+ <details><summary>Zed.log</summary>
+
+ <!-- Paste your log inside the code block. -->
+ ```log
+
+ ```
+
+ </details>
+ validations:
+ required: false
@@ -33,3 +33,21 @@ body:
Output of "zed: copy system specs into clipboard"
validations:
required: true
+
+ - type: textarea
+ attributes:
+ label: If applicable, attach your `Zed.log` file to this issue.
+ description: |
+ From the command palette, run `zed: open log` to see the last 1000 lines.
+ Or run `zed: reveal log in file manager` to reveal the log file itself.
+ value: |
+ <details><summary>Zed.log</summary>
+
+ <!-- Paste your log inside the code block. -->
+ ```log
+
+ ```
+
+ </details>
+ validations:
+ required: false
@@ -33,3 +33,21 @@ body:
Output of "zed: copy system specs into clipboard"
validations:
required: true
+
+ - type: textarea
+ attributes:
+ label: If applicable, attach your `Zed.log` file to this issue.
+ description: |
+ From the command palette, run `zed: open log` to see the last 1000 lines.
+ Or run `zed: reveal log in file manager` to reveal the log file itself.
+ value: |
+ <details><summary>Zed.log</summary>
+
+ <!-- Paste your log inside the code block. -->
+ ```log
+
+ ```
+
+ </details>
+ validations:
+ required: false
@@ -56,3 +56,20 @@ body:
Output of "zed: copy system specs into clipboard"
validations:
required: true
+ - type: textarea
+ attributes:
+ label: If applicable, attach your `Zed.log` file to this issue.
+ description: |
+ From the command palette, run `zed: open log` to see the last 1000 lines.
+ Or run `zed: reveal log in file manager` to reveal the log file itself.
+ value: |
+ <details><summary>Zed.log</summary>
+
+ <!-- Paste your log inside the code block. -->
+ ```log
+
+ ```
+
+ </details>
+ validations:
+ required: false
@@ -10,11 +10,14 @@ jobs:
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- - name: after_release::rebuild_releases_page
- run: 'curl https://zed.dev/api/revalidate-releases -H "Authorization: Bearer ${RELEASE_NOTES_API_TOKEN}"'
+ - name: after_release::rebuild_releases_page::refresh_cloud_releases
+ run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}
+ shell: bash -euxo pipefail {0}
+ - name: after_release::rebuild_releases_page::redeploy_zed_dev
+ run: npm exec --yes -- vercel@37 --token="$VERCEL_TOKEN" --scope zed-industries redeploy https://zed.dev
shell: bash -euxo pipefail {0}
env:
- RELEASE_NOTES_API_TOKEN: ${{ secrets.RELEASE_NOTES_API_TOKEN }}
+ VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
post_to_discord:
needs:
- rebuild_releases_page
@@ -48,7 +51,7 @@ jobs:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
content: ${{ steps.get-content.outputs.string }}
publish_winget:
- runs-on: namespace-profile-2x4-ubuntu-2404
+ runs-on: self-32vcpu-windows-2022
steps:
- id: set-package-name
name: after_release::publish_winget::set_package_name
@@ -67,3 +70,19 @@ jobs:
identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }}
max-versions-to-keep: 5
token: ${{ secrets.WINGET_TOKEN }}
+ create_sentry_release:
+ if: github.repository_owner == 'zed-industries'
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: release::create_sentry_release
+ uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
+ with:
+ environment: production
+ env:
+ SENTRY_ORG: zed-dev
+ SENTRY_PROJECT: zed
+ SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
@@ -475,14 +475,6 @@ jobs:
shell: bash -euxo pipefail {0}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: release::create_sentry_release
- uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
- with:
- environment: production
- env:
- SENTRY_ORG: zed-dev
- SENTRY_PROJECT: zed
- SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true
@@ -32,6 +32,7 @@ dependencies = [
"settings",
"smol",
"task",
+ "telemetry",
"tempfile",
"terminal",
"ui",
@@ -39,6 +40,7 @@ dependencies = [
"util",
"uuid",
"watch",
+ "zlog",
]
[[package]]
@@ -79,6 +81,7 @@ dependencies = [
"rand 0.9.2",
"serde_json",
"settings",
+ "telemetry",
"text",
"util",
"watch",
@@ -247,7 +250,6 @@ dependencies = [
"acp_tools",
"action_log",
"agent-client-protocol",
- "agent_settings",
"anyhow",
"async-trait",
"client",
@@ -3198,7 +3200,9 @@ dependencies = [
"indoc",
"ordered-float 2.10.1",
"rustc-hash 2.1.1",
+ "schemars 1.0.4",
"serde",
+ "serde_json",
"strum 0.27.2",
]
@@ -6404,7 +6408,7 @@ dependencies = [
"ignore",
"libc",
"log",
- "notify 8.0.0",
+ "notify 8.2.0",
"objc",
"parking_lot",
"paths",
@@ -9027,6 +9031,7 @@ dependencies = [
"settings",
"smol",
"task",
+ "terminal",
"text",
"theme",
"toml 0.8.23",
@@ -9674,6 +9679,7 @@ dependencies = [
"settings",
"theme",
"ui",
+ "urlencoding",
"util",
"workspace",
]
@@ -10408,11 +10414,10 @@ dependencies = [
[[package]]
name = "notify"
-version = "8.0.0"
-source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
+version = "8.2.0"
+source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
dependencies = [
"bitflags 2.9.4",
- "filetime",
"fsevent-sys 4.1.0",
"inotify 0.11.0",
"kqueue",
@@ -10421,7 +10426,7 @@ dependencies = [
"mio 1.1.0",
"notify-types",
"walkdir",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
]
[[package]]
@@ -10438,7 +10443,7 @@ dependencies = [
[[package]]
name = "notify-types"
version = "2.0.0"
-source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
+source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
[[package]]
name = "now"
@@ -13970,6 +13975,7 @@ dependencies = [
"gpui",
"gpui_tokio",
"http_client",
+ "image",
"json_schema_store",
"language",
"language_extension",
@@ -16211,7 +16217,6 @@ dependencies = [
"log",
"menu",
"picker",
- "project",
"reqwest_client",
"rust-embed",
"settings",
@@ -16221,7 +16226,6 @@ dependencies = [
"theme",
"title_bar",
"ui",
- "workspace",
]
[[package]]
@@ -18807,7 +18811,6 @@ dependencies = [
name = "vim_mode_setting"
version = "0.1.0"
dependencies = [
- "gpui",
"settings",
]
@@ -21675,10 +21678,10 @@ dependencies = [
"language_model",
"log",
"lsp",
+ "open_ai",
"pretty_assertions",
"project",
"release_channel",
- "schemars 1.0.4",
"serde",
"serde_json",
"settings",
@@ -21687,6 +21690,7 @@ dependencies = [
"uuid",
"workspace",
"worktree",
+ "zlog",
]
[[package]]
@@ -773,8 +773,8 @@ features = [
]
[patch.crates-io]
-notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
-notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
+notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
+notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
[profile.dev]
@@ -735,6 +735,20 @@
"tab": "editor::ComposeCompletion"
}
},
+ {
+ "context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "tab": "editor::NextSnippetTabstop"
+ }
+ },
+ {
+ "context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "shift-tab": "editor::PreviousSnippetTabstop"
+ }
+ },
// Bindings for accepting edit predictions
//
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
@@ -805,6 +805,20 @@
"tab": "editor::ComposeCompletion"
}
},
+ {
+ "context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "tab": "editor::NextSnippetTabstop"
+ }
+ },
+ {
+ "context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "shift-tab": "editor::PreviousSnippetTabstop"
+ }
+ },
{
"context": "Editor && edit_prediction",
"bindings": {
@@ -739,6 +739,20 @@
"tab": "editor::ComposeCompletion"
}
},
+ {
+ "context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "tab": "editor::NextSnippetTabstop"
+ }
+ },
+ {
+ "context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
+ "use_key_equivalents": true,
+ "bindings": {
+ "shift-tab": "editor::PreviousSnippetTabstop"
+ }
+ },
// Bindings for accepting edit predictions
//
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
@@ -1493,7 +1493,11 @@
// in your project's settings, rather than globally.
"directories": [".env", "env", ".venv", "venv"],
// Can also be `csh`, `fish`, `nushell` and `power_shell`
- "activate_script": "default"
+ "activate_script": "default",
+ // Preferred Conda manager to use when activating Conda environments.
+ // Values: "auto", "conda", "mamba", "micromamba"
+ // Default: "auto"
+ "conda_manager": "auto"
}
},
"toolbar": {
@@ -39,6 +39,7 @@ serde_json.workspace = true
settings.workspace = true
smol.workspace = true
task.workspace = true
+telemetry.workspace = true
terminal.workspace = true
ui.workspace = true
url.workspace = true
@@ -56,3 +57,4 @@ rand.workspace = true
tempfile.workspace = true
util.workspace = true
settings.workspace = true
+zlog.workspace = true
@@ -15,7 +15,7 @@ use settings::Settings as _;
use task::{Shell, ShellBuilder};
pub use terminal::*;
-use action_log::ActionLog;
+use action_log::{ActionLog, ActionLogTelemetry};
use agent_client_protocol::{self as acp};
use anyhow::{Context as _, Result, anyhow};
use editor::Bias;
@@ -820,6 +820,15 @@ pub struct AcpThread {
pending_terminal_exit: HashMap<acp::TerminalId, acp::TerminalExitStatus>,
}
+impl From<&AcpThread> for ActionLogTelemetry {
+ fn from(value: &AcpThread) -> Self {
+ Self {
+ agent_telemetry_id: value.connection().telemetry_id(),
+ session_id: value.session_id.0.clone(),
+ }
+ }
+}
+
#[derive(Debug)]
pub enum AcpThreadEvent {
NewEntry,
@@ -1346,6 +1355,17 @@ impl AcpThread {
let path_style = self.project.read(cx).path_style(cx);
let id = update.id.clone();
+ let agent = self.connection().telemetry_id();
+ let session = self.session_id();
+ if let ToolCallStatus::Completed | ToolCallStatus::Failed = status {
+ let status = if matches!(status, ToolCallStatus::Completed) {
+ "completed"
+ } else {
+ "failed"
+ };
+ telemetry::event!("Agent Tool Call Completed", agent, session, status);
+ }
+
if let Some(ix) = self.index_for_tool_call(&id) {
let AgentThreadEntry::ToolCall(call) = &mut self.entries[ix] else {
unreachable!()
@@ -1869,6 +1889,7 @@ impl AcpThread {
return Task::ready(Err(anyhow!("not supported")));
};
+ let telemetry = ActionLogTelemetry::from(&*self);
cx.spawn(async move |this, cx| {
cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
this.update(cx, |this, cx| {
@@ -1877,8 +1898,9 @@ impl AcpThread {
this.entries.truncate(ix);
cx.emit(AcpThreadEvent::EntriesRemoved(range));
}
- this.action_log()
- .update(cx, |action_log, cx| action_log.reject_all_edits(cx))
+ this.action_log().update(cx, |action_log, cx| {
+ action_log.reject_all_edits(Some(telemetry), cx)
+ })
})?
.await;
Ok(())
@@ -2355,8 +2377,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- Project::init_settings(cx);
- language::init(cx);
});
}
@@ -3614,6 +3634,10 @@ mod tests {
}
impl AgentConnection for FakeAgentConnection {
+ fn telemetry_id(&self) -> &'static str {
+ "fake"
+ }
+
fn auth_methods(&self) -> &[acp::AuthMethod] {
&self.auth_methods
}
@@ -20,6 +20,8 @@ impl UserMessageId {
}
pub trait AgentConnection {
+ fn telemetry_id(&self) -> &'static str;
+
fn new_thread(
self: Rc<Self>,
project: Entity<Project>,
@@ -106,9 +108,6 @@ pub trait AgentSessionSetTitle {
}
pub trait AgentTelemetry {
- /// The name of the agent used for telemetry.
- fn agent_name(&self) -> String;
-
/// A representation of the current thread state that can be serialized for
/// storage with telemetry events.
fn thread_data(
@@ -318,6 +317,10 @@ mod test_support {
}
impl AgentConnection for StubAgentConnection {
+ fn telemetry_id(&self) -> &'static str {
+ "stub"
+ }
+
fn auth_methods(&self) -> &[acp::AuthMethod] {
&[]
}
@@ -20,6 +20,7 @@ futures.workspace = true
gpui.workspace = true
language.workspace = true
project.workspace = true
+telemetry.workspace = true
text.workspace = true
util.workspace = true
watch.workspace = true
@@ -3,7 +3,9 @@ use buffer_diff::BufferDiff;
use clock;
use collections::BTreeMap;
use futures::{FutureExt, StreamExt, channel::mpsc};
-use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
+use gpui::{
+ App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
+};
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
use std::{cmp, ops::Range, sync::Arc};
@@ -31,71 +33,6 @@ impl ActionLog {
&self.project
}
- pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
- Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
- }
-
- /// Return a unified diff patch with user edits made since last read or notification
- pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
- let diffs = self
- .tracked_buffers
- .values()
- .filter_map(|tracked| {
- if !tracked.may_have_unnotified_user_edits {
- return None;
- }
-
- let text_with_latest_user_edits = tracked.diff_base.to_string();
- let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
- if text_with_latest_user_edits == text_with_last_seen_user_edits {
- return None;
- }
- let patch = language::unified_diff(
- &text_with_last_seen_user_edits,
- &text_with_latest_user_edits,
- );
-
- let buffer = tracked.buffer.clone();
- let file_path = buffer
- .read(cx)
- .file()
- .map(|file| {
- let mut path = file.full_path(cx).to_string_lossy().into_owned();
- if file.path_style(cx).is_windows() {
- path = path.replace('\\', "/");
- }
- path
- })
- .unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
-
- let mut result = String::new();
- result.push_str(&format!("--- a/{}\n", file_path));
- result.push_str(&format!("+++ b/{}\n", file_path));
- result.push_str(&patch);
-
- Some(result)
- })
- .collect::<Vec<_>>();
-
- if diffs.is_empty() {
- return None;
- }
-
- let unified_diff = diffs.join("\n\n");
- Some(unified_diff)
- }
-
- /// Return a unified diff patch with user edits made since last read/notification
- /// and mark them as notified
- pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
- let patch = self.unnotified_user_edits(cx);
- self.tracked_buffers.values_mut().for_each(|tracked| {
- tracked.may_have_unnotified_user_edits = false;
- tracked.last_seen_base = tracked.diff_base.clone();
- });
- patch
- }
-
fn track_buffer_internal(
&mut self,
buffer: Entity<Buffer>,
@@ -145,31 +82,26 @@ impl ActionLog {
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
let diff_base;
- let last_seen_base;
let unreviewed_edits;
if is_created {
diff_base = Rope::default();
- last_seen_base = Rope::default();
unreviewed_edits = Patch::new(vec![Edit {
old: 0..1,
new: 0..text_snapshot.max_point().row + 1,
}])
} else {
diff_base = buffer.read(cx).as_rope().clone();
- last_seen_base = diff_base.clone();
unreviewed_edits = Patch::default();
}
TrackedBuffer {
buffer: buffer.clone(),
diff_base,
- last_seen_base,
unreviewed_edits,
snapshot: text_snapshot,
status,
version: buffer.read(cx).version(),
diff,
diff_update: diff_update_tx,
- may_have_unnotified_user_edits: false,
_open_lsp_handle: open_lsp_handle,
_maintain_diff: cx.spawn({
let buffer = buffer.clone();
@@ -320,10 +252,9 @@ impl ActionLog {
let new_snapshot = buffer_snapshot.clone();
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
- let mut has_user_changes = false;
async move {
if let ChangeAuthor::User = author {
- has_user_changes = apply_non_conflicting_edits(
+ apply_non_conflicting_edits(
&unreviewed_edits,
edits,
&mut base_text,
@@ -331,22 +262,13 @@ impl ActionLog {
);
}
- (Arc::new(base_text.to_string()), base_text, has_user_changes)
+ (Arc::new(base_text.to_string()), base_text)
}
});
anyhow::Ok(rebase)
})??;
- let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
-
- this.update(cx, |this, _| {
- let tracked_buffer = this
- .tracked_buffers
- .get_mut(buffer)
- .context("buffer not tracked")
- .unwrap();
- tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
- })?;
+ let (new_base_text, new_diff_base) = rebase.await;
Self::update_diff(
this,
@@ -565,14 +487,17 @@ impl ActionLog {
&mut self,
buffer: Entity<Buffer>,
buffer_range: Range<impl language::ToPoint>,
+ telemetry: Option<ActionLogTelemetry>,
cx: &mut Context<Self>,
) {
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
return;
};
+ let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
match tracked_buffer.status {
TrackedBufferStatus::Deleted => {
+ metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
self.tracked_buffers.remove(&buffer);
cx.notify();
}
@@ -581,7 +506,6 @@ impl ActionLog {
let buffer_range =
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
let mut delta = 0i32;
-
tracked_buffer.unreviewed_edits.retain_mut(|edit| {
edit.old.start = (edit.old.start as i32 + delta) as u32;
edit.old.end = (edit.old.end as i32 + delta) as u32;
@@ -613,6 +537,7 @@ impl ActionLog {
.collect::<String>(),
);
delta += edit.new_len() as i32 - edit.old_len() as i32;
+ metrics.add_edit(edit);
false
}
});
@@ -624,19 +549,24 @@ impl ActionLog {
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
}
}
+ if let Some(telemetry) = telemetry {
+ telemetry_report_accepted_edits(&telemetry, metrics);
+ }
}
pub fn reject_edits_in_ranges(
&mut self,
buffer: Entity<Buffer>,
buffer_ranges: Vec<Range<impl language::ToPoint>>,
+ telemetry: Option<ActionLogTelemetry>,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
return Task::ready(Ok(()));
};
- match &tracked_buffer.status {
+ let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
+ let task = match &tracked_buffer.status {
TrackedBufferStatus::Created {
existing_file_content,
} => {
@@ -686,6 +616,7 @@ impl ActionLog {
}
};
+ metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
self.tracked_buffers.remove(&buffer);
cx.notify();
task
@@ -699,6 +630,7 @@ impl ActionLog {
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
// Clear all tracked edits for this buffer and start over as if we just read it.
+ metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
self.tracked_buffers.remove(&buffer);
self.buffer_read(buffer.clone(), cx);
cx.notify();
@@ -738,6 +670,7 @@ impl ActionLog {
}
if revert {
+ metrics.add_edit(edit);
let old_range = tracked_buffer
.diff_base
.point_to_offset(Point::new(edit.old.start, 0))
@@ -758,12 +691,25 @@ impl ActionLog {
self.project
.update(cx, |project, cx| project.save_buffer(buffer, cx))
}
+ };
+ if let Some(telemetry) = telemetry {
+ telemetry_report_rejected_edits(&telemetry, metrics);
}
+ task
}
- pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
- self.tracked_buffers
- .retain(|_buffer, tracked_buffer| match tracked_buffer.status {
+ pub fn keep_all_edits(
+ &mut self,
+ telemetry: Option<ActionLogTelemetry>,
+ cx: &mut Context<Self>,
+ ) {
+ self.tracked_buffers.retain(|buffer, tracked_buffer| {
+ let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
+ metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
+ if let Some(telemetry) = telemetry.as_ref() {
+ telemetry_report_accepted_edits(telemetry, metrics);
+ }
+ match tracked_buffer.status {
TrackedBufferStatus::Deleted => false,
_ => {
if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
@@ -774,13 +720,24 @@ impl ActionLog {
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
true
}
- });
+ }
+ });
+
cx.notify();
}
- pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
+ pub fn reject_all_edits(
+ &mut self,
+ telemetry: Option<ActionLogTelemetry>,
+ cx: &mut Context<Self>,
+ ) -> Task<()> {
let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
- let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
+ let reject = self.reject_edits_in_ranges(
+ buffer,
+ vec![Anchor::MIN..Anchor::MAX],
+ telemetry.clone(),
+ cx,
+ );
async move {
reject.await.log_err();
@@ -788,8 +745,7 @@ impl ActionLog {
});
let task = futures::future::join_all(futures);
-
- cx.spawn(async move |_, _| {
+ cx.background_spawn(async move {
task.await;
})
}
@@ -819,6 +775,61 @@ impl ActionLog {
}
}
+#[derive(Clone)]
+pub struct ActionLogTelemetry {
+ pub agent_telemetry_id: &'static str,
+ pub session_id: Arc<str>,
+}
+
+struct ActionLogMetrics {
+ lines_removed: u32,
+ lines_added: u32,
+ language: Option<SharedString>,
+}
+
+impl ActionLogMetrics {
+ fn for_buffer(buffer: &Buffer) -> Self {
+ Self {
+ language: buffer.language().map(|l| l.name().0),
+ lines_removed: 0,
+ lines_added: 0,
+ }
+ }
+
+ fn add_edits(&mut self, edits: &[Edit<u32>]) {
+ for edit in edits {
+ self.add_edit(edit);
+ }
+ }
+
+ fn add_edit(&mut self, edit: &Edit<u32>) {
+ self.lines_added += edit.new_len();
+ self.lines_removed += edit.old_len();
+ }
+}
+
+fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
+ telemetry::event!(
+ "Agent Edits Accepted",
+ agent = telemetry.agent_telemetry_id,
+ session = telemetry.session_id,
+ language = metrics.language,
+ lines_added = metrics.lines_added,
+ lines_removed = metrics.lines_removed
+ );
+}
+
+fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
+ telemetry::event!(
+ "Agent Edits Rejected",
+ agent = telemetry.agent_telemetry_id,
+ session = telemetry.session_id,
+ language = metrics.language,
+ lines_added = metrics.lines_added,
+ lines_removed = metrics.lines_removed
+ );
+}
+
fn apply_non_conflicting_edits(
patch: &Patch<u32>,
edits: Vec<Edit<u32>>,
@@ -949,14 +960,12 @@ enum TrackedBufferStatus {
struct TrackedBuffer {
buffer: Entity<Buffer>,
diff_base: Rope,
- last_seen_base: Rope,
unreviewed_edits: Patch<u32>,
status: TrackedBufferStatus,
version: clock::Global,
diff: Entity<BufferDiff>,
snapshot: text::BufferSnapshot,
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
- may_have_unnotified_user_edits: bool,
_open_lsp_handle: OpenLspBufferHandle,
_maintain_diff: Task<()>,
_subscription: Subscription,
@@ -987,7 +996,6 @@ mod tests {
use super::*;
use buffer_diff::DiffHunkStatusKind;
use gpui::TestAppContext;
- use indoc::indoc;
use language::Point;
use project::{FakeFs, Fs, Project, RemoveOptions};
use rand::prelude::*;
@@ -1005,8 +1013,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -1066,7 +1072,7 @@ mod tests {
);
action_log.update(cx, |log, cx| {
- log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
+ log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
});
cx.run_until_parked();
assert_eq!(
@@ -1082,7 +1088,7 @@ mod tests {
);
action_log.update(cx, |log, cx| {
- log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
+ log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
});
cx.run_until_parked();
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
@@ -1167,7 +1173,7 @@ mod tests {
);
action_log.update(cx, |log, cx| {
- log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
+ log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
});
cx.run_until_parked();
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
@@ -1264,111 +1270,7 @@ mod tests {
);
action_log.update(cx, |log, cx| {
- log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
- });
- cx.run_until_parked();
- assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
- }
-
- #[gpui::test(iterations = 10)]
- async fn test_user_edits_notifications(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- path!("/dir"),
- json!({"file": indoc! {"
- abc
- def
- ghi
- jkl
- mno"}}),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let file_path = project
- .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
- .unwrap();
- let buffer = project
- .update(cx, |project, cx| project.open_buffer(file_path, cx))
- .await
- .unwrap();
-
- // Agent edits
- cx.update(|cx| {
- action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
- buffer.update(cx, |buffer, cx| {
- buffer
- .edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
- .unwrap()
- });
- action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
- });
- cx.run_until_parked();
- assert_eq!(
- buffer.read_with(cx, |buffer, _| buffer.text()),
- indoc! {"
- abc
- deF
- GHI
- jkl
- mno"}
- );
- assert_eq!(
- unreviewed_hunks(&action_log, cx),
- vec![(
- buffer.clone(),
- vec![HunkStatus {
- range: Point::new(1, 0)..Point::new(3, 0),
- diff_status: DiffHunkStatusKind::Modified,
- old_text: "def\nghi\n".into(),
- }],
- )]
- );
-
- // User edits
- buffer.update(cx, |buffer, cx| {
- buffer.edit(
- [
- (Point::new(0, 2)..Point::new(0, 2), "X"),
- (Point::new(3, 0)..Point::new(3, 0), "Y"),
- ],
- None,
- cx,
- )
- });
- cx.run_until_parked();
- assert_eq!(
- buffer.read_with(cx, |buffer, _| buffer.text()),
- indoc! {"
- abXc
- deF
- GHI
- Yjkl
- mno"}
- );
-
- // User edits should be stored separately from agent's
- let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
- assert_eq!(
- user_edits.expect("should have some user edits"),
- indoc! {"
- --- a/dir/file
- +++ b/dir/file
- @@ -1,5 +1,5 @@
- -abc
- +abXc
- def
- ghi
- -jkl
- +Yjkl
- mno
- "}
- );
-
- action_log.update(cx, |log, cx| {
- log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
+ log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
});
cx.run_until_parked();
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
@@ -1427,7 +1329,7 @@ mod tests {
);
action_log.update(cx, |log, cx| {
- log.keep_edits_in_range(buffer.clone(), 0..5, cx)
+ log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
});
cx.run_until_parked();
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
@@ -1479,7 +1381,7 @@ mod tests {
action_log
.update(cx, |log, cx| {
- log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
+ log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
})
.await
.unwrap();
@@ -1559,7 +1461,7 @@ mod tests {
action_log
.update(cx, |log, cx| {
- log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
+ log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
})
.await
.unwrap();
@@ -1742,6 +1644,7 @@ mod tests {
log.reject_edits_in_ranges(
buffer.clone(),
vec![Point::new(4, 0)..Point::new(4, 0)],
+ None,
cx,
)
})
@@ -1776,6 +1679,7 @@ mod tests {
log.reject_edits_in_ranges(
buffer.clone(),
vec![Point::new(0, 0)..Point::new(1, 0)],
+ None,
cx,
)
})
@@ -1803,6 +1707,7 @@ mod tests {
log.reject_edits_in_ranges(
buffer.clone(),
vec![Point::new(4, 0)..Point::new(4, 0)],
+ None,
cx,
)
})
@@ -1877,7 +1782,7 @@ mod tests {
let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
..buffer.read(cx).anchor_before(Point::new(5, 3));
- log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
+ log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
.detach();
assert_eq!(
buffer.read_with(cx, |buffer, _| buffer.text()),
@@ -1938,6 +1843,7 @@ mod tests {
log.reject_edits_in_ranges(
buffer.clone(),
vec![Point::new(0, 0)..Point::new(0, 0)],
+ None,
cx,
)
})
@@ -1993,6 +1899,7 @@ mod tests {
log.reject_edits_in_ranges(
buffer.clone(),
vec![Point::new(0, 0)..Point::new(0, 11)],
+ None,
cx,
)
})
@@ -2055,6 +1962,7 @@ mod tests {
log.reject_edits_in_ranges(
buffer.clone(),
vec![Point::new(0, 0)..Point::new(100, 0)],
+ None,
cx,
)
})
@@ -2102,7 +2010,7 @@ mod tests {
// User accepts the single hunk
action_log.update(cx, |log, cx| {
- log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
+ log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, None, cx)
});
cx.run_until_parked();
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
@@ -2123,7 +2031,7 @@ mod tests {
// User rejects the hunk
action_log
.update(cx, |log, cx| {
- log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
+ log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], None, cx)
})
.await
.unwrap();
@@ -2167,7 +2075,7 @@ mod tests {
cx.run_until_parked();
// User clicks "Accept All"
- action_log.update(cx, |log, cx| log.keep_all_edits(cx));
+ action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
cx.run_until_parked();
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
@@ -2186,7 +2094,7 @@ mod tests {
// User clicks "Reject All"
action_log
- .update(cx, |log, cx| log.reject_all_edits(cx))
+ .update(cx, |log, cx| log.reject_all_edits(None, cx))
.await;
cx.run_until_parked();
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
@@ -2226,7 +2134,7 @@ mod tests {
action_log.update(cx, |log, cx| {
let range = buffer.read(cx).random_byte_range(0, &mut rng);
log::info!("keeping edits in range {:?}", range);
- log.keep_edits_in_range(buffer.clone(), range, cx)
+ log.keep_edits_in_range(buffer.clone(), range, None, cx)
});
}
25..50 => {
@@ -2234,7 +2142,7 @@ mod tests {
.update(cx, |log, cx| {
let range = buffer.read(cx).random_byte_range(0, &mut rng);
log::info!("rejecting edits in range {:?}", range);
- log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
+ log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
})
.await
.unwrap();
@@ -2488,61 +2396,4 @@ mod tests {
.collect()
})
}
-
- #[gpui::test]
- async fn test_format_patch(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- path!("/dir"),
- json!({"test.txt": "line 1\nline 2\nline 3\n"}),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
-
- let file_path = project
- .read_with(cx, |project, cx| {
- project.find_project_path("dir/test.txt", cx)
- })
- .unwrap();
- let buffer = project
- .update(cx, |project, cx| project.open_buffer(file_path, cx))
- .await
- .unwrap();
-
- cx.update(|cx| {
- // Track the buffer and mark it as read first
- action_log.update(cx, |log, cx| {
- log.buffer_read(buffer.clone(), cx);
- });
-
- // Make some edits to create a patch
- buffer.update(cx, |buffer, cx| {
- buffer
- .edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
- .unwrap(); // Replace "line2" with "CHANGED"
- });
- });
-
- cx.run_until_parked();
-
- // Get the patch
- let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
-
- // Verify the patch format contains expected unified diff elements
- assert_eq!(
- patch.unwrap(),
- indoc! {"
- --- a/dir/test.txt
- +++ b/dir/test.txt
- @@ -1,3 +1,3 @@
- line 1
- -line 2
- +CHANGED
- line 3
- "}
- );
- }
}
@@ -63,7 +63,6 @@ streaming_diff.workspace = true
strsim.workspace = true
task.workspace = true
telemetry.workspace = true
-terminal.workspace = true
text.workspace = true
thiserror.workspace = true
ui.workspace = true
@@ -218,7 +218,7 @@ impl LanguageModels {
}
_ => {
log::error!(
- "Failed to authenticate provider: {}: {err}",
+ "Failed to authenticate provider: {}: {err:#}",
provider_name.0
);
}
@@ -967,6 +967,10 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector {
}
impl acp_thread::AgentConnection for NativeAgentConnection {
+ fn telemetry_id(&self) -> &'static str {
+ "zed"
+ }
+
fn new_thread(
self: Rc<Self>,
project: Entity<Project>,
@@ -1107,10 +1111,6 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
}
impl acp_thread::AgentTelemetry for NativeAgentConnection {
- fn agent_name(&self) -> String {
- "Zed".into()
- }
-
fn thread_data(
&self,
session_id: &acp::SessionId,
@@ -1627,9 +1627,7 @@ mod internal_tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- Project::init_settings(cx);
- agent_settings::init(cx);
- language::init(cx);
+
LanguageModelRegistry::test(cx);
});
}
@@ -1394,7 +1394,7 @@ mod tests {
async fn init_test(cx: &mut TestAppContext) -> EditAgent {
cx.update(settings::init);
- cx.update(Project::init_settings);
+
let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
let model = Arc::new(FakeLanguageModel::default());
let action_log = cx.new(|_| ActionLog::new(project.clone()));
@@ -1468,14 +1468,9 @@ impl EditAgentTest {
gpui_tokio::init(cx);
let http_client = Arc::new(ReqwestClient::user_agent("agent tests").unwrap());
cx.set_http_client(http_client);
-
- client::init_settings(cx);
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
-
settings::init(cx);
- Project::init_settings(cx);
- language::init(cx);
language_model::init(client.clone(), cx);
language_models::init(user_store, client.clone(), cx);
});
@@ -88,8 +88,6 @@ mod tests {
async |fs, project, cx| {
let auth = cx.update(|cx| {
prompt_store::init(cx);
- terminal::init(cx);
-
let registry = language_model::LanguageModelRegistry::read_global(cx);
let auth = registry
.provider(&language_model::ANTHROPIC_PROVIDER_ID)
@@ -1851,7 +1851,6 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
// Initialize language model system with test provider
cx.update(|cx| {
gpui_tokio::init(cx);
- client::init_settings(cx);
let http_client = FakeHttpClient::with_404_response();
let clock = Arc::new(clock::FakeSystemClock::new());
@@ -1859,9 +1858,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
language_model::init(client.clone(), cx);
language_models::init(user_store, client.clone(), cx);
- Project::init_settings(cx);
LanguageModelRegistry::test(cx);
- agent_settings::init(cx);
});
cx.executor().forbid_parking();
@@ -2395,8 +2392,6 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
cx.update(|cx| {
settings::init(cx);
- Project::init_settings(cx);
- agent_settings::init(cx);
match model {
TestModel::Fake => {}
@@ -2404,7 +2399,6 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
gpui_tokio::init(cx);
let http_client = ReqwestClient::user_agent("agent tests").unwrap();
cx.set_http_client(Arc::new(http_client));
- client::init_settings(cx);
let client = Client::production(cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
language_model::init(client.clone(), cx);
@@ -562,7 +562,6 @@ fn resolve_path(
mod tests {
use super::*;
use crate::{ContextServerRegistry, Templates};
- use client::TelemetrySettings;
use fs::Fs;
use gpui::{TestAppContext, UpdateGlobal};
use language_model::fake_provider::FakeLanguageModel;
@@ -1753,10 +1752,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- TelemetrySettings::register(cx);
- agent_settings::AgentSettings::register(cx);
- Project::init_settings(cx);
});
}
}
@@ -246,8 +246,6 @@ mod test {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
}
@@ -778,8 +778,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -223,8 +223,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -163,8 +163,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
}
@@ -509,8 +509,6 @@ mod test {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -21,7 +21,6 @@ acp_tools.workspace = true
acp_thread.workspace = true
action_log.workspace = true
agent-client-protocol.workspace = true
-agent_settings.workspace = true
anyhow.workspace = true
async-trait.workspace = true
client.workspace = true
@@ -33,7 +32,6 @@ gpui.workspace = true
gpui_tokio = { workspace = true, optional = true }
http_client.workspace = true
indoc.workspace = true
-language.workspace = true
language_model.workspace = true
language_models.workspace = true
log.workspace = true
@@ -29,6 +29,7 @@ pub struct UnsupportedVersion;
pub struct AcpConnection {
server_name: SharedString,
+ telemetry_id: &'static str,
connection: Rc<acp::ClientSideConnection>,
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
auth_methods: Vec<acp::AuthMethod>,
@@ -52,6 +53,7 @@ pub struct AcpSession {
pub async fn connect(
server_name: SharedString,
+ telemetry_id: &'static str,
command: AgentServerCommand,
root_dir: &Path,
default_mode: Option<acp::SessionModeId>,
@@ -60,6 +62,7 @@ pub async fn connect(
) -> Result<Rc<dyn AgentConnection>> {
let conn = AcpConnection::stdio(
server_name,
+ telemetry_id,
command.clone(),
root_dir,
default_mode,
@@ -75,6 +78,7 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
impl AcpConnection {
pub async fn stdio(
server_name: SharedString,
+ telemetry_id: &'static str,
command: AgentServerCommand,
root_dir: &Path,
default_mode: Option<acp::SessionModeId>,
@@ -199,6 +203,7 @@ impl AcpConnection {
root_dir: root_dir.to_owned(),
connection,
server_name,
+ telemetry_id,
sessions,
agent_capabilities: response.agent_capabilities,
default_mode,
@@ -226,6 +231,10 @@ impl Drop for AcpConnection {
}
impl AgentConnection for AcpConnection {
+ fn telemetry_id(&self) -> &'static str {
+ self.telemetry_id
+ }
+
fn new_thread(
self: Rc<Self>,
project: Entity<Project>,
@@ -62,6 +62,7 @@ impl AgentServer for ClaudeCode {
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
let name = self.name();
+ let telemetry_id = self.telemetry_id();
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
let is_remote = delegate.project.read(cx).is_via_remote_server();
let store = delegate.store.downgrade();
@@ -85,6 +86,7 @@ impl AgentServer for ClaudeCode {
.await?;
let connection = crate::acp::connect(
name,
+ telemetry_id,
command,
root_dir.as_ref(),
default_mode,
@@ -63,6 +63,7 @@ impl AgentServer for Codex {
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
let name = self.name();
+ let telemetry_id = self.telemetry_id();
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
let is_remote = delegate.project.read(cx).is_via_remote_server();
let store = delegate.store.downgrade();
@@ -87,6 +88,7 @@ impl AgentServer for Codex {
let connection = crate::acp::connect(
name,
+ telemetry_id,
command,
root_dir.as_ref(),
default_mode,
@@ -67,6 +67,7 @@ impl crate::AgentServer for CustomAgentServer {
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
let name = self.name();
+ let telemetry_id = self.telemetry_id();
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
let is_remote = delegate.project.read(cx).is_via_remote_server();
let default_mode = self.default_mode(cx);
@@ -92,6 +93,7 @@ impl crate::AgentServer for CustomAgentServer {
.await?;
let connection = crate::acp::connect(
name,
+ telemetry_id,
command,
root_dir.as_ref(),
default_mode,
@@ -6,7 +6,9 @@ use gpui::{AppContext, Entity, TestAppContext};
use indoc::indoc;
#[cfg(test)]
use project::agent_server_store::BuiltinAgentServerSettings;
-use project::{FakeFs, Project, agent_server_store::AllAgentServersSettings};
+use project::{FakeFs, Project};
+#[cfg(test)]
+use settings::Settings;
use std::{
path::{Path, PathBuf},
sync::Arc,
@@ -452,29 +454,22 @@ pub use common_e2e_tests;
// Helpers
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
- use settings::Settings;
-
env_logger::try_init().ok();
cx.update(|cx| {
let settings_store = settings::SettingsStore::test(cx);
cx.set_global(settings_store);
- Project::init_settings(cx);
- language::init(cx);
gpui_tokio::init(cx);
let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap();
cx.set_http_client(Arc::new(http_client));
- client::init_settings(cx);
let client = client::Client::production(cx);
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
language_model::init(client.clone(), cx);
language_models::init(user_store, client, cx);
- agent_settings::init(cx);
- AllAgentServersSettings::register(cx);
#[cfg(test)]
- AllAgentServersSettings::override_global(
- AllAgentServersSettings {
+ project::agent_server_store::AllAgentServersSettings::override_global(
+ project::agent_server_store::AllAgentServersSettings {
claude: Some(BuiltinAgentServerSettings {
path: Some("claude-code-acp".into()),
args: None,
@@ -31,6 +31,7 @@ impl AgentServer for Gemini {
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
let name = self.name();
+ let telemetry_id = self.telemetry_id();
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
let is_remote = delegate.project.read(cx).is_via_remote_server();
let store = delegate.store.downgrade();
@@ -64,6 +65,7 @@ impl AgentServer for Gemini {
let connection = crate::acp::connect(
name,
+ telemetry_id,
command,
root_dir.as_ref(),
default_mode,
@@ -10,7 +10,7 @@ use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{
DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection,
- NotifyWhenAgentWaiting, Settings,
+ NotifyWhenAgentWaiting, RegisterSetting, Settings,
};
pub use crate::agent_profile::*;
@@ -19,11 +19,7 @@ pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("prompts/summarize_thread
pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str =
include_str!("prompts/summarize_thread_detailed_prompt.txt");
-pub fn init(cx: &mut App) {
- AgentSettings::register(cx);
-}
-
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, RegisterSetting)]
pub struct AgentSettings {
pub enabled: bool,
pub button: bool,
@@ -401,10 +401,9 @@ mod tests {
use acp_thread::{AgentConnection, StubAgentConnection};
use agent::HistoryStore;
use agent_client_protocol as acp;
- use agent_settings::AgentSettings;
use assistant_text_thread::TextThreadStore;
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
- use editor::{EditorSettings, RowInfo};
+ use editor::RowInfo;
use fs::FakeFs;
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
@@ -413,7 +412,7 @@ mod tests {
use pretty_assertions::assert_matches;
use project::Project;
use serde_json::json;
- use settings::{Settings as _, SettingsStore};
+ use settings::SettingsStore;
use util::path;
use workspace::Workspace;
@@ -539,13 +538,8 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
- AgentSettings::register(cx);
- workspace::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
release_channel::init(SemanticVersion::default(), cx);
- EditorSettings::register(cx);
});
}
}
@@ -1901,10 +1901,8 @@ mod tests {
let app_state = cx.update(AppState::test);
cx.update(|cx| {
- language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
- Project::init_settings(cx);
});
let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
@@ -2077,10 +2075,8 @@ mod tests {
let app_state = cx.update(AppState::test);
cx.update(|cx| {
- language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
- Project::init_settings(cx);
});
app_state
@@ -2907,10 +2903,8 @@ mod tests {
let app_state = cx.update(AppState::test);
cx.update(|cx| {
- language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
- Project::init_settings(cx);
});
app_state
@@ -1,6 +1,6 @@
use std::rc::Rc;
-use acp_thread::AgentModelSelector;
+use acp_thread::{AgentModelInfo, AgentModelSelector};
use gpui::{Entity, FocusHandle};
use picker::popover_menu::PickerPopoverMenu;
use ui::{
@@ -36,12 +36,8 @@ impl AcpModelSelectorPopover {
self.menu_handle.toggle(window, cx);
}
- pub fn active_model_name(&self, cx: &App) -> Option<SharedString> {
- self.selector
- .read(cx)
- .delegate
- .active_model()
- .map(|model| model.name.clone())
+ pub fn active_model<'a>(&self, cx: &'a App) -> Option<&'a AgentModelInfo> {
+ self.selector.read(cx).delegate.active_model()
}
}
@@ -4,12 +4,12 @@ use acp_thread::{
ToolCallStatus, UserMessageId,
};
use acp_thread::{AgentConnection, Plan};
-use action_log::ActionLog;
+use action_log::{ActionLog, ActionLogTelemetry};
use agent::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
use agent_client_protocol::{self as acp, PromptCapabilities};
use agent_servers::{AgentServer, AgentServerDelegate};
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
-use anyhow::{Result, anyhow, bail};
+use anyhow::{Result, anyhow};
use arrayvec::ArrayVec;
use audio::{Audio, Sound};
use buffer_diff::BufferDiff;
@@ -169,7 +169,7 @@ impl ThreadFeedbackState {
}
}
let session_id = thread.read(cx).session_id().clone();
- let agent_name = telemetry.agent_name();
+ let agent = thread.read(cx).connection().telemetry_id();
let task = telemetry.thread_data(&session_id, cx);
let rating = match feedback {
ThreadFeedback::Positive => "positive",
@@ -179,9 +179,9 @@ impl ThreadFeedbackState {
let thread = task.await?;
telemetry::event!(
"Agent Thread Rated",
+ agent = agent,
session_id = session_id,
rating = rating,
- agent = agent_name,
thread = thread
);
anyhow::Ok(())
@@ -206,15 +206,15 @@ impl ThreadFeedbackState {
self.comments_editor.take();
let session_id = thread.read(cx).session_id().clone();
- let agent_name = telemetry.agent_name();
+ let agent = thread.read(cx).connection().telemetry_id();
let task = telemetry.thread_data(&session_id, cx);
cx.background_spawn(async move {
let thread = task.await?;
telemetry::event!(
"Agent Thread Feedback Comments",
+ agent = agent,
session_id = session_id,
comments = comments,
- agent = agent_name,
thread = thread
);
anyhow::Ok(())
@@ -538,14 +538,7 @@ impl AcpThreadView {
})
.log_err()
} else {
- let root_dir = if let Some(acp_agent) = connection
- .clone()
- .downcast::<agent_servers::AcpConnection>()
- {
- acp_agent.root_dir().into()
- } else {
- root_dir.unwrap_or(paths::home_dir().as_path().into())
- };
+ let root_dir = root_dir.unwrap_or(paths::home_dir().as_path().into());
cx.update(|_, cx| {
connection
.clone()
@@ -1130,8 +1123,6 @@ impl AcpThreadView {
message_editor.contents(full_mention_content, cx)
});
- let agent_telemetry_id = self.agent.telemetry_id();
-
self.thread_error.take();
self.editing_message.take();
self.thread_feedback.clear();
@@ -1139,6 +1130,8 @@ impl AcpThreadView {
let Some(thread) = self.thread() else {
return;
};
+ let agent_telemetry_id = self.agent.telemetry_id();
+ let session_id = thread.read(cx).session_id().clone();
let thread = thread.downgrade();
if self.should_be_following {
self.workspace
@@ -1149,6 +1142,7 @@ impl AcpThreadView {
}
self.is_loading_contents = true;
+ let model_id = self.current_model_id(cx);
let guard = cx.new(|_| ());
cx.observe_release(&guard, |this, _guard, cx| {
this.is_loading_contents = false;
@@ -1170,6 +1164,7 @@ impl AcpThreadView {
message_editor.clear(window, cx);
});
})?;
+ let turn_start_time = Instant::now();
let send = thread.update(cx, |thread, cx| {
thread.action_log().update(cx, |action_log, cx| {
for buffer in tracked_buffers {
@@ -1178,11 +1173,27 @@ impl AcpThreadView {
});
drop(guard);
- telemetry::event!("Agent Message Sent", agent = agent_telemetry_id);
+ telemetry::event!(
+ "Agent Message Sent",
+ agent = agent_telemetry_id,
+ session = session_id,
+ model = model_id
+ );
thread.send(contents, cx)
})?;
- send.await
+ let res = send.await;
+ let turn_time_ms = turn_start_time.elapsed().as_millis();
+ let status = if res.is_ok() { "success" } else { "failure" };
+ telemetry::event!(
+ "Agent Turn Completed",
+ agent = agent_telemetry_id,
+ session = session_id,
+ model = model_id,
+ status,
+ turn_time_ms,
+ );
+ res
});
cx.spawn(async move |this, cx| {
@@ -1384,7 +1395,7 @@ impl AcpThreadView {
AcpThreadEvent::Refusal => {
self.thread_retry_status.take();
self.thread_error = Some(ThreadError::Refusal);
- let model_or_agent_name = self.get_current_model_name(cx);
+ let model_or_agent_name = self.current_model_name(cx);
let notification_message =
format!("{} refused to respond to this request", model_or_agent_name);
self.notify_with_sound(¬ification_message, IconName::Warning, window, cx);
@@ -1853,6 +1864,14 @@ impl AcpThreadView {
let Some(thread) = self.thread() else {
return;
};
+
+ telemetry::event!(
+ "Agent Tool Call Authorized",
+ agent = self.agent.telemetry_id(),
+ session = thread.read(cx).session_id(),
+ option = option_kind
+ );
+
thread.update(cx, |thread, cx| {
thread.authorize_tool_call(tool_call_id, option_id, option_kind, cx);
});
@@ -3585,6 +3604,7 @@ impl AcpThreadView {
) -> Option<AnyElement> {
let thread = thread_entity.read(cx);
let action_log = thread.action_log();
+ let telemetry = ActionLogTelemetry::from(thread);
let changed_buffers = action_log.read(cx).changed_buffers(cx);
let plan = thread.plan();
@@ -3632,6 +3652,7 @@ impl AcpThreadView {
.when(self.edits_expanded, |parent| {
parent.child(self.render_edited_files(
action_log,
+ telemetry,
&changed_buffers,
pending_edits,
cx,
@@ -3912,6 +3933,7 @@ impl AcpThreadView {
fn render_edited_files(
&self,
action_log: &Entity<ActionLog>,
+ telemetry: ActionLogTelemetry,
changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
pending_edits: bool,
cx: &Context<Self>,
@@ -4031,12 +4053,14 @@ impl AcpThreadView {
.on_click({
let buffer = buffer.clone();
let action_log = action_log.clone();
+ let telemetry = telemetry.clone();
move |_, _, cx| {
action_log.update(cx, |action_log, cx| {
action_log
.reject_edits_in_ranges(
buffer.clone(),
vec![Anchor::MIN..Anchor::MAX],
+ Some(telemetry.clone()),
cx,
)
.detach_and_log_err(cx);
@@ -4051,11 +4075,13 @@ impl AcpThreadView {
.on_click({
let buffer = buffer.clone();
let action_log = action_log.clone();
+ let telemetry = telemetry.clone();
move |_, _, cx| {
action_log.update(cx, |action_log, cx| {
action_log.keep_edits_in_range(
buffer.clone(),
Anchor::MIN..Anchor::MAX,
+ Some(telemetry.clone()),
cx,
);
})
@@ -4271,17 +4297,23 @@ impl AcpThreadView {
let Some(thread) = self.thread() else {
return;
};
+ let telemetry = ActionLogTelemetry::from(thread.read(cx));
let action_log = thread.read(cx).action_log().clone();
- action_log.update(cx, |action_log, cx| action_log.keep_all_edits(cx));
+ action_log.update(cx, |action_log, cx| {
+ action_log.keep_all_edits(Some(telemetry), cx)
+ });
}
fn reject_all(&mut self, _: &RejectAll, _window: &mut Window, cx: &mut Context<Self>) {
let Some(thread) = self.thread() else {
return;
};
+ let telemetry = ActionLogTelemetry::from(thread.read(cx));
let action_log = thread.read(cx).action_log().clone();
action_log
- .update(cx, |action_log, cx| action_log.reject_all_edits(cx))
+ .update(cx, |action_log, cx| {
+ action_log.reject_all_edits(Some(telemetry), cx)
+ })
.detach();
}
@@ -4677,35 +4709,36 @@ impl AcpThreadView {
.languages
.language_for_name("Markdown");
- let (thread_summary, markdown) = if let Some(thread) = self.thread() {
+ let (thread_title, markdown) = if let Some(thread) = self.thread() {
let thread = thread.read(cx);
(thread.title().to_string(), thread.to_markdown(cx))
} else {
return Task::ready(Ok(()));
};
+ let project = workspace.read(cx).project().clone();
window.spawn(cx, async move |cx| {
let markdown_language = markdown_language_task.await?;
- workspace.update_in(cx, |workspace, window, cx| {
- let project = workspace.project().clone();
+ let buffer = project
+ .update(cx, |project, cx| project.create_buffer(false, cx))?
+ .await?;
- if !project.read(cx).is_local() {
- bail!("failed to open active thread as markdown in remote project");
- }
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_text(markdown, cx);
+ buffer.set_language(Some(markdown_language), cx);
+ buffer.set_capability(language::Capability::ReadOnly, cx);
+ })?;
- let buffer = project.update(cx, |project, cx| {
- project.create_local_buffer(&markdown, Some(markdown_language), true, cx)
- });
- let buffer = cx.new(|cx| {
- MultiBuffer::singleton(buffer, cx).with_title(thread_summary.clone())
- });
+ workspace.update_in(cx, |workspace, window, cx| {
+ let buffer = cx
+ .new(|cx| MultiBuffer::singleton(buffer, cx).with_title(thread_title.clone()));
workspace.add_item_to_active_pane(
Box::new(cx.new(|cx| {
let mut editor =
Editor::for_multibuffer(buffer, Some(project.clone()), window, cx);
- editor.set_breadcrumb_header(thread_summary);
+ editor.set_breadcrumb_header(thread_title);
editor
})),
None,
@@ -4713,9 +4746,7 @@ impl AcpThreadView {
window,
cx,
);
-
- anyhow::Ok(())
- })??;
+ })?;
anyhow::Ok(())
})
}
@@ -5341,20 +5372,21 @@ impl AcpThreadView {
)
}
- fn get_current_model_name(&self, cx: &App) -> SharedString {
+ fn current_model_id(&self, cx: &App) -> Option<String> {
+ self.model_selector
+ .as_ref()
+ .and_then(|selector| selector.read(cx).active_model(cx).map(|m| m.id.to_string()))
+ }
+
+ fn current_model_name(&self, cx: &App) -> SharedString {
// For native agent (Zed Agent), use the specific model name (e.g., "Claude 3.5 Sonnet")
// For ACP agents, use the agent name (e.g., "Claude Code", "Gemini CLI")
// This provides better clarity about what refused the request
- if self
- .agent
- .clone()
- .downcast::<agent::NativeAgentServer>()
- .is_some()
- {
- // Native agent - use the model name
+ if self.as_native_connection(cx).is_some() {
self.model_selector
.as_ref()
- .and_then(|selector| selector.read(cx).active_model_name(cx))
+ .and_then(|selector| selector.read(cx).active_model(cx))
+ .map(|model| model.name.clone())
.unwrap_or_else(|| SharedString::from("The model"))
} else {
// ACP agent - use the agent name (e.g., "Claude Code", "Gemini CLI")
@@ -5363,7 +5395,7 @@ impl AcpThreadView {
}
fn render_refusal_error(&self, cx: &mut Context<'_, Self>) -> Callout {
- let model_or_agent_name = self.get_current_model_name(cx);
+ let model_or_agent_name = self.current_model_name(cx);
let refusal_message = format!(
"{} refused to respond to this prompt. This can happen when a model believes the prompt violates its content policy or safety guidelines, so rephrasing it can sometimes address the issue.",
model_or_agent_name
@@ -5961,7 +5993,6 @@ pub(crate) mod tests {
use acp_thread::StubAgentConnection;
use agent_client_protocol::SessionId;
use assistant_text_thread::TextThreadStore;
- use editor::EditorSettings;
use fs::FakeFs;
use gpui::{EventEmitter, SemanticVersion, TestAppContext, VisualTestContext};
use project::Project;
@@ -6349,6 +6380,10 @@ pub(crate) mod tests {
struct SaboteurAgentConnection;
impl AgentConnection for SaboteurAgentConnection {
+ fn telemetry_id(&self) -> &'static str {
+ "saboteur"
+ }
+
fn new_thread(
self: Rc<Self>,
project: Entity<Project>,
@@ -6409,6 +6444,10 @@ pub(crate) mod tests {
struct RefusalAgentConnection;
impl AgentConnection for RefusalAgentConnection {
+ fn telemetry_id(&self) -> &'static str {
+ "refusal"
+ }
+
fn new_thread(
self: Rc<Self>,
project: Entity<Project>,
@@ -6471,13 +6510,8 @@ pub(crate) mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
- AgentSettings::register(cx);
- workspace::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
release_channel::init(SemanticVersion::default(), cx);
- EditorSettings::register(cx);
prompt_store::init(cx)
});
}
@@ -515,16 +515,14 @@ impl Render for AddLlmProviderModal {
#[cfg(test)]
mod tests {
use super::*;
- use editor::EditorSettings;
use fs::FakeFs;
use gpui::{TestAppContext, VisualTestContext};
- use language::language_settings;
use language_model::{
LanguageModelProviderId, LanguageModelProviderName,
fake_provider::FakeLanguageModelProvider,
};
use project::Project;
- use settings::{Settings as _, SettingsStore};
+ use settings::SettingsStore;
use util::path;
#[gpui::test]
@@ -730,13 +728,9 @@ mod tests {
cx.update(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
- workspace::init_settings(cx);
- Project::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- language_settings::init(cx);
- EditorSettings::register(cx);
+
language_model::init_settings(cx);
- language_models::init_settings(cx);
});
let fs = FakeFs::new(cx.executor());
@@ -1,6 +1,6 @@
use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll};
use acp_thread::{AcpThread, AcpThreadEvent};
-use action_log::ActionLog;
+use action_log::ActionLogTelemetry;
use agent_settings::AgentSettings;
use anyhow::Result;
use buffer_diff::DiffHunkStatus;
@@ -40,79 +40,16 @@ use zed_actions::assistant::ToggleFocus;
pub struct AgentDiffPane {
multibuffer: Entity<MultiBuffer>,
editor: Entity<Editor>,
- thread: AgentDiffThread,
+ thread: Entity<AcpThread>,
focus_handle: FocusHandle,
workspace: WeakEntity<Workspace>,
title: SharedString,
_subscriptions: Vec<Subscription>,
}
-#[derive(PartialEq, Eq, Clone)]
-pub enum AgentDiffThread {
- AcpThread(Entity<AcpThread>),
-}
-
-impl AgentDiffThread {
- fn project(&self, cx: &App) -> Entity<Project> {
- match self {
- AgentDiffThread::AcpThread(thread) => thread.read(cx).project().clone(),
- }
- }
- fn action_log(&self, cx: &App) -> Entity<ActionLog> {
- match self {
- AgentDiffThread::AcpThread(thread) => thread.read(cx).action_log().clone(),
- }
- }
-
- fn title(&self, cx: &App) -> SharedString {
- match self {
- AgentDiffThread::AcpThread(thread) => thread.read(cx).title(),
- }
- }
-
- fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
- match self {
- AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
- }
- }
-
- fn downgrade(&self) -> WeakAgentDiffThread {
- match self {
- AgentDiffThread::AcpThread(thread) => {
- WeakAgentDiffThread::AcpThread(thread.downgrade())
- }
- }
- }
-}
-
-impl From<Entity<AcpThread>> for AgentDiffThread {
- fn from(entity: Entity<AcpThread>) -> Self {
- AgentDiffThread::AcpThread(entity)
- }
-}
-
-#[derive(PartialEq, Eq, Clone)]
-pub enum WeakAgentDiffThread {
- AcpThread(WeakEntity<AcpThread>),
-}
-
-impl WeakAgentDiffThread {
- pub fn upgrade(&self) -> Option<AgentDiffThread> {
- match self {
- WeakAgentDiffThread::AcpThread(weak) => weak.upgrade().map(AgentDiffThread::AcpThread),
- }
- }
-}
-
-impl From<WeakEntity<AcpThread>> for WeakAgentDiffThread {
- fn from(entity: WeakEntity<AcpThread>) -> Self {
- WeakAgentDiffThread::AcpThread(entity)
- }
-}
-
impl AgentDiffPane {
pub fn deploy(
- thread: impl Into<AgentDiffThread>,
+ thread: Entity<AcpThread>,
workspace: WeakEntity<Workspace>,
window: &mut Window,
cx: &mut App,
@@ -123,12 +60,11 @@ impl AgentDiffPane {
}
pub fn deploy_in_workspace(
- thread: impl Into<AgentDiffThread>,
+ thread: Entity<AcpThread>,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Workspace>,
) -> Entity<Self> {
- let thread = thread.into();
let existing_diff = workspace
.items_of_type::<AgentDiffPane>(cx)
.find(|diff| diff.read(cx).thread == thread);
@@ -145,7 +81,7 @@ impl AgentDiffPane {
}
pub fn new(
- thread: AgentDiffThread,
+ thread: Entity<AcpThread>,
workspace: WeakEntity<Workspace>,
window: &mut Window,
cx: &mut Context<Self>,
@@ -153,7 +89,7 @@ impl AgentDiffPane {
let focus_handle = cx.focus_handle();
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
- let project = thread.project(cx);
+ let project = thread.read(cx).project().clone();
let editor = cx.new(|cx| {
let mut editor =
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
@@ -164,19 +100,16 @@ impl AgentDiffPane {
editor
});
- let action_log = thread.action_log(cx);
+ let action_log = thread.read(cx).action_log().clone();
let mut this = Self {
_subscriptions: vec![
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
this.update_excerpts(window, cx)
}),
- match &thread {
- AgentDiffThread::AcpThread(thread) => cx
- .subscribe(thread, |this, _thread, event, cx| {
- this.handle_acp_thread_event(event, cx)
- }),
- },
+ cx.subscribe(&thread, |this, _thread, event, cx| {
+ this.handle_acp_thread_event(event, cx)
+ }),
],
title: SharedString::default(),
multibuffer,
@@ -191,7 +124,12 @@ impl AgentDiffPane {
}
fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
- let changed_buffers = self.thread.action_log(cx).read(cx).changed_buffers(cx);
+ let changed_buffers = self
+ .thread
+ .read(cx)
+ .action_log()
+ .read(cx)
+ .changed_buffers(cx);
let mut paths_to_delete = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
for (buffer, diff_handle) in changed_buffers {
@@ -278,7 +216,7 @@ impl AgentDiffPane {
}
fn update_title(&mut self, cx: &mut Context<Self>) {
- let new_title = self.thread.title(cx);
+ let new_title = self.thread.read(cx).title();
if new_title != self.title {
self.title = new_title;
cx.emit(EditorEvent::TitleChanged);
@@ -340,16 +278,18 @@ impl AgentDiffPane {
}
fn keep_all(&mut self, _: &KeepAll, _window: &mut Window, cx: &mut Context<Self>) {
- self.thread
- .action_log(cx)
- .update(cx, |action_log, cx| action_log.keep_all_edits(cx))
+ let telemetry = ActionLogTelemetry::from(self.thread.read(cx));
+ let action_log = self.thread.read(cx).action_log().clone();
+ action_log.update(cx, |action_log, cx| {
+ action_log.keep_all_edits(Some(telemetry), cx)
+ });
}
}
fn keep_edits_in_selection(
editor: &mut Editor,
buffer_snapshot: &MultiBufferSnapshot,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
window: &mut Window,
cx: &mut Context<Editor>,
) {
@@ -364,7 +304,7 @@ fn keep_edits_in_selection(
fn reject_edits_in_selection(
editor: &mut Editor,
buffer_snapshot: &MultiBufferSnapshot,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
window: &mut Window,
cx: &mut Context<Editor>,
) {
@@ -378,7 +318,7 @@ fn reject_edits_in_selection(
fn keep_edits_in_ranges(
editor: &mut Editor,
buffer_snapshot: &MultiBufferSnapshot,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
ranges: Vec<Range<editor::Anchor>>,
window: &mut Window,
cx: &mut Context<Editor>,
@@ -393,8 +333,15 @@ fn keep_edits_in_ranges(
for hunk in &diff_hunks_in_ranges {
let buffer = multibuffer.read(cx).buffer(hunk.buffer_id);
if let Some(buffer) = buffer {
- thread.action_log(cx).update(cx, |action_log, cx| {
- action_log.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
+ let action_log = thread.read(cx).action_log().clone();
+ let telemetry = ActionLogTelemetry::from(thread.read(cx));
+ action_log.update(cx, |action_log, cx| {
+ action_log.keep_edits_in_range(
+ buffer,
+ hunk.buffer_range.clone(),
+ Some(telemetry),
+ cx,
+ )
});
}
}
@@ -403,7 +350,7 @@ fn keep_edits_in_ranges(
fn reject_edits_in_ranges(
editor: &mut Editor,
buffer_snapshot: &MultiBufferSnapshot,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
ranges: Vec<Range<editor::Anchor>>,
window: &mut Window,
cx: &mut Context<Editor>,
@@ -427,11 +374,12 @@ fn reject_edits_in_ranges(
}
}
+ let action_log = thread.read(cx).action_log().clone();
+ let telemetry = ActionLogTelemetry::from(thread.read(cx));
for (buffer, ranges) in ranges_by_buffer {
- thread
- .action_log(cx)
+ action_log
.update(cx, |action_log, cx| {
- action_log.reject_edits_in_ranges(buffer, ranges, cx)
+ action_log.reject_edits_in_ranges(buffer, ranges, Some(telemetry.clone()), cx)
})
.detach_and_log_err(cx);
}
@@ -531,7 +479,7 @@ impl Item for AgentDiffPane {
}
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
- let title = self.thread.title(cx);
+ let title = self.thread.read(cx).title();
Label::new(format!("Review: {}", title))
.color(if params.selected {
Color::Default
@@ -712,7 +660,7 @@ impl Render for AgentDiffPane {
}
}
-fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControlsFn {
+fn diff_hunk_controls(thread: &Entity<AcpThread>) -> editor::RenderDiffHunkControlsFn {
let thread = thread.clone();
Arc::new(
@@ -739,7 +687,7 @@ fn render_diff_hunk_controls(
hunk_range: Range<editor::Anchor>,
is_created_file: bool,
line_height: Pixels,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
editor: &Entity<Editor>,
cx: &mut App,
) -> AnyElement {
@@ -1153,8 +1101,11 @@ impl Render for AgentDiffToolbar {
return Empty.into_any();
};
- let has_pending_edit_tool_use =
- agent_diff.read(cx).thread.has_pending_edit_tool_uses(cx);
+ let has_pending_edit_tool_use = agent_diff
+ .read(cx)
+ .thread
+ .read(cx)
+ .has_pending_edit_tool_calls();
if has_pending_edit_tool_use {
return div().px_2().child(spinner_icon).into_any();
@@ -1214,7 +1165,7 @@ pub enum EditorState {
}
struct WorkspaceThread {
- thread: WeakAgentDiffThread,
+ thread: WeakEntity<AcpThread>,
_thread_subscriptions: (Subscription, Subscription),
singleton_editors: HashMap<WeakEntity<Buffer>, HashMap<WeakEntity<Editor>, Subscription>>,
_settings_subscription: Subscription,
@@ -1239,23 +1190,23 @@ impl AgentDiff {
pub fn set_active_thread(
workspace: &WeakEntity<Workspace>,
- thread: impl Into<AgentDiffThread>,
+ thread: Entity<AcpThread>,
window: &mut Window,
cx: &mut App,
) {
Self::global(cx).update(cx, |this, cx| {
- this.register_active_thread_impl(workspace, thread.into(), window, cx);
+ this.register_active_thread_impl(workspace, thread, window, cx);
});
}
fn register_active_thread_impl(
&mut self,
workspace: &WeakEntity<Workspace>,
- thread: AgentDiffThread,
+ thread: Entity<AcpThread>,
window: &mut Window,
cx: &mut Context<Self>,
) {
- let action_log = thread.action_log(cx);
+ let action_log = thread.read(cx).action_log().clone();
let action_log_subscription = cx.observe_in(&action_log, window, {
let workspace = workspace.clone();
@@ -1264,14 +1215,12 @@ impl AgentDiff {
}
});
- let thread_subscription = match &thread {
- AgentDiffThread::AcpThread(thread) => cx.subscribe_in(thread, window, {
- let workspace = workspace.clone();
- move |this, thread, event, window, cx| {
- this.handle_acp_thread_event(&workspace, thread, event, window, cx)
- }
- }),
- };
+ let thread_subscription = cx.subscribe_in(&thread, window, {
+ let workspace = workspace.clone();
+ move |this, thread, event, window, cx| {
+ this.handle_acp_thread_event(&workspace, thread, event, window, cx)
+ }
+ });
if let Some(workspace_thread) = self.workspace_threads.get_mut(workspace) {
// replace thread and action log subscription, but keep editors
@@ -1348,7 +1297,7 @@ impl AgentDiff {
fn register_review_action<T: Action>(
workspace: &mut Workspace,
- review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState
+ review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState
+ 'static,
this: &Entity<AgentDiff>,
) {
@@ -1508,7 +1457,7 @@ impl AgentDiff {
return;
};
- let action_log = thread.action_log(cx);
+ let action_log = thread.read(cx).action_log();
let changed_buffers = action_log.read(cx).changed_buffers(cx);
let mut unaffected = self.reviewing_editors.clone();
@@ -1627,7 +1576,7 @@ impl AgentDiff {
fn keep_all(
editor: &Entity<Editor>,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
window: &mut Window,
cx: &mut App,
) -> PostReviewState {
@@ -1647,7 +1596,7 @@ impl AgentDiff {
fn reject_all(
editor: &Entity<Editor>,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
window: &mut Window,
cx: &mut App,
) -> PostReviewState {
@@ -1667,7 +1616,7 @@ impl AgentDiff {
fn keep(
editor: &Entity<Editor>,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
window: &mut Window,
cx: &mut App,
) -> PostReviewState {
@@ -1680,7 +1629,7 @@ impl AgentDiff {
fn reject(
editor: &Entity<Editor>,
- thread: &AgentDiffThread,
+ thread: &Entity<AcpThread>,
window: &mut Window,
cx: &mut App,
) -> PostReviewState {
@@ -1703,7 +1652,7 @@ impl AgentDiff {
fn review_in_active_editor(
&mut self,
workspace: &mut Workspace,
- review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState,
+ review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState,
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<Task<Result<()>>> {
@@ -1725,7 +1674,7 @@ impl AgentDiff {
if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx)
&& let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton()
{
- let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx);
+ let changed_buffers = thread.read(cx).action_log().read(cx).changed_buffers(cx);
let mut keys = changed_buffers.keys().cycle();
keys.find(|k| *k == &curr_buffer);
@@ -1768,12 +1717,11 @@ mod tests {
use super::*;
use crate::Keep;
use acp_thread::AgentConnection as _;
- use agent_settings::AgentSettings;
use editor::EditorSettings;
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
use project::{FakeFs, Project};
use serde_json::json;
- use settings::{Settings, SettingsStore};
+ use settings::SettingsStore;
use std::{path::Path, rc::Rc};
use util::path;
@@ -1782,13 +1730,8 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
- AgentSettings::register(cx);
prompt_store::init(cx);
- workspace::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- EditorSettings::register(cx);
language_model::init_settings(cx);
});
@@ -1815,8 +1758,7 @@ mod tests {
.await
.unwrap();
- let thread = AgentDiffThread::AcpThread(thread);
- let action_log = cx.read(|cx| thread.action_log(cx));
+ let action_log = cx.read(|cx| thread.read(cx).action_log().clone());
let (workspace, cx) =
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
@@ -1942,13 +1884,8 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
- AgentSettings::register(cx);
prompt_store::init(cx);
- workspace::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- EditorSettings::register(cx);
language_model::init_settings(cx);
workspace::register_project_item::<Editor>(cx);
});
@@ -2004,7 +1941,6 @@ mod tests {
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
// Set the active thread
- let thread = AgentDiffThread::AcpThread(thread);
cx.update(|window, cx| {
AgentDiff::set_active_thread(&workspace.downgrade(), thread.clone(), window, cx)
});
@@ -12,7 +12,6 @@ mod context_strip;
mod inline_assistant;
mod inline_prompt_editor;
mod language_model_selector;
-mod message_editor;
mod profile_selector;
mod slash_command;
mod slash_command_picker;
@@ -248,8 +247,6 @@ pub fn init(
is_eval: bool,
cx: &mut App,
) {
- AgentSettings::register(cx);
-
assistant_text_thread::init(client.clone(), cx);
rules_library::init(cx);
if !is_eval {
@@ -1082,10 +1082,7 @@ mod tests {
};
use gpui::TestAppContext;
use indoc::indoc;
- use language::{
- Buffer, Language, LanguageConfig, LanguageMatcher, Point, language_settings,
- tree_sitter_rust,
- };
+ use language::{Buffer, Language, LanguageConfig, LanguageMatcher, Point, tree_sitter_rust};
use language_model::{LanguageModelRegistry, TokenUsage};
use rand::prelude::*;
use settings::SettingsStore;
@@ -1465,8 +1462,6 @@ mod tests {
fn init_test(cx: &mut TestAppContext) {
cx.update(LanguageModelRegistry::test);
cx.set_global(cx.update(SettingsStore::test));
- cx.update(Project::init_settings);
- cx.update(language_settings::init);
}
fn simulate_response_stream(
@@ -1075,8 +1075,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -42,7 +42,7 @@ use super::{
ContextPickerAction, ContextPickerEntry, ContextPickerMode, MentionLink, RecentEntry,
available_context_picker_entries, recent_context_picker_entries_with_store, selection_ranges,
};
-use crate::message_editor::ContextCreasesAddon;
+use crate::inline_prompt_editor::ContextCreasesAddon;
pub(crate) enum Match {
File(FileMatch),
@@ -1182,10 +1182,8 @@ mod tests {
let app_state = cx.update(AppState::test);
cx.update(|cx| {
- language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
- Project::init_settings(cx);
});
app_state
@@ -1486,10 +1484,8 @@ mod tests {
let app_state = cx.update(AppState::test);
cx.update(|cx| {
- language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
- Project::init_settings(cx);
});
app_state
@@ -1686,11 +1682,6 @@ mod tests {
let store = SettingsStore::test(cx);
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
- client::init_settings(cx);
- language::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
- editor::init_settings(cx);
});
}
}
@@ -1,8 +1,8 @@
-use crate::context_store::ContextStore;
use agent::HistoryStore;
-use collections::VecDeque;
+use collections::{HashMap, VecDeque};
use editor::actions::Paste;
-use editor::display_map::EditorMargins;
+use editor::display_map::{CreaseId, EditorMargins};
+use editor::{Addon, AnchorRangeExt as _};
use editor::{
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
actions::{MoveDown, MoveUp},
@@ -17,6 +17,7 @@ use parking_lot::Mutex;
use prompt_store::PromptStore;
use settings::Settings;
use std::cmp;
+use std::ops::Range;
use std::rc::Rc;
use std::sync::Arc;
use theme::ThemeSettings;
@@ -27,12 +28,15 @@ use zed_actions::agent::ToggleModelSelector;
use crate::agent_model_selector::AgentModelSelector;
use crate::buffer_codegen::BufferCodegen;
-use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider};
+use crate::context::{AgentContextHandle, AgentContextKey};
+use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider, crease_for_mention};
+use crate::context_store::{ContextStore, ContextStoreEvent};
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
-use crate::message_editor::{ContextCreasesAddon, extract_message_creases, insert_message_creases};
use crate::terminal_codegen::TerminalCodegen;
-use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext};
-use crate::{RemoveAllContext, ToggleContextPicker};
+use crate::{
+ CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext, RemoveAllContext,
+ ToggleContextPicker,
+};
pub struct PromptEditor<T> {
pub editor: Entity<Editor>,
@@ -1157,3 +1161,156 @@ impl GenerationMode {
}
}
}
+
+/// Stored information that can be used to resurrect a context crease when creating an editor for a past message.
+#[derive(Clone, Debug)]
+pub struct MessageCrease {
+ pub range: Range<usize>,
+ pub icon_path: SharedString,
+ pub label: SharedString,
+ /// None for a deserialized message, Some otherwise.
+ pub context: Option<AgentContextHandle>,
+}
+
+#[derive(Default)]
+pub struct ContextCreasesAddon {
+ creases: HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>>,
+ _subscription: Option<Subscription>,
+}
+
+impl Addon for ContextCreasesAddon {
+ fn to_any(&self) -> &dyn std::any::Any {
+ self
+ }
+
+ fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
+ Some(self)
+ }
+}
+
+impl ContextCreasesAddon {
+ pub fn new() -> Self {
+ Self {
+ creases: HashMap::default(),
+ _subscription: None,
+ }
+ }
+
+ pub fn add_creases(
+ &mut self,
+ context_store: &Entity<ContextStore>,
+ key: AgentContextKey,
+ creases: impl IntoIterator<Item = (CreaseId, SharedString)>,
+ cx: &mut Context<Editor>,
+ ) {
+ self.creases.entry(key).or_default().extend(creases);
+ self._subscription = Some(
+ cx.subscribe(context_store, |editor, _, event, cx| match event {
+ ContextStoreEvent::ContextRemoved(key) => {
+ let Some(this) = editor.addon_mut::<Self>() else {
+ return;
+ };
+ let (crease_ids, replacement_texts): (Vec<_>, Vec<_>) = this
+ .creases
+ .remove(key)
+ .unwrap_or_default()
+ .into_iter()
+ .unzip();
+ let ranges = editor
+ .remove_creases(crease_ids, cx)
+ .into_iter()
+ .map(|(_, range)| range)
+ .collect::<Vec<_>>();
+ editor.unfold_ranges(&ranges, false, false, cx);
+ editor.edit(ranges.into_iter().zip(replacement_texts), cx);
+ cx.notify();
+ }
+ }),
+ )
+ }
+
+ pub fn into_inner(self) -> HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>> {
+ self.creases
+ }
+}
+
+pub fn extract_message_creases(
+ editor: &mut Editor,
+ cx: &mut Context<'_, Editor>,
+) -> Vec<MessageCrease> {
+ let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ let mut contexts_by_crease_id = editor
+ .addon_mut::<ContextCreasesAddon>()
+ .map(std::mem::take)
+ .unwrap_or_default()
+ .into_inner()
+ .into_iter()
+ .flat_map(|(key, creases)| {
+ let context = key.0;
+ creases
+ .into_iter()
+ .map(move |(id, _)| (id, context.clone()))
+ })
+ .collect::<HashMap<_, _>>();
+ // Filter the addon's list of creases based on what the editor reports,
+ // since the addon might have removed creases in it.
+
+ editor.display_map.update(cx, |display_map, cx| {
+ display_map
+ .snapshot(cx)
+ .crease_snapshot
+ .creases()
+ .filter_map(|(id, crease)| {
+ Some((
+ id,
+ (
+ crease.range().to_offset(&buffer_snapshot),
+ crease.metadata()?.clone(),
+ ),
+ ))
+ })
+ .map(|(id, (range, metadata))| {
+ let context = contexts_by_crease_id.remove(&id);
+ MessageCrease {
+ range,
+ context,
+ label: metadata.label,
+ icon_path: metadata.icon_path,
+ }
+ })
+ .collect()
+ })
+}
+
+pub fn insert_message_creases(
+ editor: &mut Editor,
+ message_creases: &[MessageCrease],
+ context_store: &Entity<ContextStore>,
+ window: &mut Window,
+ cx: &mut Context<'_, Editor>,
+) {
+ let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ let creases = message_creases
+ .iter()
+ .map(|crease| {
+ let start = buffer_snapshot.anchor_after(crease.range.start);
+ let end = buffer_snapshot.anchor_before(crease.range.end);
+ crease_for_mention(
+ crease.label.clone(),
+ crease.icon_path.clone(),
+ start..end,
+ cx.weak_entity(),
+ )
+ })
+ .collect::<Vec<_>>();
+ let ids = editor.insert_creases(creases.clone(), cx);
+ editor.fold_creases(creases, false, window, cx);
+ if let Some(addon) = editor.addon_mut::<ContextCreasesAddon>() {
+ for (crease, id) in message_creases.iter().zip(ids) {
+ if let Some(context) = crease.context.as_ref() {
+ let key = AgentContextKey(context.clone());
+ addon.add_creases(context_store, key, vec![(id, crease.label.clone())], cx);
+ }
+ }
+ }
+}
@@ -177,7 +177,7 @@ impl LanguageModelPickerDelegate {
}
_ => {
log::error!(
- "Failed to authenticate provider: {}: {err}",
+ "Failed to authenticate provider: {}: {err:#}",
provider_name.0
);
}
@@ -1,166 +0,0 @@
-use std::ops::Range;
-
-use collections::HashMap;
-use editor::display_map::CreaseId;
-use editor::{Addon, AnchorRangeExt, Editor};
-use gpui::{Entity, Subscription};
-use ui::prelude::*;
-
-use crate::{
- context::{AgentContextHandle, AgentContextKey},
- context_picker::crease_for_mention,
- context_store::{ContextStore, ContextStoreEvent},
-};
-
-/// Stored information that can be used to resurrect a context crease when creating an editor for a past message.
-#[derive(Clone, Debug)]
-pub struct MessageCrease {
- pub range: Range<usize>,
- pub icon_path: SharedString,
- pub label: SharedString,
- /// None for a deserialized message, Some otherwise.
- pub context: Option<AgentContextHandle>,
-}
-
-#[derive(Default)]
-pub struct ContextCreasesAddon {
- creases: HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>>,
- _subscription: Option<Subscription>,
-}
-
-impl Addon for ContextCreasesAddon {
- fn to_any(&self) -> &dyn std::any::Any {
- self
- }
-
- fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
- Some(self)
- }
-}
-
-impl ContextCreasesAddon {
- pub fn new() -> Self {
- Self {
- creases: HashMap::default(),
- _subscription: None,
- }
- }
-
- pub fn add_creases(
- &mut self,
- context_store: &Entity<ContextStore>,
- key: AgentContextKey,
- creases: impl IntoIterator<Item = (CreaseId, SharedString)>,
- cx: &mut Context<Editor>,
- ) {
- self.creases.entry(key).or_default().extend(creases);
- self._subscription = Some(
- cx.subscribe(context_store, |editor, _, event, cx| match event {
- ContextStoreEvent::ContextRemoved(key) => {
- let Some(this) = editor.addon_mut::<Self>() else {
- return;
- };
- let (crease_ids, replacement_texts): (Vec<_>, Vec<_>) = this
- .creases
- .remove(key)
- .unwrap_or_default()
- .into_iter()
- .unzip();
- let ranges = editor
- .remove_creases(crease_ids, cx)
- .into_iter()
- .map(|(_, range)| range)
- .collect::<Vec<_>>();
- editor.unfold_ranges(&ranges, false, false, cx);
- editor.edit(ranges.into_iter().zip(replacement_texts), cx);
- cx.notify();
- }
- }),
- )
- }
-
- pub fn into_inner(self) -> HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>> {
- self.creases
- }
-}
-
-pub fn extract_message_creases(
- editor: &mut Editor,
- cx: &mut Context<'_, Editor>,
-) -> Vec<MessageCrease> {
- let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
- let mut contexts_by_crease_id = editor
- .addon_mut::<ContextCreasesAddon>()
- .map(std::mem::take)
- .unwrap_or_default()
- .into_inner()
- .into_iter()
- .flat_map(|(key, creases)| {
- let context = key.0;
- creases
- .into_iter()
- .map(move |(id, _)| (id, context.clone()))
- })
- .collect::<HashMap<_, _>>();
- // Filter the addon's list of creases based on what the editor reports,
- // since the addon might have removed creases in it.
-
- editor.display_map.update(cx, |display_map, cx| {
- display_map
- .snapshot(cx)
- .crease_snapshot
- .creases()
- .filter_map(|(id, crease)| {
- Some((
- id,
- (
- crease.range().to_offset(&buffer_snapshot),
- crease.metadata()?.clone(),
- ),
- ))
- })
- .map(|(id, (range, metadata))| {
- let context = contexts_by_crease_id.remove(&id);
- MessageCrease {
- range,
- context,
- label: metadata.label,
- icon_path: metadata.icon_path,
- }
- })
- .collect()
- })
-}
-
-pub fn insert_message_creases(
- editor: &mut Editor,
- message_creases: &[MessageCrease],
- context_store: &Entity<ContextStore>,
- window: &mut Window,
- cx: &mut Context<'_, Editor>,
-) {
- let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
- let creases = message_creases
- .iter()
- .map(|crease| {
- let start = buffer_snapshot.anchor_after(crease.range.start);
- let end = buffer_snapshot.anchor_before(crease.range.end);
- crease_for_mention(
- crease.label.clone(),
- crease.icon_path.clone(),
- start..end,
- cx.weak_entity(),
- )
- })
- .collect::<Vec<_>>();
- let ids = editor.insert_creases(creases.clone(), cx);
- editor.fold_creases(creases, false, window, cx);
- if let Some(addon) = editor.addon_mut::<ContextCreasesAddon>() {
- for (crease, id) in message_creases.iter().zip(ids) {
- if let Some(context) = crease.context.as_ref() {
- let key = AgentContextKey(context.clone());
- addon.add_creases(context_store, key, vec![(id, crease.label.clone())], cx);
- }
- }
- }
-}
@@ -3223,11 +3223,7 @@ mod tests {
prompt_store::init(cx);
LanguageModelRegistry::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- agent_settings::init(cx);
- Project::init_settings(cx);
+
theme::init(theme::LoadThemes::JustBase, cx);
- workspace::init_settings(cx);
- editor::init_settings(cx);
}
}
@@ -577,8 +577,6 @@ mod test {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
// release_channel::init(SemanticVersion::default(), cx);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -22,7 +22,6 @@ use language_model::{
};
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
-use project::Project;
use prompt_store::PromptBuilder;
use rand::prelude::*;
use serde_json::json;
@@ -1411,9 +1410,6 @@ fn init_test(cx: &mut App) {
prompt_store::init(cx);
LanguageModelRegistry::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- agent_settings::init(cx);
- Project::init_settings(cx);
}
#[derive(Clone)]
@@ -48,7 +48,6 @@ pub const LEGACY_CHANNEL_COUNT: NonZero<u16> = nz!(2);
pub const REPLAY_DURATION: Duration = Duration::from_secs(30);
pub fn init(cx: &mut App) {
- AudioSettings::register(cx);
LIVE_SETTINGS.initialize(cx);
}
@@ -1,9 +1,9 @@
use std::sync::atomic::{AtomicBool, Ordering};
use gpui::App;
-use settings::{Settings, SettingsStore};
+use settings::{RegisterSetting, Settings, SettingsStore};
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, RegisterSetting)]
pub struct AudioSettings {
/// Opt into the new audio system.
///
@@ -10,7 +10,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
use paths::remote_servers_dir;
use release_channel::{AppCommitSha, ReleaseChannel};
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsStore};
+use settings::{RegisterSetting, Settings, SettingsStore};
use smol::{fs, io::AsyncReadExt};
use smol::{fs::File, process::Command};
use std::mem;
@@ -120,7 +120,7 @@ impl Drop for MacOsUnmounter<'_> {
}
}
-#[derive(Clone, Copy, Debug)]
+#[derive(Clone, Copy, Debug, RegisterSetting)]
struct AutoUpdateSetting(bool);
/// Whether or not to automatically check for updates.
@@ -138,8 +138,6 @@ struct GlobalAutoUpdate(Option<Entity<AutoUpdater>>);
impl Global for GlobalAutoUpdate {}
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
- AutoUpdateSetting::register(cx);
-
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
workspace.register_action(|_, action, window, cx| check(action, window, cx));
@@ -1028,7 +1026,6 @@ mod tests {
.set_user_settings("{}", cx)
.expect("Unable to set user settings");
cx.set_global(store);
- AutoUpdateSetting::register(cx);
assert!(AutoUpdateSetting::get_global(cx).0);
});
}
@@ -1,7 +1,6 @@
pub mod participant;
pub mod room;
-use crate::call_settings::CallSettings;
use anyhow::{Context as _, Result, anyhow};
use audio::Audio;
use client::{ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE, proto};
@@ -14,7 +13,6 @@ use gpui::{
use postage::watch;
use project::Project;
use room::Event;
-use settings::Settings;
use std::sync::Arc;
pub use livekit_client::{RemoteVideoTrack, RemoteVideoTrackView, RemoteVideoTrackViewEvent};
@@ -26,8 +24,6 @@ struct GlobalActiveCall(Entity<ActiveCall>);
impl Global for GlobalActiveCall {}
pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
- CallSettings::register(cx);
-
let active_call = cx.new(|cx| ActiveCall::new(client, user_store, cx));
cx.set_global(GlobalActiveCall(active_call));
}
@@ -1,6 +1,6 @@
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
-#[derive(Debug)]
+#[derive(Debug, RegisterSetting)]
pub struct CallSettings {
pub mute_on_join: bool,
pub share_on_join: bool,
@@ -237,7 +237,6 @@ fn init_test(cx: &mut App) -> Entity<ChannelStore> {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
release_channel::init(SemanticVersion::default(), cx);
- client::init_settings(cx);
let clock = Arc::new(FakeSystemClock::new());
let http = FakeHttpClient::with_404_response();
@@ -30,7 +30,7 @@ use rand::prelude::*;
use release_channel::{AppVersion, ReleaseChannel};
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsContent};
+use settings::{RegisterSetting, Settings, SettingsContent};
use std::{
any::TypeId,
convert::TryFrom,
@@ -95,7 +95,7 @@ actions!(
]
);
-#[derive(Deserialize)]
+#[derive(Deserialize, RegisterSetting)]
pub struct ClientSettings {
pub server_url: String,
}
@@ -113,7 +113,7 @@ impl Settings for ClientSettings {
}
}
-#[derive(Deserialize, Default)]
+#[derive(Deserialize, Default, RegisterSetting)]
pub struct ProxySettings {
pub proxy: Option<String>,
}
@@ -140,12 +140,6 @@ impl Settings for ProxySettings {
}
}
-pub fn init_settings(cx: &mut App) {
- TelemetrySettings::register(cx);
- ClientSettings::register(cx);
- ProxySettings::register(cx);
-}
-
pub fn init(client: &Arc<Client>, cx: &mut App) {
let client = Arc::downgrade(client);
cx.on_action({
@@ -508,7 +502,7 @@ impl<T: 'static> Drop for PendingEntitySubscription<T> {
}
}
-#[derive(Copy, Clone, Deserialize, Debug)]
+#[derive(Copy, Clone, Deserialize, Debug, RegisterSetting)]
pub struct TelemetrySettings {
pub diagnostics: bool,
pub metrics: bool,
@@ -2177,7 +2171,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- init_settings(cx);
});
}
}
@@ -179,8 +179,6 @@ impl Telemetry {
let release_channel =
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
- TelemetrySettings::register(cx);
-
let state = Arc::new(Mutex::new(TelemetryState {
settings: *TelemetrySettings::get_global(cx),
architecture: env::consts::ARCH,
@@ -1,5 +1,4 @@
pub mod predict_edits_v3;
-pub mod udiff;
use std::str::FromStr;
use std::sync::Arc;
@@ -1,7 +1,7 @@
use chrono::Duration;
use serde::{Deserialize, Serialize};
use std::{
- fmt::Display,
+ fmt::{Display, Write as _},
ops::{Add, Range, Sub},
path::{Path, PathBuf},
sync::Arc,
@@ -11,7 +11,14 @@ use uuid::Uuid;
use crate::PredictEditsGitInfo;
-// TODO: snippet ordering within file / relative to excerpt
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PlanContextRetrievalRequest {
+ pub excerpt: String,
+ pub excerpt_path: Arc<Path>,
+ pub excerpt_line_range: Range<Line>,
+ pub cursor_file_max_row: Line,
+ pub events: Vec<Event>,
+}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PredictEditsRequest {
@@ -125,15 +132,15 @@ impl Display for Event {
write!(
f,
"// User accepted prediction:\n--- a/{}\n+++ b/{}\n{diff}",
- old_path.display(),
- new_path.display()
+ DiffPathFmt(old_path),
+ DiffPathFmt(new_path)
)
} else {
write!(
f,
"--- a/{}\n+++ b/{}\n{diff}",
- old_path.display(),
- new_path.display()
+ DiffPathFmt(old_path),
+ DiffPathFmt(new_path)
)
}
}
@@ -141,6 +148,24 @@ impl Display for Event {
}
}
+/// always format the Path as a unix path with `/` as the path sep in Diffs
+pub struct DiffPathFmt<'a>(pub &'a Path);
+
+impl<'a> std::fmt::Display for DiffPathFmt<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let mut is_first = true;
+ for component in self.0.components() {
+ if !is_first {
+ f.write_char('/')?;
+ } else {
+ is_first = false;
+ }
+ write!(f, "{}", component.as_os_str().display())?;
+ }
+ Ok(())
+ }
+}
+
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Signature {
pub text: String,
@@ -1,294 +0,0 @@
-use std::{borrow::Cow, fmt::Display};
-
-#[derive(Debug, PartialEq)]
-pub enum DiffLine<'a> {
- OldPath { path: Cow<'a, str> },
- NewPath { path: Cow<'a, str> },
- HunkHeader(Option<HunkLocation>),
- Context(&'a str),
- Deletion(&'a str),
- Addition(&'a str),
- Garbage(&'a str),
-}
-
-#[derive(Debug, PartialEq)]
-pub struct HunkLocation {
- start_line_old: u32,
- count_old: u32,
- start_line_new: u32,
- count_new: u32,
-}
-
-impl<'a> DiffLine<'a> {
- pub fn parse(line: &'a str) -> Self {
- Self::try_parse(line).unwrap_or(Self::Garbage(line))
- }
-
- fn try_parse(line: &'a str) -> Option<Self> {
- if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
- let path = parse_header_path("a/", header);
- Some(Self::OldPath { path })
- } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
- Some(Self::NewPath {
- path: parse_header_path("b/", header),
- })
- } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
- if header.starts_with("...") {
- return Some(Self::HunkHeader(None));
- }
-
- let (start_line_old, header) = header.strip_prefix('-')?.split_once(',')?;
- let mut parts = header.split_ascii_whitespace();
- let count_old = parts.next()?;
- let (start_line_new, count_new) = parts.next()?.strip_prefix('+')?.split_once(',')?;
-
- Some(Self::HunkHeader(Some(HunkLocation {
- start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
- count_old: count_old.parse().ok()?,
- start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
- count_new: count_new.parse().ok()?,
- })))
- } else if let Some(deleted_header) = line.strip_prefix("-") {
- Some(Self::Deletion(deleted_header))
- } else if line.is_empty() {
- Some(Self::Context(""))
- } else if let Some(context) = line.strip_prefix(" ") {
- Some(Self::Context(context))
- } else {
- Some(Self::Addition(line.strip_prefix("+")?))
- }
- }
-}
-
-impl<'a> Display for DiffLine<'a> {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- DiffLine::OldPath { path } => write!(f, "--- {path}"),
- DiffLine::NewPath { path } => write!(f, "+++ {path}"),
- DiffLine::HunkHeader(Some(hunk_location)) => {
- write!(
- f,
- "@@ -{},{} +{},{} @@",
- hunk_location.start_line_old + 1,
- hunk_location.count_old,
- hunk_location.start_line_new + 1,
- hunk_location.count_new
- )
- }
- DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
- DiffLine::Context(content) => write!(f, " {content}"),
- DiffLine::Deletion(content) => write!(f, "-{content}"),
- DiffLine::Addition(content) => write!(f, "+{content}"),
- DiffLine::Garbage(line) => write!(f, "{line}"),
- }
- }
-}
-
-fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
- if !header.contains(['"', '\\']) {
- let path = header.split_ascii_whitespace().next().unwrap_or(header);
- return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
- }
-
- let mut path = String::with_capacity(header.len());
- let mut in_quote = false;
- let mut chars = header.chars().peekable();
- let mut strip_prefix = Some(strip_prefix);
-
- while let Some(char) = chars.next() {
- if char == '"' {
- in_quote = !in_quote;
- } else if char == '\\' {
- let Some(&next_char) = chars.peek() else {
- break;
- };
- chars.next();
- path.push(next_char);
- } else if char.is_ascii_whitespace() && !in_quote {
- break;
- } else {
- path.push(char);
- }
-
- if let Some(prefix) = strip_prefix
- && path == prefix
- {
- strip_prefix.take();
- path.clear();
- }
- }
-
- Cow::Owned(path)
-}
-
-fn eat_required_whitespace(header: &str) -> Option<&str> {
- let trimmed = header.trim_ascii_start();
-
- if trimmed.len() == header.len() {
- None
- } else {
- Some(trimmed)
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use indoc::indoc;
-
- #[test]
- fn parse_lines_simple() {
- let input = indoc! {"
- diff --git a/text.txt b/text.txt
- index 86c770d..a1fd855 100644
- --- a/file.txt
- +++ b/file.txt
- @@ -1,2 +1,3 @@
- context
- -deleted
- +inserted
- garbage
-
- --- b/file.txt
- +++ a/file.txt
- "};
-
- let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
-
- pretty_assertions::assert_eq!(
- lines,
- &[
- DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
- DiffLine::Garbage("index 86c770d..a1fd855 100644"),
- DiffLine::OldPath {
- path: "file.txt".into()
- },
- DiffLine::NewPath {
- path: "file.txt".into()
- },
- DiffLine::HunkHeader(Some(HunkLocation {
- start_line_old: 0,
- count_old: 2,
- start_line_new: 0,
- count_new: 3
- })),
- DiffLine::Context("context"),
- DiffLine::Deletion("deleted"),
- DiffLine::Addition("inserted"),
- DiffLine::Garbage("garbage"),
- DiffLine::Context(""),
- DiffLine::OldPath {
- path: "b/file.txt".into()
- },
- DiffLine::NewPath {
- path: "a/file.txt".into()
- },
- ]
- );
- }
-
- #[test]
- fn file_header_extra_space() {
- let options = ["--- file", "--- file", "---\tfile"];
-
- for option in options {
- pretty_assertions::assert_eq!(
- DiffLine::parse(option),
- DiffLine::OldPath {
- path: "file".into()
- },
- "{option}",
- );
- }
- }
-
- #[test]
- fn hunk_header_extra_space() {
- let options = [
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@",
- "@@\t-1,2\t+1,3\t@@",
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@",
- "@@ -1,2 +1,3 @@ garbage",
- ];
-
- for option in options {
- pretty_assertions::assert_eq!(
- DiffLine::parse(option),
- DiffLine::HunkHeader(Some(HunkLocation {
- start_line_old: 0,
- count_old: 2,
- start_line_new: 0,
- count_new: 3
- })),
- "{option}",
- );
- }
- }
-
- #[test]
- fn hunk_header_without_location() {
- pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
- }
-
- #[test]
- fn test_parse_path() {
- assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
- assert_eq!(
- parse_header_path("a/", "foo/bar/baz.txt"),
- "foo/bar/baz.txt"
- );
- assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt"),
- "foo/bar/baz.txt"
- );
-
- // Extra
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
- "foo/bar/baz.txt"
- );
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
- "foo/bar/baz.txt"
- );
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/baz.txt \""),
- "foo/bar/baz.txt"
- );
-
- // Quoted
- assert_eq!(
- parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
- "foo/bar/baz quox.txt"
- );
- assert_eq!(
- parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
- "foo/bar/baz quox.txt"
- );
- assert_eq!(
- parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
- "foo/bar/baz quox.txt"
- );
- assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
- assert_eq!(
- parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
- "foo/bar/baz quox.txt"
- );
- // unescaped quotes are dropped
- assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
-
- // Escaped
- assert_eq!(
- parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
- "foo/\"bar\"/baz.txt"
- );
- assert_eq!(
- parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
- "C:\\Projects\\My App\\old file.txt"
- );
- }
-}
@@ -17,5 +17,7 @@ cloud_llm_client.workspace = true
indoc.workspace = true
ordered-float.workspace = true
rustc-hash.workspace = true
+schemars.workspace = true
serde.workspace = true
+serde_json.workspace = true
strum.workspace = true
@@ -1,8 +1,9 @@
//! Zeta2 prompt planning and generation code shared with cloud.
+pub mod retrieval_prompt;
use anyhow::{Context as _, Result, anyhow};
use cloud_llm_client::predict_edits_v3::{
- self, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
+ self, DiffPathFmt, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
};
use indoc::indoc;
use ordered_float::OrderedFloat;
@@ -212,7 +213,7 @@ pub fn write_codeblock<'a>(
include_line_numbers: bool,
output: &'a mut String,
) {
- writeln!(output, "`````{}", path.display()).unwrap();
+ writeln!(output, "`````{}", DiffPathFmt(path)).unwrap();
write_excerpts(
excerpts,
sorted_insertions,
@@ -275,7 +276,7 @@ pub fn write_excerpts<'a>(
}
}
-fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
+pub fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
if events.is_empty() {
return;
};
@@ -0,0 +1,92 @@
+use anyhow::Result;
+use cloud_llm_client::predict_edits_v3::{self, Excerpt};
+use indoc::indoc;
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use std::{fmt::Write, sync::LazyLock};
+
+use crate::{push_events, write_codeblock};
+
+pub fn build_prompt(request: predict_edits_v3::PlanContextRetrievalRequest) -> Result<String> {
+ let mut prompt = SEARCH_INSTRUCTIONS.to_string();
+
+ if !request.events.is_empty() {
+ writeln!(&mut prompt, "## User Edits\n")?;
+ push_events(&mut prompt, &request.events);
+ }
+
+ writeln!(&mut prompt, "## Excerpt around the cursor\n")?;
+ write_codeblock(
+ &request.excerpt_path,
+ &[Excerpt {
+ start_line: request.excerpt_line_range.start,
+ text: request.excerpt.into(),
+ }],
+ &[],
+ request.cursor_file_max_row,
+ true,
+ &mut prompt,
+ );
+
+ writeln!(&mut prompt, "{TOOL_USE_REMINDER}")?;
+
+ Ok(prompt)
+}
+
+/// Search for relevant code
+///
+/// For the best results, run multiple queries at once with a single invocation of this tool.
+#[derive(Clone, Deserialize, Serialize, JsonSchema)]
+pub struct SearchToolInput {
+ /// An array of queries to run for gathering context relevant to the next prediction
+ #[schemars(length(max = 5))]
+ pub queries: Box<[SearchToolQuery]>,
+}
+
+#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
+pub struct SearchToolQuery {
+ /// A glob pattern to match file paths in the codebase
+ pub glob: String,
+ /// A regular expression to match content within the files matched by the glob pattern
+ pub regex: String,
+}
+
+pub static TOOL_SCHEMA: LazyLock<(serde_json::Value, String)> = LazyLock::new(|| {
+ let schema = schemars::schema_for!(SearchToolInput);
+
+ let description = schema
+ .get("description")
+ .and_then(|description| description.as_str())
+ .unwrap()
+ .to_string();
+
+ (schema.into(), description)
+});
+
+pub const TOOL_NAME: &str = "search";
+
+const SEARCH_INSTRUCTIONS: &str = indoc! {r#"
+ ## Task
+
+ You are part of an edit prediction system in a code editor. Your role is to identify relevant code locations
+ that will serve as context for predicting the next required edit.
+
+ **Your task:**
+ - Analyze the user's recent edits and current cursor context
+ - Use the `search` tool to find code that may be relevant for predicting the next edit
+ - Focus on finding:
+ - Code patterns that might need similar changes based on the recent edits
+ - Functions, variables, types, and constants referenced in the current cursor context
+ - Related implementations, usages, or dependencies that may require consistent updates
+
+ **Important constraints:**
+ - This conversation has exactly 2 turns
+ - You must make ALL search queries in your first response via the `search` tool
+ - All queries will be executed in parallel and results returned together
+ - In the second turn, you will select the most relevant results via the `select` tool.
+"#};
+
+const TOOL_USE_REMINDER: &str = indoc! {"
+ --
+ Use the `search` tool now
+"};
@@ -34,7 +34,7 @@ struct CurrentCompletion {
snapshot: BufferSnapshot,
/// The edits that should be applied to transform the original text into the predicted text.
/// Each edit is a range in the buffer and the text to replace it with.
- edits: Arc<[(Range<Anchor>, String)]>,
+ edits: Arc<[(Range<Anchor>, Arc<str>)]>,
/// Preview of how the buffer will look after applying the edits.
edit_preview: EditPreview,
}
@@ -42,7 +42,7 @@ struct CurrentCompletion {
impl CurrentCompletion {
/// Attempts to adjust the edits based on changes made to the buffer since the completion was generated.
/// Returns None if the user's edits conflict with the predicted edits.
- fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, String)>> {
+ fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
}
}
@@ -281,8 +281,8 @@ impl EditPredictionProvider for CodestralCompletionProvider {
return Ok(());
}
- let edits: Arc<[(Range<Anchor>, String)]> =
- vec![(cursor_position..cursor_position, completion_text)].into();
+ let edits: Arc<[(Range<Anchor>, Arc<str>)]> =
+ vec![(cursor_position..cursor_position, completion_text.into())].into();
let edit_preview = buffer
.read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))?
.await;
@@ -346,6 +346,7 @@ impl Server {
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
.add_request_handler(forward_read_only_project_request::<proto::GetColorPresentation>)
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
+ .add_request_handler(forward_read_only_project_request::<proto::OpenImageByPath>)
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
.add_request_handler(forward_read_only_project_request::<proto::GetDefaultBranch>)
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
@@ -395,6 +396,7 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::StopLanguageServers>)
.add_request_handler(forward_mutating_project_request::<proto::LinkedEditingRange>)
.add_message_handler(create_buffer_for_peer)
+ .add_message_handler(create_image_for_peer)
.add_request_handler(update_buffer)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshCodeLens>)
@@ -2389,6 +2391,26 @@ async fn create_buffer_for_peer(
Ok(())
}
+/// Notify other participants that a new image has been created
+async fn create_image_for_peer(
+ request: proto::CreateImageForPeer,
+ session: MessageContext,
+) -> Result<()> {
+ session
+ .db()
+ .await
+ .check_user_is_project_host(
+ ProjectId::from_proto(request.project_id),
+ session.connection_id,
+ )
+ .await?;
+ let peer_id = request.peer_id.context("invalid peer id")?;
+ session
+ .peer
+ .forward_send(session.connection_id, peer_id.into(), request)?;
+ Ok(())
+}
+
/// Notify other participants that a buffer has been updated. This is
/// allowed for guests as long as the update is limited to selections.
async fn update_buffer(
@@ -23,9 +23,6 @@ pub fn init_test(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
theme::init(theme::LoadThemes::JustBase, cx);
command_palette_hooks::init(cx);
- language::init(cx);
- workspace::init_settings(cx);
- project::Project::init_settings(cx);
debugger_ui::init(cx);
editor::init(cx);
});
@@ -84,7 +84,6 @@ async fn test_sharing_an_ssh_remote_project(
let node = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
let _headless_project = server_cx.new(|cx| {
- client::init_settings(cx);
HeadlessProject::new(
HeadlessAppState {
session: server_ssh,
@@ -245,7 +244,6 @@ async fn test_ssh_collaboration_git_branches(
let node = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
let headless_project = server_cx.new(|cx| {
- client::init_settings(cx);
HeadlessProject::new(
HeadlessAppState {
session: server_ssh,
@@ -450,7 +448,6 @@ async fn test_ssh_collaboration_formatting_with_prettier(
server_cx.update(HeadlessProject::init);
let remote_http_client = Arc::new(BlockedHttpClient);
let _headless_project = server_cx.new(|cx| {
- client::init_settings(cx);
HeadlessProject::new(
HeadlessAppState {
session: server_ssh,
@@ -612,7 +609,6 @@ async fn test_remote_server_debugger(
let node = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
let _headless_project = server_cx.new(|cx| {
- client::init_settings(cx);
HeadlessProject::new(
HeadlessAppState {
session: server_ssh,
@@ -721,7 +717,6 @@ async fn test_slow_adapter_startup_retries(
let node = NodeRuntime::unavailable();
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
let _headless_project = server_cx.new(|cx| {
- client::init_settings(cx);
HeadlessProject::new(
HeadlessAppState {
session: server_ssh,
@@ -174,7 +174,6 @@ impl TestServer {
cx.set_global(settings);
theme::init(theme::LoadThemes::JustBase, cx);
release_channel::init(SemanticVersion::default(), cx);
- client::init_settings(cx);
});
let clock = Arc::new(FakeSystemClock::new());
@@ -345,7 +344,6 @@ impl TestServer {
theme::init(theme::LoadThemes::JustBase, cx);
Project::init(&client, cx);
client::init(&client, cx);
- language::init(cx);
editor::init(cx);
workspace::init(app_state.clone(), cx);
call::init(client.clone(), user_store.clone(), cx);
@@ -359,7 +357,6 @@ impl TestServer {
);
language_model::LanguageModelRegistry::test(cx);
assistant_text_thread::init(client.clone(), cx);
- agent_settings::init(cx);
});
client
@@ -13,14 +13,10 @@ use gpui::{
};
pub use panel_settings::{CollaborationPanelSettings, NotificationPanelSettings};
use release_channel::ReleaseChannel;
-use settings::Settings;
use ui::px;
use workspace::AppState;
pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
- CollaborationPanelSettings::register(cx);
- NotificationPanelSettings::register(cx);
-
channel_view::init(cx);
collab_panel::init(cx);
notification_panel::init(cx);
@@ -1,16 +1,16 @@
use gpui::Pixels;
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
use ui::px;
use workspace::dock::DockPosition;
-#[derive(Debug)]
+#[derive(Debug, RegisterSetting)]
pub struct CollaborationPanelSettings {
pub button: bool,
pub dock: DockPosition,
pub default_width: Pixels,
}
-#[derive(Debug)]
+#[derive(Debug, RegisterSetting)]
pub struct NotificationPanelSettings {
pub button: bool,
pub dock: DockPosition,
@@ -28,7 +28,6 @@ use workspace::{ModalView, Workspace, WorkspaceSettings};
use zed_actions::{OpenZedUrl, command_palette::Toggle};
pub fn init(cx: &mut App) {
- client::init_settings(cx);
command_palette_hooks::init(cx);
cx.observe_new(CommandPalette::register).detach();
}
@@ -789,13 +788,11 @@ mod tests {
cx.update(|cx| {
let app_state = AppState::test(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
editor::init(cx);
menu::init();
go_to_line::init(cx);
workspace::init(app_state.clone(), cx);
init(cx);
- Project::init_settings(cx);
cx.bind_keys(KeymapFile::load_panic_on_failure(
r#"[
{
@@ -26,7 +26,6 @@ test-support = [
[dependencies]
anyhow.workspace = true
chrono.workspace = true
-client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
dirs.workspace = true
@@ -1115,11 +1115,6 @@ mod tests {
let store = SettingsStore::test(cx);
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
- client::init_settings(cx);
- language::init(cx);
- editor::init_settings(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
SettingsStore::update_global(cx, |store: &mut SettingsStore, cx| {
store.update_user_settings(cx, |settings| f(&mut settings.project.all_languages));
});
@@ -256,7 +256,7 @@ impl DebugAdapterClient {
#[cfg(test)]
mod tests {
use super::*;
- use crate::{client::DebugAdapterClient, debugger_settings::DebuggerSettings};
+ use crate::client::DebugAdapterClient;
use dap_types::{
Capabilities, InitializeRequestArguments, InitializeRequestArgumentsPathFormat,
RunInTerminalRequestArguments, StartDebuggingRequestArguments,
@@ -265,7 +265,7 @@ mod tests {
};
use gpui::TestAppContext;
use serde_json::json;
- use settings::{Settings, SettingsStore};
+ use settings::SettingsStore;
use std::sync::{
Arc,
atomic::{AtomicBool, Ordering},
@@ -277,7 +277,6 @@ mod tests {
cx.update(|cx| {
let settings = SettingsStore::test(cx);
cx.set_global(settings);
- DebuggerSettings::register(cx);
});
}
@@ -1,6 +1,7 @@
use dap_types::SteppingGranularity;
-use settings::{Settings, SettingsContent};
+use settings::{RegisterSetting, Settings, SettingsContent};
+#[derive(Debug, RegisterSetting)]
pub struct DebuggerSettings {
/// Determines the stepping granularity.
///
@@ -1,10 +1,9 @@
-use std::ffi::OsStr;
-
use anyhow::{Context as _, Result, bail};
use async_trait::async_trait;
use collections::HashMap;
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
use gpui::AsyncApp;
+use std::ffi::OsStr;
use task::{DebugScenario, ZedDebugConfig};
use crate::*;
@@ -16,6 +15,14 @@ impl GdbDebugAdapter {
const ADAPTER_NAME: &'static str = "GDB";
}
+/// Ensures that "-i=dap" is present in the GDB argument list.
+fn ensure_dap_interface(mut gdb_args: Vec<String>) -> Vec<String> {
+ if !gdb_args.iter().any(|arg| arg.trim() == "-i=dap") {
+ gdb_args.insert(0, "-i=dap".to_string());
+ }
+ gdb_args
+}
+
#[async_trait(?Send)]
impl DebugAdapter for GdbDebugAdapter {
fn name(&self) -> DebugAdapterName {
@@ -99,6 +106,18 @@ impl DebugAdapter for GdbDebugAdapter {
"type": "string",
"description": "Working directory for the debugged program. GDB will change its working directory to this directory."
},
+ "gdb_path": {
+ "type": "string",
+ "description": "Alternative path to the GDB executable, if the one in standard path is not desirable"
+ },
+ "gdb_args": {
+ "type": "array",
+ "items": {
+ "type":"string"
+ },
+ "description": "additional arguments given to GDB at startup, not the program debugged",
+ "default": []
+ },
"env": {
"type": "object",
"description": "Environment variables for the debugged program. Each key is the name of an environment variable; each value is the value of that variable."
@@ -164,21 +183,49 @@ impl DebugAdapter for GdbDebugAdapter {
user_env: Option<HashMap<String, String>>,
_: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
- let user_setting_path = user_installed_path
- .filter(|p| p.exists())
- .and_then(|p| p.to_str().map(|s| s.to_string()));
-
- let gdb_path = delegate
- .which(OsStr::new("gdb"))
- .await
- .and_then(|p| p.to_str().map(|s| s.to_string()))
- .context("Could not find gdb in path");
-
- if gdb_path.is_err() && user_setting_path.is_none() {
- bail!("Could not find gdb path or it's not installed");
- }
+ // Try to get gdb_path from config
+ let gdb_path_from_config = config
+ .config
+ .get("gdb_path")
+ .and_then(|v| v.as_str())
+ .map(|s| s.to_string());
- let gdb_path = user_setting_path.unwrap_or(gdb_path?);
+ let gdb_path = if let Some(path) = gdb_path_from_config {
+ path
+ } else {
+ // Original logic: use user_installed_path or search in system path
+ let user_setting_path = user_installed_path
+ .filter(|p| p.exists())
+ .and_then(|p| p.to_str().map(|s| s.to_string()));
+
+ let gdb_path_result = delegate
+ .which(OsStr::new("gdb"))
+ .await
+ .and_then(|p| p.to_str().map(|s| s.to_string()))
+ .context("Could not find gdb in path");
+
+ if gdb_path_result.is_err() && user_setting_path.is_none() {
+ bail!("Could not find gdb path or it's not installed");
+ }
+
+ user_setting_path.unwrap_or_else(|| gdb_path_result.unwrap())
+ };
+
+ // Arguments: use gdb_args from config if present, else user_args, else default
+ let gdb_args = {
+ let args = config
+ .config
+ .get("gdb_args")
+ .and_then(|v| v.as_array())
+ .map(|arr| {
+ arr.iter()
+ .filter_map(|v| v.as_str().map(|s| s.to_string()))
+ .collect::<Vec<_>>()
+ })
+ .or(user_args.clone())
+ .unwrap_or_else(|| vec!["-i=dap".into()]);
+ ensure_dap_interface(args)
+ };
let mut configuration = config.config.clone();
if let Some(configuration) = configuration.as_object_mut() {
@@ -187,10 +234,26 @@ impl DebugAdapter for GdbDebugAdapter {
.or_insert_with(|| delegate.worktree_root_path().to_string_lossy().into());
}
+ let mut base_env = delegate.shell_env().await;
+ base_env.extend(user_env.unwrap_or_default());
+
+ let config_env: HashMap<String, String> = config
+ .config
+ .get("env")
+ .and_then(|v| v.as_object())
+ .map(|obj| {
+ obj.iter()
+ .filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
+ .collect::<HashMap<String, String>>()
+ })
+ .unwrap_or_else(HashMap::default);
+
+ base_env.extend(config_env);
+
Ok(DebugAdapterBinary {
command: Some(gdb_path),
- arguments: user_args.unwrap_or_else(|| vec!["-i=dap".into()]),
- envs: user_env.unwrap_or_default(),
+ arguments: gdb_args,
+ envs: base_env,
cwd: Some(delegate.worktree_root_path().to_path_buf()),
connection: None,
request_args: StartDebuggingRequestArguments {
@@ -1,6 +1,5 @@
use std::any::TypeId;
-use dap::debugger_settings::DebuggerSettings;
use debugger_panel::DebugPanel;
use editor::Editor;
use gpui::{Action, App, DispatchPhase, EntityInputHandler, actions};
@@ -10,7 +9,6 @@ use project::debugger::{self, breakpoint_store::SourceBreakpoint, session::Threa
use schemars::JsonSchema;
use serde::Deserialize;
use session::DebugSession;
-use settings::Settings;
use stack_trace_view::StackTraceView;
use tasks_ui::{Spawn, TaskOverrides};
use ui::{FluentBuilder, InteractiveElement};
@@ -115,7 +113,6 @@ actions!(
);
pub fn init(cx: &mut App) {
- DebuggerSettings::register(cx);
workspace::FollowableViewRegistry::register::<DebugSession>(cx);
cx.observe_new(|workspace: &mut Workspace, _, _| {
@@ -1404,6 +1404,7 @@ impl VariableList {
div()
.text_ui(cx)
.w_full()
+ .truncate()
.when(self.disabled, |this| {
this.text_color(Color::Disabled.color(cx))
})
@@ -43,9 +43,6 @@ pub fn init_test(cx: &mut gpui::TestAppContext) {
terminal_view::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
command_palette_hooks::init(cx);
- language::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
editor::init(cx);
crate::init(cx);
dap_adapters::init(cx);
@@ -152,8 +152,8 @@ impl Render for ProjectDiagnosticsEditor {
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
enum RetainExcerpts {
- Yes,
- No,
+ All,
+ Dirty,
}
impl ProjectDiagnosticsEditor {
@@ -207,17 +207,7 @@ impl ProjectDiagnosticsEditor {
"diagnostics updated for server {language_server_id}, \
paths {paths:?}. updating excerpts"
);
- let focused = this.editor.focus_handle(cx).contains_focused(window, cx)
- || this.focus_handle.contains_focused(window, cx);
- this.update_stale_excerpts(
- if focused {
- RetainExcerpts::Yes
- } else {
- RetainExcerpts::No
- },
- window,
- cx,
- );
+ this.update_stale_excerpts(window, cx);
}
_ => {}
},
@@ -280,8 +270,7 @@ impl ProjectDiagnosticsEditor {
cx,
)
});
- this.diagnostics.clear();
- this.update_all_excerpts(window, cx);
+ this.refresh(window, cx);
})
.detach();
@@ -301,25 +290,29 @@ impl ProjectDiagnosticsEditor {
diagnostic_summary_update: Task::ready(()),
_subscription: project_event_subscription,
};
- this.update_all_excerpts(window, cx);
+ this.refresh(window, cx);
this
}
/// Closes all excerpts of buffers that:
/// - have no diagnostics anymore
/// - are saved (not dirty)
- /// - and, if `reatin_selections` is true, do not have selections within them
+ /// - and, if `retain_selections` is true, do not have selections within them
fn close_diagnosticless_buffers(
&mut self,
_window: &mut Window,
cx: &mut Context<Self>,
retain_selections: bool,
) {
- let buffer_ids = self.multibuffer.read(cx).all_buffer_ids();
- let selected_buffers = self.editor.update(cx, |editor, cx| {
+ let snapshot = self
+ .editor
+ .update(cx, |editor, cx| editor.display_snapshot(cx));
+ let buffer = self.multibuffer.read(cx);
+ let buffer_ids = buffer.all_buffer_ids();
+ let selected_buffers = self.editor.update(cx, |editor, _| {
editor
.selections
- .all_anchors(cx)
+ .all_anchors(&snapshot)
.iter()
.filter_map(|anchor| anchor.start.buffer_id)
.collect::<HashSet<_>>()
@@ -328,19 +321,19 @@ impl ProjectDiagnosticsEditor {
if retain_selections && selected_buffers.contains(&buffer_id) {
continue;
}
- let has_blocks = self
+ let has_no_blocks = self
.blocks
.get(&buffer_id)
.is_none_or(|blocks| blocks.is_empty());
- if !has_blocks {
+ if !has_no_blocks {
continue;
}
let is_dirty = self
.multibuffer
.read(cx)
.buffer(buffer_id)
- .is_some_and(|buffer| buffer.read(cx).is_dirty());
- if !is_dirty {
+ .is_none_or(|buffer| buffer.read(cx).is_dirty());
+ if is_dirty {
continue;
}
self.multibuffer.update(cx, |b, cx| {
@@ -349,18 +342,10 @@ impl ProjectDiagnosticsEditor {
}
}
- fn update_stale_excerpts(
- &mut self,
- mut retain_excerpts: RetainExcerpts,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
+ fn update_stale_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
if self.update_excerpts_task.is_some() {
return;
}
- if self.multibuffer.read(cx).is_dirty(cx) {
- retain_excerpts = RetainExcerpts::Yes;
- }
let project_handle = self.project.clone();
self.update_excerpts_task = Some(cx.spawn_in(window, async move |this, cx| {
@@ -386,6 +371,13 @@ impl ProjectDiagnosticsEditor {
.log_err()
{
this.update_in(cx, |this, window, cx| {
+ let focused = this.editor.focus_handle(cx).contains_focused(window, cx)
+ || this.focus_handle.contains_focused(window, cx);
+ let retain_excerpts = if focused {
+ RetainExcerpts::All
+ } else {
+ RetainExcerpts::Dirty
+ };
this.update_excerpts(buffer, retain_excerpts, window, cx)
})?
.await?;
@@ -441,7 +433,7 @@ impl ProjectDiagnosticsEditor {
if self.update_excerpts_task.is_some() {
self.update_excerpts_task = None;
} else {
- self.update_all_excerpts(window, cx);
+ self.refresh(window, cx);
}
cx.notify();
}
@@ -459,31 +451,26 @@ impl ProjectDiagnosticsEditor {
}
}
- /// Enqueue an update of all excerpts. Updates all paths that either
- /// currently have diagnostics or are currently present in this view.
- fn update_all_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ /// Clears all diagnostics in this view, and refetches them from the project.
+ fn refresh(&mut self, window: &mut Window, cx: &mut Context<Self>) {
+ self.diagnostics.clear();
+ self.editor.update(cx, |editor, cx| {
+ for (_, block_ids) in self.blocks.drain() {
+ editor.display_map.update(cx, |display_map, cx| {
+ display_map.remove_blocks(block_ids.into_iter().collect(), cx)
+ });
+ }
+ });
+ self.multibuffer
+ .update(cx, |multibuffer, cx| multibuffer.clear(cx));
self.project.update(cx, |project, cx| {
- let mut project_paths = project
+ self.paths_to_update = project
.diagnostic_summaries(false, cx)
.map(|(project_path, _, _)| project_path)
.collect::<BTreeSet<_>>();
-
- self.multibuffer.update(cx, |multibuffer, cx| {
- for buffer in multibuffer.all_buffers() {
- if let Some(file) = buffer.read(cx).file() {
- project_paths.insert(ProjectPath {
- path: file.path().clone(),
- worktree_id: file.worktree_id(cx),
- });
- }
- }
- multibuffer.clear(cx);
- });
-
- self.paths_to_update = project_paths;
});
- self.update_stale_excerpts(RetainExcerpts::No, window, cx);
+ self.update_stale_excerpts(window, cx);
}
fn diagnostics_are_unchanged(
@@ -576,21 +563,24 @@ impl ProjectDiagnosticsEditor {
blocks.extend(more);
}
- let mut excerpt_ranges: Vec<ExcerptRange<Point>> = match retain_excerpts {
- RetainExcerpts::No => Vec::new(),
- RetainExcerpts::Yes => this.update(cx, |this, cx| {
- this.multibuffer.update(cx, |multi_buffer, cx| {
- multi_buffer
+ let mut excerpt_ranges: Vec<ExcerptRange<Point>> = this.update(cx, |this, cx| {
+ this.multibuffer.update(cx, |multi_buffer, cx| {
+ let is_dirty = multi_buffer
+ .buffer(buffer_id)
+ .is_none_or(|buffer| buffer.read(cx).is_dirty());
+ match retain_excerpts {
+ RetainExcerpts::Dirty if !is_dirty => Vec::new(),
+ RetainExcerpts::All | RetainExcerpts::Dirty => multi_buffer
.excerpts_for_buffer(buffer_id, cx)
.into_iter()
.map(|(_, range)| ExcerptRange {
context: range.context.to_point(&buffer_snapshot),
primary: range.primary.to_point(&buffer_snapshot),
})
- .collect()
- })
- })?,
- };
+ .collect(),
+ }
+ })
+ })?;
let mut result_blocks = vec![None; excerpt_ranges.len()];
let context_lines = cx.update(|_, cx| multibuffer_context_lines(cx))?;
for b in blocks {
@@ -946,7 +936,7 @@ impl DiagnosticsToolbarEditor for WeakEntity<ProjectDiagnosticsEditor> {
fn refresh_diagnostics(&self, window: &mut Window, cx: &mut App) {
let _ = self.update(cx, |project_diagnostics_editor, cx| {
- project_diagnostics_editor.update_all_excerpts(window, cx);
+ project_diagnostics_editor.refresh(window, cx);
});
}
@@ -156,7 +156,9 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
// Cursor is at the first diagnostic
editor.update(cx, |editor, cx| {
assert_eq!(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
[DisplayPoint::new(DisplayRow(3), 8)..DisplayPoint::new(DisplayRow(3), 8)]
);
});
@@ -232,7 +234,9 @@ async fn test_diagnostics(cx: &mut TestAppContext) {
// Cursor keeps its position.
editor.update(cx, |editor, cx| {
assert_eq!(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
[DisplayPoint::new(DisplayRow(8), 8)..DisplayPoint::new(DisplayRow(8), 8)]
);
});
@@ -769,7 +773,7 @@ async fn test_random_diagnostics_blocks(cx: &mut TestAppContext, mut rng: StdRng
log::info!("updating mutated diagnostics view");
mutated_diagnostics.update_in(cx, |diagnostics, window, cx| {
- diagnostics.update_stale_excerpts(RetainExcerpts::No, window, cx)
+ diagnostics.update_stale_excerpts(window, cx)
});
log::info!("constructing reference diagnostics view");
@@ -968,7 +972,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
log::info!("updating mutated diagnostics view");
mutated_diagnostics.update_in(cx, |diagnostics, window, cx| {
- diagnostics.update_stale_excerpts(RetainExcerpts::No, window, cx)
+ diagnostics.update_stale_excerpts(window, cx)
});
cx.executor()
@@ -2017,10 +2021,6 @@ fn init_test(cx: &mut TestAppContext) {
let settings = SettingsStore::test(cx);
cx.set_global(settings);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
- client::init_settings(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
crate::init(cx);
editor::init(cx);
});
@@ -1,4 +1,4 @@
-use std::ops::Range;
+use std::{ops::Range, sync::Arc};
use client::EditPredictionUsage;
use gpui::{App, Context, Entity, SharedString};
@@ -19,7 +19,7 @@ pub enum EditPrediction {
/// Edits within the buffer that requested the prediction
Local {
id: Option<SharedString>,
- edits: Vec<(Range<language::Anchor>, String)>,
+ edits: Vec<(Range<language::Anchor>, Arc<str>)>,
edit_preview: Option<language::EditPreview>,
},
/// Jump to a different file from the one that requested the prediction
@@ -248,8 +248,8 @@ where
pub fn interpolate_edits(
old_snapshot: &BufferSnapshot,
new_snapshot: &BufferSnapshot,
- current_edits: &[(Range<Anchor>, String)],
-) -> Option<Vec<(Range<Anchor>, String)>> {
+ current_edits: &[(Range<Anchor>, Arc<str>)],
+) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
let mut edits = Vec::new();
let mut model_edits = current_edits.iter().peekable();
@@ -274,7 +274,7 @@ pub fn interpolate_edits(
if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) {
if !model_suffix.is_empty() {
let anchor = old_snapshot.anchor_after(user_edit.old.end);
- edits.push((anchor..anchor, model_suffix.to_string()));
+ edits.push((anchor..anchor, model_suffix.into()));
}
model_edits.next();
@@ -236,8 +236,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
let fs = FakeFs::new(cx.executor());
@@ -972,8 +972,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
let fs = FakeFs::new(cx.executor());
@@ -4,7 +4,6 @@ use editor::{
actions::{DeleteToPreviousWordStart, SelectAll, SplitSelectionIntoLines},
};
use gpui::{AppContext, Focusable as _, TestAppContext, TestDispatcher};
-use project::Project;
use rand::{Rng as _, SeedableRng as _, rngs::StdRng};
use settings::SettingsStore;
use ui::IntoElement;
@@ -125,10 +124,6 @@ pub fn benches() {
assets::Assets.load_test_fonts(cx);
theme::init(theme::LoadThemes::JustBase, cx);
// release_channel::init(SemanticVersion::default(), cx);
- client::init_settings(cx);
- language::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
editor::init(cx);
});
@@ -621,6 +621,8 @@ actions!(
NextEditPrediction,
/// Scrolls to the next screen.
NextScreen,
+ /// Goes to the next snippet tabstop if one exists.
+ NextSnippetTabstop,
/// Opens the context menu at cursor position.
OpenContextMenu,
/// Opens excerpts from the current file.
@@ -654,6 +656,8 @@ actions!(
Paste,
/// Navigates to the previous edit prediction.
PreviousEditPrediction,
+ /// Goes to the previous snippet tabstop if one exists.
+ PreviousSnippetTabstop,
/// Redoes the last undone edit.
Redo,
/// Redoes the last selection change.
@@ -88,10 +88,14 @@ pub fn switch_source_header(
)
})?;
- let path = PathBuf::from(goto);
-
workspace
.update_in(cx, |workspace, window, cx| {
+ let goto = if workspace.path_style(cx).is_windows() {
+ goto.strip_prefix('/').unwrap_or(goto)
+ } else {
+ goto
+ };
+ let path = PathBuf::from(goto);
workspace.open_abs_path(
path,
OpenOptions {
@@ -25,29 +25,25 @@ mod crease_map;
mod custom_highlights;
mod fold_map;
mod inlay_map;
-pub(crate) mod invisibles;
+mod invisibles;
mod tab_map;
mod wrap_map;
-use crate::{
- EditorStyle, RowExt, hover_links::InlayHighlight, inlays::Inlay, movement::TextLayoutDetails,
-};
+pub use crate::display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap};
pub use block_map::{
Block, BlockChunks as DisplayChunks, BlockContext, BlockId, BlockMap, BlockPlacement,
BlockPoint, BlockProperties, BlockRows, BlockStyle, CustomBlockId, EditorMargins, RenderBlock,
StickyHeaderExcerpt,
};
-use block_map::{BlockRow, BlockSnapshot};
-use collections::{HashMap, HashSet};
pub use crease_map::*;
-use fold_map::FoldSnapshot;
pub use fold_map::{
ChunkRenderer, ChunkRendererContext, ChunkRendererId, Fold, FoldId, FoldPlaceholder, FoldPoint,
};
-use gpui::{App, Context, Entity, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle};
-use inlay_map::InlaySnapshot;
pub use inlay_map::{InlayOffset, InlayPoint};
pub use invisibles::{is_invisible, replacement};
+
+use collections::{HashMap, HashSet};
+use gpui::{App, Context, Entity, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle};
use language::{
OffsetUtf16, Point, Subscription as BufferSubscription, language_settings::language_settings,
};
@@ -58,6 +54,10 @@ use multi_buffer::{
use project::InlayId;
use project::project_settings::DiagnosticSeverity;
use serde::Deserialize;
+use sum_tree::{Bias, TreeMap};
+use text::{BufferId, LineIndent};
+use ui::{SharedString, px};
+use unicode_segmentation::UnicodeSegmentation;
use std::{
any::TypeId,
@@ -68,15 +68,16 @@ use std::{
ops::{Add, Range, Sub},
sync::Arc,
};
-use sum_tree::{Bias, TreeMap};
+
+use crate::{
+ EditorStyle, RowExt, hover_links::InlayHighlight, inlays::Inlay, movement::TextLayoutDetails,
+};
+use block_map::{BlockRow, BlockSnapshot};
+use fold_map::FoldSnapshot;
+use inlay_map::InlaySnapshot;
use tab_map::TabSnapshot;
-use text::{BufferId, LineIndent};
-use ui::{SharedString, px};
-use unicode_segmentation::UnicodeSegmentation;
use wrap_map::{WrapMap, WrapSnapshot};
-pub use crate::display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap};
-
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum FoldStatus {
Folded,
@@ -1103,7 +1104,7 @@ impl DisplaySnapshot {
details: &TextLayoutDetails,
) -> u32 {
let layout_line = self.layout_row(display_row, details);
- layout_line.closest_index_for_x(x) as u32
+ layout_line.index_for_x(x) as u32
}
pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<SharedString> {
@@ -1598,7 +1599,7 @@ pub mod tests {
LanguageMatcher,
};
use lsp::LanguageServerId;
- use project::Project;
+
use rand::{Rng, prelude::*};
use settings::{SettingsContent, SettingsStore};
use smol::stream::StreamExt;
@@ -2987,10 +2988,7 @@ pub mod tests {
fn init_test(cx: &mut App, f: impl Fn(&mut SettingsContent)) {
let settings = SettingsStore::test(cx);
cx.set_global(settings);
- workspace::init_settings(cx);
- language::init(cx);
crate::init(cx);
- Project::init_settings(cx);
theme::init(LoadThemes::JustBase, cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, f);
@@ -63,6 +63,14 @@ pub struct BlockSnapshot {
pub(super) excerpt_header_height: u32,
}
+impl Deref for BlockSnapshot {
+ type Target = WrapSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.wrap_snapshot
+ }
+}
+
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CustomBlockId(pub usize);
@@ -453,6 +461,7 @@ pub struct BlockChunks<'a> {
input_chunk: Chunk<'a>,
output_row: BlockRow,
max_output_row: BlockRow,
+ line_count_overflow: RowDelta,
masked: bool,
}
@@ -1352,6 +1361,7 @@ impl BlockSnapshot {
input_chunk: Default::default(),
transforms: cursor,
output_row: rows.start,
+ line_count_overflow: RowDelta(0),
max_output_row,
masked,
}
@@ -1743,6 +1753,17 @@ impl<'a> Iterator for BlockChunks<'a> {
return None;
}
+ if self.line_count_overflow > RowDelta(0) {
+ let lines = self.line_count_overflow.0.min(u128::BITS);
+ self.line_count_overflow.0 -= lines;
+ self.output_row += RowDelta(lines);
+ return Some(Chunk {
+ text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..lines as usize]) },
+ chars: 1u128.unbounded_shl(lines).wrapping_sub(1),
+ ..Default::default()
+ });
+ }
+
let transform = self.transforms.item()?;
if transform.block.is_some() {
let block_start = self.transforms.start().0;
@@ -1754,13 +1775,14 @@ impl<'a> Iterator for BlockChunks<'a> {
let start_in_block = self.output_row - block_start;
let end_in_block = cmp::min(self.max_output_row, block_end) - block_start;
- // todo: We need to split the chunk here instead of taking min
- let line_count = cmp::min(end_in_block - start_in_block, RowDelta(u128::BITS));
- self.output_row += line_count;
+ let line_count = end_in_block - start_in_block;
+ let lines = RowDelta(line_count.0.min(u128::BITS));
+ self.line_count_overflow = line_count - lines;
+ self.output_row += lines;
return Some(Chunk {
- text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..line_count.0 as usize]) },
- chars: 1u128.unbounded_shl(line_count.0) - 1,
+ text: unsafe { std::str::from_utf8_unchecked(&NEWLINES[..lines.0 as usize]) },
+ chars: 1u128.unbounded_shl(lines.0).wrapping_sub(1),
..Default::default()
});
}
@@ -3042,7 +3064,7 @@ mod tests {
_ => BlockPlacement::Below(buffer.anchor_after(offset)),
};
- let height = rng.random_range(min_height..5);
+ let height = rng.random_range(min_height..512);
BlockProperties {
style: BlockStyle::Fixed,
placement,
@@ -630,6 +630,14 @@ pub struct FoldSnapshot {
pub version: usize,
}
+impl Deref for FoldSnapshot {
+ type Target = InlaySnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inlay_snapshot
+ }
+}
+
impl FoldSnapshot {
pub fn buffer(&self) -> &MultiBufferSnapshot {
&self.inlay_snapshot.buffer
@@ -32,6 +32,14 @@ pub struct InlaySnapshot {
pub version: usize,
}
+impl std::ops::Deref for InlaySnapshot {
+ type Target = MultiBufferSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.buffer
+ }
+}
+
#[derive(Clone, Debug)]
enum Transform {
Isomorphic(TextSummary),
@@ -167,6 +167,14 @@ pub struct TabSnapshot {
pub version: usize,
}
+impl std::ops::Deref for TabSnapshot {
+ type Target = FoldSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.fold_snapshot
+ }
+}
+
impl TabSnapshot {
pub fn buffer_snapshot(&self) -> &MultiBufferSnapshot {
&self.fold_snapshot.inlay_snapshot.buffer
@@ -43,6 +43,14 @@ pub struct WrapSnapshot {
interpolated: bool,
}
+impl std::ops::Deref for WrapSnapshot {
+ type Target = TabSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.tab_snapshot
+ }
+}
+
#[derive(Clone, Debug, Default, Eq, PartialEq)]
struct Transform {
summary: TransformSummary,
@@ -2,7 +2,7 @@ use edit_prediction::EditPredictionProvider;
use gpui::{Entity, KeyBinding, Modifiers, prelude::*};
use indoc::indoc;
use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint};
-use std::ops::Range;
+use std::{ops::Range, sync::Arc};
use text::{Point, ToOffset};
use crate::{
@@ -24,7 +24,7 @@ async fn test_edit_prediction_insert(cx: &mut gpui::TestAppContext) {
assert_editor_active_edit_completion(&mut cx, |_, edits| {
assert_eq!(edits.len(), 1);
- assert_eq!(edits[0].1.as_str(), "-273.15");
+ assert_eq!(edits[0].1.as_ref(), "-273.15");
});
accept_completion(&mut cx);
@@ -46,7 +46,7 @@ async fn test_edit_prediction_modification(cx: &mut gpui::TestAppContext) {
assert_editor_active_edit_completion(&mut cx, |_, edits| {
assert_eq!(edits.len(), 1);
- assert_eq!(edits[0].1.as_str(), "3.14159");
+ assert_eq!(edits[0].1.as_ref(), "3.14159");
});
accept_completion(&mut cx);
@@ -330,7 +330,7 @@ async fn test_edit_prediction_preview_cleanup_on_toggle_off(cx: &mut gpui::TestA
fn assert_editor_active_edit_completion(
cx: &mut EditorTestContext,
- assert: impl FnOnce(MultiBufferSnapshot, &Vec<(Range<Anchor>, String)>),
+ assert: impl FnOnce(MultiBufferSnapshot, &Vec<(Range<Anchor>, Arc<str>)>),
) {
cx.editor(|editor, _, cx| {
let completion_state = editor
@@ -312,13 +312,7 @@ pub enum HideMouseCursorOrigin {
MovementAction,
}
-pub fn init_settings(cx: &mut App) {
- EditorSettings::register(cx);
-}
-
pub fn init(cx: &mut App) {
- init_settings(cx);
-
cx.set_global(GlobalBlameRenderer(Arc::new(())));
workspace::register_project_item::<Editor>(cx);
@@ -618,7 +612,7 @@ pub(crate) enum EditDisplayMode {
enum EditPrediction {
Edit {
- edits: Vec<(Range<Anchor>, String)>,
+ edits: Vec<(Range<Anchor>, Arc<str>)>,
edit_preview: Option<EditPreview>,
display_mode: EditDisplayMode,
snapshot: BufferSnapshot,
@@ -1847,7 +1841,7 @@ impl Editor {
})
});
- let selections = SelectionsCollection::new(display_map.clone(), multi_buffer.clone());
+ let selections = SelectionsCollection::new();
let blink_manager = cx.new(|cx| {
let mut blink_manager = BlinkManager::new(CURSOR_BLINK_INTERVAL, cx);
@@ -2456,7 +2450,7 @@ impl Editor {
}
pub fn display_snapshot(&self, cx: &mut App) -> DisplaySnapshot {
- self.selections.display_map(cx)
+ self.display_map.update(cx, |map, cx| map.snapshot(cx))
}
pub fn deploy_mouse_context_menu(
@@ -2533,6 +2527,18 @@ impl Editor {
key_context.add("renaming");
}
+ if let Some(snippet_stack) = self.snippet_stack.last() {
+ key_context.add("in_snippet");
+
+ if snippet_stack.active_index > 0 {
+ key_context.add("has_previous_tabstop");
+ }
+
+ if snippet_stack.active_index < snippet_stack.ranges.len().saturating_sub(1) {
+ key_context.add("has_next_tabstop");
+ }
+ }
+
match self.context_menu.borrow().as_ref() {
Some(CodeContextMenu::Completions(menu)) => {
if menu.visible() {
@@ -3378,9 +3384,10 @@ impl Editor {
) -> gpui::Subscription {
let other_selections = other.read(cx).selections.disjoint_anchors().to_vec();
if !other_selections.is_empty() {
- self.selections.change_with(cx, |selections| {
- selections.select_anchors(other_selections);
- });
+ self.selections
+ .change_with(&self.display_snapshot(cx), |selections| {
+ selections.select_anchors(other_selections);
+ });
}
let other_subscription = cx.subscribe(&other, |this, other, other_evt, cx| {
@@ -3389,7 +3396,8 @@ impl Editor {
if other_selections.is_empty() {
return;
}
- this.selections.change_with(cx, |selections| {
+ let snapshot = this.display_snapshot(cx);
+ this.selections.change_with(&snapshot, |selections| {
selections.select_anchors(other_selections);
});
}
@@ -3402,9 +3410,12 @@ impl Editor {
return;
}
other.update(cx, |other_editor, cx| {
- other_editor.selections.change_with(cx, |selections| {
- selections.select_anchors(these_selections);
- })
+ let snapshot = other_editor.display_snapshot(cx);
+ other_editor
+ .selections
+ .change_with(&snapshot, |selections| {
+ selections.select_anchors(these_selections);
+ })
});
}
});
@@ -3420,13 +3431,14 @@ impl Editor {
effects: SelectionEffects,
window: &mut Window,
cx: &mut Context<Self>,
- change: impl FnOnce(&mut MutableSelectionsCollection<'_>) -> R,
+ change: impl FnOnce(&mut MutableSelectionsCollection<'_, '_>) -> R,
) -> R {
+ let snapshot = self.display_snapshot(cx);
if let Some(state) = &mut self.deferred_selection_effects_state {
state.effects.scroll = effects.scroll.or(state.effects.scroll);
state.effects.completions = effects.completions;
state.effects.nav_history = effects.nav_history.or(state.effects.nav_history);
- let (changed, result) = self.selections.change_with(cx, change);
+ let (changed, result) = self.selections.change_with(&snapshot, change);
state.changed |= changed;
return result;
}
@@ -3441,7 +3453,7 @@ impl Editor {
add_selections_state: self.add_selections_state.clone(),
},
};
- let (changed, result) = self.selections.change_with(cx, change);
+ let (changed, result) = self.selections.change_with(&snapshot, change);
state.changed = state.changed || changed;
if self.defer_selection_effects {
self.deferred_selection_effects_state = Some(state);
@@ -7970,7 +7982,7 @@ impl Editor {
let inlay = Inlay::edit_prediction(
post_inc(&mut self.next_inlay_id),
range.start,
- new_text.as_str(),
+ new_text.as_ref(),
);
inlay_ids.push(inlay.id);
inlays.push(inlay);
@@ -8992,7 +9004,7 @@ impl Editor {
newest_selection_head: Option<DisplayPoint>,
editor_width: Pixels,
style: &EditorStyle,
- edits: &Vec<(Range<Anchor>, String)>,
+ edits: &Vec<(Range<Anchor>, Arc<str>)>,
edit_preview: &Option<language::EditPreview>,
snapshot: &language::BufferSnapshot,
window: &mut Window,
@@ -10068,6 +10080,42 @@ impl Editor {
self.outdent(&Outdent, window, cx);
}
+ pub fn next_snippet_tabstop(
+ &mut self,
+ _: &NextSnippetTabstop,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self.mode.is_single_line() || self.snippet_stack.is_empty() {
+ cx.propagate();
+ return;
+ }
+
+ if self.move_to_next_snippet_tabstop(window, cx) {
+ self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
+ return;
+ }
+ cx.propagate();
+ }
+
+ pub fn previous_snippet_tabstop(
+ &mut self,
+ _: &PreviousSnippetTabstop,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self.mode.is_single_line() || self.snippet_stack.is_empty() {
+ cx.propagate();
+ return;
+ }
+
+ if self.move_to_prev_snippet_tabstop(window, cx) {
+ self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
+ return;
+ }
+ cx.propagate();
+ }
+
pub fn tab(&mut self, _: &Tab, window: &mut Window, cx: &mut Context<Self>) {
if self.mode.is_single_line() {
cx.propagate();
@@ -16250,7 +16298,7 @@ impl Editor {
.map(|s| s.to_vec())
{
self.change_selections(Default::default(), window, cx, |s| {
- let map = s.display_map();
+ let map = s.display_snapshot();
s.select_display_ranges(selections.iter().map(|a| {
let point = a.to_display_point(&map);
point..point
@@ -16271,7 +16319,7 @@ impl Editor {
.map(|s| s.to_vec())
{
self.change_selections(Default::default(), window, cx, |s| {
- let map = s.display_map();
+ let map = s.display_snapshot();
s.select_display_ranges(selections.iter().map(|a| {
let point = a.to_display_point(&map);
point..point
@@ -18066,14 +18114,15 @@ impl Editor {
cx: &mut Context<Self>,
) {
let old_cursor_position = self.selections.newest_anchor().head();
- self.selections.change_with(cx, |s| {
- s.select_anchors(selections);
- if let Some(pending_selection) = pending_selection {
- s.set_pending(pending_selection, SelectMode::Character);
- } else {
- s.clear_pending();
- }
- });
+ self.selections
+ .change_with(&self.display_snapshot(cx), |s| {
+ s.select_anchors(selections);
+ if let Some(pending_selection) = pending_selection {
+ s.set_pending(pending_selection, SelectMode::Character);
+ } else {
+ s.clear_pending();
+ }
+ });
self.selections_did_change(
false,
&old_cursor_position,
@@ -20281,7 +20330,7 @@ impl Editor {
let locations = self
.selections
- .all_anchors(cx)
+ .all_anchors(&self.display_snapshot(cx))
.iter()
.map(|selection| {
(
@@ -22182,6 +22231,10 @@ impl Editor {
}
fn register_buffer(&mut self, buffer_id: BufferId, cx: &mut Context<Self>) {
+ if self.ignore_lsp_data() {
+ return;
+ }
+
if !self.registered_buffers.contains_key(&buffer_id)
&& let Some(project) = self.project.as_ref()
{
@@ -24395,25 +24448,20 @@ impl InvalidationRegion for SnippetState {
fn edit_prediction_edit_text(
current_snapshot: &BufferSnapshot,
- edits: &[(Range<Anchor>, String)],
+ edits: &[(Range<Anchor>, impl AsRef<str>)],
edit_preview: &EditPreview,
include_deletions: bool,
cx: &App,
) -> HighlightedText {
let edits = edits
.iter()
- .map(|(anchor, text)| {
- (
- anchor.start.text_anchor..anchor.end.text_anchor,
- text.clone(),
- )
- })
+ .map(|(anchor, text)| (anchor.start.text_anchor..anchor.end.text_anchor, text))
.collect::<Vec<_>>();
edit_preview.highlight_edits(current_snapshot, &edits, include_deletions, cx)
}
-fn edit_prediction_fallback_text(edits: &[(Range<Anchor>, String)], cx: &App) -> HighlightedText {
+fn edit_prediction_fallback_text(edits: &[(Range<Anchor>, Arc<str>)], cx: &App) -> HighlightedText {
// Fallback for providers that don't provide edit_preview (like Copilot/Supermaven)
// Just show the raw edit text with basic styling
let mut text = String::new();
@@ -24806,7 +24854,7 @@ impl Focusable for BreakpointPromptEditor {
}
fn all_edits_insertions_or_deletions(
- edits: &Vec<(Range<Anchor>, String)>,
+ edits: &Vec<(Range<Anchor>, Arc<str>)>,
snapshot: &MultiBufferSnapshot,
) -> bool {
let mut all_insertions = true;
@@ -8,12 +8,12 @@ pub use settings::{
GoToDefinitionFallback, HideMouseMode, MinimapThumb, MinimapThumbBorder, MultiCursorModifier,
ScrollBeyondLastLine, ScrollbarDiagnostics, SeedQuerySetting, ShowMinimap, SnippetSortOrder,
};
-use settings::{RelativeLineNumbers, Settings};
+use settings::{RegisterSetting, RelativeLineNumbers, Settings};
use ui::scrollbars::{ScrollbarVisibility, ShowScrollbar};
/// Imports from the VSCode settings at
/// https://code.visualstudio.com/docs/reference/default-settings
-#[derive(Clone)]
+#[derive(Clone, RegisterSetting)]
pub struct EditorSettings {
pub cursor_blink: bool,
pub cursor_shape: Option<CursorShape>,
@@ -68,6 +68,12 @@ use workspace::{
register_project_item,
};
+fn display_ranges(editor: &Editor, cx: &mut Context<'_, Editor>) -> Vec<Range<DisplayPoint>> {
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))
+}
+
#[gpui::test]
fn test_edit_events(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -416,7 +422,7 @@ fn test_selection_with_mouse(cx: &mut TestAppContext) {
});
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(2), 2)]
);
@@ -433,7 +439,7 @@ fn test_selection_with_mouse(cx: &mut TestAppContext) {
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3)]
);
@@ -450,7 +456,7 @@ fn test_selection_with_mouse(cx: &mut TestAppContext) {
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(1), 1)]
);
@@ -468,7 +474,7 @@ fn test_selection_with_mouse(cx: &mut TestAppContext) {
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(1), 1)]
);
@@ -486,7 +492,7 @@ fn test_selection_with_mouse(cx: &mut TestAppContext) {
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[
DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(1), 1),
@@ -500,7 +506,7 @@ fn test_selection_with_mouse(cx: &mut TestAppContext) {
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[DisplayPoint::new(DisplayRow(3), 3)..DisplayPoint::new(DisplayRow(0), 0)]
);
@@ -533,7 +539,7 @@ fn test_multiple_cursor_removal(cx: &mut TestAppContext) {
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[
DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1),
@@ -551,7 +557,7 @@ fn test_multiple_cursor_removal(cx: &mut TestAppContext) {
assert_eq!(
editor
- .update(cx, |editor, _, cx| editor.selections.display_ranges(cx))
+ .update(cx, |editor, _, cx| display_ranges(editor, cx))
.unwrap(),
[DisplayPoint::new(DisplayRow(3), 2)..DisplayPoint::new(DisplayRow(3), 2)]
);
@@ -569,7 +575,7 @@ fn test_canceling_pending_selection(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.begin_selection(DisplayPoint::new(DisplayRow(2), 2), false, 1, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(2), 2)]
);
});
@@ -583,7 +589,7 @@ fn test_canceling_pending_selection(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3)]
);
});
@@ -598,7 +604,7 @@ fn test_canceling_pending_selection(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 3)]
);
});
@@ -616,25 +622,25 @@ fn test_movement_actions_with_pending_selection(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.begin_selection(DisplayPoint::new(DisplayRow(2), 2), false, 1, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(2), 2)]
);
editor.move_down(&Default::default(), window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(3), 2)..DisplayPoint::new(DisplayRow(3), 2)]
);
editor.begin_selection(DisplayPoint::new(DisplayRow(2), 2), false, 1, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(2), 2)]
);
editor.move_up(&Default::default(), window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2)]
);
});
@@ -653,14 +659,14 @@ fn test_extending_selection(cx: &mut TestAppContext) {
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), false, 1, window, cx);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 5)]
);
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 10)]
);
@@ -668,7 +674,7 @@ fn test_extending_selection(cx: &mut TestAppContext) {
editor.end_selection(window, cx);
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 2, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 11)]
);
@@ -681,7 +687,7 @@ fn test_extending_selection(cx: &mut TestAppContext) {
);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 0)]
);
@@ -690,13 +696,13 @@ fn test_extending_selection(cx: &mut TestAppContext) {
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), true, 2, window, cx);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)]
);
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 11)]
);
@@ -708,7 +714,7 @@ fn test_extending_selection(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)]
);
@@ -721,7 +727,7 @@ fn test_extending_selection(cx: &mut TestAppContext) {
);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(0), 7)..DisplayPoint::new(DisplayRow(0), 0)]
);
});
@@ -804,10 +810,14 @@ fn test_clone(cx: &mut TestAppContext) {
);
assert_set_eq!(
cloned_editor
- .update(cx, |e, _window, cx| e.selections.display_ranges(cx))
+ .update(cx, |e, _window, cx| e
+ .selections
+ .display_ranges(&e.display_snapshot(cx)))
.unwrap(),
editor
- .update(cx, |e, _, cx| e.selections.display_ranges(cx))
+ .update(cx, |e, _, cx| e
+ .selections
+ .display_ranges(&e.display_snapshot(cx)))
.unwrap()
);
}
@@ -861,7 +871,9 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
editor.navigate(nav_entry.data.unwrap(), window, cx);
assert_eq!(nav_entry.item.id(), cx.entity_id());
assert_eq!(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
&[DisplayPoint::new(DisplayRow(3), 0)..DisplayPoint::new(DisplayRow(3), 0)]
);
assert!(pop_history(&mut editor, cx).is_none());
@@ -871,7 +883,9 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
editor.begin_selection(DisplayPoint::new(DisplayRow(5), 0), false, 1, window, cx);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
&[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 0)]
);
assert!(pop_history(&mut editor, cx).is_none());
@@ -881,14 +895,18 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
editor.begin_selection(DisplayPoint::new(DisplayRow(15), 0), false, 1, window, cx);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
&[DisplayPoint::new(DisplayRow(15), 0)..DisplayPoint::new(DisplayRow(15), 0)]
);
let nav_entry = pop_history(&mut editor, cx).unwrap();
editor.navigate(nav_entry.data.unwrap(), window, cx);
assert_eq!(nav_entry.item.id(), cx.entity_id());
assert_eq!(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
&[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(5), 0)]
);
assert!(pop_history(&mut editor, cx).is_none());
@@ -924,7 +942,9 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
&[editor.max_point(cx)..editor.max_point(cx)]
);
assert_eq!(
@@ -967,7 +987,7 @@ fn test_cancel(cx: &mut TestAppContext) {
);
editor.end_selection(window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 3),
DisplayPoint::new(DisplayRow(3), 4)..DisplayPoint::new(DisplayRow(1), 1),
@@ -978,7 +998,7 @@ fn test_cancel(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.cancel(&Cancel, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(3), 4)..DisplayPoint::new(DisplayRow(1), 1)]
);
});
@@ -986,7 +1006,7 @@ fn test_cancel(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.cancel(&Cancel, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1)]
);
});
@@ -1447,43 +1467,43 @@ fn test_move_cursor(cx: &mut TestAppContext) {
});
_ = editor.update(cx, |editor, window, cx| {
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
editor.move_down(&MoveDown, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)]
);
editor.move_right(&MoveRight, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 4)]
);
editor.move_left(&MoveLeft, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)]
);
editor.move_up(&MoveUp, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
editor.move_to_end(&MoveToEnd, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(5), 6)..DisplayPoint::new(DisplayRow(5), 6)]
);
editor.move_to_beginning(&MoveToBeginning, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
@@ -1494,13 +1514,13 @@ fn test_move_cursor(cx: &mut TestAppContext) {
});
editor.select_to_beginning(&SelectToBeginning, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 0)]
);
editor.select_to_end(&SelectToEnd, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(5), 6)]
);
});
@@ -1532,94 +1552,43 @@ fn test_move_cursor_multibyte(cx: &mut TestAppContext) {
assert_eq!(editor.display_text(cx), "🟥🟧⋯🟦🟪\nab⋯e\nαβ⋯ε");
editor.move_right(&MoveRight, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(0, "🟥".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(0, "🟥".len())]);
editor.move_right(&MoveRight, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(0, "🟥🟧".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(0, "🟥🟧".len())]);
editor.move_right(&MoveRight, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(0, "🟥🟧⋯".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(0, "🟥🟧⋯".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(1, "ab⋯e".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(1, "ab⋯e".len())]);
editor.move_left(&MoveLeft, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(1, "ab⋯".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(1, "ab⋯".len())]);
editor.move_left(&MoveLeft, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(1, "ab".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(1, "ab".len())]);
editor.move_left(&MoveLeft, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(1, "a".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(1, "a".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(2, "α".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(2, "α".len())]);
editor.move_right(&MoveRight, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(2, "αβ".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(2, "αβ".len())]);
editor.move_right(&MoveRight, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(2, "αβ⋯".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(2, "αβ⋯".len())]);
editor.move_right(&MoveRight, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(2, "αβ⋯ε".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(2, "αβ⋯ε".len())]);
editor.move_up(&MoveUp, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(1, "ab⋯e".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(1, "ab⋯e".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(2, "αβ⋯ε".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(2, "αβ⋯ε".len())]);
editor.move_up(&MoveUp, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(1, "ab⋯e".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(1, "ab⋯e".len())]);
editor.move_up(&MoveUp, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(0, "🟥🟧".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(0, "🟥🟧".len())]);
editor.move_left(&MoveLeft, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(0, "🟥".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(0, "🟥".len())]);
editor.move_left(&MoveLeft, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(0, "".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(0, "".len())]);
});
}
@@ -1639,65 +1608,35 @@ fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
// moving above start of document should move selection to start of document,
// but the next move down should still be at the original goal_x
editor.move_up(&MoveUp, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(0, "".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(0, "".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(1, "abcd".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(1, "abcd".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(2, "αβγ".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(2, "αβγ".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(3, "abcd".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(3, "abcd".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]);
// moving past end of document should not change goal_x
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(5, "".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(5, "".len())]);
editor.move_down(&MoveDown, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(5, "".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(5, "".len())]);
editor.move_up(&MoveUp, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]);
editor.move_up(&MoveUp, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(3, "abcd".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(3, "abcd".len())]);
editor.move_up(&MoveUp, window, cx);
- assert_eq!(
- editor.selections.display_ranges(cx),
- &[empty_range(2, "αβγ".len())]
- );
+ assert_eq!(display_ranges(editor, cx), &[empty_range(2, "αβγ".len())]);
});
}
@@ -1733,7 +1672,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2),
@@ -1744,7 +1683,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0),
@@ -1755,7 +1694,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2),
@@ -1766,7 +1705,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.move_to_end_of_line(&move_to_end, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3),
DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(1), 5),
@@ -1778,7 +1717,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.move_to_end_of_line(&move_to_end, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3),
DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(1), 5),
@@ -1797,7 +1736,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 2),
@@ -1815,7 +1754,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 0),
@@ -1833,7 +1772,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 2),
@@ -1850,7 +1789,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
cx,
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 3),
DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 5),
@@ -1862,7 +1801,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
editor.delete_to_end_of_line(&DeleteToEndOfLine, window, cx);
assert_eq!(editor.display_text(cx), "ab\n de");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2),
DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 4),
@@ -1874,7 +1813,7 @@ fn test_beginning_end_of_line(cx: &mut TestAppContext) {
editor.delete_to_beginning_of_line(&delete_to_beg, window, cx);
assert_eq!(editor.display_text(cx), "\n");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0),
@@ -1927,14 +1866,14 @@ fn test_beginning_end_of_line_ignore_soft_wrap(cx: &mut TestAppContext) {
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),],
- editor.selections.display_ranges(cx)
+ display_ranges(editor, cx)
);
// Moving to the end of the line should put us at the end of the line.
editor.move_to_end_of_line(&move_to_end, window, cx);
assert_eq!(
vec![DisplayPoint::new(DisplayRow(0), 16)..DisplayPoint::new(DisplayRow(0), 16),],
- editor.selections.display_ranges(cx)
+ display_ranges(editor, cx)
);
// Now, let's assert behavior on the second line, that ended up being soft-wrapped.
@@ -1950,14 +1889,14 @@ fn test_beginning_end_of_line_ignore_soft_wrap(cx: &mut TestAppContext) {
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0),],
- editor.selections.display_ranges(cx)
+ display_ranges(editor, cx)
);
// Moving to the beginning of the line again should be a no-op.
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0),],
- editor.selections.display_ranges(cx)
+ display_ranges(editor, cx)
);
// Moving to the end of the line should put us right after the `s` that was soft-wrapped to the
@@ -1965,14 +1904,14 @@ fn test_beginning_end_of_line_ignore_soft_wrap(cx: &mut TestAppContext) {
editor.move_to_end_of_line(&move_to_end, window, cx);
assert_eq!(
vec![DisplayPoint::new(DisplayRow(2), 5)..DisplayPoint::new(DisplayRow(2), 5),],
- editor.selections.display_ranges(cx)
+ display_ranges(editor, cx)
);
// Moving to the end of the line again should be a no-op.
editor.move_to_end_of_line(&move_to_end, window, cx);
assert_eq!(
vec![DisplayPoint::new(DisplayRow(2), 5)..DisplayPoint::new(DisplayRow(2), 5),],
- editor.selections.display_ranges(cx)
+ display_ranges(editor, cx)
);
});
}
@@ -2016,7 +1955,7 @@ fn test_beginning_of_line_stop_at_indent(cx: &mut TestAppContext) {
// and the second cursor at the first non-whitespace character in the line.
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2),
@@ -2027,7 +1966,7 @@ fn test_beginning_of_line_stop_at_indent(cx: &mut TestAppContext) {
// and should move the second cursor to the beginning of the line.
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0),
@@ -2038,7 +1977,7 @@ fn test_beginning_of_line_stop_at_indent(cx: &mut TestAppContext) {
// and should move the second cursor back to the first non-whitespace character in the line.
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2),
@@ -2051,7 +1990,7 @@ fn test_beginning_of_line_stop_at_indent(cx: &mut TestAppContext) {
editor.move_left(&MoveLeft, window, cx);
editor.select_to_beginning_of_line(&select_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 2),
@@ -2062,7 +2001,7 @@ fn test_beginning_of_line_stop_at_indent(cx: &mut TestAppContext) {
// and should select to the beginning of the line for the second cursor.
editor.select_to_beginning_of_line(&select_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(1), 4)..DisplayPoint::new(DisplayRow(1), 0),
@@ -2103,21 +2042,21 @@ fn test_beginning_of_line_with_cursor_between_line_start_and_indent(cx: &mut Tes
// cursor should move to line_start
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
// cursor should move to indent_start
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 4)]
);
// cursor should move to back to line_start
editor.move_to_beginning_of_line(&move_to_beg, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
});
@@ -2210,37 +2149,37 @@ fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut TestAppContext) {
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(1), 9)..DisplayPoint::new(DisplayRow(1), 9)]
);
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(1), 14)..DisplayPoint::new(DisplayRow(1), 14)]
);
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(2), 4)..DisplayPoint::new(DisplayRow(2), 4)]
);
editor.move_to_next_word_end(&MoveToNextWordEnd, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(2), 8)..DisplayPoint::new(DisplayRow(2), 8)]
);
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(2), 4)..DisplayPoint::new(DisplayRow(2), 4)]
);
editor.move_to_previous_word_start(&MoveToPreviousWordStart, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(1), 14)..DisplayPoint::new(DisplayRow(1), 14)]
);
});
@@ -4487,7 +4426,7 @@ fn test_delete_line(cx: &mut TestAppContext) {
editor.delete_line(&DeleteLine, window, cx);
assert_eq!(editor.display_text(cx), "ghi");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1),
@@ -4508,7 +4447,7 @@ fn test_delete_line(cx: &mut TestAppContext) {
editor.delete_line(&DeleteLine, window, cx);
assert_eq!(editor.display_text(cx), "ghi\n");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1)]
);
});
@@ -4526,7 +4465,7 @@ fn test_delete_line(cx: &mut TestAppContext) {
editor.delete_line(&DeleteLine, window, cx);
assert_eq!(editor.display_text(cx), "\njkl\nmno");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
});
@@ -5685,7 +5624,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
editor.duplicate_line_down(&DuplicateLineDown, window, cx);
assert_eq!(editor.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 1),
DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(1), 2),
@@ -5709,7 +5648,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
editor.duplicate_line_down(&DuplicateLineDown, window, cx);
assert_eq!(editor.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(4), 1),
DisplayPoint::new(DisplayRow(4), 2)..DisplayPoint::new(DisplayRow(5), 1),
@@ -5735,7 +5674,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
editor.duplicate_line_up(&DuplicateLineUp, window, cx);
assert_eq!(editor.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 1),
DisplayPoint::new(DisplayRow(0), 2)..DisplayPoint::new(DisplayRow(0), 2),
@@ -5759,7 +5698,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
editor.duplicate_line_up(&DuplicateLineUp, window, cx);
assert_eq!(editor.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(1), 1),
DisplayPoint::new(DisplayRow(1), 2)..DisplayPoint::new(DisplayRow(2), 1),
@@ -5781,7 +5720,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
editor.duplicate_selection(&DuplicateSelection, window, cx);
assert_eq!(editor.display_text(cx), "abc\ndbc\ndef\ngf\nghi\n");
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(1), 1),
DisplayPoint::new(DisplayRow(2), 2)..DisplayPoint::new(DisplayRow(3), 1),
@@ -5828,7 +5767,7 @@ fn test_move_line_up_down(cx: &mut TestAppContext) {
"aa⋯bbb\nccc⋯eeee\nggggg\n⋯i\njjjjj\nfffff"
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1),
DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1),
@@ -5845,7 +5784,7 @@ fn test_move_line_up_down(cx: &mut TestAppContext) {
"ccc⋯eeee\naa⋯bbb\nfffff\nggggg\n⋯i\njjjjj"
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1),
DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(3), 1),
@@ -5862,7 +5801,7 @@ fn test_move_line_up_down(cx: &mut TestAppContext) {
"ccc⋯eeee\nfffff\naa⋯bbb\nggggg\n⋯i\njjjjj"
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1),
DisplayPoint::new(DisplayRow(3), 1)..DisplayPoint::new(DisplayRow(3), 1),
@@ -5879,7 +5818,7 @@ fn test_move_line_up_down(cx: &mut TestAppContext) {
"ccc⋯eeee\naa⋯bbb\nggggg\n⋯i\njjjjj\nfffff"
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(1), 1)..DisplayPoint::new(DisplayRow(1), 1),
DisplayPoint::new(DisplayRow(2), 1)..DisplayPoint::new(DisplayRow(2), 1),
@@ -7612,7 +7551,7 @@ fn test_select_all(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.select_all(&SelectAll, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
&[DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(2), 3)]
);
});
@@ -7637,7 +7576,7 @@ fn test_select_line(cx: &mut TestAppContext) {
});
editor.select_line(&SelectLine, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(2), 0),
DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(5), 0),
@@ -7648,7 +7587,7 @@ fn test_select_line(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.select_line(&SelectLine, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(3), 0),
DisplayPoint::new(DisplayRow(4), 0)..DisplayPoint::new(DisplayRow(5), 5),
@@ -7659,7 +7598,7 @@ fn test_select_line(cx: &mut TestAppContext) {
_ = editor.update(cx, |editor, window, cx| {
editor.select_line(&SelectLine, window, cx);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(5), 5)]
);
});
@@ -7784,7 +7723,7 @@ async fn test_split_selection_into_lines_interacting_with_creases(cx: &mut TestA
"aaaaa\nbbbbb\nccccc\nddddd\neeeee\nfffff\nggggg\nhhhhh\niiiii"
);
assert_eq!(
- editor.selections.display_ranges(cx),
+ display_ranges(editor, cx),
[
DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 5),
DisplayPoint::new(DisplayRow(1), 5)..DisplayPoint::new(DisplayRow(1), 5),
@@ -8928,7 +8867,9 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut TestAppContext) {
editor.select_larger_syntax_node(&SelectLargerSyntaxNode, window, cx);
});
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
&[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
@@ -8937,7 +8878,9 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut TestAppContext) {
editor.select_larger_syntax_node(&SelectLargerSyntaxNode, window, cx);
});
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
&[DisplayPoint::new(DisplayRow(5), 0)..DisplayPoint::new(DisplayRow(0), 0)]
);
@@ -232,6 +232,8 @@ impl EditorElement {
register_action(editor, window, Editor::blame_hover);
register_action(editor, window, Editor::delete);
register_action(editor, window, Editor::tab);
+ register_action(editor, window, Editor::next_snippet_tabstop);
+ register_action(editor, window, Editor::previous_snippet_tabstop);
register_action(editor, window, Editor::backtab);
register_action(editor, window, Editor::indent);
register_action(editor, window, Editor::outdent);
@@ -4091,7 +4093,17 @@ impl EditorElement {
.on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
.on_click(window.listener_for(&self.editor, {
let buffer_id = for_excerpt.buffer_id;
- move |editor, _e: &ClickEvent, _window, cx| {
+ move |editor, e: &ClickEvent, window, cx| {
+ if e.modifiers().alt {
+ editor.open_excerpts_common(
+ Some(jump_data.clone()),
+ e.modifiers().secondary(),
+ window,
+ cx,
+ );
+ return;
+ }
+
if is_folded {
editor.unfold_buffer(buffer_id, cx);
} else {
@@ -8332,7 +8344,7 @@ impl LineWithInvisibles {
let fragment_end_x = fragment_start_x + shaped_line.width;
if x < fragment_end_x {
return Some(
- fragment_start_index + shaped_line.index_for_x(x - fragment_start_x)?,
+ fragment_start_index + shaped_line.index_for_x(x - fragment_start_x),
);
}
fragment_start_x = fragment_end_x;
@@ -764,11 +764,6 @@ mod tests {
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
- client::init_settings(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
-
crate::init(cx);
});
}
@@ -1,7 +1,7 @@
use crate::{
ActiveDiagnostic, Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings,
EditorSnapshot, GlobalDiagnosticRenderer, Hover,
- display_map::{InlayOffset, ToDisplayPoint, invisibles::is_invisible},
+ display_map::{InlayOffset, ToDisplayPoint, is_invisible},
hover_links::{InlayHighlight, RangeInEditor},
movement::TextLayoutDetails,
scroll::ScrollAmount,
@@ -266,7 +266,7 @@ impl Editor {
reason: InlayHintRefreshReason,
cx: &mut Context<Self>,
) {
- if !self.mode.is_full() || self.inlay_hints.is_none() {
+ if self.ignore_lsp_data() || self.inlay_hints.is_none() {
return;
}
let Some(semantics_provider) = self.semantics_provider() else {
@@ -3951,10 +3951,6 @@ let c = 3;"#
cx.set_global(settings_store);
theme::init(theme::LoadThemes::JustBase, cx);
release_channel::init(SemanticVersion::default(), cx);
- client::init_settings(cx);
- language::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
crate::init(cx);
});
@@ -877,7 +877,7 @@ mod tests {
};
use gpui::{AppContext as _, font, px};
use language::Capability;
- use project::{Project, project_settings::DiagnosticSeverity};
+ use project::project_settings::DiagnosticSeverity;
use settings::SettingsStore;
use util::post_inc;
@@ -1346,10 +1346,7 @@ mod tests {
fn init_test(cx: &mut gpui::App) {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- workspace::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
crate::init(cx);
- Project::init_settings(cx);
}
}
@@ -1,33 +1,30 @@
use std::{
- cell::Ref,
cmp, fmt, iter, mem,
ops::{Deref, DerefMut, Range, Sub},
sync::Arc,
};
use collections::HashMap;
-use gpui::{App, Entity, Pixels};
-use itertools::Itertools;
+use gpui::Pixels;
+use itertools::Itertools as _;
use language::{Bias, Point, Selection, SelectionGoal, TextDimension};
use util::post_inc;
use crate::{
- Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBuffer, MultiBufferSnapshot, SelectMode,
- ToOffset, ToPoint,
- display_map::{DisplayMap, DisplaySnapshot, ToDisplayPoint},
+ Anchor, DisplayPoint, DisplayRow, ExcerptId, MultiBufferSnapshot, SelectMode, ToOffset,
+ ToPoint,
+ display_map::{DisplaySnapshot, ToDisplayPoint},
movement::TextLayoutDetails,
};
#[derive(Debug, Clone)]
pub struct PendingSelection {
- pub selection: Selection<Anchor>,
- pub mode: SelectMode,
+ selection: Selection<Anchor>,
+ mode: SelectMode,
}
#[derive(Debug, Clone)]
pub struct SelectionsCollection {
- display_map: Entity<DisplayMap>,
- buffer: Entity<MultiBuffer>,
next_selection_id: usize,
line_mode: bool,
/// The non-pending, non-overlapping selections.
@@ -40,10 +37,8 @@ pub struct SelectionsCollection {
}
impl SelectionsCollection {
- pub fn new(display_map: Entity<DisplayMap>, buffer: Entity<MultiBuffer>) -> Self {
+ pub fn new() -> Self {
Self {
- display_map,
- buffer,
next_selection_id: 1,
line_mode: false,
disjoint: Arc::default(),
@@ -62,14 +57,6 @@ impl SelectionsCollection {
}
}
- pub fn display_map(&self, cx: &mut App) -> DisplaySnapshot {
- self.display_map.update(cx, |map, cx| map.snapshot(cx))
- }
-
- fn buffer<'a>(&self, cx: &'a App) -> Ref<'a, MultiBufferSnapshot> {
- self.buffer.read(cx).read(cx)
- }
-
pub fn clone_state(&mut self, other: &SelectionsCollection) {
self.next_selection_id = other.next_selection_id;
self.line_mode = other.line_mode;
@@ -106,15 +93,14 @@ impl SelectionsCollection {
}
/// Non-overlapping selections using anchors, including the pending selection.
- pub fn all_anchors(&self, cx: &mut App) -> Arc<[Selection<Anchor>]> {
+ pub fn all_anchors(&self, snapshot: &DisplaySnapshot) -> Arc<[Selection<Anchor>]> {
if self.pending.is_none() {
self.disjoint_anchors_arc()
} else {
- let all_offset_selections = self.all::<usize>(&self.display_map(cx));
- let buffer = self.buffer(cx);
+ let all_offset_selections = self.all::<usize>(snapshot);
all_offset_selections
.into_iter()
- .map(|selection| selection_to_anchor_selection(selection, &buffer))
+ .map(|selection| selection_to_anchor_selection(selection, snapshot))
.collect()
}
}
@@ -354,16 +340,17 @@ impl SelectionsCollection {
}
#[cfg(any(test, feature = "test-support"))]
- pub fn display_ranges(&self, cx: &mut App) -> Vec<Range<DisplayPoint>> {
- let display_map = self.display_map(cx);
+ pub fn display_ranges(&self, display_snapshot: &DisplaySnapshot) -> Vec<Range<DisplayPoint>> {
self.disjoint_anchors_arc()
.iter()
.chain(self.pending_anchor())
.map(|s| {
if s.reversed {
- s.end.to_display_point(&display_map)..s.start.to_display_point(&display_map)
+ s.end.to_display_point(display_snapshot)
+ ..s.start.to_display_point(display_snapshot)
} else {
- s.start.to_display_point(&display_map)..s.end.to_display_point(&display_map)
+ s.start.to_display_point(display_snapshot)
+ ..s.end.to_display_point(display_snapshot)
}
})
.collect()
@@ -385,7 +372,7 @@ impl SelectionsCollection {
let is_empty = positions.start == positions.end;
let line_len = display_map.line_len(row);
let line = display_map.layout_row(row, text_layout_details);
- let start_col = line.closest_index_for_x(positions.start) as u32;
+ let start_col = line.index_for_x(positions.start) as u32;
let (start, end) = if is_empty {
let point = DisplayPoint::new(row, std::cmp::min(start_col, line_len));
@@ -395,7 +382,7 @@ impl SelectionsCollection {
return None;
}
let start = DisplayPoint::new(row, start_col);
- let end_col = line.closest_index_for_x(positions.end) as u32;
+ let end_col = line.index_for_x(positions.end) as u32;
let end = DisplayPoint::new(row, end_col);
(start, end)
};
@@ -414,13 +401,13 @@ impl SelectionsCollection {
pub fn change_with<R>(
&mut self,
- cx: &mut App,
- change: impl FnOnce(&mut MutableSelectionsCollection) -> R,
+ snapshot: &DisplaySnapshot,
+ change: impl FnOnce(&mut MutableSelectionsCollection<'_, '_>) -> R,
) -> (bool, R) {
let mut mutable_collection = MutableSelectionsCollection {
+ snapshot,
collection: self,
selections_changed: false,
- cx,
};
let result = change(&mut mutable_collection);
@@ -460,13 +447,13 @@ impl SelectionsCollection {
}
}
-pub struct MutableSelectionsCollection<'a> {
+pub struct MutableSelectionsCollection<'snap, 'a> {
collection: &'a mut SelectionsCollection,
+ snapshot: &'snap DisplaySnapshot,
selections_changed: bool,
- cx: &'a mut App,
}
-impl<'a> fmt::Debug for MutableSelectionsCollection<'a> {
+impl<'snap, 'a> fmt::Debug for MutableSelectionsCollection<'snap, 'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MutableSelectionsCollection")
.field("collection", &self.collection)
@@ -475,13 +462,9 @@ impl<'a> fmt::Debug for MutableSelectionsCollection<'a> {
}
}
-impl<'a> MutableSelectionsCollection<'a> {
- pub fn display_map(&mut self) -> DisplaySnapshot {
- self.collection.display_map(self.cx)
- }
-
- pub fn buffer(&self) -> Ref<'_, MultiBufferSnapshot> {
- self.collection.buffer(self.cx)
+impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
+ pub fn display_snapshot(&self) -> DisplaySnapshot {
+ self.snapshot.clone()
}
pub fn clear_disjoint(&mut self) {
@@ -512,12 +495,11 @@ impl<'a> MutableSelectionsCollection<'a> {
}
pub(crate) fn set_pending_anchor_range(&mut self, range: Range<Anchor>, mode: SelectMode) {
- let buffer = self.buffer.read(self.cx).snapshot(self.cx);
self.collection.pending = Some(PendingSelection {
selection: {
let mut start = range.start;
let mut end = range.end;
- let reversed = if start.cmp(&end, &buffer).is_gt() {
+ let reversed = if start.cmp(&end, self.snapshot).is_gt() {
mem::swap(&mut start, &mut end);
true
} else {
@@ -557,7 +539,7 @@ impl<'a> MutableSelectionsCollection<'a> {
return true;
}
- if !oldest.start.cmp(&oldest.end, &self.buffer()).is_eq() {
+ if !oldest.start.cmp(&oldest.end, self.snapshot).is_eq() {
let head = oldest.head();
oldest.start = head;
oldest.end = head;
@@ -573,10 +555,10 @@ impl<'a> MutableSelectionsCollection<'a> {
where
T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub<T, Output = T> + std::marker::Copy,
{
- let display_map = self.display_map();
+ let display_map = self.display_snapshot();
let mut selections = self.collection.all(&display_map);
- let mut start = range.start.to_offset(&self.buffer());
- let mut end = range.end.to_offset(&self.buffer());
+ let mut start = range.start.to_offset(self.snapshot);
+ let mut end = range.end.to_offset(self.snapshot);
let reversed = if start > end {
mem::swap(&mut start, &mut end);
true
@@ -597,10 +579,9 @@ impl<'a> MutableSelectionsCollection<'a> {
where
T: ToOffset + std::marker::Copy + std::fmt::Debug,
{
- let buffer = self.buffer.read(self.cx).snapshot(self.cx);
let mut selections = selections
.into_iter()
- .map(|selection| selection.map(|it| it.to_offset(&buffer)))
+ .map(|selection| selection.map(|it| it.to_offset(self.snapshot)))
.map(|mut selection| {
if selection.start > selection.end {
mem::swap(&mut selection.start, &mut selection.end);
@@ -629,14 +610,14 @@ impl<'a> MutableSelectionsCollection<'a> {
self.collection.disjoint = Arc::from_iter(
selections
.into_iter()
- .map(|selection| selection_to_anchor_selection(selection, &buffer)),
+ .map(|selection| selection_to_anchor_selection(selection, self.snapshot)),
);
self.collection.pending = None;
self.selections_changed = true;
}
pub fn select_anchors(&mut self, selections: Vec<Selection<Anchor>>) {
- let map = self.display_map();
+ let map = self.display_snapshot();
let resolved_selections =
resolve_selections_wrapping_blocks::<usize, _>(&selections, &map).collect::<Vec<_>>();
self.select(resolved_selections);
@@ -647,10 +628,9 @@ impl<'a> MutableSelectionsCollection<'a> {
I: IntoIterator<Item = Range<T>>,
T: ToOffset,
{
- let buffer = self.buffer.read(self.cx).snapshot(self.cx);
let ranges = ranges
.into_iter()
- .map(|range| range.start.to_offset(&buffer)..range.end.to_offset(&buffer));
+ .map(|range| range.start.to_offset(self.snapshot)..range.end.to_offset(self.snapshot));
self.select_offset_ranges(ranges);
}
@@ -686,13 +666,12 @@ impl<'a> MutableSelectionsCollection<'a> {
where
I: IntoIterator<Item = Range<Anchor>>,
{
- let buffer = self.buffer.read(self.cx).snapshot(self.cx);
let selections = ranges
.into_iter()
.map(|range| {
let mut start = range.start;
let mut end = range.end;
- let reversed = if start.cmp(&end, &buffer).is_gt() {
+ let reversed = if start.cmp(&end, self.snapshot).is_gt() {
mem::swap(&mut start, &mut end);
true
} else {
@@ -718,7 +697,6 @@ impl<'a> MutableSelectionsCollection<'a> {
where
T: IntoIterator<Item = Range<DisplayPoint>>,
{
- let display_map = self.display_map();
let selections = ranges
.into_iter()
.map(|range| {
@@ -732,8 +710,8 @@ impl<'a> MutableSelectionsCollection<'a> {
};
Selection {
id: post_inc(&mut self.collection.next_selection_id),
- start: start.to_point(&display_map),
- end: end.to_point(&display_map),
+ start: start.to_point(self.snapshot),
+ end: end.to_point(self.snapshot),
reversed,
goal: SelectionGoal::None,
}
@@ -743,7 +721,6 @@ impl<'a> MutableSelectionsCollection<'a> {
}
pub fn reverse_selections(&mut self) {
- let map = &self.display_map();
let mut new_selections: Vec<Selection<Point>> = Vec::new();
let disjoint = self.disjoint.clone();
for selection in disjoint
@@ -753,8 +730,14 @@ impl<'a> MutableSelectionsCollection<'a> {
{
new_selections.push(Selection {
id: self.new_selection_id(),
- start: selection.start.to_display_point(map).to_point(map),
- end: selection.end.to_display_point(map).to_point(map),
+ start: selection
+ .start
+ .to_display_point(self.snapshot)
+ .to_point(self.snapshot),
+ end: selection
+ .end
+ .to_display_point(self.snapshot)
+ .to_point(self.snapshot),
reversed: selection.reversed,
goal: selection.goal,
});
@@ -767,7 +750,7 @@ impl<'a> MutableSelectionsCollection<'a> {
mut move_selection: impl FnMut(&DisplaySnapshot, &mut Selection<DisplayPoint>),
) {
let mut changed = false;
- let display_map = self.display_map();
+ let display_map = self.display_snapshot();
let selections = self.collection.all_display(&display_map);
let selections = selections
.into_iter()
@@ -791,22 +774,20 @@ impl<'a> MutableSelectionsCollection<'a> {
mut move_selection: impl FnMut(&MultiBufferSnapshot, &mut Selection<usize>),
) {
let mut changed = false;
- let snapshot = self.buffer().clone();
- let display_map = self.display_map();
+ let display_map = self.display_snapshot();
let selections = self
.collection
.all::<usize>(&display_map)
.into_iter()
.map(|selection| {
let mut moved_selection = selection.clone();
- move_selection(&snapshot, &mut moved_selection);
+ move_selection(self.snapshot, &mut moved_selection);
if selection != moved_selection {
changed = true;
}
moved_selection
})
.collect();
- drop(snapshot);
if changed {
self.select(selections)
@@ -858,11 +839,10 @@ impl<'a> MutableSelectionsCollection<'a> {
&mut self,
find_replacement_cursors: impl FnOnce(&DisplaySnapshot) -> Vec<DisplayPoint>,
) {
- let display_map = self.display_map();
- let new_selections = find_replacement_cursors(&display_map)
+ let new_selections = find_replacement_cursors(self.snapshot)
.into_iter()
.map(|cursor| {
- let cursor_point = cursor.to_point(&display_map);
+ let cursor_point = cursor.to_point(self.snapshot);
Selection {
id: post_inc(&mut self.collection.next_selection_id),
start: cursor_point,
@@ -886,12 +866,11 @@ impl<'a> MutableSelectionsCollection<'a> {
let mut selections_with_lost_position = HashMap::default();
let anchors_with_status = {
- let buffer = self.buffer();
let disjoint_anchors = self
.disjoint
.iter()
.flat_map(|selection| [&selection.start, &selection.end]);
- buffer.refresh_anchors(disjoint_anchors)
+ self.snapshot.refresh_anchors(disjoint_anchors)
};
let adjusted_disjoint: Vec<_> = anchors_with_status
.chunks(2)
@@ -919,16 +898,16 @@ impl<'a> MutableSelectionsCollection<'a> {
.collect();
if !adjusted_disjoint.is_empty() {
- let map = self.display_map();
+ let map = self.display_snapshot();
let resolved_selections =
resolve_selections_wrapping_blocks(adjusted_disjoint.iter(), &map).collect();
self.select::<usize>(resolved_selections);
}
if let Some(pending) = pending.as_mut() {
- let buffer = self.buffer();
- let anchors =
- buffer.refresh_anchors([&pending.selection.start, &pending.selection.end]);
+ let anchors = self
+ .snapshot
+ .refresh_anchors([&pending.selection.start, &pending.selection.end]);
let (_, start, kept_start) = anchors[0];
let (_, end, kept_end) = anchors[1];
let kept_head = if pending.selection.reversed {
@@ -951,14 +930,14 @@ impl<'a> MutableSelectionsCollection<'a> {
}
}
-impl Deref for MutableSelectionsCollection<'_> {
+impl Deref for MutableSelectionsCollection<'_, '_> {
type Target = SelectionsCollection;
fn deref(&self) -> &Self::Target {
self.collection
}
}
-impl DerefMut for MutableSelectionsCollection<'_> {
+impl DerefMut for MutableSelectionsCollection<'_, '_> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.collection
}
@@ -54,10 +54,8 @@ impl EditorLspTestContext {
cx.update(|cx| {
assets::Assets.load_test_fonts(cx);
- language::init(cx);
crate::init(cx);
workspace::init(app_state.clone(), cx);
- Project::init_settings(cx);
});
let file_name = format!(
@@ -23,7 +23,6 @@ use gpui_tokio::Tokio;
use language::LanguageRegistry;
use language_model::{ConfiguredModel, LanguageModel, LanguageModelRegistry, SelectedModel};
use node_runtime::{NodeBinaryOptions, NodeRuntime};
-use project::Project;
use project::project_settings::ProjectSettings;
use prompt_store::PromptBuilder;
use release_channel::AppVersion;
@@ -354,7 +353,6 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
let settings_store = SettingsStore::new(cx, &settings::default_settings());
cx.set_global(settings_store);
- client::init_settings(cx);
// Set User-Agent so we can download language servers from GitHub
let user_agent = format!(
@@ -376,8 +374,6 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
};
cx.set_http_client(Arc::new(http));
- Project::init_settings(cx);
-
let client = Client::production(cx);
cx.set_http_client(client.http_client());
@@ -422,8 +418,6 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
let node_runtime = NodeRuntime::new(client.http_client(), None, rx);
let extension_host_proxy = ExtensionHostProxy::global(cx);
-
- language::init(cx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
language_model::init(client.clone(), cx);
@@ -200,8 +200,6 @@ pub fn init(
node_runtime: NodeRuntime,
cx: &mut App,
) {
- ExtensionSettings::register(cx);
-
let store = cx.new(move |cx| {
ExtensionStore::new(
paths::extensions_dir().clone(),
@@ -2,10 +2,10 @@ use collections::HashMap;
use extension::{
DownloadFileCapability, ExtensionCapability, NpmInstallPackageCapability, ProcessExecCapability,
};
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
use std::sync::Arc;
-#[derive(Debug, Default, Clone)]
+#[derive(Debug, Default, Clone, RegisterSetting)]
pub struct ExtensionSettings {
/// The extensions that should be automatically installed by Zed.
///
@@ -1,7 +1,7 @@
use crate::{
Event, ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry,
- ExtensionIndexThemeEntry, ExtensionManifest, ExtensionSettings, ExtensionStore,
- GrammarManifestEntry, RELOAD_DEBOUNCE_DURATION, SchemaVersion,
+ ExtensionIndexThemeEntry, ExtensionManifest, ExtensionStore, GrammarManifestEntry,
+ RELOAD_DEBOUNCE_DURATION, SchemaVersion,
};
use async_compression::futures::bufread::GzipEncoder;
use collections::{BTreeMap, HashSet};
@@ -19,7 +19,7 @@ use project::{DEFAULT_COMPLETION_CONTEXT, Project};
use release_channel::AppVersion;
use reqwest_client::ReqwestClient;
use serde_json::json;
-use settings::{Settings as _, SettingsStore};
+use settings::SettingsStore;
use std::{
ffi::OsString,
path::{Path, PathBuf},
@@ -865,9 +865,6 @@ fn init_test(cx: &mut TestAppContext) {
release_channel::init(SemanticVersion::default(), cx);
extension::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- Project::init_settings(cx);
- ExtensionSettings::register(cx);
- language::init(cx);
gpui_tokio::init(cx);
});
}
@@ -90,12 +90,7 @@ pub struct FileFinder {
init_modifiers: Option<Modifiers>,
}
-pub fn init_settings(cx: &mut App) {
- FileFinderSettings::register(cx);
-}
-
pub fn init(cx: &mut App) {
- init_settings(cx);
cx.observe_new(FileFinder::register).detach();
cx.observe_new(OpenPathPrompt::register).detach();
cx.observe_new(OpenPathPrompt::register_new_path).detach();
@@ -1,8 +1,8 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
-#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
+#[derive(Deserialize, Debug, Clone, Copy, PartialEq, RegisterSetting)]
pub struct FileFinderSettings {
pub file_icons: bool,
pub modal_max_width: FileFinderWidth,
@@ -3206,11 +3206,8 @@ fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
cx.update(|cx| {
let state = AppState::test(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
super::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
state
})
}
@@ -397,11 +397,8 @@ fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
cx.update(|cx| {
let state = AppState::test(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
super::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
state
})
}
@@ -40,7 +40,7 @@ objc.workspace = true
cocoa = "0.26"
[target.'cfg(not(target_os = "macos"))'.dependencies]
-notify = "8.0.0"
+notify = "8.2.0"
[target.'cfg(target_os = "windows")'.dependencies]
windows.workspace = true
@@ -64,23 +64,23 @@ pub enum StageStatus {
}
impl StageStatus {
- pub fn is_fully_staged(&self) -> bool {
+ pub const fn is_fully_staged(&self) -> bool {
matches!(self, StageStatus::Staged)
}
- pub fn is_fully_unstaged(&self) -> bool {
+ pub const fn is_fully_unstaged(&self) -> bool {
matches!(self, StageStatus::Unstaged)
}
- pub fn has_staged(&self) -> bool {
+ pub const fn has_staged(&self) -> bool {
matches!(self, StageStatus::Staged | StageStatus::PartiallyStaged)
}
- pub fn has_unstaged(&self) -> bool {
+ pub const fn has_unstaged(&self) -> bool {
matches!(self, StageStatus::Unstaged | StageStatus::PartiallyStaged)
}
- pub fn as_bool(self) -> Option<bool> {
+ pub const fn as_bool(self) -> Option<bool> {
match self {
StageStatus::Staged => Some(true),
StageStatus::Unstaged => Some(false),
@@ -2,15 +2,15 @@ use std::sync::Arc;
use git::GitHostingProviderRegistry;
use gpui::App;
-use settings::{GitHostingProviderConfig, GitHostingProviderKind, Settings, SettingsStore};
+use settings::{
+ GitHostingProviderConfig, GitHostingProviderKind, RegisterSetting, Settings, SettingsStore,
+};
use url::Url;
use util::ResultExt as _;
use crate::{Bitbucket, Github, Gitlab};
pub(crate) fn init(cx: &mut App) {
- GitHostingProviderSettings::register(cx);
-
init_git_hosting_provider_settings(cx);
}
@@ -52,7 +52,7 @@ fn update_git_hosting_providers_from_settings(cx: &mut App) {
provider_registry.set_setting_providers(iter);
}
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, RegisterSetting)]
pub struct GitHostingProviderSettings {
pub git_hosting_providers: Vec<GitHostingProviderConfig>,
}
@@ -370,10 +370,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
- editor::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
});
}
@@ -47,7 +47,7 @@ use panel::{
};
use project::{
Fs, Project, ProjectPath,
- git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId},
+ git_store::{GitStoreEvent, Repository, RepositoryEvent, RepositoryId, pending_op},
};
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsStore, StatusStyle};
@@ -271,27 +271,6 @@ impl GitStatusEntry {
}
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-enum TargetStatus {
- Staged,
- Unstaged,
- Reverted,
- Unchanged,
-}
-
-struct PendingOperation {
- finished: bool,
- target_status: TargetStatus,
- entries: Vec<GitStatusEntry>,
- op_id: usize,
-}
-
-impl PendingOperation {
- fn contains_path(&self, path: &RepoPath) -> bool {
- self.entries.iter().any(|p| &p.repo_path == path)
- }
-}
-
pub struct GitPanel {
pub(crate) active_repository: Option<Entity<Repository>>,
pub(crate) commit_editor: Entity<Editor>,
@@ -307,7 +286,6 @@ pub struct GitPanel {
new_count: usize,
entry_count: usize,
new_staged_count: usize,
- pending: Vec<PendingOperation>,
pending_commit: Option<Task<()>>,
amend_pending: bool,
original_commit_message: Option<String>,
@@ -427,7 +405,7 @@ impl GitPanel {
move |this, _git_store, event, window, cx| match event {
GitStoreEvent::ActiveRepositoryChanged(_) => {
this.active_repository = this.project.read(cx).active_repository(cx);
- this.schedule_update(true, window, cx);
+ this.schedule_update(window, cx);
}
GitStoreEvent::RepositoryUpdated(
_,
@@ -436,7 +414,7 @@ impl GitPanel {
| RepositoryEvent::MergeHeadsChanged,
true,
) => {
- this.schedule_update(true, window, cx);
+ this.schedule_update(window, cx);
}
GitStoreEvent::RepositoryUpdated(
_,
@@ -445,7 +423,7 @@ impl GitPanel {
)
| GitStoreEvent::RepositoryAdded
| GitStoreEvent::RepositoryRemoved(_) => {
- this.schedule_update(false, window, cx);
+ this.schedule_update(window, cx);
}
GitStoreEvent::IndexWriteError(error) => {
this.workspace
@@ -472,7 +450,6 @@ impl GitPanel {
fs,
new_count: 0,
new_staged_count: 0,
- pending: Vec::new(),
pending_commit: None,
amend_pending: false,
original_commit_message: None,
@@ -501,7 +478,7 @@ impl GitPanel {
_settings_subscription,
};
- this.schedule_update(false, window, cx);
+ this.schedule_update(window, cx);
this
})
}
@@ -1013,15 +990,7 @@ impl GitPanel {
return;
};
- let op_id = self.pending.iter().map(|p| p.op_id).max().unwrap_or(0) + 1;
- self.pending.push(PendingOperation {
- op_id,
- target_status: TargetStatus::Reverted,
- entries: entries.clone(),
- finished: false,
- });
- self.update_visible_entries(window, cx);
- let task = cx.spawn(async move |_, cx| {
+ let task = cx.spawn_in(window, async move |this, cx| {
let tasks: Vec<_> = workspace.update(cx, |workspace, cx| {
workspace.project().update(cx, |project, cx| {
entries
@@ -1038,8 +1007,8 @@ impl GitPanel {
let buffers = futures::future::join_all(tasks).await;
- active_repository
- .update(cx, |repo, cx| {
+ this.update_in(cx, |this, window, cx| {
+ let task = active_repository.update(cx, |repo, cx| {
repo.checkout_files(
"HEAD",
entries
@@ -1048,10 +1017,14 @@ impl GitPanel {
.collect(),
cx,
)
- })?
- .await??;
+ });
+ this.update_visible_entries(window, cx);
+ cx.notify();
+ task
+ })?
+ .await?;
- let tasks: Vec<_> = cx.update(|cx| {
+ let tasks: Vec<_> = cx.update(|_, cx| {
buffers
.iter()
.filter_map(|buffer| {
@@ -1071,21 +1044,10 @@ impl GitPanel {
let result = task.await;
this.update_in(cx, |this, window, cx| {
- for pending in this.pending.iter_mut() {
- if pending.op_id == op_id {
- pending.finished = true;
- if result.is_err() {
- pending.target_status = TargetStatus::Unchanged;
- this.update_visible_entries(window, cx);
- }
- break;
- }
+ if let Err(err) = result {
+ this.update_visible_entries(window, cx);
+ this.show_error_toast("checkout", err, cx);
}
- result
- .map_err(|e| {
- this.show_error_toast("checkout", e, cx);
- })
- .ok();
})
.ok();
})
@@ -1213,26 +1175,44 @@ impl GitPanel {
});
}
+ fn change_all_files_stage(&mut self, stage: bool, cx: &mut Context<Self>) {
+ let Some(active_repository) = self.active_repository.clone() else {
+ return;
+ };
+ cx.spawn({
+ async move |this, cx| {
+ let result = this
+ .update(cx, |this, cx| {
+ let task = active_repository.update(cx, |repo, cx| {
+ if stage {
+ repo.stage_all(cx)
+ } else {
+ repo.unstage_all(cx)
+ }
+ });
+ this.update_counts(active_repository.read(cx));
+ cx.notify();
+ task
+ })?
+ .await;
+
+ this.update(cx, |this, cx| {
+ if let Err(err) = result {
+ this.show_error_toast(if stage { "add" } else { "reset" }, err, cx);
+ }
+ cx.notify()
+ })
+ }
+ })
+ .detach();
+ }
+
pub fn stage_all(&mut self, _: &StageAll, _window: &mut Window, cx: &mut Context<Self>) {
- let entries = self
- .entries
- .iter()
- .filter_map(|entry| entry.status_entry())
- .filter(|status_entry| status_entry.staging.has_unstaged())
- .cloned()
- .collect::<Vec<_>>();
- self.change_file_stage(true, entries, cx);
+ self.change_all_files_stage(true, cx);
}
pub fn unstage_all(&mut self, _: &UnstageAll, _window: &mut Window, cx: &mut Context<Self>) {
- let entries = self
- .entries
- .iter()
- .filter_map(|entry| entry.status_entry())
- .filter(|status_entry| status_entry.staging.has_staged())
- .cloned()
- .collect::<Vec<_>>();
- self.change_file_stage(false, entries, cx);
+ self.change_all_files_stage(false, cx);
}
fn toggle_staged_for_entry(
@@ -1247,9 +1227,12 @@ impl GitPanel {
let (stage, repo_paths) = match entry {
GitListEntry::Status(status_entry) => {
let repo_paths = vec![status_entry.clone()];
- let stage = if let Some(status) = self.entry_staging(&status_entry) {
- !status.is_fully_staged()
- } else if status_entry.status.staging().is_fully_staged() {
+ let stage = if active_repository
+ .read(cx)
+ .pending_ops_for_path(&status_entry.repo_path)
+ .map(|ops| ops.staging() || ops.staged())
+ .unwrap_or(status_entry.status.staging().has_staged())
+ {
if let Some(op) = self.bulk_staging.clone()
&& op.anchor == status_entry.repo_path
{
@@ -1291,56 +1274,31 @@ impl GitPanel {
let Some(active_repository) = self.active_repository.clone() else {
return;
};
- let op_id = self.pending.iter().map(|p| p.op_id).max().unwrap_or(0) + 1;
- self.pending.push(PendingOperation {
- op_id,
- target_status: if stage {
- TargetStatus::Staged
- } else {
- TargetStatus::Unstaged
- },
- entries: entries.clone(),
- finished: false,
- });
- let repository = active_repository.read(cx);
- self.update_counts(repository);
- cx.notify();
-
cx.spawn({
async move |this, cx| {
- let result = cx
- .update(|cx| {
- if stage {
- active_repository.update(cx, |repo, cx| {
- let repo_paths = entries
- .iter()
- .map(|entry| entry.repo_path.clone())
- .collect();
+ let result = this
+ .update(cx, |this, cx| {
+ let task = active_repository.update(cx, |repo, cx| {
+ let repo_paths = entries
+ .iter()
+ .map(|entry| entry.repo_path.clone())
+ .collect();
+ if stage {
repo.stage_entries(repo_paths, cx)
- })
- } else {
- active_repository.update(cx, |repo, cx| {
- let repo_paths = entries
- .iter()
- .map(|entry| entry.repo_path.clone())
- .collect();
+ } else {
repo.unstage_entries(repo_paths, cx)
- })
- }
+ }
+ });
+ this.update_counts(active_repository.read(cx));
+ cx.notify();
+ task
})?
.await;
this.update(cx, |this, cx| {
- for pending in this.pending.iter_mut() {
- if pending.op_id == op_id {
- pending.finished = true
- }
+ if let Err(err) = result {
+ this.show_error_toast(if stage { "add" } else { "reset" }, err, cx);
}
- result
- .map_err(|e| {
- this.show_error_toast(if stage { "add" } else { "reset" }, e, cx);
- })
- .ok();
cx.notify();
})
}
@@ -2572,12 +2530,7 @@ impl GitPanel {
message.push('\n');
}
- fn schedule_update(
- &mut self,
- clear_pending: bool,
- window: &mut Window,
- cx: &mut Context<Self>,
- ) {
+ fn schedule_update(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let handle = cx.entity().downgrade();
self.reopen_commit_buffer(window, cx);
self.update_visible_entries_task = cx.spawn_in(window, async move |_, cx| {
@@ -2585,9 +2538,6 @@ impl GitPanel {
if let Some(git_panel) = handle.upgrade() {
git_panel
.update_in(cx, |git_panel, window, cx| {
- if clear_pending {
- git_panel.clear_pending();
- }
git_panel.update_visible_entries(window, cx);
})
.ok();
@@ -2636,10 +2586,6 @@ impl GitPanel {
.detach_and_log_err(cx);
}
- fn clear_pending(&mut self) {
- self.pending.retain(|v| !v.finished)
- }
-
fn update_visible_entries(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let path_style = self.project.read(cx).path_style(cx);
let bulk_staging = self.bulk_staging.take();
@@ -2682,11 +2628,12 @@ impl GitPanel {
let is_new = entry.status.is_created();
let staging = entry.status.staging();
- if self.pending.iter().any(|pending| {
- pending.target_status == TargetStatus::Reverted
- && !pending.finished
- && pending.contains_path(&entry.repo_path)
- }) {
+ if let Some(pending) = repo.pending_ops_for_path(&entry.repo_path)
+ && pending
+ .ops
+ .iter()
+ .any(|op| op.git_status == pending_op::GitStatus::Reverted && op.finished())
+ {
continue;
}
@@ -2727,30 +2674,35 @@ impl GitPanel {
}
}
- let mut pending_staged_count = 0;
- let mut last_pending_staged = None;
- let mut pending_status_for_single_staged = None;
- for pending in self.pending.iter() {
- if pending.target_status == TargetStatus::Staged {
- pending_staged_count += pending.entries.len();
- last_pending_staged = pending.entries.first().cloned();
- }
- if let Some(single_staged) = &single_staged_entry
- && pending.contains_path(&single_staged.repo_path)
+ if conflict_entries.is_empty() {
+ if staged_count == 1
+ && let Some(entry) = single_staged_entry.as_ref()
{
- pending_status_for_single_staged = Some(pending.target_status);
- }
- }
-
- if conflict_entries.is_empty() && staged_count == 1 && pending_staged_count == 0 {
- match pending_status_for_single_staged {
- Some(TargetStatus::Staged) | None => {
- self.single_staged_entry = single_staged_entry;
+ if let Some(ops) = repo.pending_ops_for_path(&entry.repo_path) {
+ if ops.staged() {
+ self.single_staged_entry = single_staged_entry;
+ }
}
- _ => {}
+ } else if repo
+ .pending_ops_by_path
+ .summary()
+ .item_summary
+ .staging_count
+ == 1
+ {
+ self.single_staged_entry = repo.pending_ops_by_path.iter().find_map(|ops| {
+ if ops.staging() {
+ repo.status_for_path(&ops.repo_path)
+ .map(|status| GitStatusEntry {
+ repo_path: ops.repo_path.clone(),
+ status: status.status,
+ staging: StageStatus::Staged,
+ })
+ } else {
+ None
+ }
+ });
}
- } else if conflict_entries.is_empty() && pending_staged_count == 1 {
- self.single_staged_entry = last_pending_staged;
}
if conflict_entries.is_empty() && changed_entries.len() == 1 {
@@ -2799,7 +2751,10 @@ impl GitPanel {
&& let Some(index) = bulk_staging_anchor_new_index
&& let Some(entry) = self.entries.get(index)
&& let Some(entry) = entry.status_entry()
- && self.entry_staging(entry).unwrap_or(entry.staging) == StageStatus::Staged
+ && repo
+ .pending_ops_for_path(&entry.repo_path)
+ .map(|ops| ops.staging() || ops.staged())
+ .unwrap_or(entry.staging.has_staged())
{
self.bulk_staging = bulk_staging;
}
@@ -2845,51 +2800,29 @@ impl GitPanel {
continue;
};
self.entry_count += 1;
+ let is_staging_or_staged = repo
+ .pending_ops_for_path(&status_entry.repo_path)
+ .map(|ops| ops.staging() || ops.staged())
+ .unwrap_or(status_entry.staging.has_staged());
if repo.had_conflict_on_last_merge_head_change(&status_entry.repo_path) {
self.conflicted_count += 1;
- if self
- .entry_staging(status_entry)
- .unwrap_or(status_entry.staging)
- .has_staged()
- {
+ if is_staging_or_staged {
self.conflicted_staged_count += 1;
}
} else if status_entry.status.is_created() {
self.new_count += 1;
- if self
- .entry_staging(status_entry)
- .unwrap_or(status_entry.staging)
- .has_staged()
- {
+ if is_staging_or_staged {
self.new_staged_count += 1;
}
} else {
self.tracked_count += 1;
- if self
- .entry_staging(status_entry)
- .unwrap_or(status_entry.staging)
- .has_staged()
- {
+ if is_staging_or_staged {
self.tracked_staged_count += 1;
}
}
}
}
- fn entry_staging(&self, entry: &GitStatusEntry) -> Option<StageStatus> {
- for pending in self.pending.iter().rev() {
- if pending.contains_path(&entry.repo_path) {
- match pending.target_status {
- TargetStatus::Staged => return Some(StageStatus::Staged),
- TargetStatus::Unstaged => return Some(StageStatus::Unstaged),
- TargetStatus::Reverted => continue,
- TargetStatus::Unchanged => continue,
- }
- }
- }
- None
- }
-
pub(crate) fn has_staged_changes(&self) -> bool {
self.tracked_staged_count > 0
|| self.new_staged_count > 0
@@ -3727,10 +3660,15 @@ impl GitPanel {
let ix = self.entry_by_path(&repo_path, cx)?;
let entry = self.entries.get(ix)?;
- let status = entry.status_entry()?;
- let entry_staging = self.entry_staging(status).unwrap_or(status.staging);
+ let is_staging_or_staged = if let Some(status_entry) = entry.status_entry() {
+ repo.pending_ops_for_path(&repo_path)
+ .map(|ops| ops.staging() || ops.staged())
+ .unwrap_or(status_entry.staging.has_staged())
+ } else {
+ false
+ };
- let checkbox = Checkbox::new("stage-file", entry_staging.as_bool().into())
+ let checkbox = Checkbox::new("stage-file", is_staging_or_staged.into())
.disabled(!self.has_write_access(cx))
.fill()
.elevation(ElevationIndex::Surface)
@@ -4022,8 +3960,29 @@ impl GitPanel {
let checkbox_id: ElementId =
ElementId::Name(format!("entry_{}_{}_checkbox", display_name, ix).into());
- let entry_staging = self.entry_staging(entry).unwrap_or(entry.staging);
- let mut is_staged: ToggleState = entry_staging.as_bool().into();
+ let active_repo = self
+ .project
+ .read(cx)
+ .active_repository(cx)
+ .expect("active repository must be set");
+ let repo = active_repo.read(cx);
+ // Checking for current staged/unstaged file status is a chained operation:
+ // 1. first, we check for any pending operation recorded in repository
+ // 2. if there are no pending ops either running or finished, we then ask the repository
+ // for the most up-to-date file status read from disk - we do this since `entry` arg to this function `render_entry`
+ // is likely to be staled, and may lead to weird artifacts in the form of subsecond auto-uncheck/check on
+ // the checkbox's state (or flickering) which is undesirable.
+ // 3. finally, if there is no info about this `entry` in the repo, we fall back to whatever status is encoded
+ // in `entry` arg.
+ let is_staging_or_staged = repo
+ .pending_ops_for_path(&entry.repo_path)
+ .map(|ops| ops.staging() || ops.staged())
+ .or_else(|| {
+ repo.status_for_path(&entry.repo_path)
+ .map(|status| status.status.staging().has_staged())
+ })
+ .unwrap_or(entry.staging.has_staged());
+ let mut is_staged: ToggleState = is_staging_or_staged.into();
if self.show_placeholders && !self.has_staged_changes() && !entry.status.is_created() {
is_staged = ToggleState::Selected;
}
@@ -4142,9 +4101,11 @@ impl GitPanel {
}
})
.tooltip(move |_window, cx| {
- let is_staged = entry_staging.is_fully_staged();
-
- let action = if is_staged { "Unstage" } else { "Stage" };
+ let action = if is_staging_or_staged {
+ "Unstage"
+ } else {
+ "Stage"
+ };
let tooltip_name = action.to_string();
Tooltip::for_action(tooltip_name, &ToggleStaged, cx)
@@ -4994,7 +4955,7 @@ mod tests {
status::{StatusCode, UnmergedStatus, UnmergedStatusCode},
};
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
- use project::{FakeFs, WorktreeSettings};
+ use project::FakeFs;
use serde_json::json;
use settings::SettingsStore;
use theme::LoadThemes;
@@ -5009,13 +4970,8 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- AgentSettings::register(cx);
- WorktreeSettings::register(cx);
- workspace::init_settings(cx);
theme::init(LoadThemes::JustBase, cx);
- language::init(cx);
editor::init(cx);
- Project::init_settings(cx);
crate::init(cx);
});
}
@@ -2,7 +2,7 @@ use editor::EditorSettings;
use gpui::Pixels;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{Settings, StatusStyle};
+use settings::{RegisterSetting, Settings, StatusStyle};
use ui::{
px,
scrollbars::{ScrollbarVisibility, ShowScrollbar},
@@ -14,7 +14,7 @@ pub struct ScrollbarSettings {
pub show: Option<ShowScrollbar>,
}
-#[derive(Debug, Clone, PartialEq)]
+#[derive(Debug, Clone, PartialEq, RegisterSetting)]
pub struct GitPanelSettings {
pub button: bool,
pub dock: DockPosition,
@@ -1,6 +1,5 @@
use std::any::Any;
-use ::settings::Settings;
use command_palette_hooks::CommandPaletteFilter;
use commit_modal::CommitModal;
use editor::{Editor, actions::DiffClipboardWithSelectionData};
@@ -15,7 +14,6 @@ use git::{
repository::{Branch, Upstream, UpstreamTracking, UpstreamTrackingStatus},
status::{FileStatus, StatusCode, UnmergedStatus, UnmergedStatusCode},
};
-use git_panel_settings::GitPanelSettings;
use gpui::{
Action, App, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, SharedString,
Window, actions,
@@ -57,8 +55,6 @@ actions!(
);
pub fn init(cx: &mut App) {
- GitPanelSettings::register(cx);
-
editor::set_blame_renderer(blame_ui::GitBlameRenderer, cx);
commit_view::init(cx);
@@ -1587,9 +1587,6 @@ mod tests {
let store = SettingsStore::test(cx);
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
editor::init(cx);
crate::init(cx);
});
@@ -458,10 +458,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
- editor::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
});
}
@@ -1,6 +1,6 @@
use editor::{Editor, EditorEvent, MultiBufferSnapshot};
use gpui::{App, Entity, FocusHandle, Focusable, Styled, Subscription, Task, WeakEntity};
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
use std::{fmt::Write, num::NonZeroU32, time::Duration};
use text::{Point, Selection};
use ui::{
@@ -293,7 +293,7 @@ impl StatusItemView for CursorPosition {
}
}
-#[derive(Clone, Copy, PartialEq, Eq)]
+#[derive(Clone, Copy, PartialEq, Eq, RegisterSetting)]
pub enum LineIndicatorFormat {
Short,
Long,
@@ -1,6 +1,6 @@
pub mod cursor_position;
-use cursor_position::{LineIndicatorFormat, UserCaretPosition};
+use cursor_position::UserCaretPosition;
use editor::{
Anchor, Editor, MultiBufferSnapshot, RowHighlightOptions, SelectionEffects, ToOffset, ToPoint,
actions::Tab,
@@ -11,7 +11,6 @@ use gpui::{
Subscription, div, prelude::*,
};
use language::Buffer;
-use settings::Settings;
use text::{Bias, Point};
use theme::ActiveTheme;
use ui::prelude::*;
@@ -19,7 +18,6 @@ use util::paths::FILE_ROW_COLUMN_DELIMITER;
use workspace::{DismissDecision, ModalView};
pub fn init(cx: &mut App) {
- LineIndicatorFormat::register(cx);
cx.observe_new(GoToLine::register).detach();
}
@@ -770,11 +768,8 @@ mod tests {
fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
cx.update(|cx| {
let state = AppState::test(cx);
- language::init(cx);
crate::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
state
})
}
@@ -178,7 +178,7 @@ impl TextInput {
if position.y > bounds.bottom() {
return self.content.len();
}
- line.closest_index_for_x(position.x - bounds.left())
+ line.index_for_x(position.x - bounds.left())
}
fn select_to(&mut self, offset: usize, cx: &mut Context<Self>) {
@@ -380,7 +380,7 @@ impl EntityInputHandler for TextInput {
let last_layout = self.last_layout.as_ref()?;
assert_eq!(last_layout.text, self.content);
- let utf8_index = last_layout.index_for_x(point.x - line_point.x)?;
+ let utf8_index = last_layout.index_for_x(point.x - line_point.x);
Some(self.offset_to_utf16(utf8_index))
}
}
@@ -1,6 +1,6 @@
use gpui::{
- App, Application, Context, Menu, MenuItem, SystemMenuType, Window, WindowOptions, actions, div,
- prelude::*, rgb,
+ App, Application, Context, Global, Menu, MenuItem, SharedString, SystemMenuType, Window,
+ WindowOptions, actions, div, prelude::*, rgb,
};
struct SetMenus;
@@ -21,29 +21,87 @@ impl Render for SetMenus {
fn main() {
Application::new().run(|cx: &mut App| {
+ cx.set_global(AppState::new());
+
// Bring the menu bar to the foreground (so you can see the menu bar)
cx.activate(true);
// Register the `quit` function so it can be referenced by the `MenuItem::action` in the menu bar
cx.on_action(quit);
+ cx.on_action(toggle_check);
// Add menu items
- cx.set_menus(vec![Menu {
- name: "set_menus".into(),
- items: vec![
- MenuItem::os_submenu("Services", SystemMenuType::Services),
- MenuItem::separator(),
- MenuItem::action("Quit", Quit),
- ],
- }]);
+ set_app_menus(cx);
cx.open_window(WindowOptions::default(), |_, cx| cx.new(|_| SetMenus {}))
.unwrap();
});
}
+#[derive(PartialEq)]
+enum ViewMode {
+ List,
+ Grid,
+}
+
+impl ViewMode {
+ fn toggle(&mut self) {
+ *self = match self {
+ ViewMode::List => ViewMode::Grid,
+ ViewMode::Grid => ViewMode::List,
+ }
+ }
+}
+
+impl Into<SharedString> for ViewMode {
+ fn into(self) -> SharedString {
+ match self {
+ ViewMode::List => "List",
+ ViewMode::Grid => "Grid",
+ }
+ .into()
+ }
+}
+
+struct AppState {
+ view_mode: ViewMode,
+}
+
+impl AppState {
+ fn new() -> Self {
+ Self {
+ view_mode: ViewMode::List,
+ }
+ }
+}
+
+impl Global for AppState {}
+
+fn set_app_menus(cx: &mut App) {
+ let app_state = cx.global::<AppState>();
+ cx.set_menus(vec![Menu {
+ name: "set_menus".into(),
+ items: vec![
+ MenuItem::os_submenu("Services", SystemMenuType::Services),
+ MenuItem::separator(),
+ MenuItem::action(ViewMode::List, ToggleCheck)
+ .checked(app_state.view_mode == ViewMode::List),
+ MenuItem::action(ViewMode::Grid, ToggleCheck)
+ .checked(app_state.view_mode == ViewMode::Grid),
+ MenuItem::separator(),
+ MenuItem::action("Quit", Quit),
+ ],
+ }]);
+}
+
// Associate actions using the `actions!` macro (or `Action` derive macro)
-actions!(set_menus, [Quit]);
+actions!(set_menus, [Quit, ToggleCheck]);
// Define the quit function that is registered with the App
fn quit(_: &Quit, cx: &mut App) {
println!("Gracefully quitting the application . . .");
cx.quit();
}
+
+fn toggle_check(_: &ToggleCheck, cx: &mut App) {
+ let app_state = cx.global_mut::<AppState>();
+ app_state.view_mode.toggle();
+ set_app_menus(cx);
+}
@@ -281,7 +281,7 @@ impl BackgroundExecutor {
});
let mut cx = std::task::Context::from_waker(&waker);
- let duration = Duration::from_secs(500);
+ let duration = Duration::from_secs(180);
let mut test_should_end_by = Instant::now() + duration;
loop {
@@ -107,7 +107,7 @@ pub use util::{FutureExt, Timeout, arc_cow::ArcCow};
pub use view::*;
pub use window::*;
-use std::{any::Any, borrow::BorrowMut, future::Future};
+use std::{any::Any, future::Future};
use taffy::TaffyLayoutEngine;
/// The context trait, allows the different contexts in GPUI to be used
@@ -253,7 +253,7 @@ pub trait BorrowAppContext {
impl<C> BorrowAppContext for C
where
- C: BorrowMut<App>,
+ C: std::borrow::BorrowMut<App>,
{
fn set_global<G: Global>(&mut self, global: G) {
self.borrow_mut().set_global(global)
@@ -64,12 +64,15 @@ pub enum MenuItem {
/// The name of this menu item
name: SharedString,
- /// the action to perform when this menu item is selected
+ /// The action to perform when this menu item is selected
action: Box<dyn Action>,
/// The OS Action that corresponds to this action, if any
/// See [`OsAction`] for more information
os_action: Option<OsAction>,
+
+ /// Whether this action is checked
+ checked: bool,
},
}
@@ -98,6 +101,7 @@ impl MenuItem {
name: name.into(),
action: Box::new(action),
os_action: None,
+ checked: false,
}
}
@@ -111,6 +115,7 @@ impl MenuItem {
name: name.into(),
action: Box::new(action),
os_action: Some(os_action),
+ checked: false,
}
}
@@ -123,14 +128,36 @@ impl MenuItem {
name,
action,
os_action,
+ checked,
} => OwnedMenuItem::Action {
name: name.into(),
action,
os_action,
+ checked,
},
MenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.owned()),
}
}
+
+ /// Set whether this menu item is checked
+ ///
+ /// Only for [`MenuItem::Action`], otherwise, will be ignored
+ pub fn checked(mut self, checked: bool) -> Self {
+ match self {
+ MenuItem::Action {
+ action,
+ os_action,
+ name,
+ ..
+ } => MenuItem::Action {
+ name,
+ action,
+ os_action,
+ checked,
+ },
+ _ => self,
+ }
+ }
}
/// OS menus are menus that are recognized by the operating system
@@ -171,12 +198,15 @@ pub enum OwnedMenuItem {
/// The name of this menu item
name: String,
- /// the action to perform when this menu item is selected
+ /// The action to perform when this menu item is selected
action: Box<dyn Action>,
/// The OS Action that corresponds to this action, if any
/// See [`OsAction`] for more information
os_action: Option<OsAction>,
+
+ /// Whether this action is checked
+ checked: bool,
},
}
@@ -189,10 +219,12 @@ impl Clone for OwnedMenuItem {
name,
action,
os_action,
+ checked,
} => OwnedMenuItem::Action {
name: name.clone(),
action: action.boxed_clone(),
os_action: *os_action,
+ checked: *checked,
},
OwnedMenuItem::SystemMenu(os_menu) => OwnedMenuItem::SystemMenu(os_menu.clone()),
}
@@ -19,7 +19,7 @@ use cocoa::{
NSApplication, NSApplicationActivationPolicy::NSApplicationActivationPolicyRegular,
NSEventModifierFlags, NSMenu, NSMenuItem, NSModalResponse, NSOpenPanel, NSPasteboard,
NSPasteboardTypePNG, NSPasteboardTypeRTF, NSPasteboardTypeRTFD, NSPasteboardTypeString,
- NSPasteboardTypeTIFF, NSSavePanel, NSWindow,
+ NSPasteboardTypeTIFF, NSSavePanel, NSVisualEffectState, NSVisualEffectView, NSWindow,
},
base::{BOOL, NO, YES, id, nil, selector},
foundation::{
@@ -315,6 +315,7 @@ impl MacPlatform {
name,
action,
os_action,
+ checked,
} => {
// Note that this is intentionally using earlier bindings, whereas typically
// later ones take display precedence. See the discussion on
@@ -409,6 +410,10 @@ impl MacPlatform {
.autorelease();
}
+ if *checked {
+ item.setState_(NSVisualEffectState::Active);
+ }
+
let tag = actions.len() as NSInteger;
let _: () = msg_send![item, setTag: tag];
actions.push(action.boxed_clone());
@@ -211,8 +211,8 @@ impl DirectWriteTextSystem {
})))
}
- pub(crate) fn handle_gpu_lost(&self, directx_devices: &DirectXDevices) {
- self.0.write().handle_gpu_lost(directx_devices);
+ pub(crate) fn handle_gpu_lost(&self, directx_devices: &DirectXDevices) -> Result<()> {
+ self.0.write().handle_gpu_lost(directx_devices)
}
}
@@ -231,7 +231,9 @@ impl PlatformTextSystem for DirectWriteTextSystem {
Ok(*font_id)
} else {
let mut lock = RwLockUpgradableReadGuard::upgrade(lock);
- let font_id = lock.select_font(font);
+ let font_id = lock
+ .select_font(font)
+ .with_context(|| format!("Failed to select font: {:?}", font))?;
lock.font_selections.insert(font.clone(), font_id);
Ok(font_id)
}
@@ -457,7 +459,7 @@ impl DirectWriteState {
}
}
- fn select_font(&mut self, target_font: &Font) -> FontId {
+ fn select_font(&mut self, target_font: &Font) -> Option<FontId> {
unsafe {
if target_font.family == ".SystemUIFont" {
let family = self.system_ui_font_name.clone();
@@ -468,7 +470,6 @@ impl DirectWriteState {
&target_font.features,
target_font.fallbacks.as_ref(),
)
- .unwrap()
} else {
let family = self.system_ui_font_name.clone();
self.find_font_id(
@@ -478,7 +479,7 @@ impl DirectWriteState {
&target_font.features,
target_font.fallbacks.as_ref(),
)
- .unwrap_or_else(|| {
+ .or_else(|| {
#[cfg(any(test, feature = "test-support"))]
{
panic!("ERROR: {} font not found!", target_font.family);
@@ -494,7 +495,6 @@ impl DirectWriteState {
target_font.fallbacks.as_ref(),
true,
)
- .unwrap()
}
})
}
@@ -1215,18 +1215,11 @@ impl DirectWriteState {
result
}
- fn handle_gpu_lost(&mut self, directx_devices: &DirectXDevices) {
- try_to_recover_from_device_lost(
- || GPUState::new(directx_devices).context("Recreating GPU state for DirectWrite"),
- |gpu_state| self.components.gpu_state = gpu_state,
- || {
- log::error!(
- "Failed to recreate GPU state for DirectWrite after multiple attempts."
- );
- // Do something here?
- // At this point, the device loss is considered unrecoverable.
- },
- );
+ fn handle_gpu_lost(&mut self, directx_devices: &DirectXDevices) -> Result<()> {
+ try_to_recover_from_device_lost(|| {
+ GPUState::new(directx_devices).context("Recreating GPU state for DirectWrite")
+ })
+ .map(|gpu_state| self.components.gpu_state = gpu_state)
}
}
@@ -1479,8 +1472,10 @@ impl IDWriteTextRenderer_Impl for TextRenderer_Impl {
.get(&font_identifier)
{
*id
+ } else if let Some(id) = context.text_system.select_font(&font_struct) {
+ id
} else {
- context.text_system.select_font(&font_struct)
+ return Err(Error::new(DWRITE_E_NOFONT, "Failed to select font"));
};
let glyph_ids = unsafe { std::slice::from_raw_parts(glyphrun.glyphIndices, glyph_count) };
@@ -1,4 +1,5 @@
use anyhow::{Context, Result};
+use itertools::Itertools;
use util::ResultExt;
use windows::Win32::{
Foundation::HMODULE,
@@ -20,24 +21,18 @@ use windows::Win32::{
};
use windows::core::Interface;
-pub(crate) fn try_to_recover_from_device_lost<T>(
- mut f: impl FnMut() -> Result<T>,
- on_success: impl FnOnce(T),
- on_error: impl FnOnce(),
-) {
- let result = (0..5).find_map(|i| {
- if i > 0 {
- // Add a small delay before retrying
- std::thread::sleep(std::time::Duration::from_millis(100));
- }
- f().log_err()
- });
-
- if let Some(result) = result {
- on_success(result);
- } else {
- on_error();
- }
+pub(crate) fn try_to_recover_from_device_lost<T>(mut f: impl FnMut() -> Result<T>) -> Result<T> {
+ (0..5)
+ .map(|i| {
+ if i > 0 {
+ // Add a small delay before retrying
+ std::thread::sleep(std::time::Duration::from_millis(100 + i * 10));
+ }
+ f()
+ })
+ .find_or_last(Result::is_ok)
+ .unwrap()
+ .context("DirectXRenderer failed to recover from lost device after multiple attempts")
}
#[derive(Clone)]
@@ -1,5 +1,5 @@
use std::{
- mem::ManuallyDrop,
+ slice,
sync::{Arc, OnceLock},
};
@@ -39,12 +39,15 @@ pub(crate) struct FontInfo {
pub(crate) struct DirectXRenderer {
hwnd: HWND,
atlas: Arc<DirectXAtlas>,
- devices: ManuallyDrop<DirectXRendererDevices>,
- resources: ManuallyDrop<DirectXResources>,
+ devices: Option<DirectXRendererDevices>,
+ resources: Option<DirectXResources>,
globals: DirectXGlobalElements,
pipelines: DirectXRenderPipelines,
direct_composition: Option<DirectComposition>,
font_info: &'static FontInfo,
+
+ width: u32,
+ height: u32,
}
/// Direct3D objects
@@ -60,19 +63,17 @@ pub(crate) struct DirectXRendererDevices {
struct DirectXResources {
// Direct3D rendering objects
swap_chain: IDXGISwapChain1,
- render_target: ManuallyDrop<ID3D11Texture2D>,
- render_target_view: [Option<ID3D11RenderTargetView>; 1],
+ render_target: Option<ID3D11Texture2D>,
+ render_target_view: Option<ID3D11RenderTargetView>,
// Path intermediate textures (with MSAA)
path_intermediate_texture: ID3D11Texture2D,
- path_intermediate_srv: [Option<ID3D11ShaderResourceView>; 1],
+ path_intermediate_srv: Option<ID3D11ShaderResourceView>,
path_intermediate_msaa_texture: ID3D11Texture2D,
- path_intermediate_msaa_view: [Option<ID3D11RenderTargetView>; 1],
+ path_intermediate_msaa_view: Option<ID3D11RenderTargetView>,
- // Cached window size and viewport
- width: u32,
- height: u32,
- viewport: [D3D11_VIEWPORT; 1],
+ // Cached viewport
+ viewport: D3D11_VIEWPORT,
}
struct DirectXRenderPipelines {
@@ -86,8 +87,8 @@ struct DirectXRenderPipelines {
}
struct DirectXGlobalElements {
- global_params_buffer: [Option<ID3D11Buffer>; 1],
- sampler: [Option<ID3D11SamplerState>; 1],
+ global_params_buffer: Option<ID3D11Buffer>,
+ sampler: Option<ID3D11SamplerState>,
}
struct DirectComposition {
@@ -100,7 +101,7 @@ impl DirectXRendererDevices {
pub(crate) fn new(
directx_devices: &DirectXDevices,
disable_direct_composition: bool,
- ) -> Result<ManuallyDrop<Self>> {
+ ) -> Result<Self> {
let DirectXDevices {
adapter,
dxgi_factory,
@@ -113,13 +114,13 @@ impl DirectXRendererDevices {
Some(device.cast().context("Creating DXGI device")?)
};
- Ok(ManuallyDrop::new(Self {
+ Ok(Self {
adapter: adapter.clone(),
dxgi_factory: dxgi_factory.clone(),
device: device.clone(),
device_context: device_context.clone(),
dxgi_device,
- }))
+ })
}
}
@@ -158,12 +159,14 @@ impl DirectXRenderer {
Ok(DirectXRenderer {
hwnd,
atlas,
- devices,
- resources,
+ devices: Some(devices),
+ resources: Some(resources),
globals,
pipelines,
direct_composition,
font_info: Self::get_font_info(),
+ width: 1,
+ height: 1,
})
}
@@ -172,55 +175,49 @@ impl DirectXRenderer {
}
fn pre_draw(&self) -> Result<()> {
+ let resources = self.resources.as_ref().expect("resources missing");
+ let device_context = &self
+ .devices
+ .as_ref()
+ .expect("devices missing")
+ .device_context;
update_buffer(
- &self.devices.device_context,
- self.globals.global_params_buffer[0].as_ref().unwrap(),
+ device_context,
+ self.globals.global_params_buffer.as_ref().unwrap(),
&[GlobalParams {
gamma_ratios: self.font_info.gamma_ratios,
- viewport_size: [
- self.resources.viewport[0].Width,
- self.resources.viewport[0].Height,
- ],
+ viewport_size: [resources.viewport.Width, resources.viewport.Height],
grayscale_enhanced_contrast: self.font_info.grayscale_enhanced_contrast,
_pad: 0,
}],
)?;
unsafe {
- self.devices.device_context.ClearRenderTargetView(
- self.resources.render_target_view[0].as_ref().unwrap(),
- &[0.0; 4],
- );
- self.devices
- .device_context
- .OMSetRenderTargets(Some(&self.resources.render_target_view), None);
- self.devices
- .device_context
- .RSSetViewports(Some(&self.resources.viewport));
+ device_context
+ .ClearRenderTargetView(resources.render_target_view.as_ref().unwrap(), &[0.0; 4]);
+ device_context
+ .OMSetRenderTargets(Some(slice::from_ref(&resources.render_target_view)), None);
+ device_context.RSSetViewports(Some(slice::from_ref(&resources.viewport)));
}
Ok(())
}
#[inline]
fn present(&mut self) -> Result<()> {
- let result = unsafe { self.resources.swap_chain.Present(0, DXGI_PRESENT(0)) };
+ let result = unsafe {
+ self.resources
+ .as_ref()
+ .expect("resources missing")
+ .swap_chain
+ .Present(0, DXGI_PRESENT(0))
+ };
result.ok().context("Presenting swap chain failed")
}
- pub(crate) fn handle_device_lost(&mut self, directx_devices: &DirectXDevices) {
- try_to_recover_from_device_lost(
- || {
- self.handle_device_lost_impl(directx_devices)
- .context("DirectXRenderer handling device lost")
- },
- |_| {},
- || {
- log::error!(
- "DirectXRenderer failed to recover from device lost after multiple attempts"
- );
- // Do something here?
- // At this point, the device loss is considered unrecoverable.
- },
- );
+ pub(crate) fn handle_device_lost(&mut self, directx_devices: &DirectXDevices) -> Result<()> {
+ try_to_recover_from_device_lost(|| {
+ self.handle_device_lost_impl(directx_devices)
+ .context("DirectXRenderer handling device lost")
+ })
}
fn handle_device_lost_impl(&mut self, directx_devices: &DirectXDevices) -> Result<()> {
@@ -228,35 +225,41 @@ impl DirectXRenderer {
unsafe {
#[cfg(debug_assertions)]
- report_live_objects(&self.devices.device)
- .context("Failed to report live objects after device lost")
- .log_err();
-
- ManuallyDrop::drop(&mut self.resources);
- self.devices.device_context.OMSetRenderTargets(None, None);
- self.devices.device_context.ClearState();
- self.devices.device_context.Flush();
+ if let Some(devices) = &self.devices {
+ report_live_objects(&devices.device)
+ .context("Failed to report live objects after device lost")
+ .log_err();
+ }
- #[cfg(debug_assertions)]
- report_live_objects(&self.devices.device)
- .context("Failed to report live objects after device lost")
- .log_err();
+ self.resources.take();
+ if let Some(devices) = &self.devices {
+ devices.device_context.OMSetRenderTargets(None, None);
+ devices.device_context.ClearState();
+ devices.device_context.Flush();
+ #[cfg(debug_assertions)]
+ report_live_objects(&devices.device)
+ .context("Failed to report live objects after device lost")
+ .log_err();
+ }
- drop(self.direct_composition.take());
- ManuallyDrop::drop(&mut self.devices);
+ self.direct_composition.take();
+ self.devices.take();
}
let devices = DirectXRendererDevices::new(directx_devices, disable_direct_composition)
.context("Recreating DirectX devices")?;
let resources = DirectXResources::new(
&devices,
- self.resources.width,
- self.resources.height,
+ self.width,
+ self.height,
self.hwnd,
disable_direct_composition,
- )?;
- let globals = DirectXGlobalElements::new(&devices.device)?;
- let pipelines = DirectXRenderPipelines::new(&devices.device)?;
+ )
+ .context("Creating DirectX resources")?;
+ let globals = DirectXGlobalElements::new(&devices.device)
+ .context("Creating DirectXGlobalElements")?;
+ let pipelines = DirectXRenderPipelines::new(&devices.device)
+ .context("Creating DirectXRenderPipelines")?;
let direct_composition = if disable_direct_composition {
None
@@ -269,17 +272,17 @@ impl DirectXRenderer {
self.atlas
.handle_device_lost(&devices.device, &devices.device_context);
- self.devices = devices;
- self.resources = resources;
- self.globals = globals;
- self.pipelines = pipelines;
- self.direct_composition = direct_composition;
unsafe {
- self.devices
+ devices
.device_context
- .OMSetRenderTargets(Some(&self.resources.render_target_view), None);
+ .OMSetRenderTargets(Some(slice::from_ref(&resources.render_target_view)), None);
}
+ self.devices = Some(devices);
+ self.resources = Some(resources);
+ self.globals = globals;
+ self.pipelines = pipelines;
+ self.direct_composition = direct_composition;
Ok(())
}
@@ -318,23 +321,25 @@ impl DirectXRenderer {
pub(crate) fn resize(&mut self, new_size: Size<DevicePixels>) -> Result<()> {
let width = new_size.width.0.max(1) as u32;
let height = new_size.height.0.max(1) as u32;
- if self.resources.width == width && self.resources.height == height {
+ if self.width == width && self.height == height {
return Ok(());
}
- self.resources.width = width;
- self.resources.height = height;
+ self.width = width;
+ self.height = height;
// Clear the render target before resizing
- unsafe { self.devices.device_context.OMSetRenderTargets(None, None) };
- unsafe { ManuallyDrop::drop(&mut self.resources.render_target) };
- drop(self.resources.render_target_view[0].take().unwrap());
+ let devices = self.devices.as_ref().context("devices missing")?;
+ unsafe { devices.device_context.OMSetRenderTargets(None, None) };
+ let resources = self.resources.as_mut().context("resources missing")?;
+ resources.render_target.take();
+ resources.render_target_view.take();
// Resizing the swap chain requires a call to the underlying DXGI adapter, which can return the device removed error.
// The app might have moved to a monitor that's attached to a different graphics device.
// When a graphics device is removed or reset, the desktop resolution often changes, resulting in a window size change.
// But here we just return the error, because we are handling device lost scenarios elsewhere.
unsafe {
- self.resources
+ resources
.swap_chain
.ResizeBuffers(
BUFFER_COUNT as u32,
@@ -346,12 +351,11 @@ impl DirectXRenderer {
.context("Failed to resize swap chain")?;
}
- self.resources
- .recreate_resources(&self.devices, width, height)?;
+ resources.recreate_resources(devices, width, height)?;
unsafe {
- self.devices
+ devices
.device_context
- .OMSetRenderTargets(Some(&self.resources.render_target_view), None);
+ .OMSetRenderTargets(Some(slice::from_ref(&resources.render_target_view)), None);
}
Ok(())
@@ -361,15 +365,22 @@ impl DirectXRenderer {
if shadows.is_empty() {
return Ok(());
}
+ let devices = self.devices.as_ref().context("devices missing")?;
self.pipelines.shadow_pipeline.update_buffer(
- &self.devices.device,
- &self.devices.device_context,
+ &devices.device,
+ &devices.device_context,
shadows,
)?;
self.pipelines.shadow_pipeline.draw(
- &self.devices.device_context,
- &self.resources.viewport,
- &self.globals.global_params_buffer,
+ &devices.device_context,
+ slice::from_ref(
+ &self
+ .resources
+ .as_ref()
+ .context("resources missing")?
+ .viewport,
+ ),
+ slice::from_ref(&self.globals.global_params_buffer),
D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP,
4,
shadows.len() as u32,
@@ -380,15 +391,22 @@ impl DirectXRenderer {
if quads.is_empty() {
return Ok(());
}
+ let devices = self.devices.as_ref().context("devices missing")?;
self.pipelines.quad_pipeline.update_buffer(
- &self.devices.device,
- &self.devices.device_context,
+ &devices.device,
+ &devices.device_context,
quads,
)?;
self.pipelines.quad_pipeline.draw(
- &self.devices.device_context,
- &self.resources.viewport,
- &self.globals.global_params_buffer,
+ &devices.device_context,
+ slice::from_ref(
+ &self
+ .resources
+ .as_ref()
+ .context("resources missing")?
+ .viewport,
+ ),
+ slice::from_ref(&self.globals.global_params_buffer),
D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP,
4,
quads.len() as u32,
@@ -400,18 +418,19 @@ impl DirectXRenderer {
return Ok(());
}
+ let devices = self.devices.as_ref().context("devices missing")?;
+ let resources = self.resources.as_ref().context("resources missing")?;
// Clear intermediate MSAA texture
unsafe {
- self.devices.device_context.ClearRenderTargetView(
- self.resources.path_intermediate_msaa_view[0]
- .as_ref()
- .unwrap(),
+ devices.device_context.ClearRenderTargetView(
+ resources.path_intermediate_msaa_view.as_ref().unwrap(),
&[0.0; 4],
);
// Set intermediate MSAA texture as render target
- self.devices
- .device_context
- .OMSetRenderTargets(Some(&self.resources.path_intermediate_msaa_view), None);
+ devices.device_context.OMSetRenderTargets(
+ Some(slice::from_ref(&resources.path_intermediate_msaa_view)),
+ None,
+ );
}
// Collect all vertices and sprites for a single draw call
@@ -427,14 +446,15 @@ impl DirectXRenderer {
}
self.pipelines.path_rasterization_pipeline.update_buffer(
- &self.devices.device,
- &self.devices.device_context,
+ &devices.device,
+ &devices.device_context,
&vertices,
)?;
+
self.pipelines.path_rasterization_pipeline.draw(
- &self.devices.device_context,
- &self.resources.viewport,
- &self.globals.global_params_buffer,
+ &devices.device_context,
+ slice::from_ref(&resources.viewport),
+ slice::from_ref(&self.globals.global_params_buffer),
D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST,
vertices.len() as u32,
1,
@@ -442,17 +462,17 @@ impl DirectXRenderer {
// Resolve MSAA to non-MSAA intermediate texture
unsafe {
- self.devices.device_context.ResolveSubresource(
- &self.resources.path_intermediate_texture,
+ devices.device_context.ResolveSubresource(
+ &resources.path_intermediate_texture,
0,
- &self.resources.path_intermediate_msaa_texture,
+ &resources.path_intermediate_msaa_texture,
0,
RENDER_TARGET_FORMAT,
);
// Restore main render target
- self.devices
+ devices
.device_context
- .OMSetRenderTargets(Some(&self.resources.render_target_view), None);
+ .OMSetRenderTargets(Some(slice::from_ref(&resources.render_target_view)), None);
}
Ok(())
@@ -485,19 +505,21 @@ impl DirectXRenderer {
vec![PathSprite { bounds }]
};
+ let devices = self.devices.as_ref().context("devices missing")?;
+ let resources = self.resources.as_ref().context("resources missing")?;
self.pipelines.path_sprite_pipeline.update_buffer(
- &self.devices.device,
- &self.devices.device_context,
+ &devices.device,
+ &devices.device_context,
&sprites,
)?;
// Draw the sprites with the path texture
self.pipelines.path_sprite_pipeline.draw_with_texture(
- &self.devices.device_context,
- &self.resources.path_intermediate_srv,
- &self.resources.viewport,
- &self.globals.global_params_buffer,
- &self.globals.sampler,
+ &devices.device_context,
+ slice::from_ref(&resources.path_intermediate_srv),
+ slice::from_ref(&resources.viewport),
+ slice::from_ref(&self.globals.global_params_buffer),
+ slice::from_ref(&self.globals.sampler),
sprites.len() as u32,
)
}
@@ -506,15 +528,17 @@ impl DirectXRenderer {
if underlines.is_empty() {
return Ok(());
}
+ let devices = self.devices.as_ref().context("devices missing")?;
+ let resources = self.resources.as_ref().context("resources missing")?;
self.pipelines.underline_pipeline.update_buffer(
- &self.devices.device,
- &self.devices.device_context,
+ &devices.device,
+ &devices.device_context,
underlines,
)?;
self.pipelines.underline_pipeline.draw(
- &self.devices.device_context,
- &self.resources.viewport,
- &self.globals.global_params_buffer,
+ &devices.device_context,
+ slice::from_ref(&resources.viewport),
+ slice::from_ref(&self.globals.global_params_buffer),
D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP,
4,
underlines.len() as u32,
@@ -529,18 +553,20 @@ impl DirectXRenderer {
if sprites.is_empty() {
return Ok(());
}
+ let devices = self.devices.as_ref().context("devices missing")?;
+ let resources = self.resources.as_ref().context("resources missing")?;
self.pipelines.mono_sprites.update_buffer(
- &self.devices.device,
- &self.devices.device_context,
+ &devices.device,
+ &devices.device_context,
sprites,
)?;
let texture_view = self.atlas.get_texture_view(texture_id);
self.pipelines.mono_sprites.draw_with_texture(
- &self.devices.device_context,
+ &devices.device_context,
&texture_view,
- &self.resources.viewport,
- &self.globals.global_params_buffer,
- &self.globals.sampler,
+ slice::from_ref(&resources.viewport),
+ slice::from_ref(&self.globals.global_params_buffer),
+ slice::from_ref(&self.globals.sampler),
sprites.len() as u32,
)
}
@@ -553,18 +579,21 @@ impl DirectXRenderer {
if sprites.is_empty() {
return Ok(());
}
+
+ let devices = self.devices.as_ref().context("devices missing")?;
+ let resources = self.resources.as_ref().context("resources missing")?;
self.pipelines.poly_sprites.update_buffer(
- &self.devices.device,
- &self.devices.device_context,
+ &devices.device,
+ &devices.device_context,
sprites,
)?;
let texture_view = self.atlas.get_texture_view(texture_id);
self.pipelines.poly_sprites.draw_with_texture(
- &self.devices.device_context,
+ &devices.device_context,
&texture_view,
- &self.resources.viewport,
- &self.globals.global_params_buffer,
- &self.globals.sampler,
+ slice::from_ref(&resources.viewport),
+ slice::from_ref(&self.globals.global_params_buffer),
+ slice::from_ref(&self.globals.sampler),
sprites.len() as u32,
)
}
@@ -577,7 +606,8 @@ impl DirectXRenderer {
}
pub(crate) fn gpu_specs(&self) -> Result<GpuSpecs> {
- let desc = unsafe { self.devices.adapter.GetDesc1() }?;
+ let devices = self.devices.as_ref().context("devices missing")?;
+ let desc = unsafe { devices.adapter.GetDesc1() }?;
let is_software_emulated = (desc.Flags & DXGI_ADAPTER_FLAG_SOFTWARE.0 as u32) != 0;
let device_name = String::from_utf16_lossy(&desc.Description)
.trim_matches(char::from(0))
@@ -592,7 +622,7 @@ impl DirectXRenderer {
0x10DE => nvidia::get_driver_version(),
0x1002 => amd::get_driver_version(),
// For Intel and other vendors, we use the DXGI API to get the driver version.
- _ => dxgi::get_driver_version(&self.devices.adapter),
+ _ => dxgi::get_driver_version(&devices.adapter),
}
.context("Failed to get gpu driver info")
.log_err()
@@ -626,7 +656,7 @@ impl DirectXResources {
height: u32,
hwnd: HWND,
disable_direct_composition: bool,
- ) -> Result<ManuallyDrop<Self>> {
+ ) -> Result<Self> {
let swap_chain = if disable_direct_composition {
create_swap_chain(&devices.dxgi_factory, &devices.device, hwnd, width, height)?
} else {
@@ -649,18 +679,16 @@ impl DirectXResources {
) = create_resources(devices, &swap_chain, width, height)?;
set_rasterizer_state(&devices.device, &devices.device_context)?;
- Ok(ManuallyDrop::new(Self {
+ Ok(Self {
swap_chain,
- render_target,
+ render_target: Some(render_target),
render_target_view,
path_intermediate_texture,
path_intermediate_msaa_texture,
path_intermediate_msaa_view,
path_intermediate_srv,
viewport,
- width,
- height,
- }))
+ })
}
#[inline]
@@ -679,7 +707,7 @@ impl DirectXResources {
path_intermediate_msaa_view,
viewport,
) = create_resources(devices, &self.swap_chain, width, height)?;
- self.render_target = render_target;
+ self.render_target = Some(render_target);
self.render_target_view = render_target_view;
self.path_intermediate_texture = path_intermediate_texture;
self.path_intermediate_msaa_texture = path_intermediate_msaa_texture;
@@ -789,7 +817,7 @@ impl DirectXGlobalElements {
};
let mut buffer = None;
device.CreateBuffer(&desc, None, Some(&mut buffer))?;
- [buffer]
+ buffer
};
let sampler = unsafe {
@@ -807,7 +835,7 @@ impl DirectXGlobalElements {
};
let mut output = None;
device.CreateSamplerState(&desc, Some(&mut output))?;
- [output]
+ output
};
Ok(Self {
@@ -832,7 +860,7 @@ struct PipelineState<T> {
fragment: ID3D11PixelShader,
buffer: ID3D11Buffer,
buffer_size: usize,
- view: [Option<ID3D11ShaderResourceView>; 1],
+ view: Option<ID3D11ShaderResourceView>,
blend_state: ID3D11BlendState,
_marker: std::marker::PhantomData<T>,
}
@@ -902,7 +930,7 @@ impl<T> PipelineState<T> {
) -> Result<()> {
set_pipeline_state(
device_context,
- &self.view,
+ slice::from_ref(&self.view),
topology,
viewport,
&self.vertex,
@@ -927,7 +955,7 @@ impl<T> PipelineState<T> {
) -> Result<()> {
set_pipeline_state(
device_context,
- &self.view,
+ slice::from_ref(&self.view),
D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP,
viewport,
&self.vertex,
@@ -964,18 +992,8 @@ struct PathSprite {
impl Drop for DirectXRenderer {
fn drop(&mut self) {
#[cfg(debug_assertions)]
- report_live_objects(&self.devices.device).ok();
- unsafe {
- ManuallyDrop::drop(&mut self.devices);
- ManuallyDrop::drop(&mut self.resources);
- }
- }
-}
-
-impl Drop for DirectXResources {
- fn drop(&mut self) {
- unsafe {
- ManuallyDrop::drop(&mut self.render_target);
+ if let Some(devices) = &self.devices {
+ report_live_objects(&devices.device).ok();
}
}
}
@@ -1049,13 +1067,13 @@ fn create_resources(
width: u32,
height: u32,
) -> Result<(
- ManuallyDrop<ID3D11Texture2D>,
- [Option<ID3D11RenderTargetView>; 1],
ID3D11Texture2D,
- [Option<ID3D11ShaderResourceView>; 1],
+ Option<ID3D11RenderTargetView>,
+ ID3D11Texture2D,
+ Option<ID3D11ShaderResourceView>,
ID3D11Texture2D,
- [Option<ID3D11RenderTargetView>; 1],
- [D3D11_VIEWPORT; 1],
+ Option<ID3D11RenderTargetView>,
+ D3D11_VIEWPORT,
)> {
let (render_target, render_target_view) =
create_render_target_and_its_view(swap_chain, &devices.device)?;
@@ -1079,17 +1097,11 @@ fn create_resources(
fn create_render_target_and_its_view(
swap_chain: &IDXGISwapChain1,
device: &ID3D11Device,
-) -> Result<(
- ManuallyDrop<ID3D11Texture2D>,
- [Option<ID3D11RenderTargetView>; 1],
-)> {
+) -> Result<(ID3D11Texture2D, Option<ID3D11RenderTargetView>)> {
let render_target: ID3D11Texture2D = unsafe { swap_chain.GetBuffer(0) }?;
let mut render_target_view = None;
unsafe { device.CreateRenderTargetView(&render_target, None, Some(&mut render_target_view))? };
- Ok((
- ManuallyDrop::new(render_target),
- [Some(render_target_view.unwrap())],
- ))
+ Ok((render_target, render_target_view))
}
#[inline]
@@ -1097,7 +1109,7 @@ fn create_path_intermediate_texture(
device: &ID3D11Device,
width: u32,
height: u32,
-) -> Result<(ID3D11Texture2D, [Option<ID3D11ShaderResourceView>; 1])> {
+) -> Result<(ID3D11Texture2D, Option<ID3D11ShaderResourceView>)> {
let texture = unsafe {
let mut output = None;
let desc = D3D11_TEXTURE2D_DESC {
@@ -1122,7 +1134,7 @@ fn create_path_intermediate_texture(
let mut shader_resource_view = None;
unsafe { device.CreateShaderResourceView(&texture, None, Some(&mut shader_resource_view))? };
- Ok((texture, [Some(shader_resource_view.unwrap())]))
+ Ok((texture, Some(shader_resource_view.unwrap())))
}
#[inline]
@@ -1130,7 +1142,7 @@ fn create_path_intermediate_msaa_texture_and_view(
device: &ID3D11Device,
width: u32,
height: u32,
-) -> Result<(ID3D11Texture2D, [Option<ID3D11RenderTargetView>; 1])> {
+) -> Result<(ID3D11Texture2D, Option<ID3D11RenderTargetView>)> {
let msaa_texture = unsafe {
let mut output = None;
let desc = D3D11_TEXTURE2D_DESC {
@@ -1153,15 +1165,11 @@ fn create_path_intermediate_msaa_texture_and_view(
};
let mut msaa_view = None;
unsafe { device.CreateRenderTargetView(&msaa_texture, None, Some(&mut msaa_view))? };
- Ok((msaa_texture, [Some(msaa_view.unwrap())]))
+ Ok((msaa_texture, Some(msaa_view.unwrap())))
}
#[inline]
-fn set_viewport(
- device_context: &ID3D11DeviceContext,
- width: f32,
- height: f32,
-) -> [D3D11_VIEWPORT; 1] {
+fn set_viewport(device_context: &ID3D11DeviceContext, width: f32, height: f32) -> D3D11_VIEWPORT {
let viewport = [D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
@@ -1171,7 +1179,7 @@ fn set_viewport(
MaxDepth: 1.0,
}];
unsafe { device_context.RSSetViewports(Some(&viewport)) };
- viewport
+ viewport[0]
}
#[inline]
@@ -1299,10 +1307,10 @@ fn create_buffer(
fn create_buffer_view(
device: &ID3D11Device,
buffer: &ID3D11Buffer,
-) -> Result<[Option<ID3D11ShaderResourceView>; 1]> {
+) -> Result<Option<ID3D11ShaderResourceView>> {
let mut view = None;
unsafe { device.CreateShaderResourceView(buffer, None, Some(&mut view)) }?;
- Ok([view])
+ Ok(view)
}
#[inline]
@@ -1133,7 +1133,9 @@ impl WindowsWindowInner {
let mut lock = self.state.borrow_mut();
let devices = lparam.0 as *const DirectXDevices;
let devices = unsafe { &*devices };
- lock.renderer.handle_device_lost(&devices);
+ if let Err(err) = lock.renderer.handle_device_lost(&devices) {
+ panic!("Device lost: {err}");
+ }
Some(0)
}
@@ -1,7 +1,6 @@
use std::{
cell::RefCell,
ffi::OsStr,
- mem::ManuallyDrop,
path::{Path, PathBuf},
rc::{Rc, Weak},
sync::{Arc, atomic::Ordering},
@@ -57,7 +56,7 @@ pub(crate) struct WindowsPlatformState {
jump_list: JumpList,
// NOTE: standard cursor handles don't need to close.
pub(crate) current_cursor: Option<HCURSOR>,
- directx_devices: ManuallyDrop<DirectXDevices>,
+ directx_devices: Option<DirectXDevices>,
}
#[derive(Default)]
@@ -76,7 +75,7 @@ impl WindowsPlatformState {
let callbacks = PlatformCallbacks::default();
let jump_list = JumpList::new();
let current_cursor = load_cursor(CursorStyle::Arrow);
- let directx_devices = ManuallyDrop::new(directx_devices);
+ let directx_devices = Some(directx_devices);
Self {
callbacks,
@@ -128,11 +127,17 @@ impl WindowsPlatform {
Some(HWND_MESSAGE),
None,
None,
- Some(&context as *const _ as *const _),
+ Some(&raw const context as *const _),
)
};
- let inner = context.inner.take().unwrap()?;
- let dispatcher = context.dispatcher.take().unwrap();
+ let inner = context
+ .inner
+ .take()
+ .context("CreateWindowExW did not run correctly")??;
+ let dispatcher = context
+ .dispatcher
+ .take()
+ .context("CreateWindowExW did not run correctly")?;
let handle = result?;
let disable_direct_composition = std::env::var(DISABLE_DIRECT_COMPOSITION)
@@ -190,7 +195,7 @@ impl WindowsPlatform {
main_receiver: self.inner.main_receiver.clone(),
platform_window_handle: self.handle,
disable_direct_composition: self.disable_direct_composition,
- directx_devices: (*self.inner.state.borrow().directx_devices).clone(),
+ directx_devices: self.inner.state.borrow().directx_devices.clone().unwrap(),
}
}
@@ -238,7 +243,7 @@ impl WindowsPlatform {
}
fn begin_vsync_thread(&self) {
- let mut directx_device = (*self.inner.state.borrow().directx_devices).clone();
+ let mut directx_device = self.inner.state.borrow().directx_devices.clone().unwrap();
let platform_window: SafeHwnd = self.handle.into();
let validation_number = self.inner.validation_number;
let all_windows = Arc::downgrade(&self.raw_window_handles);
@@ -250,13 +255,15 @@ impl WindowsPlatform {
loop {
vsync_provider.wait_for_vsync();
if check_device_lost(&directx_device.device) {
- handle_gpu_device_lost(
+ if let Err(err) = handle_gpu_device_lost(
&mut directx_device,
platform_window.as_raw(),
validation_number,
&all_windows,
&text_system,
- );
+ ) {
+ panic!("Device lost: {err}");
+ }
}
let Some(all_windows) = all_windows.upgrade() else {
break;
@@ -696,14 +703,24 @@ impl Platform for WindowsPlatform {
impl WindowsPlatformInner {
fn new(context: &mut PlatformWindowCreateContext) -> Result<Rc<Self>> {
let state = RefCell::new(WindowsPlatformState::new(
- context.directx_devices.take().unwrap(),
+ context
+ .directx_devices
+ .take()
+ .context("missing directx devices")?,
));
Ok(Rc::new(Self {
state,
raw_window_handles: context.raw_window_handles.clone(),
- dispatcher: context.dispatcher.as_ref().unwrap().clone(),
+ dispatcher: context
+ .dispatcher
+ .as_ref()
+ .context("missing dispatcher")?
+ .clone(),
validation_number: context.validation_number,
- main_receiver: context.main_receiver.take().unwrap(),
+ main_receiver: context
+ .main_receiver
+ .take()
+ .context("missing main receiver")?,
}))
}
@@ -826,10 +843,8 @@ impl WindowsPlatformInner {
let mut lock = self.state.borrow_mut();
let directx_devices = lparam.0 as *const DirectXDevices;
let directx_devices = unsafe { &*directx_devices };
- unsafe {
- ManuallyDrop::drop(&mut lock.directx_devices);
- }
- lock.directx_devices = ManuallyDrop::new(directx_devices.clone());
+ lock.directx_devices.take();
+ lock.directx_devices = Some(directx_devices.clone());
Some(0)
}
@@ -846,14 +861,6 @@ impl Drop for WindowsPlatform {
}
}
-impl Drop for WindowsPlatformState {
- fn drop(&mut self) {
- unsafe {
- ManuallyDrop::drop(&mut self.directx_devices);
- }
- }
-}
-
pub(crate) struct WindowCreationInfo {
pub(crate) icon: HICON,
pub(crate) executor: ForegroundExecutor,
@@ -1077,37 +1084,28 @@ fn handle_gpu_device_lost(
validation_number: usize,
all_windows: &std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
text_system: &std::sync::Weak<DirectWriteTextSystem>,
-) {
+) -> Result<()> {
// Here we wait a bit to ensure the system has time to recover from the device lost state.
// If we don't wait, the final drawing result will be blank.
std::thread::sleep(std::time::Duration::from_millis(350));
- try_to_recover_from_device_lost(
- || {
- DirectXDevices::new()
- .context("Failed to recreate new DirectX devices after device lost")
- },
- |new_devices| *directx_devices = new_devices,
- || {
- log::error!("Failed to recover DirectX devices after multiple attempts.");
- // Do something here?
- // At this point, the device loss is considered unrecoverable.
- // std::process::exit(1);
- },
- );
+ *directx_devices = try_to_recover_from_device_lost(|| {
+ DirectXDevices::new().context("Failed to recreate new DirectX devices after device lost")
+ })?;
log::info!("DirectX devices successfully recreated.");
+ let lparam = LPARAM(directx_devices as *const _ as _);
unsafe {
SendMessageW(
platform_window,
WM_GPUI_GPU_DEVICE_LOST,
Some(WPARAM(validation_number)),
- Some(LPARAM(directx_devices as *const _ as _)),
+ Some(lparam),
);
}
if let Some(text_system) = text_system.upgrade() {
- text_system.handle_gpu_lost(&directx_devices);
+ text_system.handle_gpu_lost(&directx_devices)?;
}
if let Some(all_windows) = all_windows.upgrade() {
for window in all_windows.read().iter() {
@@ -1116,7 +1114,7 @@ fn handle_gpu_device_lost(
window.as_raw(),
WM_GPUI_GPU_DEVICE_LOST,
Some(WPARAM(validation_number)),
- Some(LPARAM(directx_devices as *const _ as _)),
+ Some(lparam),
);
}
}
@@ -1132,6 +1130,7 @@ fn handle_gpu_device_lost(
}
}
}
+ Ok(())
}
const PLATFORM_WINDOW_CLASS_NAME: PCWSTR = w!("Zed::PlatformWindow");
@@ -1152,13 +1151,16 @@ unsafe extern "system" fn window_procedure(
lparam: LPARAM,
) -> LRESULT {
if msg == WM_NCCREATE {
- let params = lparam.0 as *const CREATESTRUCTW;
- let params = unsafe { &*params };
+ let params = unsafe { &*(lparam.0 as *const CREATESTRUCTW) };
let creation_context = params.lpCreateParams as *mut PlatformWindowCreateContext;
let creation_context = unsafe { &mut *creation_context };
+ let Some(main_sender) = creation_context.main_sender.take() else {
+ creation_context.inner = Some(Err(anyhow!("missing main sender")));
+ return LRESULT(0);
+ };
creation_context.dispatcher = Some(Arc::new(WindowsDispatcher::new(
- creation_context.main_sender.take().unwrap(),
+ main_sender,
hwnd,
creation_context.validation_number,
)));
@@ -61,7 +61,6 @@ pub struct WindowsWindowState {
pub(crate) struct WindowsWindowInner {
hwnd: HWND,
- pub(super) this: Weak<Self>,
drop_target_helper: IDropTargetHelper,
pub(crate) state: RefCell<WindowsWindowState>,
pub(crate) system_settings: RefCell<WindowsSystemSettings>,
@@ -215,9 +214,8 @@ impl WindowsWindowInner {
context.disable_direct_composition,
)?);
- Ok(Rc::new_cyclic(|this| Self {
+ Ok(Rc::new(Self {
hwnd,
- this: this.clone(),
drop_target_helper: context.drop_target_helper.clone(),
state,
handle: context.handle,
@@ -232,11 +230,8 @@ impl WindowsWindowInner {
}))
}
- fn toggle_fullscreen(&self) {
- let Some(this) = self.this.upgrade() else {
- log::error!("Unable to toggle fullscreen: window has been dropped");
- return;
- };
+ fn toggle_fullscreen(self: &Rc<Self>) {
+ let this = self.clone();
self.executor
.spawn(async move {
let mut lock = this.state.borrow_mut();
@@ -246,36 +241,42 @@ impl WindowsWindowInner {
y,
cx,
cy,
- } = if let Some(state) = lock.fullscreen.take() {
- state
- } else {
- let (window_bounds, _) = lock.calculate_window_bounds();
- lock.fullscreen_restore_bounds = window_bounds;
- let style = WINDOW_STYLE(unsafe { get_window_long(this.hwnd, GWL_STYLE) } as _);
- let mut rc = RECT::default();
- unsafe { GetWindowRect(this.hwnd, &mut rc) }
- .context("failed to get window rect")
- .log_err();
- let _ = lock.fullscreen.insert(StyleAndBounds {
- style,
- x: rc.left,
- y: rc.top,
- cx: rc.right - rc.left,
- cy: rc.bottom - rc.top,
- });
- let style = style
- & !(WS_THICKFRAME
- | WS_SYSMENU
- | WS_MAXIMIZEBOX
- | WS_MINIMIZEBOX
- | WS_CAPTION);
- let physical_bounds = lock.display.physical_bounds();
- StyleAndBounds {
- style,
- x: physical_bounds.left().0,
- y: physical_bounds.top().0,
- cx: physical_bounds.size.width.0,
- cy: physical_bounds.size.height.0,
+ } = match lock.fullscreen.take() {
+ Some(state) => state,
+ None => {
+ let (window_bounds, _) = lock.calculate_window_bounds();
+ lock.fullscreen_restore_bounds = window_bounds;
+ drop(lock);
+
+ let style =
+ WINDOW_STYLE(unsafe { get_window_long(this.hwnd, GWL_STYLE) } as _);
+ let mut rc = RECT::default();
+ unsafe { GetWindowRect(this.hwnd, &mut rc) }
+ .context("failed to get window rect")
+ .log_err();
+
+ lock = this.state.borrow_mut();
+ let _ = lock.fullscreen.insert(StyleAndBounds {
+ style,
+ x: rc.left,
+ y: rc.top,
+ cx: rc.right - rc.left,
+ cy: rc.bottom - rc.top,
+ });
+ let style = style
+ & !(WS_THICKFRAME
+ | WS_SYSMENU
+ | WS_MAXIMIZEBOX
+ | WS_MINIMIZEBOX
+ | WS_CAPTION);
+ let physical_bounds = lock.display.physical_bounds();
+ StyleAndBounds {
+ style,
+ x: physical_bounds.left().0,
+ y: physical_bounds.top().0,
+ cx: physical_bounds.size.width.0,
+ cy: physical_bounds.size.height.0,
+ }
}
};
drop(lock);
@@ -296,7 +297,7 @@ impl WindowsWindowInner {
.detach();
}
- fn set_window_placement(&self) -> Result<()> {
+ fn set_window_placement(self: &Rc<Self>) -> Result<()> {
let Some(open_status) = self.state.borrow_mut().initial_placement.take() else {
return Ok(());
};
@@ -900,9 +901,9 @@ impl IDropTarget_Impl for WindowsDragDropHandler_Impl {
if idata.u.hGlobal.is_invalid() {
return Ok(());
}
- let hdrop = idata.u.hGlobal.0 as *mut HDROP;
+ let hdrop = HDROP(idata.u.hGlobal.0);
let mut paths = SmallVec::<[PathBuf; 2]>::new();
- with_file_names(*hdrop, |file_name| {
+ with_file_names(hdrop, |file_name| {
if let Some(path) = PathBuf::from_str(&file_name).log_err() {
paths.push(path);
}
@@ -1166,8 +1167,7 @@ unsafe extern "system" fn window_procedure(
lparam: LPARAM,
) -> LRESULT {
if msg == WM_NCCREATE {
- let window_params = lparam.0 as *const CREATESTRUCTW;
- let window_params = unsafe { &*window_params };
+ let window_params = unsafe { &*(lparam.0 as *const CREATESTRUCTW) };
let window_creation_context = window_params.lpCreateParams as *mut WindowCreateContext;
let window_creation_context = unsafe { &mut *window_creation_context };
return match WindowsWindowInner::new(window_creation_context, hwnd, window_params) {
@@ -54,25 +54,9 @@ pub struct ShapedGlyph {
}
impl LineLayout {
- /// The index for the character at the given x coordinate
- pub fn index_for_x(&self, x: Pixels) -> Option<usize> {
- if x >= self.width {
- None
- } else {
- for run in self.runs.iter().rev() {
- for glyph in run.glyphs.iter().rev() {
- if glyph.position.x <= x {
- return Some(glyph.index);
- }
- }
- }
- Some(0)
- }
- }
-
/// closest_index_for_x returns the character boundary closest to the given x coordinate
/// (e.g. to handle aligning up/down arrow keys)
- pub fn closest_index_for_x(&self, x: Pixels) -> usize {
+ pub fn index_for_x(&self, x: Pixels) -> usize {
let mut prev_index = 0;
let mut prev_x = px(0.);
@@ -278,34 +262,10 @@ impl WrappedLineLayout {
}
/// The index corresponding to a given position in this layout for the given line height.
- ///
- /// See also [`Self::closest_index_for_position`].
pub fn index_for_position(
- &self,
- position: Point<Pixels>,
- line_height: Pixels,
- ) -> Result<usize, usize> {
- self._index_for_position(position, line_height, false)
- }
-
- /// The closest index to a given position in this layout for the given line height.
- ///
- /// Closest means the character boundary closest to the given position.
- ///
- /// See also [`LineLayout::closest_index_for_x`].
- pub fn closest_index_for_position(
- &self,
- position: Point<Pixels>,
- line_height: Pixels,
- ) -> Result<usize, usize> {
- self._index_for_position(position, line_height, true)
- }
-
- fn _index_for_position(
&self,
mut position: Point<Pixels>,
line_height: Pixels,
- closest: bool,
) -> Result<usize, usize> {
let wrapped_line_ix = (position.y / line_height) as usize;
@@ -345,16 +305,9 @@ impl WrappedLineLayout {
} else if position_in_unwrapped_line.x >= wrapped_line_end_x {
Err(wrapped_line_end_index)
} else {
- if closest {
- Ok(self
- .unwrapped_layout
- .closest_index_for_x(position_in_unwrapped_line.x))
- } else {
- Ok(self
- .unwrapped_layout
- .index_for_x(position_in_unwrapped_line.x)
- .unwrap())
- }
+ Ok(self
+ .unwrapped_layout
+ .index_for_x(position_in_unwrapped_line.x))
}
}
@@ -412,7 +412,6 @@ impl ProjectItem for ImageView {
}
pub fn init(cx: &mut App) {
- ImageViewerSettings::register(cx);
workspace::register_project_item::<ImageView>(cx);
workspace::register_serializable_item::<ImageView>(cx);
}
@@ -1,8 +1,8 @@
pub use settings::ImageFileSizeUnit;
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
/// The settings for the image viewer.
-#[derive(Clone, Debug, Default)]
+#[derive(Clone, Debug, Default, RegisterSetting)]
pub struct ImageViewerSettings {
/// The unit to use for displaying image file sizes.
///
@@ -3,7 +3,7 @@ use editor::scroll::Autoscroll;
use editor::{Editor, SelectionEffects};
use gpui::{App, AppContext as _, Context, Window, actions};
pub use settings::HourFormat;
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
use std::{
fs::OpenOptions,
path::{Path, PathBuf},
@@ -20,7 +20,7 @@ actions!(
);
/// Settings specific to journaling
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, RegisterSetting)]
pub struct JournalSettings {
/// The path of the directory where journal entries are stored.
///
@@ -44,8 +44,6 @@ impl settings::Settings for JournalSettings {
}
pub fn init(_: Arc<AppState>, cx: &mut App) {
- JournalSettings::register(cx);
-
cx.observe_new(
|workspace: &mut Workspace, _window, _cx: &mut Context<Workspace>| {
workspace.register_action(|workspace, _: &NewJournalEntry, window, cx| {
@@ -1102,9 +1102,6 @@ mod tests {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
- project::Project::init_settings(cx);
- workspace::init_settings(cx);
});
let fs = FakeFs::new(cx.executor());
@@ -747,7 +747,7 @@ impl EditPreview {
pub fn highlight_edits(
&self,
current_snapshot: &BufferSnapshot,
- edits: &[(Range<Anchor>, String)],
+ edits: &[(Range<Anchor>, impl AsRef<str>)],
include_deletions: bool,
cx: &App,
) -> HighlightedText {
@@ -774,7 +774,8 @@ impl EditPreview {
.end
.bias_right(&self.old_snapshot)
.to_offset(&self.applied_edits_snapshot);
- let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
+ let edit_start_in_preview_snapshot =
+ edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
let unchanged_range_in_preview_snapshot =
offset_in_preview_snapshot..edit_start_in_preview_snapshot;
@@ -799,7 +800,7 @@ impl EditPreview {
);
}
- if !edit_text.is_empty() {
+ if !edit_text.as_ref().is_empty() {
highlighted_text.add_text_from_buffer_range(
edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
&self.applied_edits_snapshot,
@@ -823,7 +824,7 @@ impl EditPreview {
highlighted_text.build()
}
- fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
+ fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
let (first, _) = edits.first()?;
let (last, _) = edits.last()?;
@@ -1173,7 +1174,7 @@ impl Buffer {
pub fn preview_edits(
&self,
- edits: Arc<[(Range<Anchor>, String)]>,
+ edits: Arc<[(Range<Anchor>, Arc<str>)]>,
cx: &App,
) -> Task<EditPreview> {
let registry = self.language_registry();
@@ -3120,15 +3120,13 @@ async fn test_preview_edits(cx: &mut TestAppContext) {
.map(|(range, text)| {
(
buffer.anchor_before(range.start)..buffer.anchor_after(range.end),
- text.to_string(),
+ text.into(),
)
})
- .collect::<Vec<_>>()
+ .collect::<Arc<[_]>>()
});
let edit_preview = buffer
- .read_with(cx, |buffer, cx| {
- buffer.preview_edits(edits.clone().into(), cx)
- })
+ .read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))
.await;
let highlighted_edits = cx.read(|cx| {
edit_preview.highlight_edits(&buffer.read(cx).snapshot(), &edits, include_deletions, cx)
@@ -3924,7 +3922,6 @@ fn assert_bracket_pairs(
fn init_settings(cx: &mut App, f: fn(&mut AllLanguageSettingsContent)) {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- crate::init(cx);
cx.update_global::<SettingsStore, _>(|settings, cx| {
settings.update_user_settings(cx, |content| f(&mut content.project.all_languages));
});
@@ -88,13 +88,6 @@ pub use syntax_map::{
pub use text::{AnchorRangeExt, LineEnding};
pub use tree_sitter::{Node, Parser, Tree, TreeCursor};
-/// Initializes the `language` crate.
-///
-/// This should be called before making use of items from the create.
-pub fn init(cx: &mut App) {
- language_settings::init(cx);
-}
-
static QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Mutex::new(vec![]);
static PARSERS: Mutex<Vec<Parser>> = Mutex::new(vec![]);
@@ -15,15 +15,10 @@ pub use settings::{
Formatter, FormatterList, InlayHintKind, LanguageSettingsContent, LspInsertMode,
RewrapBehavior, ShowWhitespaceSetting, SoftWrap, WordsCompletionMode,
};
-use settings::{Settings, SettingsLocation, SettingsStore};
+use settings::{RegisterSetting, Settings, SettingsLocation, SettingsStore};
use shellexpand;
use std::{borrow::Cow, num::NonZeroU32, path::Path, sync::Arc};
-/// Initializes the language settings.
-pub fn init(cx: &mut App) {
- AllLanguageSettings::register(cx);
-}
-
/// Returns the settings for the specified language from the provided file.
pub fn language_settings<'a>(
language: Option<LanguageName>,
@@ -50,7 +45,7 @@ pub fn all_language_settings<'a>(
}
/// The settings for all languages.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, RegisterSetting)]
pub struct AllLanguageSettings {
/// The edit prediction settings.
pub edit_predictions: EditPredictionSettings,
@@ -9,7 +9,7 @@ use std::{path::PathBuf, sync::Arc};
use async_trait::async_trait;
use collections::HashMap;
use fs::Fs;
-use gpui::{AsyncApp, SharedString};
+use gpui::{App, AsyncApp, SharedString};
use settings::WorktreeId;
use task::ShellKind;
use util::rel_path::RelPath;
@@ -110,7 +110,7 @@ pub trait ToolchainLister: Send + Sync + 'static {
fs: &dyn Fs,
) -> anyhow::Result<Toolchain>;
- fn activation_script(&self, toolchain: &Toolchain, shell: ShellKind) -> Vec<String>;
+ fn activation_script(&self, toolchain: &Toolchain, shell: ShellKind, cx: &App) -> Vec<String>;
/// Returns various "static" bits of information about this toolchain lister. This function should be pure.
fn meta(&self) -> ToolchainMetadata;
@@ -28,7 +28,6 @@ use crate::provider::x_ai::XAiLanguageModelProvider;
pub use crate::settings::*;
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
- crate::settings::init_settings(cx);
let registry = LanguageModelRegistry::global(cx);
registry.update(cx, |registry, cx| {
register_language_model_providers(registry, user_store, client.clone(), cx);
@@ -356,11 +356,13 @@ pub fn into_open_ai(
for content in message.content {
match content {
MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
- add_message_content_part(
- open_ai::MessagePart::Text { text },
- message.role,
- &mut messages,
- )
+ if !text.trim().is_empty() {
+ add_message_content_part(
+ open_ai::MessagePart::Text { text },
+ message.role,
+ &mut messages,
+ );
+ }
}
MessageContent::RedactedThinking(_) => {}
MessageContent::Image(image) => {
@@ -1,8 +1,7 @@
use std::sync::Arc;
use collections::HashMap;
-use gpui::App;
-use settings::Settings;
+use settings::RegisterSetting;
use crate::provider::{
anthropic::AnthropicSettings, bedrock::AmazonBedrockSettings, cloud::ZedDotDevSettings,
@@ -12,11 +11,7 @@ use crate::provider::{
vercel::VercelSettings, x_ai::XAiSettings,
};
-/// Initializes the language model settings.
-pub fn init_settings(cx: &mut App) {
- AllLanguageModelSettings::register(cx);
-}
-
+#[derive(Debug, RegisterSetting)]
pub struct AllLanguageModelSettings {
pub anthropic: AnthropicSettings,
pub bedrock: AmazonBedrockSettings,
@@ -109,12 +109,7 @@ fn init_test(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- workspace::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
release_channel::init(SemanticVersion::default(), cx);
- language::init(cx);
- client::init_settings(cx);
- Project::init_settings(cx);
- editor::init_settings(cx);
});
}
@@ -69,6 +69,7 @@ settings.workspace = true
smol.workspace = true
url.workspace = true
task.workspace = true
+terminal.workspace = true
theme.workspace = true
toml.workspace = true
tree-sitter = { workspace = true, optional = true }
@@ -32,7 +32,6 @@ mod tests {
cx.update(|cx| {
let test_settings = SettingsStore::test(cx);
cx.set_global(test_settings);
- language::init(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |s| {
s.project.all_languages.defaults.tab_size = NonZeroU32::new(2)
@@ -402,7 +402,6 @@ mod tests {
cx.update(|cx| {
let test_settings = SettingsStore::test(cx);
cx.set_global(test_settings);
- language::init(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |s| {
s.project.all_languages.defaults.tab_size = NonZeroU32::new(2);
@@ -21,9 +21,11 @@ use project::Fs;
use project::lsp_store::language_server_settings;
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
+use settings::Settings;
use smol::lock::OnceCell;
use std::cmp::Ordering;
use std::env::consts;
+use terminal::terminal_settings::TerminalSettings;
use util::command::new_smol_command;
use util::fs::{make_file_executable, remove_matching};
use util::rel_path::RelPath;
@@ -1171,7 +1173,7 @@ impl ToolchainLister for PythonToolchainProvider {
.context("Could not convert a venv into a toolchain")
}
- fn activation_script(&self, toolchain: &Toolchain, shell: ShellKind) -> Vec<String> {
+ fn activation_script(&self, toolchain: &Toolchain, shell: ShellKind, cx: &App) -> Vec<String> {
let Ok(toolchain) =
serde_json::from_value::<PythonToolchainData>(toolchain.as_json.clone())
else {
@@ -1184,10 +1186,34 @@ impl ToolchainLister for PythonToolchainProvider {
match toolchain.environment.kind {
Some(PythonEnvironmentKind::Conda) => {
+ let settings = TerminalSettings::get_global(cx);
+ let conda_manager = settings
+ .detect_venv
+ .as_option()
+ .map(|venv| venv.conda_manager)
+ .unwrap_or(settings::CondaManager::Auto);
+
+ let manager = match conda_manager {
+ settings::CondaManager::Conda => "conda",
+ settings::CondaManager::Mamba => "mamba",
+ settings::CondaManager::Micromamba => "micromamba",
+ settings::CondaManager::Auto => {
+ // When auto, prefer the detected manager or fall back to conda
+ toolchain
+ .environment
+ .manager
+ .as_ref()
+ .and_then(|m| m.executable.file_name())
+ .and_then(|name| name.to_str())
+ .filter(|name| matches!(*name, "conda" | "mamba" | "micromamba"))
+ .unwrap_or("conda")
+ }
+ };
+
if let Some(name) = &toolchain.environment.name {
- activation_script.push(format!("conda activate {name}"));
+ activation_script.push(format!("{manager} activate {name}"));
} else {
- activation_script.push("conda activate".to_string());
+ activation_script.push(format!("{manager} activate base"));
}
}
Some(PythonEnvironmentKind::Venv | PythonEnvironmentKind::VirtualEnv) => {
@@ -2245,7 +2271,6 @@ mod tests {
cx.update(|cx| {
let test_settings = SettingsStore::test(cx);
cx.set_global(test_settings);
- language::init(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |s| {
s.project.all_languages.defaults.tab_size = NonZeroU32::new(2);
@@ -1445,7 +1445,6 @@ mod tests {
cx.update(|cx| {
let test_settings = SettingsStore::test(cx);
cx.set_global(test_settings);
- language::init(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |s| {
s.project.all_languages.defaults.tab_size = NonZeroU32::new(2);
@@ -1091,8 +1091,7 @@ mod tests {
use std::path::Path;
use gpui::{AppContext as _, BackgroundExecutor, TestAppContext};
- use language::language_settings;
- use project::{FakeFs, Project};
+ use project::FakeFs;
use serde_json::json;
use task::TaskTemplates;
use unindent::Unindent;
@@ -1432,8 +1431,6 @@ mod tests {
async fn test_package_json_discovery(executor: BackgroundExecutor, cx: &mut TestAppContext) {
cx.update(|cx| {
settings::init(cx);
- Project::init_settings(cx);
- language_settings::init(cx);
});
let package_json_1 = json!({
@@ -1593,8 +1590,6 @@ mod tests {
) {
cx.update(|cx| {
settings::init(cx);
- Project::init_settings(cx);
- language_settings::init(cx);
});
// Test case with all test runners present
@@ -41,6 +41,7 @@ fn main() {
name: "Quit".into(),
action: Box::new(Quit),
os_action: None,
+ checked: false,
}],
}]);
@@ -38,7 +38,6 @@ pub fn main() {
Application::new().with_assets(Assets).run(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
- language::init(cx);
cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]);
let node_runtime = NodeRuntime::unavailable();
@@ -22,7 +22,6 @@ pub fn main() {
Application::new().with_assets(Assets).run(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
- language::init(cx);
cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]);
let node_runtime = NodeRuntime::unavailable();
@@ -22,8 +22,8 @@ use gpui::{
AnyElement, App, BorderStyle, Bounds, ClipboardItem, CursorStyle, DispatchPhase, Edges, Entity,
FocusHandle, Focusable, FontStyle, FontWeight, GlobalElementId, Hitbox, Hsla, Image,
ImageFormat, KeyContext, Length, MouseDownEvent, MouseEvent, MouseMoveEvent, MouseUpEvent,
- Point, Stateful, StrikethroughStyle, StyleRefinement, StyledText, Task, TextLayout, TextRun,
- TextStyle, TextStyleRefinement, actions, img, point, quad,
+ Point, ScrollHandle, Stateful, StrikethroughStyle, StyleRefinement, StyledText, Task,
+ TextLayout, TextRun, TextStyle, TextStyleRefinement, actions, img, point, quad,
};
use language::{Language, LanguageRegistry, Rope};
use parser::CodeBlockMetadata;
@@ -31,7 +31,7 @@ use parser::{MarkdownEvent, MarkdownTag, MarkdownTagEnd, parse_links_only, parse
use pulldown_cmark::Alignment;
use sum_tree::TreeMap;
use theme::SyntaxTheme;
-use ui::{Tooltip, prelude::*};
+use ui::{ScrollAxes, Scrollbars, Tooltip, WithScrollbar, prelude::*};
use util::ResultExt;
use crate::parser::CodeBlockKind;
@@ -108,6 +108,7 @@ pub struct Markdown {
fallback_code_block_language: Option<LanguageName>,
options: Options,
copied_code_blocks: HashSet<ElementId>,
+ code_block_scroll_handles: HashMap<usize, ScrollHandle>,
}
struct Options {
@@ -176,6 +177,7 @@ impl Markdown {
parse_links_only: false,
},
copied_code_blocks: HashSet::default(),
+ code_block_scroll_handles: HashMap::default(),
};
this.parse(cx);
this
@@ -199,11 +201,28 @@ impl Markdown {
parse_links_only: true,
},
copied_code_blocks: HashSet::default(),
+ code_block_scroll_handles: HashMap::default(),
};
this.parse(cx);
this
}
+ fn code_block_scroll_handle(&mut self, id: usize) -> ScrollHandle {
+ self.code_block_scroll_handles
+ .entry(id)
+ .or_insert_with(ScrollHandle::new)
+ .clone()
+ }
+
+ fn retain_code_block_scroll_handles(&mut self, ids: &HashSet<usize>) {
+ self.code_block_scroll_handles
+ .retain(|id, _| ids.contains(id));
+ }
+
+ fn clear_code_block_scroll_handles(&mut self) {
+ self.code_block_scroll_handles.clear();
+ }
+
pub fn is_parsing(&self) -> bool {
self.pending_parse.is_some()
}
@@ -754,14 +773,19 @@ impl Element for MarkdownElement {
self.style.base_text_style.clone(),
self.style.syntax.clone(),
);
- let markdown = self.markdown.read(cx);
- let parsed_markdown = &markdown.parsed_markdown;
- let images = &markdown.images_by_source_offset;
+ let (parsed_markdown, images) = {
+ let markdown = self.markdown.read(cx);
+ (
+ markdown.parsed_markdown.clone(),
+ markdown.images_by_source_offset.clone(),
+ )
+ };
let markdown_end = if let Some(last) = parsed_markdown.events.last() {
last.0.end
} else {
0
};
+ let mut code_block_ids = HashSet::default();
let mut current_code_block_metadata = None;
let mut current_img_block_range: Option<Range<usize>> = None;
@@ -841,39 +865,69 @@ impl Element for MarkdownElement {
current_code_block_metadata = Some(metadata.clone());
let is_indented = matches!(kind, CodeBlockKind::Indented);
+ let scroll_handle = if self.style.code_block_overflow_x_scroll {
+ code_block_ids.insert(range.start);
+ Some(self.markdown.update(cx, |markdown, _| {
+ markdown.code_block_scroll_handle(range.start)
+ }))
+ } else {
+ None
+ };
match (&self.code_block_renderer, is_indented) {
(CodeBlockRenderer::Default { .. }, _) | (_, true) => {
// This is a parent container that we can position the copy button inside.
- builder.push_div(
- div().group("code_block").relative().w_full(),
- range,
- markdown_end,
- );
+ let parent_container =
+ div().group("code_block").relative().w_full();
+
+ let mut parent_container: AnyDiv = if let Some(scroll_handle) =
+ scroll_handle.as_ref()
+ {
+ let scrollbars = Scrollbars::new(ScrollAxes::Horizontal)
+ .id(("markdown-code-block-scrollbar", range.start))
+ .tracked_scroll_handle(scroll_handle.clone())
+ .with_track_along(
+ ScrollAxes::Horizontal,
+ cx.theme().colors().editor_background,
+ )
+ .notify_content();
+
+ parent_container
+ .rounded_lg()
+ .custom_scrollbars(scrollbars, window, cx)
+ .into()
+ } else {
+ parent_container.into()
+ };
+
+ if let CodeBlockRenderer::Default { border: true, .. } =
+ &self.code_block_renderer
+ {
+ parent_container = parent_container
+ .rounded_md()
+ .border_1()
+ .border_color(cx.theme().colors().border_variant);
+ }
- let mut code_block = div()
+ parent_container.style().refine(&self.style.code_block);
+ builder.push_div(parent_container, range, markdown_end);
+
+ let code_block = div()
.id(("code-block", range.start))
.rounded_lg()
.map(|mut code_block| {
- if self.style.code_block_overflow_x_scroll {
+ if let Some(scroll_handle) = scroll_handle.as_ref() {
code_block.style().restrict_scroll_to_axis =
Some(true);
- code_block.flex().overflow_x_scroll()
+ code_block
+ .flex()
+ .overflow_x_scroll()
+ .track_scroll(scroll_handle)
} else {
code_block.w_full()
}
});
- if let CodeBlockRenderer::Default { border: true, .. } =
- &self.code_block_renderer
- {
- code_block = code_block
- .rounded_md()
- .border_1()
- .border_color(cx.theme().colors().border_variant);
- }
-
- code_block.style().refine(&self.style.code_block);
if let Some(code_block_text_style) = &self.style.code_block.text
{
builder.push_text_style(code_block_text_style.to_owned());
@@ -884,33 +938,53 @@ impl Element for MarkdownElement {
(CodeBlockRenderer::Custom { render, .. }, _) => {
let parent_container = render(
kind,
- parsed_markdown,
+ &parsed_markdown,
range.clone(),
metadata.clone(),
window,
cx,
);
+ let mut parent_container: AnyDiv = if let Some(scroll_handle) =
+ scroll_handle.as_ref()
+ {
+ let scrollbars = Scrollbars::new(ScrollAxes::Horizontal)
+ .id(("markdown-code-block-scrollbar", range.start))
+ .tracked_scroll_handle(scroll_handle.clone())
+ .with_track_along(
+ ScrollAxes::Horizontal,
+ cx.theme().colors().editor_background,
+ )
+ .notify_content();
+
+ parent_container
+ .rounded_b_lg()
+ .custom_scrollbars(scrollbars, window, cx)
+ .into()
+ } else {
+ parent_container.into()
+ };
+
+ parent_container.style().refine(&self.style.code_block);
builder.push_div(parent_container, range, markdown_end);
- let mut code_block = div()
+ let code_block = div()
.id(("code-block", range.start))
.rounded_b_lg()
.map(|mut code_block| {
- if self.style.code_block_overflow_x_scroll {
+ if let Some(scroll_handle) = scroll_handle.as_ref() {
code_block.style().restrict_scroll_to_axis =
Some(true);
code_block
.flex()
.overflow_x_scroll()
.overflow_y_hidden()
+ .track_scroll(scroll_handle)
} else {
code_block.w_full().overflow_hidden()
}
});
- code_block.style().refine(&self.style.code_block);
-
if let Some(code_block_text_style) = &self.style.code_block.text
{
builder.push_text_style(code_block_text_style.to_owned());
@@ -1218,6 +1292,15 @@ impl Element for MarkdownElement {
_ => log::debug!("unsupported markdown event {:?}", event),
}
}
+ if self.style.code_block_overflow_x_scroll {
+ let code_block_ids = code_block_ids;
+ self.markdown.update(cx, move |markdown, _| {
+ markdown.retain_code_block_scroll_handles(&code_block_ids);
+ });
+ } else {
+ self.markdown
+ .update(cx, |markdown, _| markdown.clear_code_block_scroll_handles());
+ }
let mut rendered_markdown = builder.build();
let child_layout_id = rendered_markdown.element.request_layout(window, cx);
let layout_id = window.request_layout(gpui::Style::default(), [child_layout_id], cx);
@@ -31,6 +31,7 @@ pulldown-cmark.workspace = true
settings.workspace = true
theme.workspace = true
ui.workspace = true
+urlencoding.workspace = true
util.workspace = true
workspace.workspace = true
@@ -4,6 +4,7 @@ use gpui::{
};
use language::HighlightId;
use std::{fmt::Display, ops::Range, path::PathBuf};
+use urlencoding;
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
@@ -108,6 +109,7 @@ pub struct ParsedMarkdownTable {
pub source_range: Range<usize>,
pub header: Vec<ParsedMarkdownTableRow>,
pub body: Vec<ParsedMarkdownTableRow>,
+ pub caption: Option<MarkdownParagraph>,
}
#[derive(Debug, Clone, Copy, Default)]
@@ -220,6 +222,10 @@ impl MarkdownHighlight {
});
}
+ if style.oblique {
+ highlight.font_style = Some(FontStyle::Oblique)
+ }
+
Some(highlight)
}
@@ -241,6 +247,8 @@ pub struct MarkdownHighlightStyle {
pub weight: FontWeight,
/// Whether the text should be stylized as link.
pub link: bool,
+ // Whether the text should be obliqued.
+ pub oblique: bool,
}
/// A parsed region in a Markdown document.
@@ -277,7 +285,12 @@ impl Link {
return Some(Link::Web { url: text });
}
- let path = PathBuf::from(&text);
+ // URL decode the text to handle spaces and other special characters
+ let decoded_text = urlencoding::decode(&text)
+ .map(|s| s.into_owned())
+ .unwrap_or(text);
+
+ let path = PathBuf::from(&decoded_text);
if path.is_absolute() && path.exists() {
return Some(Link::Path {
display_path: path.clone(),
@@ -287,7 +300,7 @@ impl Link {
if let Some(file_location_directory) = file_location_directory {
let display_path = path;
- let path = file_location_directory.join(text);
+ let path = file_location_directory.join(decoded_text);
if path.exists() {
return Some(Link::Path { display_path, path });
}
@@ -12,6 +12,7 @@ use pulldown_cmark::{Alignment, Event, Options, Parser, Tag, TagEnd};
use std::{
cell::RefCell, collections::HashMap, mem, ops::Range, path::PathBuf, rc::Rc, sync::Arc, vec,
};
+use ui::SharedString;
pub async fn parse_markdown(
markdown_input: &str,
@@ -533,6 +534,7 @@ impl<'a> MarkdownParser<'a> {
source_range,
header,
body,
+ caption: None,
}
}
@@ -875,13 +877,21 @@ impl<'a> MarkdownParser<'a> {
}
markup5ever_rcdom::NodeData::Comment { .. } => {}
markup5ever_rcdom::NodeData::Element { name, attrs, .. } => {
+ let mut styles = if let Some(styles) = Self::markdown_style_from_html_styles(
+ Self::extract_styles_from_attributes(attrs),
+ ) {
+ vec![MarkdownHighlight::Style(styles)]
+ } else {
+ Vec::default()
+ };
+
if local_name!("img") == name.local {
if let Some(image) = self.extract_image(source_range, attrs) {
elements.push(ParsedMarkdownElement::Image(image));
}
} else if local_name!("p") == name.local {
let mut paragraph = MarkdownParagraph::new();
- self.parse_paragraph(source_range, node, &mut paragraph);
+ self.parse_paragraph(source_range, node, &mut paragraph, &mut styles);
if !paragraph.is_empty() {
elements.push(ParsedMarkdownElement::Paragraph(paragraph));
@@ -896,7 +906,7 @@ impl<'a> MarkdownParser<'a> {
| local_name!("h6")
) {
let mut paragraph = MarkdownParagraph::new();
- self.consume_paragraph(source_range.clone(), node, &mut paragraph);
+ self.consume_paragraph(source_range.clone(), node, &mut paragraph, &mut styles);
if !paragraph.is_empty() {
elements.push(ParsedMarkdownElement::Heading(ParsedMarkdownHeading {
@@ -943,24 +953,90 @@ impl<'a> MarkdownParser<'a> {
source_range: Range<usize>,
node: &Rc<markup5ever_rcdom::Node>,
paragraph: &mut MarkdownParagraph,
+ highlights: &mut Vec<MarkdownHighlight>,
) {
+ fn add_highlight_range(
+ text: &String,
+ start: usize,
+ highlights: Vec<MarkdownHighlight>,
+ ) -> Vec<(Range<usize>, MarkdownHighlight)> {
+ highlights
+ .into_iter()
+ .map(|style| (start..text.len(), style))
+ .collect()
+ }
+
match &node.data {
markup5ever_rcdom::NodeData::Text { contents } => {
- paragraph.push(MarkdownParagraphChunk::Text(ParsedMarkdownText {
- source_range,
- regions: Vec::default(),
- region_ranges: Vec::default(),
- highlights: Vec::default(),
- contents: contents.borrow().to_string().into(),
- }));
+ // append the text to the last chunk, so we can have a hacky version
+ // of inline text with highlighting
+ if let Some(text) = paragraph.iter_mut().last().and_then(|p| match p {
+ MarkdownParagraphChunk::Text(text) => Some(text),
+ _ => None,
+ }) {
+ let mut new_text = text.contents.to_string();
+ new_text.push_str(&contents.borrow());
+ let highlights = add_highlight_range(
+ &new_text,
+ text.contents.len(),
+ std::mem::take(highlights),
+ );
+
+ text.contents = SharedString::from(new_text);
+ text.highlights.extend(highlights);
+ } else {
+ let contents = contents.borrow().to_string();
+ paragraph.push(MarkdownParagraphChunk::Text(ParsedMarkdownText {
+ source_range,
+ highlights: add_highlight_range(&contents, 0, std::mem::take(highlights)),
+ regions: Vec::default(),
+ contents: contents.into(),
+ region_ranges: Vec::default(),
+ }));
+ }
}
markup5ever_rcdom::NodeData::Element { name, attrs, .. } => {
if local_name!("img") == name.local {
if let Some(image) = self.extract_image(source_range, attrs) {
paragraph.push(MarkdownParagraphChunk::Image(image));
}
+ } else if local_name!("b") == name.local || local_name!("strong") == name.local {
+ highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle {
+ weight: FontWeight::BOLD,
+ ..Default::default()
+ }));
+
+ self.consume_paragraph(source_range, node, paragraph, highlights);
+ } else if local_name!("i") == name.local {
+ highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle {
+ italic: true,
+ ..Default::default()
+ }));
+
+ self.consume_paragraph(source_range, node, paragraph, highlights);
+ } else if local_name!("em") == name.local {
+ highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle {
+ oblique: true,
+ ..Default::default()
+ }));
+
+ self.consume_paragraph(source_range, node, paragraph, highlights);
+ } else if local_name!("del") == name.local {
+ highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle {
+ strikethrough: true,
+ ..Default::default()
+ }));
+
+ self.consume_paragraph(source_range, node, paragraph, highlights);
+ } else if local_name!("ins") == name.local {
+ highlights.push(MarkdownHighlight::Style(MarkdownHighlightStyle {
+ underline: true,
+ ..Default::default()
+ }));
+
+ self.consume_paragraph(source_range, node, paragraph, highlights);
} else {
- self.consume_paragraph(source_range, node, paragraph);
+ self.consume_paragraph(source_range, node, paragraph, highlights);
}
}
_ => {}
@@ -972,9 +1048,10 @@ impl<'a> MarkdownParser<'a> {
source_range: Range<usize>,
node: &Rc<markup5ever_rcdom::Node>,
paragraph: &mut MarkdownParagraph,
+ highlights: &mut Vec<MarkdownHighlight>,
) {
for node in node.children.borrow().iter() {
- self.parse_paragraph(source_range.clone(), node, paragraph);
+ self.parse_paragraph(source_range.clone(), node, paragraph, highlights);
}
}
@@ -1019,7 +1096,7 @@ impl<'a> MarkdownParser<'a> {
}
let mut children = MarkdownParagraph::new();
- self.consume_paragraph(source_range, node, &mut children);
+ self.consume_paragraph(source_range, node, &mut children, &mut Vec::new());
let is_header = matches!(name.local, local_name!("th"));
@@ -1083,6 +1160,58 @@ impl<'a> MarkdownParser<'a> {
})
}
+ fn markdown_style_from_html_styles(
+ styles: HashMap<String, String>,
+ ) -> Option<MarkdownHighlightStyle> {
+ let mut markdown_style = MarkdownHighlightStyle::default();
+
+ if let Some(text_decoration) = styles.get("text-decoration") {
+ match text_decoration.to_lowercase().as_str() {
+ "underline" => {
+ markdown_style.underline = true;
+ }
+ "line-through" => {
+ markdown_style.strikethrough = true;
+ }
+ _ => {}
+ }
+ }
+
+ if let Some(font_style) = styles.get("font-style") {
+ match font_style.to_lowercase().as_str() {
+ "italic" => {
+ markdown_style.italic = true;
+ }
+ "oblique" => {
+ markdown_style.oblique = true;
+ }
+ _ => {}
+ }
+ }
+
+ if let Some(font_weight) = styles.get("font-weight") {
+ match font_weight.to_lowercase().as_str() {
+ "bold" => {
+ markdown_style.weight = FontWeight::BOLD;
+ }
+ "lighter" => {
+ markdown_style.weight = FontWeight::THIN;
+ }
+ _ => {
+ if let Some(weight) = font_weight.parse::<f32>().ok() {
+ markdown_style.weight = FontWeight(weight);
+ }
+ }
+ }
+ }
+
+ if markdown_style != MarkdownHighlightStyle::default() {
+ Some(markdown_style)
+ } else {
+ None
+ }
+ }
+
fn extract_styles_from_attributes(
attrs: &RefCell<Vec<html5ever::Attribute>>,
) -> HashMap<String, String> {
@@ -1232,11 +1361,22 @@ impl<'a> MarkdownParser<'a> {
) -> Option<ParsedMarkdownTable> {
let mut header_rows = Vec::new();
let mut body_rows = Vec::new();
+ let mut caption = None;
- // node should be a thead or tbody element
+ // node should be a thead, tbody or caption element
for node in node.children.borrow().iter() {
match &node.data {
markup5ever_rcdom::NodeData::Element { name, .. } => {
+ if local_name!("caption") == name.local {
+ let mut paragraph = MarkdownParagraph::new();
+ self.parse_paragraph(
+ source_range.clone(),
+ node,
+ &mut paragraph,
+ &mut Vec::new(),
+ );
+ caption = Some(paragraph);
+ }
if local_name!("thead") == name.local {
// node should be a tr element
for node in node.children.borrow().iter() {
@@ -1262,6 +1402,7 @@ impl<'a> MarkdownParser<'a> {
source_range,
body: body_rows,
header: header_rows,
+ caption,
})
} else {
None
@@ -1400,6 +1541,83 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_html_inline_style_elements() {
+ let parsed =
+ parse("<p>Some text <strong>strong text</strong> more text <b>bold text</b> more text <i>italic text</i> more text <em>emphasized text</em> more text <del>deleted text</del> more text <ins>inserted text</ins></p>").await;
+
+ assert_eq!(1, parsed.children.len());
+ let chunks = if let ParsedMarkdownElement::Paragraph(chunks) = &parsed.children[0] {
+ chunks
+ } else {
+ panic!("Expected a paragraph");
+ };
+
+ assert_eq!(1, chunks.len());
+ let text = if let MarkdownParagraphChunk::Text(text) = &chunks[0] {
+ text
+ } else {
+ panic!("Expected a paragraph");
+ };
+
+ assert_eq!(0..205, text.source_range);
+ assert_eq!(
+ "Some text strong text more text bold text more text italic text more text emphasized text more text deleted text more text inserted text",
+ text.contents.as_str(),
+ );
+ assert_eq!(
+ vec![
+ (
+ 10..21,
+ MarkdownHighlight::Style(MarkdownHighlightStyle {
+ weight: FontWeight(700.0),
+ ..Default::default()
+ },),
+ ),
+ (
+ 32..41,
+ MarkdownHighlight::Style(MarkdownHighlightStyle {
+ weight: FontWeight(700.0),
+ ..Default::default()
+ },),
+ ),
+ (
+ 52..63,
+ MarkdownHighlight::Style(MarkdownHighlightStyle {
+ italic: true,
+ weight: FontWeight(400.0),
+ ..Default::default()
+ },),
+ ),
+ (
+ 74..89,
+ MarkdownHighlight::Style(MarkdownHighlightStyle {
+ weight: FontWeight(400.0),
+ oblique: true,
+ ..Default::default()
+ },),
+ ),
+ (
+ 100..112,
+ MarkdownHighlight::Style(MarkdownHighlightStyle {
+ strikethrough: true,
+ weight: FontWeight(400.0),
+ ..Default::default()
+ },),
+ ),
+ (
+ 123..136,
+ MarkdownHighlight::Style(MarkdownHighlightStyle {
+ underline: true,
+ weight: FontWeight(400.0,),
+ ..Default::default()
+ },),
+ ),
+ ],
+ text.highlights
+ );
+ }
+
#[gpui::test]
async fn test_text_with_inline_html() {
let parsed = parse("This is a paragraph with an inline HTML <sometag>tag</sometag>.").await;
@@ -1919,6 +2137,7 @@ mod tests {
ParsedMarkdown {
children: vec![ParsedMarkdownElement::Table(table(
0..366,
+ None,
vec![row(vec![
column(
1,
@@ -1975,6 +2194,77 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_html_table_with_caption() {
+ let parsed = parse(
+ "<table>
+ <caption>My Table</caption>
+ <tbody>
+ <tr>
+ <td>1</td>
+ <td>Chris</td>
+ </tr>
+ <tr>
+ <td>2</td>
+ <td>Dennis</td>
+ </tr>
+ </tbody>
+ </table>",
+ )
+ .await;
+
+ assert_eq!(
+ ParsedMarkdown {
+ children: vec![ParsedMarkdownElement::Table(table(
+ 0..280,
+ Some(vec![MarkdownParagraphChunk::Text(ParsedMarkdownText {
+ source_range: 0..280,
+ contents: "My Table".into(),
+ highlights: Default::default(),
+ region_ranges: Default::default(),
+ regions: Default::default()
+ })]),
+ vec![],
+ vec![
+ row(vec![
+ column(
+ 1,
+ 1,
+ false,
+ text("1", 0..280),
+ ParsedMarkdownTableAlignment::None
+ ),
+ column(
+ 1,
+ 1,
+ false,
+ text("Chris", 0..280),
+ ParsedMarkdownTableAlignment::None
+ )
+ ]),
+ row(vec![
+ column(
+ 1,
+ 1,
+ false,
+ text("2", 0..280),
+ ParsedMarkdownTableAlignment::None
+ ),
+ column(
+ 1,
+ 1,
+ false,
+ text("Dennis", 0..280),
+ ParsedMarkdownTableAlignment::None
+ )
+ ]),
+ ],
+ ))],
+ },
+ parsed
+ );
+ }
+
#[gpui::test]
async fn test_html_table_without_headings() {
let parsed = parse(
@@ -1997,6 +2287,7 @@ mod tests {
ParsedMarkdown {
children: vec![ParsedMarkdownElement::Table(table(
0..240,
+ None,
vec![],
vec![
row(vec![
@@ -2056,6 +2347,7 @@ mod tests {
ParsedMarkdown {
children: vec![ParsedMarkdownElement::Table(table(
0..150,
+ None,
vec![row(vec![
column(
1,
@@ -2253,6 +2545,7 @@ Some other content
let expected_table = table(
0..48,
+ None,
vec![row(vec![
column(
1,
@@ -2288,6 +2581,7 @@ Some other content
let expected_table = table(
0..95,
+ None,
vec![row(vec![
column(
1,
@@ -2809,6 +3103,7 @@ fn main() {
fn table(
source_range: Range<usize>,
+ caption: Option<MarkdownParagraph>,
header: Vec<ParsedMarkdownTableRow>,
body: Vec<ParsedMarkdownTableRow>,
) -> ParsedMarkdownTable {
@@ -2816,6 +3111,7 @@ fn main() {
source_range,
header,
body,
+ caption,
}
}
@@ -561,12 +561,17 @@ fn render_markdown_table(parsed: &ParsedMarkdownTable, cx: &mut RenderContext) -
}
cx.with_common_p(div())
- .grid()
- .size_full()
- .grid_cols(max_column_count as u16)
- .border_1()
- .border_color(cx.border_color)
- .children(cells)
+ .when_some(parsed.caption.as_ref(), |this, caption| {
+ this.children(render_markdown_text(caption, cx))
+ })
+ .child(
+ div()
+ .grid()
+ .grid_cols(max_column_count as u16)
+ .border_1()
+ .border_color(cx.border_color)
+ .children(cells),
+ )
.into_any()
}
@@ -293,7 +293,7 @@ pub struct FunctionDefinition {
pub parameters: Option<Value>,
}
-#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
+#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(tag = "role", rename_all = "lowercase")]
pub enum RequestMessage {
Assistant {
@@ -366,25 +366,42 @@ pub struct ImageUrl {
pub detail: Option<String>,
}
-#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
+#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct ToolCall {
pub id: String,
#[serde(flatten)]
pub content: ToolCallContent,
}
-#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
+#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum ToolCallContent {
Function { function: FunctionContent },
}
-#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
+#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct FunctionContent {
pub name: String,
pub arguments: String,
}
+#[derive(Clone, Serialize, Deserialize, Debug)]
+pub struct Response {
+ pub id: String,
+ pub object: String,
+ pub created: u64,
+ pub model: String,
+ pub choices: Vec<Choice>,
+ pub usage: Usage,
+}
+
+#[derive(Clone, Serialize, Deserialize, Debug)]
+pub struct Choice {
+ pub index: u32,
+ pub message: RequestMessage,
+ pub finish_reason: Option<String>,
+}
+
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct ResponseMessageDelta {
pub role: Option<Role>,
@@ -410,7 +427,7 @@ pub struct FunctionChunk {
pub arguments: Option<String>,
}
-#[derive(Serialize, Deserialize, Debug)]
+#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct Usage {
pub prompt_tokens: u64,
pub completion_tokens: u64,
@@ -575,11 +575,8 @@ mod tests {
fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
cx.update(|cx| {
let state = AppState::test(cx);
- language::init(cx);
crate::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
state
})
}
@@ -656,13 +656,7 @@ struct SerializedOutlinePanel {
active: Option<bool>,
}
-pub fn init_settings(cx: &mut App) {
- OutlinePanelSettings::register(cx);
-}
-
pub fn init(cx: &mut App) {
- init_settings(cx);
-
cx.observe_new(|workspace: &mut Workspace, _, _| {
workspace.register_action(|workspace, _: &ToggleFocus, window, cx| {
workspace.toggle_panel_focus::<OutlinePanel>(window, cx);
@@ -6823,10 +6817,7 @@ outline: struct OutlineEntryExcerpt
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
project_search::init(cx);
buffer_search::init(cx);
super::init(cx);
@@ -1,9 +1,10 @@
use editor::EditorSettings;
use gpui::{App, Pixels};
+use settings::RegisterSetting;
pub use settings::{DockSide, Settings, ShowIndentGuides};
use ui::scrollbars::{ScrollbarVisibility, ShowScrollbar};
-#[derive(Debug, Clone, Copy, PartialEq)]
+#[derive(Debug, Clone, Copy, PartialEq, RegisterSetting)]
pub struct OutlinePanelSettings {
pub button: bool,
pub default_width: Pixels,
@@ -460,8 +460,12 @@ pub fn user_ssh_config_file() -> PathBuf {
home_dir().join(".ssh/config")
}
-pub fn global_ssh_config_file() -> &'static Path {
- Path::new("/etc/ssh/ssh_config")
+pub fn global_ssh_config_file() -> Option<&'static Path> {
+ if cfg!(windows) {
+ None
+ } else {
+ Some(Path::new("/etc/ssh/ssh_config"))
+ }
}
/// Returns candidate paths for the vscode user settings file
@@ -20,7 +20,7 @@ use remote::RemoteClient;
use rpc::{AnyProtoClient, TypedEnvelope, proto};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::SettingsStore;
+use settings::{RegisterSetting, SettingsStore};
use task::Shell;
use util::{ResultExt as _, debug_panic};
@@ -1618,7 +1618,7 @@ pub const GEMINI_NAME: &'static str = "gemini";
pub const CLAUDE_CODE_NAME: &'static str = "claude";
pub const CODEX_NAME: &'static str = "codex";
-#[derive(Default, Clone, JsonSchema, Debug, PartialEq)]
+#[derive(Default, Clone, JsonSchema, Debug, PartialEq, RegisterSetting)]
pub struct AllAgentServersSettings {
pub gemini: Option<BuiltinAgentServerSettings>,
pub claude: Option<BuiltinAgentServerSettings>,
@@ -1307,7 +1307,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- Project::init_settings(cx);
let mut settings = ProjectSettings::get_global(cx).clone();
for (id, config) in context_server_configurations {
settings.context_servers.insert(id, config);
@@ -1,6 +1,7 @@
pub mod branch_diff;
mod conflict_set;
pub mod git_traversal;
+pub mod pending_op;
use crate::{
ProjectEnvironment, ProjectItem, ProjectPath,
@@ -16,7 +17,10 @@ pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, Conflic
use fs::Fs;
use futures::{
FutureExt, StreamExt,
- channel::{mpsc, oneshot},
+ channel::{
+ mpsc,
+ oneshot::{self, Canceled},
+ },
future::{self, Shared},
stream::FuturesOrdered,
};
@@ -44,6 +48,7 @@ use language::{
proto::{deserialize_version, serialize_version},
};
use parking_lot::Mutex;
+use pending_op::{PendingOp, PendingOpId, PendingOps};
use postage::stream::Stream as _;
use rpc::{
AnyProtoClient, TypedEnvelope,
@@ -248,6 +253,7 @@ pub struct MergeDetails {
pub struct RepositorySnapshot {
pub id: RepositoryId,
pub statuses_by_path: SumTree<StatusEntry>,
+ pub pending_ops_by_path: SumTree<PendingOps>,
pub work_directory_abs_path: Arc<Path>,
pub path_style: PathStyle,
pub branch: Option<Branch>,
@@ -311,6 +317,9 @@ pub enum RepositoryEvent {
MergeHeadsChanged,
BranchChanged,
StashEntriesChanged,
+ PendingOpsChanged {
+ pending_ops: SumTree<pending_op::PendingOps>,
+ },
}
#[derive(Clone, Debug)]
@@ -338,7 +347,7 @@ pub struct GitJob {
#[derive(PartialEq, Eq)]
enum GitJobKey {
- WriteIndex(RepoPath),
+ WriteIndex(Vec<RepoPath>),
ReloadBufferDiffBases,
RefreshStatuses,
ReloadGitState,
@@ -2161,7 +2170,7 @@ impl GitStore {
.update(&mut cx, |repository_handle, cx| {
repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
})?
- .await??;
+ .await?;
Ok(proto::Ack {})
}
@@ -2954,6 +2963,7 @@ impl RepositorySnapshot {
Self {
id,
statuses_by_path: Default::default(),
+ pending_ops_by_path: Default::default(),
work_directory_abs_path,
branch: None,
head_commit: None,
@@ -3081,6 +3091,12 @@ impl RepositorySnapshot {
.cloned()
}
+ pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
+ self.pending_ops_by_path
+ .get(&PathKey(path.0.clone()), ())
+ .cloned()
+ }
+
pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
}
@@ -3636,37 +3652,50 @@ impl Repository {
&mut self,
commit: &str,
paths: Vec<RepoPath>,
- _cx: &mut App,
- ) -> oneshot::Receiver<Result<()>> {
+ cx: &mut Context<Self>,
+ ) -> Task<Result<()>> {
let commit = commit.to_string();
let id = self.id;
- self.send_job(
- Some(format!("git checkout {}", commit).into()),
- move |git_repo, _| async move {
- match git_repo {
- RepositoryState::Local {
- backend,
- environment,
- ..
- } => {
- backend
- .checkout_files(commit, paths, environment.clone())
- .await
- }
- RepositoryState::Remote { project_id, client } => {
- client
- .request(proto::GitCheckoutFiles {
- project_id: project_id.0,
- repository_id: id.to_proto(),
- commit,
- paths: paths.into_iter().map(|p| p.to_proto()).collect(),
- })
- .await?;
-
- Ok(())
- }
- }
+ self.spawn_job_with_tracking(
+ paths.clone(),
+ pending_op::GitStatus::Reverted,
+ cx,
+ async move |this, cx| {
+ this.update(cx, |this, _cx| {
+ this.send_job(
+ Some(format!("git checkout {}", commit).into()),
+ move |git_repo, _| async move {
+ match git_repo {
+ RepositoryState::Local {
+ backend,
+ environment,
+ ..
+ } => {
+ backend
+ .checkout_files(commit, paths, environment.clone())
+ .await
+ }
+ RepositoryState::Remote { project_id, client } => {
+ client
+ .request(proto::GitCheckoutFiles {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ commit,
+ paths: paths
+ .into_iter()
+ .map(|p| p.to_proto())
+ .collect(),
+ })
+ .await?;
+
+ Ok(())
+ }
+ }
+ },
+ )
+ })?
+ .await?
},
)
}
@@ -3796,7 +3825,7 @@ impl Repository {
}
pub fn stage_entries(
- &self,
+ &mut self,
entries: Vec<RepoPath>,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
@@ -3811,54 +3840,54 @@ impl Repository {
.collect::<Vec<_>>()
.join(" ");
let status = format!("git add {paths}");
- let job_key = match entries.len() {
- 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
- _ => None,
- };
-
- cx.spawn(async move |this, cx| {
- for save_task in save_tasks {
- save_task.await?;
- }
+ let job_key = GitJobKey::WriteIndex(entries.clone());
- this.update(cx, |this, _| {
- this.send_keyed_job(
- job_key,
- Some(status.into()),
- move |git_repo, _cx| async move {
- match git_repo {
- RepositoryState::Local {
- backend,
- environment,
- ..
- } => backend.stage_paths(entries, environment.clone()).await,
- RepositoryState::Remote { project_id, client } => {
- client
- .request(proto::Stage {
- project_id: project_id.0,
- repository_id: id.to_proto(),
- paths: entries
- .into_iter()
- .map(|repo_path| repo_path.to_proto())
- .collect(),
- })
- .await
- .context("sending stage request")?;
+ self.spawn_job_with_tracking(
+ entries.clone(),
+ pending_op::GitStatus::Staged,
+ cx,
+ async move |this, cx| {
+ for save_task in save_tasks {
+ save_task.await?;
+ }
- Ok(())
+ this.update(cx, |this, _| {
+ this.send_keyed_job(
+ Some(job_key),
+ Some(status.into()),
+ move |git_repo, _cx| async move {
+ match git_repo {
+ RepositoryState::Local {
+ backend,
+ environment,
+ ..
+ } => backend.stage_paths(entries, environment.clone()).await,
+ RepositoryState::Remote { project_id, client } => {
+ client
+ .request(proto::Stage {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ paths: entries
+ .into_iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
+ })
+ .await
+ .context("sending stage request")?;
+
+ Ok(())
+ }
}
- }
- },
- )
- })?
- .await??;
-
- Ok(())
- })
+ },
+ )
+ })?
+ .await?
+ },
+ )
}
pub fn unstage_entries(
- &self,
+ &mut self,
entries: Vec<RepoPath>,
cx: &mut Context<Self>,
) -> Task<anyhow::Result<()>> {
@@ -3873,66 +3902,88 @@ impl Repository {
.collect::<Vec<_>>()
.join(" ");
let status = format!("git reset {paths}");
- let job_key = match entries.len() {
- 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
- _ => None,
- };
+ let job_key = GitJobKey::WriteIndex(entries.clone());
- cx.spawn(async move |this, cx| {
- for save_task in save_tasks {
- save_task.await?;
- }
-
- this.update(cx, |this, _| {
- this.send_keyed_job(
- job_key,
- Some(status.into()),
- move |git_repo, _cx| async move {
- match git_repo {
- RepositoryState::Local {
- backend,
- environment,
- ..
- } => backend.unstage_paths(entries, environment).await,
- RepositoryState::Remote { project_id, client } => {
- client
- .request(proto::Unstage {
- project_id: project_id.0,
- repository_id: id.to_proto(),
- paths: entries
- .into_iter()
- .map(|repo_path| repo_path.to_proto())
- .collect(),
- })
- .await
- .context("sending unstage request")?;
+ self.spawn_job_with_tracking(
+ entries.clone(),
+ pending_op::GitStatus::Unstaged,
+ cx,
+ async move |this, cx| {
+ for save_task in save_tasks {
+ save_task.await?;
+ }
- Ok(())
+ this.update(cx, |this, _| {
+ this.send_keyed_job(
+ Some(job_key),
+ Some(status.into()),
+ move |git_repo, _cx| async move {
+ match git_repo {
+ RepositoryState::Local {
+ backend,
+ environment,
+ ..
+ } => backend.unstage_paths(entries, environment).await,
+ RepositoryState::Remote { project_id, client } => {
+ client
+ .request(proto::Unstage {
+ project_id: project_id.0,
+ repository_id: id.to_proto(),
+ paths: entries
+ .into_iter()
+ .map(|repo_path| repo_path.to_proto())
+ .collect(),
+ })
+ .await
+ .context("sending unstage request")?;
+
+ Ok(())
+ }
}
- }
- },
- )
- })?
- .await??;
-
- Ok(())
- })
+ },
+ )
+ })?
+ .await?
+ },
+ )
}
- pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
+ pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
let to_stage = self
.cached_status()
- .filter(|entry| !entry.status.staging().is_fully_staged())
- .map(|entry| entry.repo_path)
+ .filter_map(|entry| {
+ if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
+ if ops.staging() || ops.staged() {
+ None
+ } else {
+ Some(entry.repo_path)
+ }
+ } else if entry.status.staging().has_staged() {
+ None
+ } else {
+ Some(entry.repo_path)
+ }
+ })
.collect();
self.stage_entries(to_stage, cx)
}
- pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
+ pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
let to_unstage = self
.cached_status()
- .filter(|entry| entry.status.staging().has_staged())
- .map(|entry| entry.repo_path)
+ .filter_map(|entry| {
+ if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
+ if !ops.staging() && !ops.staged() {
+ None
+ } else {
+ Some(entry.repo_path)
+ }
+ } else if entry.status.staging().has_unstaged() {
+ None
+ } else {
+ Some(entry.repo_path)
+ }
+ })
.collect();
self.unstage_entries(to_unstage, cx)
}
@@ -4368,7 +4419,7 @@ impl Repository {
let this = cx.weak_entity();
let git_store = self.git_store.clone();
self.send_keyed_job(
- Some(GitJobKey::WriteIndex(path.clone())),
+ Some(GitJobKey::WriteIndex(vec![path.clone()])),
None,
move |git_repo, mut cx| async move {
log::debug!(
@@ -5199,6 +5250,67 @@ impl Repository {
pub fn barrier(&mut self) -> oneshot::Receiver<()> {
self.send_job(None, |_, _| async {})
}
+
+ fn spawn_job_with_tracking<AsyncFn>(
+ &mut self,
+ paths: Vec<RepoPath>,
+ git_status: pending_op::GitStatus,
+ cx: &mut Context<Self>,
+ f: AsyncFn,
+ ) -> Task<Result<()>>
+ where
+ AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
+ {
+ let ids = self.new_pending_ops_for_paths(paths, git_status);
+
+ cx.spawn(async move |this, cx| {
+ let (job_status, result) = match f(this.clone(), cx).await {
+ Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
+ Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
+ Err(err) => (pending_op::JobStatus::Error, Err(err)),
+ };
+
+ this.update(cx, |this, _| {
+ let mut edits = Vec::with_capacity(ids.len());
+ for (id, entry) in ids {
+ if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
+ if let Some(op) = ops.op_by_id_mut(id) {
+ op.job_status = job_status;
+ }
+ edits.push(sum_tree::Edit::Insert(ops));
+ }
+ }
+ this.snapshot.pending_ops_by_path.edit(edits, ());
+ })?;
+
+ result
+ })
+ }
+
+ fn new_pending_ops_for_paths(
+ &mut self,
+ paths: Vec<RepoPath>,
+ git_status: pending_op::GitStatus,
+ ) -> Vec<(PendingOpId, RepoPath)> {
+ let mut edits = Vec::with_capacity(paths.len());
+ let mut ids = Vec::with_capacity(paths.len());
+ for path in paths {
+ let mut ops = self
+ .snapshot
+ .pending_ops_for_path(&path)
+ .unwrap_or_else(|| PendingOps::new(&path));
+ let id = ops.max_id() + 1;
+ ops.ops.push(PendingOp {
+ id,
+ git_status,
+ job_status: pending_op::JobStatus::Running,
+ });
+ edits.push(sum_tree::Edit::Insert(ops));
+ ids.push((id, path));
+ }
+ self.snapshot.pending_ops_by_path.edit(edits, ());
+ ids
+ }
}
fn get_permalink_in_rust_registry_src(
@@ -5464,6 +5576,28 @@ async fn compute_snapshot(
MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
+ let pending_ops_by_path = SumTree::from_iter(
+ prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
+ let inner_ops: Vec<PendingOp> =
+ ops.ops.iter().filter(|op| op.running()).cloned().collect();
+ if inner_ops.is_empty() {
+ None
+ } else {
+ Some(PendingOps {
+ repo_path: ops.repo_path.clone(),
+ ops: inner_ops,
+ })
+ }
+ }),
+ (),
+ );
+
+ if pending_ops_by_path != prev_snapshot.pending_ops_by_path {
+ events.push(RepositoryEvent::PendingOpsChanged {
+ pending_ops: prev_snapshot.pending_ops_by_path.clone(),
+ })
+ }
+
if merge_heads_changed {
events.push(RepositoryEvent::MergeHeadsChanged);
}
@@ -5489,6 +5623,7 @@ async fn compute_snapshot(
let snapshot = RepositorySnapshot {
id,
statuses_by_path,
+ pending_ops_by_path,
work_directory_abs_path,
path_style: prev_snapshot.path_style,
scan_id: prev_snapshot.scan_id + 1,
@@ -268,13 +268,10 @@ mod tests {
status::{UnmergedStatus, UnmergedStatusCode},
};
use gpui::{BackgroundExecutor, TestAppContext};
- use language::language_settings::AllLanguageSettings;
use serde_json::json;
- use settings::Settings as _;
use text::{Buffer, BufferId, Point, ReplicaId, ToOffset as _};
use unindent::Unindent as _;
use util::{path, rel_path::rel_path};
- use worktree::WorktreeSettings;
#[test]
fn test_parse_conflicts_in_buffer() {
@@ -488,9 +485,6 @@ mod tests {
zlog::init_test();
cx.update(|cx| {
settings::init(cx);
- WorktreeSettings::register(cx);
- Project::init_settings(cx);
- AllLanguageSettings::register(cx);
});
let initial_text = "
one
@@ -589,9 +583,6 @@ mod tests {
zlog::init_test();
cx.update(|cx| {
settings::init(cx);
- WorktreeSettings::register(cx);
- Project::init_settings(cx);
- AllLanguageSettings::register(cx);
});
let initial_text = "
@@ -688,7 +688,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- Project::init_settings(cx);
});
}
@@ -0,0 +1,147 @@
+use git::repository::RepoPath;
+use std::ops::Add;
+use sum_tree::{ContextLessSummary, Item, KeyedItem};
+use worktree::{PathKey, PathSummary};
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum GitStatus {
+ Staged,
+ Unstaged,
+ Reverted,
+ Unchanged,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum JobStatus {
+ Running,
+ Finished,
+ Skipped,
+ Error,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct PendingOps {
+ pub repo_path: RepoPath,
+ pub ops: Vec<PendingOp>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct PendingOp {
+ pub id: PendingOpId,
+ pub git_status: GitStatus,
+ pub job_status: JobStatus,
+}
+
+#[derive(Clone, Debug)]
+pub struct PendingOpsSummary {
+ pub staged_count: usize,
+ pub staging_count: usize,
+}
+
+#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+pub struct PendingOpId(pub u16);
+
+impl Item for PendingOps {
+ type Summary = PathSummary<PendingOpsSummary>;
+
+ fn summary(&self, _cx: ()) -> Self::Summary {
+ PathSummary {
+ max_path: self.repo_path.0.clone(),
+ item_summary: PendingOpsSummary {
+ staged_count: self.staged() as usize,
+ staging_count: self.staging() as usize,
+ },
+ }
+ }
+}
+
+impl ContextLessSummary for PendingOpsSummary {
+ fn zero() -> Self {
+ Self {
+ staged_count: 0,
+ staging_count: 0,
+ }
+ }
+
+ fn add_summary(&mut self, summary: &Self) {
+ self.staged_count += summary.staged_count;
+ self.staging_count += summary.staging_count;
+ }
+}
+
+impl KeyedItem for PendingOps {
+ type Key = PathKey;
+
+ fn key(&self) -> Self::Key {
+ PathKey(self.repo_path.0.clone())
+ }
+}
+
+impl Add<u16> for PendingOpId {
+ type Output = PendingOpId;
+
+ fn add(self, rhs: u16) -> Self::Output {
+ Self(self.0 + rhs)
+ }
+}
+
+impl From<u16> for PendingOpId {
+ fn from(id: u16) -> Self {
+ Self(id)
+ }
+}
+
+impl PendingOps {
+ pub fn new(path: &RepoPath) -> Self {
+ Self {
+ repo_path: path.clone(),
+ ops: Vec::new(),
+ }
+ }
+
+ pub fn max_id(&self) -> PendingOpId {
+ self.ops.last().map(|op| op.id).unwrap_or_default()
+ }
+
+ pub fn op_by_id(&self, id: PendingOpId) -> Option<&PendingOp> {
+ self.ops.iter().find(|op| op.id == id)
+ }
+
+ pub fn op_by_id_mut(&mut self, id: PendingOpId) -> Option<&mut PendingOp> {
+ self.ops.iter_mut().find(|op| op.id == id)
+ }
+
+ /// File is staged if the last job is finished and has status Staged.
+ pub fn staged(&self) -> bool {
+ if let Some(last) = self.ops.last() {
+ if last.git_status == GitStatus::Staged && last.job_status == JobStatus::Finished {
+ return true;
+ }
+ }
+ false
+ }
+
+ /// File is staged if the last job is not finished and has status Staged.
+ pub fn staging(&self) -> bool {
+ if let Some(last) = self.ops.last() {
+ if last.git_status == GitStatus::Staged && last.job_status != JobStatus::Finished {
+ return true;
+ }
+ }
+ false
+ }
+}
+
+impl PendingOp {
+ pub fn running(&self) -> bool {
+ self.job_status == JobStatus::Running
+ }
+
+ pub fn finished(&self) -> bool {
+ matches!(self.job_status, JobStatus::Finished | JobStatus::Skipped)
+ }
+
+ pub fn error(&self) -> bool {
+ self.job_status == JobStatus::Error
+ }
+}
@@ -11,16 +11,22 @@ use gpui::{
pub use image::ImageFormat;
use image::{ExtendedColorType, GenericImageView, ImageReader};
use language::{DiskState, File};
-use rpc::{AnyProtoClient, ErrorExt as _};
+use rpc::{AnyProtoClient, ErrorExt as _, TypedEnvelope, proto};
use std::num::NonZeroU64;
use std::path::PathBuf;
use std::sync::Arc;
use util::{ResultExt, rel_path::RelPath};
-use worktree::{LoadedBinaryFile, PathChange, Worktree};
+use worktree::{LoadedBinaryFile, PathChange, Worktree, WorktreeId};
#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
pub struct ImageId(NonZeroU64);
+impl ImageId {
+ pub fn to_proto(&self) -> u64 {
+ self.0.get()
+ }
+}
+
impl std::fmt::Display for ImageId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
@@ -102,6 +108,24 @@ pub struct ImageItem {
}
impl ImageItem {
+ fn compute_metadata_from_bytes(image_bytes: &[u8]) -> Result<ImageMetadata> {
+ let image_format = image::guess_format(image_bytes)?;
+
+ let mut image_reader = ImageReader::new(std::io::Cursor::new(image_bytes));
+ image_reader.set_format(image_format);
+ let image = image_reader.decode()?;
+
+ let (width, height) = image.dimensions();
+
+ Ok(ImageMetadata {
+ width,
+ height,
+ file_size: image_bytes.len() as u64,
+ format: image_format,
+ colors: ImageColorInfo::from_color_type(image.color()),
+ })
+ }
+
pub async fn load_image_metadata(
image: Entity<ImageItem>,
project: Entity<Project>,
@@ -117,25 +141,7 @@ impl ImageItem {
})??;
let image_bytes = fs.load_bytes(&image_path).await?;
- let image_format = image::guess_format(&image_bytes)?;
-
- let mut image_reader = ImageReader::new(std::io::Cursor::new(image_bytes));
- image_reader.set_format(image_format);
- let image = image_reader.decode()?;
-
- let (width, height) = image.dimensions();
- let file_metadata = fs
- .metadata(image_path.as_path())
- .await?
- .context("failed to load image metadata")?;
-
- Ok(ImageMetadata {
- width,
- height,
- file_size: file_metadata.len,
- format: image_format,
- colors: ImageColorInfo::from_color_type(image.color()),
- })
+ Self::compute_metadata_from_bytes(&image_bytes)
}
pub fn project_path(&self, cx: &App) -> ProjectPath {
@@ -265,9 +271,23 @@ trait ImageStoreImpl {
) -> Task<Result<()>>;
fn as_local(&self) -> Option<Entity<LocalImageStore>>;
+ fn as_remote(&self) -> Option<Entity<RemoteImageStore>>;
}
-struct RemoteImageStore {}
+struct RemoteImageStore {
+ upstream_client: AnyProtoClient,
+ project_id: u64,
+ loading_remote_images_by_id: HashMap<ImageId, LoadingRemoteImage>,
+ remote_image_listeners:
+ HashMap<ImageId, Vec<oneshot::Sender<anyhow::Result<Entity<ImageItem>>>>>,
+ loaded_images: HashMap<ImageId, Entity<ImageItem>>,
+}
+
+struct LoadingRemoteImage {
+ state: proto::ImageState,
+ chunks: Vec<Vec<u8>>,
+ received_size: u64,
+}
struct LocalImageStore {
local_image_ids_by_path: HashMap<ProjectPath, ImageId>,
@@ -316,12 +336,18 @@ impl ImageStore {
pub fn remote(
worktree_store: Entity<WorktreeStore>,
- _upstream_client: AnyProtoClient,
- _remote_id: u64,
+ upstream_client: AnyProtoClient,
+ project_id: u64,
cx: &mut Context<Self>,
) -> Self {
Self {
- state: Box::new(cx.new(|_| RemoteImageStore {})),
+ state: Box::new(cx.new(|_| RemoteImageStore {
+ upstream_client,
+ project_id,
+ loading_remote_images_by_id: Default::default(),
+ remote_image_listeners: Default::default(),
+ loaded_images: Default::default(),
+ })),
opened_images: Default::default(),
loading_images_by_path: Default::default(),
worktree_store,
@@ -429,9 +455,7 @@ impl ImageStore {
fn add_image(&mut self, image: Entity<ImageItem>, cx: &mut Context<ImageStore>) -> Result<()> {
let image_id = image.read(cx).id;
-
self.opened_images.insert(image_id, image.downgrade());
-
cx.subscribe(&image, Self::on_image_event).detach();
cx.emit(ImageStoreEvent::ImageAdded(image));
Ok(())
@@ -451,6 +475,135 @@ impl ImageStore {
})
}
}
+
+ pub fn handle_create_image_for_peer(
+ &mut self,
+ envelope: TypedEnvelope<proto::CreateImageForPeer>,
+ cx: &mut Context<Self>,
+ ) -> Result<()> {
+ if let Some(remote) = self.state.as_remote() {
+ let worktree_store = self.worktree_store.clone();
+ let image = remote.update(cx, |remote, cx| {
+ remote.handle_create_image_for_peer(envelope, &worktree_store, cx)
+ })?;
+ if let Some(image) = image {
+ remote.update(cx, |this, cx| {
+ let image = image.clone();
+ let image_id = image.read(cx).id;
+ this.loaded_images.insert(image_id, image)
+ });
+
+ self.add_image(image, cx)?;
+ }
+ }
+
+ Ok(())
+ }
+}
+
+impl RemoteImageStore {
+ pub fn wait_for_remote_image(
+ &mut self,
+ id: ImageId,
+ cx: &mut Context<Self>,
+ ) -> Task<Result<Entity<ImageItem>>> {
+ if let Some(image) = self.loaded_images.remove(&id) {
+ return Task::ready(Ok(image));
+ }
+
+ let (tx, rx) = oneshot::channel();
+ self.remote_image_listeners.entry(id).or_default().push(tx);
+
+ cx.spawn(async move |_this, cx| {
+ let result = cx.background_spawn(async move { rx.await? }).await;
+ result
+ })
+ }
+
+ pub fn handle_create_image_for_peer(
+ &mut self,
+ envelope: TypedEnvelope<proto::CreateImageForPeer>,
+ worktree_store: &Entity<WorktreeStore>,
+ cx: &mut Context<Self>,
+ ) -> Result<Option<Entity<ImageItem>>> {
+ use proto::create_image_for_peer::Variant;
+ match envelope.payload.variant {
+ Some(Variant::State(state)) => {
+ let image_id =
+ ImageId::from(NonZeroU64::new(state.id).context("invalid image id")?);
+
+ self.loading_remote_images_by_id.insert(
+ image_id,
+ LoadingRemoteImage {
+ state,
+ chunks: Vec::new(),
+ received_size: 0,
+ },
+ );
+ Ok(None)
+ }
+ Some(Variant::Chunk(chunk)) => {
+ let image_id =
+ ImageId::from(NonZeroU64::new(chunk.image_id).context("invalid image id")?);
+
+ let loading = self
+ .loading_remote_images_by_id
+ .get_mut(&image_id)
+ .context("received chunk for unknown image")?;
+
+ loading.received_size += chunk.data.len() as u64;
+ loading.chunks.push(chunk.data);
+
+ if loading.received_size == loading.state.content_size {
+ let loading = self.loading_remote_images_by_id.remove(&image_id).unwrap();
+
+ let mut content = Vec::with_capacity(loading.received_size as usize);
+ for chunk_data in loading.chunks {
+ content.extend_from_slice(&chunk_data);
+ }
+
+ let image_metadata = ImageItem::compute_metadata_from_bytes(&content).log_err();
+ let image = create_gpui_image(content)?;
+
+ let proto_file = loading.state.file.context("missing file in image state")?;
+ let worktree_id = WorktreeId::from_proto(proto_file.worktree_id);
+ let worktree = worktree_store
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)
+ .context("worktree not found")?;
+
+ let file = Arc::new(
+ worktree::File::from_proto(proto_file, worktree, cx)
+ .context("invalid file in image state")?,
+ );
+
+ let entity = cx.new(|_cx| ImageItem {
+ id: image_id,
+ file,
+ image,
+ image_metadata,
+ reload_task: None,
+ });
+
+ if let Some(listeners) = self.remote_image_listeners.remove(&image_id) {
+ for listener in listeners {
+ listener.send(Ok(entity.clone())).ok();
+ }
+ }
+
+ Ok(Some(entity))
+ } else {
+ Ok(None)
+ }
+ }
+ None => {
+ log::warn!("Received CreateImageForPeer with no variant");
+ Ok(None)
+ }
+ }
+ }
+
+ // TODO: subscribe to worktree and update image contents or at least mark as dirty on file changes
}
impl ImageStoreImpl for Entity<LocalImageStore> {
@@ -520,6 +673,64 @@ impl ImageStoreImpl for Entity<LocalImageStore> {
fn as_local(&self) -> Option<Entity<LocalImageStore>> {
Some(self.clone())
}
+
+ fn as_remote(&self) -> Option<Entity<RemoteImageStore>> {
+ None
+ }
+}
+
+impl ImageStoreImpl for Entity<RemoteImageStore> {
+ fn open_image(
+ &self,
+ path: Arc<RelPath>,
+ worktree: Entity<Worktree>,
+ cx: &mut Context<ImageStore>,
+ ) -> Task<Result<Entity<ImageItem>>> {
+ let worktree_id = worktree.read(cx).id().to_proto();
+ let (project_id, client) = {
+ let store = self.read(cx);
+ (store.project_id, store.upstream_client.clone())
+ };
+ let remote_store = self.clone();
+
+ cx.spawn(async move |_image_store, cx| {
+ let response = client
+ .request(rpc::proto::OpenImageByPath {
+ project_id,
+ worktree_id,
+ path: path.to_proto(),
+ })
+ .await?;
+
+ let image_id = ImageId::from(
+ NonZeroU64::new(response.image_id).context("invalid image_id in response")?,
+ );
+
+ remote_store
+ .update(cx, |remote_store, cx| {
+ remote_store.wait_for_remote_image(image_id, cx)
+ })?
+ .await
+ })
+ }
+
+ fn reload_images(
+ &self,
+ _images: HashSet<Entity<ImageItem>>,
+ _cx: &mut Context<ImageStore>,
+ ) -> Task<Result<()>> {
+ Task::ready(Err(anyhow::anyhow!(
+ "Reloading images from remote is not supported"
+ )))
+ }
+
+ fn as_local(&self) -> Option<Entity<LocalImageStore>> {
+ None
+ }
+
+ fn as_remote(&self) -> Option<Entity<RemoteImageStore>> {
+ Some(self.clone())
+ }
}
impl LocalImageStore {
@@ -694,33 +905,6 @@ fn create_gpui_image(content: Vec<u8>) -> anyhow::Result<Arc<gpui::Image>> {
)))
}
-impl ImageStoreImpl for Entity<RemoteImageStore> {
- fn open_image(
- &self,
- _path: Arc<RelPath>,
- _worktree: Entity<Worktree>,
- _cx: &mut Context<ImageStore>,
- ) -> Task<Result<Entity<ImageItem>>> {
- Task::ready(Err(anyhow::anyhow!(
- "Opening images from remote is not supported"
- )))
- }
-
- fn reload_images(
- &self,
- _images: HashSet<Entity<ImageItem>>,
- _cx: &mut Context<ImageStore>,
- ) -> Task<Result<()>> {
- Task::ready(Err(anyhow::anyhow!(
- "Reloading images from remote is not supported"
- )))
- }
-
- fn as_local(&self) -> Option<Entity<LocalImageStore>> {
- None
- }
-}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -736,8 +920,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -782,4 +964,24 @@ mod tests {
assert_eq!(image1, image2);
}
+
+ #[gpui::test]
+ fn test_compute_metadata_from_bytes() {
+ // Single white pixel PNG
+ let png_bytes = vec![
+ 0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48,
+ 0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00,
+ 0x00, 0x1F, 0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, 0x78,
+ 0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, 0x00,
+ 0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
+ ];
+
+ let metadata = ImageItem::compute_metadata_from_bytes(&png_bytes).unwrap();
+
+ assert_eq!(metadata.width, 1);
+ assert_eq!(metadata.height, 1);
+ assert_eq!(metadata.file_size, png_bytes.len() as u64);
+ assert_eq!(metadata.format, image::ImageFormat::Png);
+ assert!(metadata.colors.is_some());
+ }
}
@@ -2035,7 +2035,7 @@ impl LocalLspStore {
cx: &mut AsyncApp,
) -> Result<Vec<(Range<Anchor>, Arc<str>)>> {
let logger = zlog::scoped!("lsp_format");
- zlog::info!(logger => "Formatting via LSP");
+ zlog::debug!(logger => "Formatting via LSP");
let uri = file_path_to_lsp_url(abs_path)?;
let text_document = lsp::TextDocumentIdentifier::new(uri);
@@ -4193,7 +4193,7 @@ impl LspStore {
})
.detach();
} else {
- panic!("oops!");
+ // Our remote connection got closed
}
handle
}
@@ -8425,6 +8425,8 @@ impl LspStore {
while let Some((server_id, response_result)) = response_results.next().await {
match response_result {
Ok(response) => responses.push((server_id, response)),
+ // rust-analyzer likes to error with this when its still loading up
+ Err(e) if e.to_string().ends_with("content modified") => (),
Err(e) => log::error!("Error handling response for request {request:?}: {e:#}"),
}
}
@@ -12418,6 +12420,8 @@ impl LspStore {
let mut responses = Vec::new();
match server_task.await {
Ok(response) => responses.push((server_id, response)),
+ // rust-analyzer likes to error with this when its still loading up
+ Err(e) if e.to_string().ends_with("content modified") => (),
Err(e) => log::error!(
"Error handling response for request {request:?}: {e:#}"
),
@@ -35,7 +35,6 @@ mod yarn;
use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope};
use crate::{
- agent_server_store::AllAgentServersSettings,
git_store::GitStore,
lsp_store::{SymbolLocation, log_store::LogKind},
};
@@ -101,7 +100,7 @@ use rpc::{
};
use search::{SearchInputKind, SearchQuery, SearchResult};
use search_history::SearchHistory;
-use settings::{InvalidSettingsError, Settings, SettingsLocation, SettingsStore};
+use settings::{InvalidSettingsError, RegisterSetting, Settings, SettingsLocation, SettingsStore};
use smol::channel::Receiver;
use snippet::Snippet;
use snippet_provider::SnippetProvider;
@@ -996,7 +995,7 @@ pub enum PulledDiagnostics {
/// Whether to disable all AI features in Zed.
///
/// Default: false
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, RegisterSetting)]
pub struct DisableAiSettings {
pub disable_ai: bool,
}
@@ -1010,16 +1009,8 @@ impl settings::Settings for DisableAiSettings {
}
impl Project {
- pub fn init_settings(cx: &mut App) {
- WorktreeSettings::register(cx);
- ProjectSettings::register(cx);
- DisableAiSettings::register(cx);
- AllAgentServersSettings::register(cx);
- }
-
pub fn init(client: &Arc<Client>, cx: &mut App) {
connection_manager::init(client.clone(), cx);
- Self::init_settings(cx);
let client: AnyProtoClient = client.clone().into();
client.add_entity_message_handler(Self::handle_add_collaborator);
@@ -1037,6 +1028,7 @@ impl Project {
client.add_entity_request_handler(Self::handle_open_new_buffer);
client.add_entity_message_handler(Self::handle_create_buffer_for_peer);
client.add_entity_message_handler(Self::handle_toggle_lsp_logs);
+ client.add_entity_message_handler(Self::handle_create_image_for_peer);
WorktreeStore::init(&client);
BufferStore::init(&client);
@@ -1433,6 +1425,7 @@ impl Project {
remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.agent_server_store);
remote_proto.add_entity_message_handler(Self::handle_create_buffer_for_peer);
+ remote_proto.add_entity_message_handler(Self::handle_create_image_for_peer);
remote_proto.add_entity_message_handler(Self::handle_update_worktree);
remote_proto.add_entity_message_handler(Self::handle_update_project);
remote_proto.add_entity_message_handler(Self::handle_toast);
@@ -2853,13 +2846,20 @@ impl Project {
let weak_project = cx.entity().downgrade();
cx.spawn(async move |_, cx| {
let image_item = open_image_task.await?;
- let project = weak_project.upgrade().context("Project dropped")?;
- let metadata = ImageItem::load_image_metadata(image_item.clone(), project, cx).await?;
- image_item.update(cx, |image_item, cx| {
- image_item.image_metadata = Some(metadata);
- cx.emit(ImageItemEvent::MetadataUpdated);
- })?;
+ // Check if metadata already exists (e.g., for remote images)
+ let needs_metadata =
+ cx.read_entity(&image_item, |item, _| item.image_metadata.is_none())?;
+
+ if needs_metadata {
+ let project = weak_project.upgrade().context("Project dropped")?;
+ let metadata =
+ ImageItem::load_image_metadata(image_item.clone(), project, cx).await?;
+ image_item.update(cx, |image_item, cx| {
+ image_item.image_metadata = Some(metadata);
+ cx.emit(ImageItemEvent::MetadataUpdated);
+ })?;
+ }
Ok(image_item)
})
@@ -3323,6 +3323,7 @@ impl Project {
event: &ImageItemEvent,
cx: &mut Context<Self>,
) -> Option<()> {
+ // TODO: handle image events from remote
if let ImageItemEvent::ReloadNeeded = event
&& !self.is_via_collab()
{
@@ -5060,6 +5061,20 @@ impl Project {
buffer.read(cx).remote_id()
}
+ async fn handle_create_image_for_peer(
+ this: Entity<Self>,
+ envelope: TypedEnvelope<proto::CreateImageForPeer>,
+ mut cx: AsyncApp,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ this.image_store.update(cx, |image_store, cx| {
+ image_store.handle_create_image_for_peer(envelope, cx)
+ })
+ })?
+ .log_err();
+ Ok(())
+ }
+
fn synchronize_remote_buffers(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
let project_id = match self.client_state {
ProjectClientState::Remote {
@@ -5735,7 +5750,6 @@ mod disable_ai_settings_tests {
async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
cx.update(|cx| {
settings::init(cx);
- Project::init_settings(cx);
// Test 1: Default is false (AI enabled)
assert!(
@@ -20,8 +20,8 @@ use serde::{Deserialize, Serialize};
pub use settings::DirenvSettings;
pub use settings::LspSettings;
use settings::{
- DapSettingsContent, InvalidSettingsError, LocalSettingsKind, Settings, SettingsLocation,
- SettingsStore, parse_json_with_comments, watch_config_file,
+ DapSettingsContent, InvalidSettingsError, LocalSettingsKind, RegisterSetting, Settings,
+ SettingsLocation, SettingsStore, parse_json_with_comments, watch_config_file,
};
use std::{path::PathBuf, sync::Arc, time::Duration};
use task::{DebugTaskFile, TaskTemplates, VsCodeDebugTaskFile, VsCodeTaskFile};
@@ -33,7 +33,7 @@ use crate::{
worktree_store::{WorktreeStore, WorktreeStoreEvent},
};
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, RegisterSetting)]
pub struct ProjectSettings {
/// Configuration for language servers.
///
@@ -2,7 +2,7 @@
use crate::{
Event,
- git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
+ git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
task_inventory::TaskContexts,
task_store::TaskSettingsLocation,
*,
@@ -20,7 +20,7 @@ use git::{
status::{StatusCode, TrackedStatus},
};
use git2::RepositoryInitOptions;
-use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
+use gpui::{App, BackgroundExecutor, FutureExt, SemanticVersion, UpdateGlobal};
use itertools::Itertools;
use language::{
Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
@@ -50,6 +50,7 @@ use std::{
sync::{Arc, OnceLock},
task::Poll,
};
+use sum_tree::SumTree;
use task::{ResolvedTask, ShellKind, TaskContext};
use unindent::Unindent as _;
use util::{
@@ -8369,6 +8370,443 @@ async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
});
}
+#[track_caller]
+/// We merge lhs into rhs.
+fn merge_pending_ops_snapshots(
+ source: Vec<pending_op::PendingOps>,
+ mut target: Vec<pending_op::PendingOps>,
+) -> Vec<pending_op::PendingOps> {
+ for s_ops in source {
+ if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
+ if ops.repo_path == s_ops.repo_path {
+ Some(idx)
+ } else {
+ None
+ }
+ }) {
+ let t_ops = &mut target[idx];
+ for s_op in s_ops.ops {
+ if let Some(op_idx) = t_ops
+ .ops
+ .iter()
+ .zip(0..)
+ .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
+ {
+ let t_op = &mut t_ops.ops[op_idx];
+ match (s_op.job_status, t_op.job_status) {
+ (pending_op::JobStatus::Running, _) => {}
+ (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
+ (s_st, t_st) if s_st == t_st => {}
+ _ => unreachable!(),
+ }
+ } else {
+ t_ops.ops.push(s_op);
+ }
+ }
+ t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
+ } else {
+ target.push(s_ops);
+ }
+ }
+ target
+}
+
+#[gpui::test]
+async fn test_repository_pending_ops_staging(
+ executor: gpui::BackgroundExecutor,
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+
+ let fs = FakeFs::new(executor);
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "my-repo": {
+ ".git": {},
+ "a.txt": "a",
+ }
+
+ }),
+ )
+ .await;
+
+ fs.set_status_for_repo(
+ path!("/root/my-repo/.git").as_ref(),
+ &[("a.txt", FileStatus::Untracked)],
+ );
+
+ let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
+ let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
+ project.update(cx, |project, cx| {
+ let pending_ops_all = pending_ops_all.clone();
+ cx.subscribe(project.git_store(), move |_, _, e, _| {
+ if let GitStoreEvent::RepositoryUpdated(
+ _,
+ RepositoryEvent::PendingOpsChanged { pending_ops },
+ _,
+ ) = e
+ {
+ let merged = merge_pending_ops_snapshots(
+ pending_ops.items(()),
+ pending_ops_all.lock().items(()),
+ );
+ *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
+ }
+ })
+ .detach();
+ });
+ project
+ .update(cx, |project, cx| project.git_scans_complete(cx))
+ .await;
+
+ let repo = project.read_with(cx, |project, cx| {
+ project.repositories(cx).values().next().unwrap().clone()
+ });
+
+ // Ensure we have no pending ops for any of the untracked files
+ repo.read_with(cx, |repo, _cx| {
+ assert!(repo.pending_ops_by_path.is_empty());
+ });
+
+ let mut id = 1u16;
+
+ let mut assert_stage = async |path: RepoPath, stage| {
+ let git_status = if stage {
+ pending_op::GitStatus::Staged
+ } else {
+ pending_op::GitStatus::Unstaged
+ };
+ repo.update(cx, |repo, cx| {
+ let task = if stage {
+ repo.stage_entries(vec![path.clone()], cx)
+ } else {
+ repo.unstage_entries(vec![path.clone()], cx)
+ };
+ let ops = repo.pending_ops_for_path(&path).unwrap();
+ assert_eq!(
+ ops.ops.last(),
+ Some(&pending_op::PendingOp {
+ id: id.into(),
+ git_status,
+ job_status: pending_op::JobStatus::Running
+ })
+ );
+ task
+ })
+ .await
+ .unwrap();
+
+ repo.read_with(cx, |repo, _cx| {
+ let ops = repo.pending_ops_for_path(&path).unwrap();
+ assert_eq!(
+ ops.ops.last(),
+ Some(&pending_op::PendingOp {
+ id: id.into(),
+ git_status,
+ job_status: pending_op::JobStatus::Finished
+ })
+ );
+ });
+
+ id += 1;
+ };
+
+ assert_stage(repo_path("a.txt"), true).await;
+ assert_stage(repo_path("a.txt"), false).await;
+ assert_stage(repo_path("a.txt"), true).await;
+ assert_stage(repo_path("a.txt"), false).await;
+ assert_stage(repo_path("a.txt"), true).await;
+
+ cx.run_until_parked();
+
+ assert_eq!(
+ pending_ops_all
+ .lock()
+ .get(&worktree::PathKey(repo_path("a.txt").0), ())
+ .unwrap()
+ .ops,
+ vec![
+ pending_op::PendingOp {
+ id: 1u16.into(),
+ git_status: pending_op::GitStatus::Staged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ pending_op::PendingOp {
+ id: 2u16.into(),
+ git_status: pending_op::GitStatus::Unstaged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ pending_op::PendingOp {
+ id: 3u16.into(),
+ git_status: pending_op::GitStatus::Staged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ pending_op::PendingOp {
+ id: 4u16.into(),
+ git_status: pending_op::GitStatus::Unstaged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ pending_op::PendingOp {
+ id: 5u16.into(),
+ git_status: pending_op::GitStatus::Staged,
+ job_status: pending_op::JobStatus::Finished
+ }
+ ],
+ );
+
+ repo.update(cx, |repo, _cx| {
+ let git_statuses = repo.cached_status().collect::<Vec<_>>();
+
+ assert_eq!(
+ git_statuses,
+ [StatusEntry {
+ repo_path: repo_path("a.txt"),
+ status: TrackedStatus {
+ index_status: StatusCode::Added,
+ worktree_status: StatusCode::Unmodified
+ }
+ .into(),
+ }]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_repository_pending_ops_long_running_staging(
+ executor: gpui::BackgroundExecutor,
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+
+ let fs = FakeFs::new(executor);
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "my-repo": {
+ ".git": {},
+ "a.txt": "a",
+ }
+
+ }),
+ )
+ .await;
+
+ fs.set_status_for_repo(
+ path!("/root/my-repo/.git").as_ref(),
+ &[("a.txt", FileStatus::Untracked)],
+ );
+
+ let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
+ let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
+ project.update(cx, |project, cx| {
+ let pending_ops_all = pending_ops_all.clone();
+ cx.subscribe(project.git_store(), move |_, _, e, _| {
+ if let GitStoreEvent::RepositoryUpdated(
+ _,
+ RepositoryEvent::PendingOpsChanged { pending_ops },
+ _,
+ ) = e
+ {
+ let merged = merge_pending_ops_snapshots(
+ pending_ops.items(()),
+ pending_ops_all.lock().items(()),
+ );
+ *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
+ }
+ })
+ .detach();
+ });
+
+ project
+ .update(cx, |project, cx| project.git_scans_complete(cx))
+ .await;
+
+ let repo = project.read_with(cx, |project, cx| {
+ project.repositories(cx).values().next().unwrap().clone()
+ });
+
+ repo.update(cx, |repo, cx| {
+ repo.stage_entries(vec![repo_path("a.txt")], cx)
+ })
+ .detach();
+
+ repo.update(cx, |repo, cx| {
+ repo.stage_entries(vec![repo_path("a.txt")], cx)
+ })
+ .unwrap()
+ .with_timeout(Duration::from_secs(1), &cx.executor())
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ assert_eq!(
+ pending_ops_all
+ .lock()
+ .get(&worktree::PathKey(repo_path("a.txt").0), ())
+ .unwrap()
+ .ops,
+ vec![
+ pending_op::PendingOp {
+ id: 1u16.into(),
+ git_status: pending_op::GitStatus::Staged,
+ job_status: pending_op::JobStatus::Skipped
+ },
+ pending_op::PendingOp {
+ id: 2u16.into(),
+ git_status: pending_op::GitStatus::Staged,
+ job_status: pending_op::JobStatus::Finished
+ }
+ ],
+ );
+
+ repo.update(cx, |repo, _cx| {
+ let git_statuses = repo.cached_status().collect::<Vec<_>>();
+
+ assert_eq!(
+ git_statuses,
+ [StatusEntry {
+ repo_path: repo_path("a.txt"),
+ status: TrackedStatus {
+ index_status: StatusCode::Added,
+ worktree_status: StatusCode::Unmodified
+ }
+ .into(),
+ }]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_repository_pending_ops_stage_all(
+ executor: gpui::BackgroundExecutor,
+ cx: &mut gpui::TestAppContext,
+) {
+ init_test(cx);
+
+ let fs = FakeFs::new(executor);
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "my-repo": {
+ ".git": {},
+ "a.txt": "a",
+ "b.txt": "b"
+ }
+
+ }),
+ )
+ .await;
+
+ fs.set_status_for_repo(
+ path!("/root/my-repo/.git").as_ref(),
+ &[
+ ("a.txt", FileStatus::Untracked),
+ ("b.txt", FileStatus::Untracked),
+ ],
+ );
+
+ let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
+ let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
+ project.update(cx, |project, cx| {
+ let pending_ops_all = pending_ops_all.clone();
+ cx.subscribe(project.git_store(), move |_, _, e, _| {
+ if let GitStoreEvent::RepositoryUpdated(
+ _,
+ RepositoryEvent::PendingOpsChanged { pending_ops },
+ _,
+ ) = e
+ {
+ let merged = merge_pending_ops_snapshots(
+ pending_ops.items(()),
+ pending_ops_all.lock().items(()),
+ );
+ *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
+ }
+ })
+ .detach();
+ });
+ project
+ .update(cx, |project, cx| project.git_scans_complete(cx))
+ .await;
+
+ let repo = project.read_with(cx, |project, cx| {
+ project.repositories(cx).values().next().unwrap().clone()
+ });
+
+ repo.update(cx, |repo, cx| {
+ repo.stage_entries(vec![repo_path("a.txt")], cx)
+ })
+ .await
+ .unwrap();
+ repo.update(cx, |repo, cx| repo.stage_all(cx))
+ .await
+ .unwrap();
+ repo.update(cx, |repo, cx| repo.unstage_all(cx))
+ .await
+ .unwrap();
+
+ cx.run_until_parked();
+
+ assert_eq!(
+ pending_ops_all
+ .lock()
+ .get(&worktree::PathKey(repo_path("a.txt").0), ())
+ .unwrap()
+ .ops,
+ vec![
+ pending_op::PendingOp {
+ id: 1u16.into(),
+ git_status: pending_op::GitStatus::Staged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ pending_op::PendingOp {
+ id: 2u16.into(),
+ git_status: pending_op::GitStatus::Unstaged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ ],
+ );
+ assert_eq!(
+ pending_ops_all
+ .lock()
+ .get(&worktree::PathKey(repo_path("b.txt").0), ())
+ .unwrap()
+ .ops,
+ vec![
+ pending_op::PendingOp {
+ id: 1u16.into(),
+ git_status: pending_op::GitStatus::Staged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ pending_op::PendingOp {
+ id: 2u16.into(),
+ git_status: pending_op::GitStatus::Unstaged,
+ job_status: pending_op::JobStatus::Finished
+ },
+ ],
+ );
+
+ repo.update(cx, |repo, _cx| {
+ let git_statuses = repo.cached_status().collect::<Vec<_>>();
+
+ assert_eq!(
+ git_statuses,
+ [
+ StatusEntry {
+ repo_path: repo_path("a.txt"),
+ status: FileStatus::Untracked,
+ },
+ StatusEntry {
+ repo_path: repo_path("b.txt"),
+ status: FileStatus::Untracked,
+ },
+ ]
+ );
+ });
+}
+
#[gpui::test]
async fn test_repository_subfolder_git_status(
executor: gpui::BackgroundExecutor,
@@ -9653,8 +10091,6 @@ pub fn init_test(cx: &mut gpui::TestAppContext) {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
release_channel::init(SemanticVersion::default(), cx);
- language::init(cx);
- Project::init_settings(cx);
});
}
@@ -9747,7 +10183,7 @@ fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
}
}
- fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
+ fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
vec![]
}
}
@@ -127,8 +127,10 @@ impl Project {
.language_for_name(&toolchain.language_name.0)
.await
.ok();
- let lister = language?.toolchain_lister();
- return Some(lister?.activation_script(&toolchain, shell_kind));
+ let lister = language?.toolchain_lister()?;
+ return cx
+ .update(|cx| lister.activation_script(&toolchain, shell_kind, cx))
+ .ok();
}
None
})
@@ -317,7 +319,8 @@ impl Project {
.unwrap_or_else(get_default_system_shell),
None => settings.shell.program(),
};
- let shell_kind = ShellKind::new(&shell, self.path_style(cx).is_windows());
+
+ let is_windows = self.path_style(cx).is_windows();
// Prepare a task for resolving the environment
let env_task =
@@ -325,6 +328,7 @@ impl Project {
let lang_registry = self.languages.clone();
cx.spawn(async move |project, cx| {
+ let shell_kind = ShellKind::new(&shell, is_windows);
let mut env = env_task.await.unwrap_or_default();
env.extend(settings.env);
@@ -337,13 +341,16 @@ impl Project {
.language_for_name(&toolchain.language_name.0)
.await
.ok();
- let lister = language?.toolchain_lister();
- return Some(lister?.activation_script(&toolchain, shell_kind));
+ let lister = language?.toolchain_lister()?;
+ return cx
+ .update(|cx| lister.activation_script(&toolchain, shell_kind, cx))
+ .ok();
}
None
})
.await
.unwrap_or_default();
+
let builder = project
.update(cx, move |_, cx| {
let (shell, env) = {
@@ -360,13 +360,7 @@ impl FoldedAncestors {
}
}
-pub fn init_settings(cx: &mut App) {
- ProjectPanelSettings::register(cx);
-}
-
pub fn init(cx: &mut App) {
- init_settings(cx);
-
cx.observe_new(|workspace: &mut Workspace, _, _| {
workspace.register_action(|workspace, _: &ToggleFocus, window, cx| {
workspace.toggle_panel_focus::<ProjectPanel>(window, cx);
@@ -2,13 +2,16 @@ use editor::EditorSettings;
use gpui::Pixels;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::{DockSide, ProjectPanelEntrySpacing, Settings, ShowDiagnostics, ShowIndentGuides};
+use settings::{
+ DockSide, ProjectPanelEntrySpacing, RegisterSetting, Settings, ShowDiagnostics,
+ ShowIndentGuides,
+};
use ui::{
px,
scrollbars::{ScrollbarVisibility, ShowScrollbar},
};
-#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
+#[derive(Deserialize, Debug, Clone, Copy, PartialEq, RegisterSetting)]
pub struct ProjectPanelSettings {
pub button: bool,
pub hide_gitignore: bool,
@@ -7333,14 +7333,8 @@ fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
- editor::init_settings(cx);
crate::init(cx);
- workspace::init_settings(cx);
- client::init_settings(cx);
- Project::init_settings(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |settings| {
@@ -7358,12 +7352,9 @@ fn init_test_with_editor(cx: &mut TestAppContext) {
cx.update(|cx| {
let app_state = AppState::test(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- init_settings(cx);
- language::init(cx);
editor::init(cx);
crate::init(cx);
workspace::init(app_state, cx);
- Project::init_settings(cx);
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |settings| {
@@ -439,9 +439,6 @@ mod tests {
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
release_channel::init(SemanticVersion::default(), cx);
- language::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
editor::init(cx);
});
}
@@ -0,0 +1,36 @@
+syntax = "proto3";
+package zed.messages;
+
+import "core.proto";
+import "worktree.proto";
+
+message OpenImageByPath {
+ uint64 project_id = 1;
+ uint64 worktree_id = 2;
+ string path = 3;
+}
+
+message OpenImageResponse {
+ uint64 image_id = 1;
+}
+
+message CreateImageForPeer {
+ uint64 project_id = 1;
+ PeerId peer_id = 2;
+ oneof variant {
+ ImageState state = 3;
+ ImageChunk chunk = 4;
+ }
+}
+
+message ImageState {
+ uint64 id = 1;
+ optional File file = 2;
+ uint64 content_size = 3;
+ string format = 4; // e.g., "png", "jpeg", "webp", etc.
+}
+
+message ImageChunk {
+ uint64 image_id = 1;
+ bytes data = 2;
+}
@@ -9,6 +9,7 @@ import "channel.proto";
import "core.proto";
import "debugger.proto";
import "git.proto";
+import "image.proto";
import "lsp.proto";
import "notification.proto";
import "task.proto";
@@ -431,7 +432,11 @@ message Envelope {
GitWorktreesResponse git_worktrees_response = 388;
GitGetWorktrees git_get_worktrees = 389;
- GitCreateWorktree git_create_worktree = 390; // current max
+ GitCreateWorktree git_create_worktree = 390;
+
+ OpenImageByPath open_image_by_path = 391;
+ OpenImageResponse open_image_response = 392;
+ CreateImageForPeer create_image_for_peer = 393; // current max
}
reserved 87 to 88;
@@ -51,6 +51,7 @@ messages!(
(Commit, Background),
(CopyProjectEntry, Foreground),
(CreateBufferForPeer, Foreground),
+ (CreateImageForPeer, Foreground),
(CreateChannel, Foreground),
(CreateChannelResponse, Foreground),
(CreateContext, Foreground),
@@ -179,9 +180,11 @@ messages!(
(OnTypeFormattingResponse, Background),
(OpenBufferById, Background),
(OpenBufferByPath, Background),
+ (OpenImageByPath, Background),
(OpenBufferForSymbol, Background),
(OpenBufferForSymbolResponse, Background),
(OpenBufferResponse, Background),
+ (OpenImageResponse, Background),
(OpenCommitMessageBuffer, Background),
(OpenContext, Foreground),
(OpenContextResponse, Foreground),
@@ -397,6 +400,7 @@ request_messages!(
(OnTypeFormatting, OnTypeFormattingResponse),
(OpenBufferById, OpenBufferResponse),
(OpenBufferByPath, OpenBufferResponse),
+ (OpenImageByPath, OpenImageResponse),
(OpenBufferForSymbol, OpenBufferForSymbolResponse),
(OpenCommitMessageBuffer, OpenBufferResponse),
(OpenNewBuffer, OpenBufferResponse),
@@ -545,6 +549,7 @@ entity_messages!(
GetColorPresentation,
CopyProjectEntry,
CreateBufferForPeer,
+ CreateImageForPeer,
CreateProjectEntry,
GetDocumentColor,
DeleteProjectEntry,
@@ -581,6 +586,7 @@ entity_messages!(
OpenNewBuffer,
OpenBufferById,
OpenBufferByPath,
+ OpenImageByPath,
OpenBufferForSymbol,
OpenCommitMessageBuffer,
PerformRename,
@@ -34,8 +34,6 @@ use workspace::{
use zed_actions::{OpenRecent, OpenRemote};
pub fn init(cx: &mut App) {
- SshSettings::register(cx);
-
#[cfg(target_os = "windows")]
cx.on_action(|open_wsl: &zed_actions::wsl_actions::OpenFolderInWsl, cx| {
let create_new_window = open_wsl.create_new_window;
@@ -762,10 +760,9 @@ impl Render for MatchTooltip {
mod tests {
use std::path::PathBuf;
- use dap::debugger_settings::DebuggerSettings;
use editor::Editor;
use gpui::{TestAppContext, UpdateGlobal, WindowHandle};
- use project::Project;
+
use serde_json::json;
use settings::SettingsStore;
use util::path;
@@ -911,12 +908,8 @@ mod tests {
fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
cx.update(|cx| {
let state = AppState::test(cx);
- language::init(cx);
crate::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- DebuggerSettings::register(cx);
- Project::init_settings(cx);
state
})
}
@@ -23,7 +23,7 @@ use remote::{
SshConnectionOptions,
};
pub use settings::SshConnection;
-use settings::{ExtendingVec, Settings, WslConnection};
+use settings::{ExtendingVec, RegisterSetting, Settings, WslConnection};
use theme::ThemeSettings;
use ui::{
ActiveTheme, Color, CommonAnimationExt, Context, Icon, IconName, IconSize, InteractiveElement,
@@ -32,6 +32,7 @@ use ui::{
use util::paths::PathWithPosition;
use workspace::{AppState, ModalView, Workspace};
+#[derive(RegisterSetting)]
pub struct SshSettings {
pub ssh_connections: ExtendingVec<SshConnection>,
pub wsl_connections: ExtendingVec<WslConnection>,
@@ -2199,11 +2199,9 @@ impl RemoteServerProjects {
fn spawn_ssh_config_watch(fs: Arc<dyn Fs>, cx: &Context<RemoteServerProjects>) -> Task<()> {
let mut user_ssh_config_watcher =
watch_config_file(cx.background_executor(), fs.clone(), user_ssh_config_file());
- let mut global_ssh_config_watcher = watch_config_file(
- cx.background_executor(),
- fs,
- global_ssh_config_file().to_owned(),
- );
+ let mut global_ssh_config_watcher = global_ssh_config_file()
+ .map(|it| watch_config_file(cx.background_executor(), fs, it.to_owned()))
+ .unwrap_or_else(|| futures::channel::mpsc::unbounded().1);
cx.spawn(async move |remote_server_projects, cx| {
let mut global_hosts = BTreeSet::default();
@@ -306,7 +306,7 @@ impl RemoteConnection for SshRemoteConnection {
use futures::AsyncWriteExt;
let sftp_batch = format!("put -r {src_path_display} {dest_path_str}\n");
stdin.write_all(sftp_batch.as_bytes()).await?;
- drop(stdin);
+ stdin.flush().await?;
}
let output = child.output().await?;
@@ -39,6 +39,7 @@ git2 = { workspace = true, features = ["vendored-libgit2"] }
gpui.workspace = true
gpui_tokio.workspace = true
http_client.workspace = true
+image.workspace = true
json_schema_store.workspace = true
language.workspace = true
language_extension.workspace = true
@@ -1,4 +1,5 @@
use anyhow::{Context as _, Result, anyhow};
+use language::File;
use lsp::LanguageServerId;
use extension::ExtensionHostProxy;
@@ -15,6 +16,7 @@ use project::{
buffer_store::{BufferStore, BufferStoreEvent},
debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
git_store::GitStore,
+ image_store::ImageId,
lsp_store::log_store::{self, GlobalLogStore, LanguageServerKind},
project_settings::SettingsObserver,
search::SearchQuery,
@@ -26,11 +28,15 @@ use rpc::{
proto::{self, REMOTE_SERVER_PEER_ID, REMOTE_SERVER_PROJECT_ID},
};
-use settings::{Settings as _, initial_server_settings_content};
+use settings::initial_server_settings_content;
use smol::stream::StreamExt;
use std::{
+ num::NonZeroU64,
path::{Path, PathBuf},
- sync::{Arc, atomic::AtomicUsize},
+ sync::{
+ Arc,
+ atomic::{AtomicU64, AtomicUsize, Ordering},
+ },
};
use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind};
use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
@@ -68,9 +74,6 @@ pub struct HeadlessAppState {
impl HeadlessProject {
pub fn init(cx: &mut App) {
settings::init(cx);
- language::init(cx);
- project::Project::init_settings(cx);
- extension_host::ExtensionSettings::register(cx);
log_store::init(true, cx);
}
@@ -260,6 +263,7 @@ impl HeadlessProject {
session.add_entity_request_handler(Self::handle_open_server_settings);
session.add_entity_request_handler(Self::handle_get_directory_environment);
session.add_entity_message_handler(Self::handle_toggle_lsp_logs);
+ session.add_entity_request_handler(Self::handle_open_image_by_path);
session.add_entity_request_handler(BufferStore::handle_update_buffer);
session.add_entity_message_handler(BufferStore::handle_close_buffer);
@@ -525,6 +529,71 @@ impl HeadlessProject {
})
}
+ pub async fn handle_open_image_by_path(
+ this: Entity<Self>,
+ message: TypedEnvelope<proto::OpenImageByPath>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::OpenImageResponse> {
+ static NEXT_ID: AtomicU64 = AtomicU64::new(1);
+ let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
+ let path = RelPath::from_proto(&message.payload.path)?;
+ let project_id = message.payload.project_id;
+ use proto::create_image_for_peer::Variant;
+
+ let (worktree_store, session) = this.read_with(&cx, |this, _| {
+ (this.worktree_store.clone(), this.session.clone())
+ })?;
+
+ let worktree = worktree_store
+ .read_with(&cx, |store, cx| store.worktree_for_id(worktree_id, cx))?
+ .context("worktree not found")?;
+
+ let load_task = worktree.update(&mut cx, |worktree, cx| {
+ worktree.load_binary_file(path.as_ref(), cx)
+ })?;
+
+ let loaded_file = load_task.await?;
+ let content = loaded_file.content;
+ let file = loaded_file.file;
+
+ let proto_file = worktree.read_with(&cx, |_worktree, cx| file.to_proto(cx))?;
+ let image_id =
+ ImageId::from(NonZeroU64::new(NEXT_ID.fetch_add(1, Ordering::Relaxed)).unwrap());
+
+ let format = image::guess_format(&content)
+ .map(|f| format!("{:?}", f).to_lowercase())
+ .unwrap_or_else(|_| "unknown".to_string());
+
+ let state = proto::ImageState {
+ id: image_id.to_proto(),
+ file: Some(proto_file),
+ content_size: content.len() as u64,
+ format,
+ };
+
+ session.send(proto::CreateImageForPeer {
+ project_id,
+ peer_id: Some(REMOTE_SERVER_PEER_ID),
+ variant: Some(Variant::State(state)),
+ })?;
+
+ const CHUNK_SIZE: usize = 1024 * 1024; // 1MB chunks
+ for chunk in content.chunks(CHUNK_SIZE) {
+ session.send(proto::CreateImageForPeer {
+ project_id,
+ peer_id: Some(REMOTE_SERVER_PEER_ID),
+ variant: Some(Variant::Chunk(proto::ImageChunk {
+ image_id: image_id.to_proto(),
+ data: chunk.to_vec(),
+ })),
+ })?;
+ }
+
+ Ok(proto::OpenImageResponse {
+ image_id: image_id.to_proto(),
+ })
+ }
+
pub async fn handle_open_new_buffer(
this: Entity<Self>,
_message: TypedEnvelope<proto::OpenNewBuffer>,
@@ -1493,10 +1493,7 @@ async fn test_remote_git_diffs_when_recv_update_repository_delay(
.await
.unwrap();
let buffer_id = cx.update(|cx| buffer.read(cx).remote_id());
- cx.update(|cx| {
- workspace::init_settings(cx);
- editor::init_settings(cx);
- });
+
let cx = cx.add_empty_window();
let editor = cx.new_window_entity(|window, cx| {
Editor::for_buffer(buffer, Some(project.clone()), window, cx)
@@ -1853,8 +1850,6 @@ pub async fn init_test(
let proxy = Arc::new(ExtensionHostProxy::new());
server_cx.update(HeadlessProject::init);
let headless = server_cx.new(|cx| {
- client::init_settings(cx);
-
HeadlessProject::new(
crate::HeadlessAppState {
session: ssh_server_client,
@@ -1906,7 +1901,6 @@ fn build_project(ssh: Entity<RemoteClient>, cx: &mut TestAppContext) -> Entity<P
cx.update(|cx| {
Project::init(&client, cx);
- language::init(cx);
});
cx.update(|cx| Project::remote(ssh, client, node, user_store, languages, fs, cx))
@@ -397,8 +397,6 @@ pub fn execute_run(
log::info!("gpui app started, initializing server");
let session = start_server(listeners, log_rx, cx);
- client::init_settings(cx);
-
GitHostingProviderRegistry::set_global(git_hosting_provider_registry, cx);
git_hosting_providers::init(cx);
dap_adapters::init(cx);
@@ -2,9 +2,9 @@ use collections::HashMap;
use editor::EditorSettings;
use gpui::App;
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
-#[derive(Debug, Default)]
+#[derive(Debug, Default, RegisterSetting)]
pub struct JupyterSettings {
pub kernel_selections: HashMap<String, String>,
}
@@ -15,7 +15,6 @@ use async_dispatcher::{Dispatcher, Runnable, set_dispatcher};
use gpui::{App, PlatformDispatcher};
use project::Fs;
pub use runtimelib::ExecutionState;
-use settings::Settings as _;
pub use crate::jupyter_settings::JupyterSettings;
pub use crate::kernels::{Kernel, KernelSpecification, KernelStatus};
@@ -31,9 +30,6 @@ pub const KERNEL_DOCS_URL: &str = "https://zed.dev/docs/repl#changing-kernels";
pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
set_dispatcher(zed_dispatcher(cx));
- JupyterSettings::register(cx);
- ::editor::init_settings(cx);
- ReplSettings::register(cx);
repl_sessions_ui::init(cx);
ReplStore::init(fs, cx);
}
@@ -1,7 +1,7 @@
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
/// Settings for configuring REPL display and behavior.
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, RegisterSetting)]
pub struct ReplSettings {
/// Maximum number of lines to keep in REPL's scrollback buffer.
/// Clamped with [4, 256] range.
@@ -1532,7 +1532,6 @@ mod tests {
};
use gpui::{Hsla, TestAppContext, UpdateGlobal, VisualTestContext};
use language::{Buffer, Point};
- use project::Project;
use settings::{SearchSettingsContent, SettingsStore};
use smol::stream::StreamExt as _;
use unindent::Unindent as _;
@@ -1542,11 +1541,8 @@ mod tests {
cx.update(|cx| {
let store = settings::SettingsStore::test(cx);
cx.set_global(store);
- workspace::init_settings(cx);
editor::init(cx);
- language::init(cx);
- Project::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
crate::init(cx);
});
@@ -1685,7 +1681,9 @@ mod tests {
assert_eq!(search_bar.active_match_index, Some(0));
search_bar.select_next_match(&SelectNextMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(0), 41)..DisplayPoint::new(DisplayRow(0), 43)]
);
});
@@ -1696,7 +1694,9 @@ mod tests {
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.select_next_match(&SelectNextMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(3), 11)..DisplayPoint::new(DisplayRow(3), 13)]
);
});
@@ -1707,7 +1707,9 @@ mod tests {
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.select_next_match(&SelectNextMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(3), 56)..DisplayPoint::new(DisplayRow(3), 58)]
);
});
@@ -1718,7 +1720,9 @@ mod tests {
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.select_next_match(&SelectNextMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(0), 41)..DisplayPoint::new(DisplayRow(0), 43)]
);
});
@@ -1729,7 +1733,9 @@ mod tests {
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.select_prev_match(&SelectPreviousMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(3), 56)..DisplayPoint::new(DisplayRow(3), 58)]
);
});
@@ -1740,7 +1746,9 @@ mod tests {
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.select_prev_match(&SelectPreviousMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(3), 11)..DisplayPoint::new(DisplayRow(3), 13)]
);
});
@@ -1751,7 +1759,9 @@ mod tests {
search_bar.update_in(cx, |search_bar, window, cx| {
search_bar.select_prev_match(&SelectPreviousMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(0), 41)..DisplayPoint::new(DisplayRow(0), 43)]
);
});
@@ -1772,7 +1782,9 @@ mod tests {
assert_eq!(search_bar.active_match_index, Some(1));
search_bar.select_prev_match(&SelectPreviousMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(0), 41)..DisplayPoint::new(DisplayRow(0), 43)]
);
});
@@ -1793,7 +1805,9 @@ mod tests {
assert_eq!(search_bar.active_match_index, Some(1));
search_bar.select_next_match(&SelectNextMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(3), 11)..DisplayPoint::new(DisplayRow(3), 13)]
);
});
@@ -1814,7 +1828,9 @@ mod tests {
assert_eq!(search_bar.active_match_index, Some(2));
search_bar.select_prev_match(&SelectPreviousMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(3), 56)..DisplayPoint::new(DisplayRow(3), 58)]
);
});
@@ -1835,7 +1851,9 @@ mod tests {
assert_eq!(search_bar.active_match_index, Some(2));
search_bar.select_next_match(&SelectNextMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(0), 41)..DisplayPoint::new(DisplayRow(0), 43)]
);
});
@@ -1856,7 +1874,9 @@ mod tests {
assert_eq!(search_bar.active_match_index, Some(0));
search_bar.select_prev_match(&SelectPreviousMatch, window, cx);
assert_eq!(
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(3), 56)..DisplayPoint::new(DisplayRow(3), 58)]
);
});
@@ -1989,7 +2009,7 @@ mod tests {
"Initially, the editor should not be focused"
);
let initial_selections = editor.update(cx, |editor, cx| {
- let initial_selections = editor.selections.display_ranges(cx);
+ let initial_selections = editor.selections.display_ranges(&editor.display_snapshot(cx));
assert_eq!(
initial_selections.len(), 1,
"Expected to have only one selection before adding carets to all matches, but got: {initial_selections:?}",
@@ -2008,7 +2028,7 @@ mod tests {
);
search_bar.update(cx, |search_bar, cx| {
let all_selections =
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
+ editor.update(cx, |editor, cx| editor.selections.display_ranges(&editor.display_snapshot(cx)));
assert_eq!(
all_selections.len(),
expected_query_matches_count,
@@ -2032,8 +2052,11 @@ mod tests {
"Should still have editor focused after SelectNextMatch"
);
search_bar.update(cx, |search_bar, cx| {
- let all_selections =
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
+ let all_selections = editor.update(cx, |editor, cx| {
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))
+ });
assert_eq!(
all_selections.len(),
1,
@@ -2062,7 +2085,7 @@ mod tests {
);
search_bar.update(cx, |search_bar, cx| {
let all_selections =
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
+ editor.update(cx, |editor, cx| editor.selections.display_ranges(&editor.display_snapshot(cx)));
assert_eq!(
all_selections.len(),
expected_query_matches_count,
@@ -2087,8 +2110,11 @@ mod tests {
);
search_bar.update(cx, |search_bar, cx| {
- let all_selections =
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
+ let all_selections = editor.update(cx, |editor, cx| {
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))
+ });
assert_eq!(
all_selections.len(),
1,
@@ -2130,7 +2156,7 @@ mod tests {
);
search_bar.update(cx, |search_bar, cx| {
let all_selections =
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
+ editor.update(cx, |editor, cx| editor.selections.display_ranges(&editor.display_snapshot(cx)));
assert_eq!(
all_selections, last_match_selections,
"Should not select anything new if there are no matches"
@@ -2194,8 +2220,11 @@ mod tests {
search_bar.select_all_matches(&SelectAllMatches, window, cx);
});
search_bar.update(cx, |_, cx| {
- let all_selections =
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
+ let all_selections = editor.update(cx, |editor, cx| {
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))
+ });
assert_eq!(
all_selections.len(),
2,
@@ -2220,8 +2249,11 @@ mod tests {
search_bar.select_all_matches(&SelectAllMatches, window, cx);
});
search_bar.update(cx, |_, cx| {
- let all_selections =
- editor.update(cx, |editor, cx| editor.selections.display_ranges(cx));
+ let all_selections = editor.update(cx, |editor, cx| {
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))
+ });
assert_eq!(
all_selections.len(),
2,
@@ -2526,7 +2526,7 @@ pub mod tests {
assert_eq!(
search_view
.results_editor
- .update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ .update(cx, |editor, cx| editor.selections.display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(2), 32)..DisplayPoint::new(DisplayRow(2), 35)]
);
@@ -2537,9 +2537,9 @@ pub mod tests {
.update(cx, |search_view, window, cx| {
assert_eq!(search_view.active_match_index, Some(1));
assert_eq!(
- search_view
- .results_editor
- .update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ search_view.results_editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(2), 37)..DisplayPoint::new(DisplayRow(2), 40)]
);
search_view.select_match(Direction::Next, window, cx);
@@ -2550,9 +2550,9 @@ pub mod tests {
.update(cx, |search_view, window, cx| {
assert_eq!(search_view.active_match_index, Some(2));
assert_eq!(
- search_view
- .results_editor
- .update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ search_view.results_editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(5), 6)..DisplayPoint::new(DisplayRow(5), 9)]
);
search_view.select_match(Direction::Next, window, cx);
@@ -2563,9 +2563,9 @@ pub mod tests {
.update(cx, |search_view, window, cx| {
assert_eq!(search_view.active_match_index, Some(0));
assert_eq!(
- search_view
- .results_editor
- .update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ search_view.results_editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(2), 32)..DisplayPoint::new(DisplayRow(2), 35)]
);
search_view.select_match(Direction::Prev, window, cx);
@@ -2576,9 +2576,9 @@ pub mod tests {
.update(cx, |search_view, window, cx| {
assert_eq!(search_view.active_match_index, Some(2));
assert_eq!(
- search_view
- .results_editor
- .update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ search_view.results_editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(5), 6)..DisplayPoint::new(DisplayRow(5), 9)]
);
search_view.select_match(Direction::Prev, window, cx);
@@ -2589,9 +2589,9 @@ pub mod tests {
.update(cx, |search_view, _, cx| {
assert_eq!(search_view.active_match_index, Some(1));
assert_eq!(
- search_view
- .results_editor
- .update(cx, |editor, cx| editor.selections.display_ranges(cx)),
+ search_view.results_editor.update(cx, |editor, cx| editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx))),
[DisplayPoint::new(DisplayRow(2), 37)..DisplayPoint::new(DisplayRow(2), 40)]
);
})
@@ -4547,11 +4547,7 @@ pub mod tests {
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
- client::init_settings(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
crate::init(cx);
});
}
@@ -25,6 +25,7 @@ futures.workspace = true
gpui.workspace = true
inventory.workspace = true
log.workspace = true
+migrator.workspace = true
paths.workspace = true
release_channel.workspace = true
rust-embed.workspace = true
@@ -40,7 +41,6 @@ smallvec.workspace = true
strum.workspace = true
util.workspace = true
zlog.workspace = true
-migrator.workspace = true
[dev-dependencies]
fs = { workspace = true, features = ["test-support"] }
@@ -3,12 +3,14 @@ use std::fmt::{Display, Formatter};
use crate::{self as settings, settings_content::BaseKeymapContent};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
/// Base key bindings scheme. Base keymaps can be overridden with user keymaps.
///
/// Default: VSCode
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default)]
+#[derive(
+ Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default, RegisterSetting,
+)]
pub enum BaseKeymap {
#[default]
VSCode,
@@ -9,6 +9,13 @@ mod settings_store;
mod vscode_import;
pub use settings_content::*;
+pub use settings_macros::RegisterSetting;
+
+#[doc(hidden)]
+pub mod private {
+ pub use crate::settings_store::{RegisteredSetting, SettingValue};
+ pub use inventory;
+}
use gpui::{App, Global};
use rust_embed::RustEmbed;
@@ -81,7 +88,6 @@ pub struct SettingsAssets;
pub fn init(cx: &mut App) {
let settings = SettingsStore::new(cx, &default_settings());
cx.set_global(settings);
- BaseKeymap::register(cx);
SettingsStore::observe_active_settings_profile_name(cx).detach();
}
@@ -348,6 +348,22 @@ pub struct TerminalToolbarContent {
pub breadcrumbs: Option<bool>,
}
+#[derive(
+ Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum CondaManager {
+ /// Automatically detect the conda manager
+ #[default]
+ Auto,
+ /// Use conda
+ Conda,
+ /// Use mamba
+ Mamba,
+ /// Use micromamba
+ Micromamba,
+}
+
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
#[serde(rename_all = "snake_case")]
pub enum VenvSettings {
@@ -360,6 +376,10 @@ pub enum VenvSettings {
activate_script: Option<ActivateScript>,
venv_name: Option<String>,
directories: Option<Vec<PathBuf>>,
+ /// Preferred Conda manager to use when activating Conda environments.
+ ///
+ /// Default: auto
+ conda_manager: Option<CondaManager>,
},
}
#[skip_serializing_none]
@@ -367,6 +387,7 @@ pub struct VenvSettingsContent<'a> {
pub activate_script: ActivateScript,
pub venv_name: &'a str,
pub directories: &'a [PathBuf],
+ pub conda_manager: CondaManager,
}
impl VenvSettings {
@@ -377,10 +398,12 @@ impl VenvSettings {
activate_script,
venv_name,
directories,
+ conda_manager,
} => Some(VenvSettingsContent {
activate_script: activate_script.unwrap_or(ActivateScript::Default),
venv_name: venv_name.as_deref().unwrap_or(""),
directories: directories.as_deref().unwrap_or(&[]),
+ conda_manager: conda_manager.unwrap_or(CondaManager::Auto),
}),
}
}
@@ -124,6 +124,14 @@ pub trait Settings: 'static + Send + Sync + Sized {
}
}
+pub struct RegisteredSetting {
+ pub settings_value: fn() -> Box<dyn AnySettingValue>,
+ pub from_settings: fn(&SettingsContent) -> Box<dyn Any>,
+ pub id: fn() -> TypeId,
+}
+
+inventory::collect!(RegisteredSetting);
+
#[derive(Clone, Copy, Debug)]
pub struct SettingsLocation<'a> {
pub worktree_id: WorktreeId,
@@ -220,13 +228,17 @@ pub enum LocalSettingsKind {
impl Global for SettingsStore {}
+#[doc(hidden)]
#[derive(Debug)]
-struct SettingValue<T> {
- global_value: Option<T>,
- local_values: Vec<(WorktreeId, Arc<RelPath>, T)>,
+pub struct SettingValue<T> {
+ #[doc(hidden)]
+ pub global_value: Option<T>,
+ #[doc(hidden)]
+ pub local_values: Vec<(WorktreeId, Arc<RelPath>, T)>,
}
-trait AnySettingValue: 'static + Send + Sync {
+#[doc(hidden)]
+pub trait AnySettingValue: 'static + Send + Sync {
fn setting_type_name(&self) -> &'static str;
fn from_settings(&self, s: &SettingsContent) -> Box<dyn Any>;
@@ -250,7 +262,7 @@ impl SettingsStore {
let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded();
let default_settings: Rc<SettingsContent> =
parse_json_with_comments(default_settings).unwrap();
- Self {
+ let mut this = Self {
setting_values: Default::default(),
default_settings: default_settings.clone(),
global_settings: None,
@@ -268,7 +280,11 @@ impl SettingsStore {
}
}),
file_errors: BTreeMap::default(),
- }
+ };
+
+ this.load_settings_types();
+
+ this
}
pub fn observe_active_settings_profile_name(cx: &mut App) -> gpui::Subscription {
@@ -288,19 +304,34 @@ impl SettingsStore {
/// Add a new type of setting to the store.
pub fn register_setting<T: Settings>(&mut self) {
- let setting_type_id = TypeId::of::<T>();
- let entry = self.setting_values.entry(setting_type_id);
+ self.register_setting_internal(&RegisteredSetting {
+ settings_value: || {
+ Box::new(SettingValue::<T> {
+ global_value: None,
+ local_values: Vec::new(),
+ })
+ },
+ from_settings: |content| Box::new(T::from_settings(content)),
+ id: || TypeId::of::<T>(),
+ });
+ }
+
+ fn load_settings_types(&mut self) {
+ for registered_setting in inventory::iter::<RegisteredSetting>() {
+ self.register_setting_internal(registered_setting);
+ }
+ }
+
+ fn register_setting_internal(&mut self, registered_setting: &RegisteredSetting) {
+ let entry = self.setting_values.entry((registered_setting.id)());
if matches!(entry, hash_map::Entry::Occupied(_)) {
return;
}
- let setting_value = entry.or_insert(Box::new(SettingValue::<T> {
- global_value: None,
- local_values: Vec::new(),
- }));
- let value = T::from_settings(&self.merged_settings);
- setting_value.set_global_value(Box::new(value));
+ let setting_value = entry.or_insert((registered_setting.settings_value)());
+ let value = (registered_setting.from_settings)(&self.merged_settings);
+ setting_value.set_global_value(value);
}
/// Get the value of a setting.
@@ -76,3 +76,27 @@ pub fn derive_merge_from(input: TokenStream) -> TokenStream {
TokenStream::from(expanded)
}
+
+/// Registers the setting type with the SettingsStore. Note that you need to
+/// have `gpui` in your dependencies for this to work.
+#[proc_macro_derive(RegisterSetting)]
+pub fn derive_register_setting(input: TokenStream) -> TokenStream {
+ let input = syn::parse_macro_input!(input as DeriveInput);
+ let type_name = &input.ident;
+
+ quote! {
+ settings::private::inventory::submit! {
+ settings::private::RegisteredSetting {
+ settings_value: || {
+ Box::new(settings::private::SettingValue::<#type_name> {
+ global_value: None,
+ local_values: Vec::new(),
+ })
+ },
+ from_settings: |content| Box::new(<#type_name as settings::Settings>::from_settings(content)),
+ id: || std::any::TypeId::of::<#type_name>(),
+ }
+ }
+ }
+ .into()
+}
@@ -280,10 +280,8 @@ fn display_name(profile_name: &Option<String>) -> String {
#[cfg(test)]
mod tests {
use super::*;
- use client;
use editor;
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
- use language;
use menu::{Cancel, Confirm, SelectNext, SelectPrevious};
use project::{FakeFs, Project};
use serde_json::json;
@@ -302,12 +300,8 @@ mod tests {
cx.set_global(settings_store);
settings::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- client::init_settings(cx);
- language::init(cx);
super::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
state
});
@@ -3651,9 +3651,6 @@ pub mod test {
pub fn register_settings(cx: &mut App) {
settings::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- workspace::init_settings(cx);
- project::Project::init_settings(cx);
- language::init(cx);
editor::init(cx);
menu::init();
}
@@ -26,7 +26,6 @@ language.workspace = true
log.workspace = true
menu.workspace = true
picker.workspace = true
-project.workspace = true
reqwest_client.workspace = true
rust-embed.workspace = true
settings.workspace = true
@@ -36,7 +35,6 @@ strum = { workspace = true, features = ["derive"] }
theme.workspace = true
title_bar = { workspace = true, features = ["stories"] }
ui = { workspace = true, features = ["stories"] }
-workspace.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
@@ -14,14 +14,12 @@ use gpui::{
div, px, size,
};
use log::LevelFilter;
-use project::Project;
use reqwest_client::ReqwestClient;
use settings::{KeymapFile, Settings};
use simplelog::SimpleLogger;
use strum::IntoEnumIterator;
use theme::ThemeSettings;
use ui::prelude::*;
-use workspace;
use crate::app_menus::app_menus;
use crate::assets::Assets;
@@ -85,10 +83,7 @@ fn main() {
theme::ThemeSelection::Static(settings::ThemeName(theme_name.into()));
ThemeSettings::override_global(theme_settings, cx);
- language::init(cx);
editor::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
init(cx);
load_storybook_keymap(cx);
cx.set_menus(app_menus());
@@ -7,6 +7,7 @@ use language::{Anchor, Buffer, BufferSnapshot};
use std::{
ops::{AddAssign, Range},
path::Path,
+ sync::Arc,
time::Duration,
};
use text::{ToOffset, ToPoint};
@@ -51,7 +52,7 @@ fn completion_from_diff(
) -> EditPrediction {
let buffer_text = snapshot.text_for_range(delete_range).collect::<String>();
- let mut edits: Vec<(Range<language::Anchor>, String)> = Vec::new();
+ let mut edits: Vec<(Range<language::Anchor>, Arc<str>)> = Vec::new();
let completion_graphemes: Vec<&str> = completion_text.graphemes(true).collect();
let buffer_graphemes: Vec<&str> = buffer_text.graphemes(true).collect();
@@ -70,7 +71,10 @@ fn completion_from_diff(
if k != 0 {
let offset = snapshot.anchor_after(offset);
// the range from the current position to item is an inlay.
- let edit = (offset..offset, completion_graphemes[i..i + k].join(""));
+ let edit = (
+ offset..offset,
+ completion_graphemes[i..i + k].join("").into(),
+ );
edits.push(edit);
}
i += k + 1;
@@ -90,7 +94,7 @@ fn completion_from_diff(
// there is leftover completion text, so drop it as an inlay.
let edit_range = offset..offset;
let edit_text = completion_graphemes[i..].join("");
- edits.push((edit_range, edit_text));
+ edits.push((edit_range, edit_text.into()));
}
EditPrediction::Local {
@@ -537,7 +537,7 @@ impl TabSwitcherDelegate {
let Some(tab_match) = self.matches.get(ix) else {
return;
};
- let Some(pane) = self.pane.upgrade() else {
+ let Some(pane) = tab_match.pane.upgrade() else {
return;
};
pane.update(cx, |pane, cx| {
@@ -284,11 +284,8 @@ fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
cx.update(|cx| {
let state = AppState::test(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
super::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
state
})
}
@@ -602,11 +602,8 @@ mod tests {
pub(crate) fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
cx.update(|cx| {
let state = AppState::test(cx);
- language::init(cx);
crate::init(cx);
editor::init(cx);
- workspace::init_settings(cx);
- Project::init_settings(cx);
TaskStore::init(None);
state
})
@@ -182,10 +182,6 @@ impl EventListener for ZedListener {
}
}
-pub fn init(cx: &mut App) {
- TerminalSettings::register(cx);
-}
-
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct TerminalBounds {
pub cell_width: Pixels,
@@ -426,7 +422,7 @@ impl TerminalBuilder {
activation_script: Vec<String>,
) -> Task<Result<TerminalBuilder>> {
let version = release_channel::AppVersion::global(cx);
- cx.background_spawn(async move {
+ let fut = async move {
// If the parent environment doesn't have a locale set
// (As is the case when launched from a .app on MacOS),
// and the Project doesn't have a locale set, then
@@ -648,7 +644,13 @@ impl TerminalBuilder {
terminal,
events_rx,
})
- })
+ };
+ // the thread we spawn things on has an effect on signal handling
+ if cfg!(target_os = "unix") {
+ cx.spawn(async move |_| fut.await)
+ } else {
+ cx.background_spawn(fut)
+ }
}
pub fn subscribe(mut self, cx: &Context<Terminal>) -> Terminal {
@@ -8,8 +8,8 @@ use serde::{Deserialize, Serialize};
pub use settings::AlternateScroll;
use settings::{
- ShowScrollbar, TerminalBlink, TerminalDockPosition, TerminalLineHeight, VenvSettings,
- WorkingDirectory, merge_from::MergeFrom,
+ RegisterSetting, ShowScrollbar, TerminalBlink, TerminalDockPosition, TerminalLineHeight,
+ VenvSettings, WorkingDirectory, merge_from::MergeFrom,
};
use task::Shell;
use theme::FontFamilyName;
@@ -19,7 +19,7 @@ pub struct Toolbar {
pub breadcrumbs: bool,
}
-#[derive(Clone, Debug, Deserialize)]
+#[derive(Clone, Debug, Deserialize, RegisterSetting)]
pub struct TerminalSettings {
pub shell: Shell,
pub working_directory: WorkingDirectory,
@@ -1971,10 +1971,6 @@ mod tests {
let store = SettingsStore::test(cx);
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
- client::init_settings(cx);
- language::init(cx);
- Project::init_settings(cx);
- workspace::init_settings(cx);
editor::init(cx);
crate::init(cx);
});
@@ -534,10 +534,7 @@ mod tests {
let fs = app_cx.update(AppState::test).fs.as_fake().clone();
app_cx.update(|cx| {
- terminal::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- Project::init_settings(cx);
- language::init(cx);
editor::init(cx);
});
@@ -95,7 +95,6 @@ actions!(
pub fn init(cx: &mut App) {
assistant_slash_command::init(cx);
terminal_panel::init(cx);
- terminal::init(cx);
register_serializable_item::<TerminalView>(cx);
@@ -1692,10 +1691,7 @@ mod tests {
pub async fn init_test(cx: &mut TestAppContext) -> (Entity<Project>, Entity<Workspace>) {
let params = cx.update(AppState::test);
cx.update(|cx| {
- terminal::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
- Project::init_settings(cx);
- language::init(cx);
});
let project = Project::test(params.fs.clone(), [], cx).await;
@@ -12,7 +12,7 @@ use refineable::Refineable;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
pub use settings::{FontFamilyName, IconThemeName, ThemeMode, ThemeName};
-use settings::{Settings, SettingsContent};
+use settings::{RegisterSetting, Settings, SettingsContent};
use std::sync::Arc;
const MIN_FONT_SIZE: Pixels = px(6.0);
@@ -94,7 +94,7 @@ impl From<settings::UiDensity> for UiDensity {
}
/// Customizable settings for the UI and theme system.
-#[derive(Clone, PartialEq)]
+#[derive(Clone, PartialEq, RegisterSetting)]
pub struct ThemeSettings {
/// The UI font size. Determines the size of text in the UI,
/// as well as the size of a [gpui::Rems] unit.
@@ -108,7 +108,6 @@ pub fn init(themes_to_load: LoadThemes, cx: &mut App) {
ThemeRegistry::global(cx).load_bundled_themes();
}
- ThemeSettings::register(cx);
FontFamilyCache::init_global(cx);
let theme = GlobalTheme::configured_theme(cx);
@@ -110,16 +110,24 @@ impl ApplicationMenu {
.into_iter()
.fold(menu, |menu, item| match item {
OwnedMenuItem::Separator => menu.separator(),
- OwnedMenuItem::Action { name, action, .. } => menu.action(name, action),
+ OwnedMenuItem::Action {
+ name,
+ action,
+ checked,
+ ..
+ } => menu.action_checked(name, action, checked),
OwnedMenuItem::Submenu(submenu) => {
submenu
.items
.into_iter()
.fold(menu, |menu, item| match item {
OwnedMenuItem::Separator => menu.separator(),
- OwnedMenuItem::Action { name, action, .. } => {
- menu.action(name, action)
- }
+ OwnedMenuItem::Action {
+ name,
+ action,
+ checked,
+ ..
+ } => menu.action_checked(name, action, checked),
OwnedMenuItem::Submenu(_) => menu,
OwnedMenuItem::SystemMenu(_) => {
// A system menu doesn't make sense in this context, so ignore it
@@ -66,7 +66,6 @@ actions!(
);
pub fn init(cx: &mut App) {
- TitleBarSettings::register(cx);
SystemWindowTabs::init(cx);
cx.observe_new(|workspace: &mut Workspace, window, cx| {
@@ -1,6 +1,6 @@
-use settings::{Settings, SettingsContent};
+use settings::{RegisterSetting, Settings, SettingsContent};
-#[derive(Copy, Clone, Debug)]
+#[derive(Copy, Clone, Debug, RegisterSetting)]
pub struct TitleBarSettings {
pub show_branch_icon: bool,
pub show_onboarding_banner: bool,
@@ -542,9 +542,22 @@ impl ContextMenu {
self
}
- pub fn action(mut self, label: impl Into<SharedString>, action: Box<dyn Action>) -> Self {
+ pub fn action(self, label: impl Into<SharedString>, action: Box<dyn Action>) -> Self {
+ self.action_checked(label, action, false)
+ }
+
+ pub fn action_checked(
+ mut self,
+ label: impl Into<SharedString>,
+ action: Box<dyn Action>,
+ checked: bool,
+ ) -> Self {
self.items.push(ContextMenuItem::Entry(ContextMenuEntry {
- toggle: None,
+ toggle: if checked {
+ Some((IconPosition::Start, true))
+ } else {
+ None
+ },
label: label.into(),
action: Some(action.boxed_clone()),
handler: Rc::new(move |context, window, cx| {
@@ -199,7 +199,11 @@ pub fn get_windows_system_shell() -> String {
.or_else(|| find_pwsh_in_programfiles(true, true))
.or_else(find_pwsh_in_scoop)
.map(|p| p.to_string_lossy().into_owned())
- .unwrap_or("powershell.exe".to_string())
+ .inspect(|shell| log::info!("Found powershell in: {}", shell))
+ .unwrap_or_else(|| {
+ log::warn!("Powershell not found, falling back to `cmd`");
+ "cmd.exe".to_string()
+ })
});
(*SYSTEM_SHELL).clone()
@@ -136,25 +136,24 @@ async fn capture_windows(
std::env::current_exe().context("Failed to determine current zed executable path.")?;
let shell_kind = ShellKind::new(shell_path, true);
- if let ShellKind::Posix
- | ShellKind::Csh
- | ShellKind::Tcsh
- | ShellKind::Rc
- | ShellKind::Fish
- | ShellKind::Xonsh = shell_kind
+ if let ShellKind::Csh | ShellKind::Tcsh | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh =
+ shell_kind
{
return Err(anyhow::anyhow!("unsupported shell kind"));
}
let mut cmd = crate::command::new_smol_command(shell_path);
let cmd = match shell_kind {
- ShellKind::Posix
- | ShellKind::Csh
- | ShellKind::Tcsh
- | ShellKind::Rc
- | ShellKind::Fish
- | ShellKind::Xonsh => {
+ ShellKind::Csh | ShellKind::Tcsh | ShellKind::Rc | ShellKind::Fish | ShellKind::Xonsh => {
unreachable!()
}
+ ShellKind::Posix => cmd.args([
+ "-c",
+ &format!(
+ "cd '{}'; '{}' --printenv",
+ directory.display(),
+ zed_path.display()
+ ),
+ ]),
ShellKind::PowerShell => cmd.args([
"-NonInteractive",
"-NoProfile",
@@ -168,7 +167,7 @@ async fn capture_windows(
ShellKind::Elvish => cmd.args([
"-c",
&format!(
- "cd '{}'; {} --printenv",
+ "cd '{}'; '{}' --printenv",
directory.display(),
zed_path.display()
),
@@ -208,7 +207,6 @@ async fn capture_windows(
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr),
);
- // "cmd" "/c" "cd \'C:\\Workspace\\salsa\\\'; \'C:\\Workspace\\zed\\zed\\target\\debug\\zed.exe\' --printenv"
let env_output = String::from_utf8_lossy(&output.stdout);
// Parse the JSON output from zed --printenv
@@ -611,17 +611,21 @@ where
let file = caller.file().replace('\\', "/");
// In this codebase all crates reside in a `crates` directory,
// so discard the prefix up to that segment to find the crate name
- let target = file
- .split_once("crates/")
- .and_then(|(_, s)| s.split_once("/src/"));
+ let file = file.split_once("crates/");
+ let target = file.as_ref().and_then(|(_, s)| s.split_once("/src/"));
let module_path = target.map(|(krate, module)| {
- krate.to_owned() + "::" + &module.trim_end_matches(".rs").replace('/', "::")
+ if module.starts_with(krate) {
+ module.trim_end_matches(".rs").replace('/', "::")
+ } else {
+ krate.to_owned() + "::" + &module.trim_end_matches(".rs").replace('/', "::")
+ }
});
+ let file = file.map(|(_, file)| format!("crates/{file}"));
log::logger().log(
&log::Record::builder()
- .target(target.map_or("", |(krate, _)| krate))
- .module_path(module_path.as_deref())
+ .target(module_path.as_deref().unwrap_or(""))
+ .module_path(file.as_deref())
.args(format_args!("{:?}", error))
.file(Some(caller.file()))
.line(Some(caller.line()))
@@ -38,7 +38,7 @@ impl Vim {
.map(|s| s.to_vec())
{
editor.change_selections(Default::default(), window, cx, |s| {
- let map = s.display_map();
+ let map = s.display_snapshot();
s.select_display_ranges(selections.iter().map(|a| {
let point = a.to_display_point(&map);
point..point
@@ -682,8 +682,9 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
.disjoint_anchor_ranges()
.collect::<Vec<_>>()
});
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
- let end = Point::new(range.end.0, s.buffer().line_len(range.end));
+ let end = Point::new(range.end.0, snapshot.line_len(range.end));
s.select_ranges([end..Point::new(range.start.0, 0)]);
});
selections
@@ -120,8 +120,8 @@ impl Vim {
editor.edit(edits, cx);
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
editor.change_selections(Default::default(), window, cx, |s| {
- let snapshot = s.buffer().clone();
s.select_ranges(new_selections.into_iter().map(|(anchor, len)| {
let offset = anchor.to_offset(&snapshot);
if action.before {
@@ -25,10 +25,6 @@ impl VimTestContext {
git_ui::init(cx);
crate::init(cx);
search::init(cx);
- workspace::init_settings(cx);
- language::init(cx);
- editor::init_settings(cx);
- project::Project::init_settings(cx);
theme::init(theme::LoadThemes::JustBase, cx);
settings_ui::init(cx);
});
@@ -40,6 +40,7 @@ use normal::search::SearchSubmit;
use object::Object;
use schemars::JsonSchema;
use serde::Deserialize;
+use settings::RegisterSetting;
pub use settings::{
ModeContent, Settings, SettingsStore, UseSystemClipboard, update_settings_file,
};
@@ -268,8 +269,6 @@ actions!(
/// Initializes the `vim` crate.
pub fn init(cx: &mut App) {
- vim_mode_setting::init(cx);
- VimSettings::register(cx);
VimGlobals::register(cx);
cx.observe_new(Vim::register).detach();
@@ -1210,7 +1209,7 @@ impl Vim {
s.select_anchor_ranges(vec![pos..pos])
}
- let snapshot = s.display_map();
+ let snapshot = s.display_snapshot();
if let Some(pending) = s.pending_anchor_mut()
&& pending.reversed
&& mode.is_visual()
@@ -1943,6 +1942,7 @@ impl Vim {
}
}
+#[derive(RegisterSetting)]
struct VimSettings {
pub default_mode: Mode,
pub toggle_relative_line_numbers: bool,
@@ -179,7 +179,7 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
vim.update_editor(cx, |_, editor, cx| {
editor.set_clip_at_line_ends(false, cx);
editor.change_selections(Default::default(), window, cx, |s| {
- let map = s.display_map();
+ let map = s.display_snapshot();
let ranges = ranges
.into_iter()
.map(|(start, end, reversed)| {
@@ -304,7 +304,7 @@ impl Vim {
) {
let text_layout_details = editor.text_layout_details(window);
editor.change_selections(Default::default(), window, cx, |s| {
- let map = &s.display_map();
+ let map = &s.display_snapshot();
let mut head = s.newest_anchor().head().to_display_point(map);
let mut tail = s.oldest_anchor().tail().to_display_point(map);
@@ -371,12 +371,10 @@ impl Vim {
loop {
let laid_out_line = map.layout_row(row, &text_layout_details);
- let start = DisplayPoint::new(
- row,
- laid_out_line.closest_index_for_x(positions.start) as u32,
- );
+ let start =
+ DisplayPoint::new(row, laid_out_line.index_for_x(positions.start) as u32);
let mut end =
- DisplayPoint::new(row, laid_out_line.closest_index_for_x(positions.end) as u32);
+ DisplayPoint::new(row, laid_out_line.index_for_x(positions.end) as u32);
if end <= start {
if start.column() == map.line_len(start.row()) {
end = start;
@@ -12,5 +12,4 @@ workspace = true
path = "src/vim_mode_setting.rs"
[dependencies]
-gpui.workspace = true
settings.workspace = true
@@ -4,15 +4,9 @@
//! disable Vim/Helix modes without having to depend on the `vim` crate in its
//! entirety.
-use gpui::App;
-use settings::{Settings, SettingsContent};
-
-/// Initializes the `vim_mode_setting` crate.
-pub fn init(cx: &mut App) {
- VimModeSetting::register(cx);
- HelixModeSetting::register(cx);
-}
+use settings::{RegisterSetting, Settings, SettingsContent};
+#[derive(RegisterSetting)]
pub struct VimModeSetting(pub bool);
impl Settings for VimModeSetting {
@@ -21,6 +15,7 @@ impl Settings for VimModeSetting {
}
}
+#[derive(RegisterSetting)]
pub struct HelixModeSetting(pub bool);
impl Settings for HelixModeSetting {
@@ -17,7 +17,8 @@ use gpui::{
};
use project::{Project, ProjectEntryId, ProjectPath};
pub use settings::{
- ActivateOnClose, ClosePosition, Settings, SettingsLocation, ShowCloseButton, ShowDiagnostics,
+ ActivateOnClose, ClosePosition, RegisterSetting, Settings, SettingsLocation, ShowCloseButton,
+ ShowDiagnostics,
};
use smallvec::SmallVec;
use std::{
@@ -50,6 +51,7 @@ impl Default for SaveOptions {
}
}
+#[derive(RegisterSetting)]
pub struct ItemSettings {
pub git_status: bool,
pub close_position: ClosePosition,
@@ -59,6 +61,7 @@ pub struct ItemSettings {
pub show_close_button: ShowCloseButton,
}
+#[derive(RegisterSetting)]
pub struct PreviewTabsSettings {
pub enabled: bool,
pub enable_preview_from_file_finder: bool,
@@ -6873,8 +6873,6 @@ mod tests {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
theme::init(LoadThemes::JustBase, cx);
- crate::init_settings(cx);
- Project::init_settings(cx);
});
}
@@ -791,12 +791,11 @@ impl WorkspaceDb {
remote_connection_id IS ?
LIMIT 1
})
- .map(|mut prepared_statement| {
+ .and_then(|mut prepared_statement| {
(prepared_statement)((
root_paths.serialize().paths,
remote_connection_id.map(|id| id.0 as i32),
))
- .unwrap()
})
.context("No workspaces found")
.warn_on_err()
@@ -527,14 +527,6 @@ impl From<WorkspaceId> for i64 {
}
}
-pub fn init_settings(cx: &mut App) {
- WorkspaceSettings::register(cx);
- ItemSettings::register(cx);
- PreviewTabsSettings::register(cx);
- TabBarSettings::register(cx);
- StatusBarSettings::register(cx);
-}
-
fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, cx: &mut App) {
let paths = cx.prompt_for_paths(options);
cx.spawn(
@@ -568,7 +560,6 @@ fn prompt_and_open_paths(app_state: Arc<AppState>, options: PathPromptOptions, c
}
pub fn init(app_state: Arc<AppState>, cx: &mut App) {
- init_settings(cx);
component::init();
theme_preview::init(cx);
toast_layer::init(cx);
@@ -987,7 +978,6 @@ impl AppState {
theme::init(theme::LoadThemes::JustBase, cx);
client::init(&client, cx);
- crate::init_settings(cx);
Arc::new(Self {
client,
@@ -11341,9 +11331,6 @@ mod tests {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
theme::init(theme::LoadThemes::JustBase, cx);
- language::init(cx);
- crate::init_settings(cx);
- Project::init_settings(cx);
});
}
@@ -3,13 +3,12 @@ use std::num::NonZeroUsize;
use crate::DockPosition;
use collections::HashMap;
use serde::Deserialize;
-pub use settings::AutosaveSetting;
pub use settings::{
- BottomDockLayout, PaneSplitDirectionHorizontal, PaneSplitDirectionVertical,
- RestoreOnStartupBehavior,
+ AutosaveSetting, BottomDockLayout, InactiveOpacity, PaneSplitDirectionHorizontal,
+ PaneSplitDirectionVertical, RegisterSetting, RestoreOnStartupBehavior, Settings,
};
-use settings::{InactiveOpacity, Settings};
+#[derive(RegisterSetting)]
pub struct WorkspaceSettings {
pub active_pane_modifiers: ActivePanelModifiers,
pub bottom_dock_layout: settings::BottomDockLayout,
@@ -53,7 +52,7 @@ pub struct ActivePanelModifiers {
pub inactive_opacity: Option<InactiveOpacity>,
}
-#[derive(Deserialize)]
+#[derive(Deserialize, RegisterSetting)]
pub struct TabBarSettings {
pub show: bool,
pub show_nav_history_buttons: bool,
@@ -121,7 +120,7 @@ impl Settings for TabBarSettings {
}
}
-#[derive(Deserialize)]
+#[derive(Deserialize, RegisterSetting)]
pub struct StatusBarSettings {
pub show: bool,
pub active_language_button: bool,
@@ -1,14 +1,14 @@
use std::path::Path;
use anyhow::Context as _;
-use settings::Settings;
+use settings::{RegisterSetting, Settings};
use util::{
ResultExt,
paths::{PathMatcher, PathStyle},
rel_path::RelPath,
};
-#[derive(Clone, PartialEq, Eq)]
+#[derive(Clone, PartialEq, Eq, RegisterSetting)]
pub struct WorktreeSettings {
pub project_name: Option<String>,
/// Whether to prevent this project from being shared in public channels.
@@ -1,7 +1,4 @@
-use crate::{
- Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle,
- worktree_settings::WorktreeSettings,
-};
+use crate::{Entry, EntryKind, Event, PathChange, Worktree, WorktreeModelHandle};
use anyhow::Result;
use fs::{FakeFs, Fs, RealFs, RemoveOptions};
use git::GITIGNORE;
@@ -12,7 +9,7 @@ use pretty_assertions::assert_eq;
use rand::prelude::*;
use serde_json::json;
-use settings::{Settings, SettingsStore};
+use settings::SettingsStore;
use std::{
env,
fmt::Write,
@@ -2269,6 +2266,5 @@ fn init_test(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- WorktreeSettings::register(cx);
});
}
@@ -19,7 +19,6 @@ fn main() {
app.run(|cx| {
settings::init(cx);
- WorktreeSettings::register(cx);
let fs = Arc::new(RealFs::new(None, cx.background_executor().clone()));
cx.spawn(async move |cx| {
@@ -409,7 +409,7 @@ pub fn main() {
handle_settings_changed,
);
handle_keymap_file_changes(user_keymap_file_rx, cx);
- client::init_settings(cx);
+
let user_agent = format!(
"Zed/{} ({}; {})",
AppVersion::global(cx),
@@ -468,7 +468,6 @@ pub fn main() {
let node_runtime = NodeRuntime::new(client.http_client(), Some(shell_env_loaded_rx), rx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
- language::init(cx);
languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
@@ -573,7 +572,6 @@ pub fn main() {
supermaven::init(app_state.client.clone(), cx);
language_model::init(app_state.client.clone(), cx);
language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
- agent_settings::init(cx);
acp_tools::init(cx);
zeta2_tools::init(cx);
web_search::init(cx);
@@ -859,15 +857,19 @@ fn handle_open_request(request: OpenRequest, app_state: Arc<AppState>, cx: &mut
// zed://settings/languages/Rust/tab_size - SUPPORT
// languages.$(language).tab_size
// [ languages $(language) tab_size]
- workspace::with_active_or_new_workspace(cx, |_workspace, window, cx| {
- match setting_path {
+ cx.spawn(async move |cx| {
+ let workspace =
+ workspace::get_any_active_workspace(app_state, cx.clone()).await?;
+
+ workspace.update(cx, |_, window, cx| match setting_path {
None => window.dispatch_action(Box::new(zed_actions::OpenSettings), cx),
Some(setting_path) => window.dispatch_action(
Box::new(zed_actions::OpenSettingsAt { path: setting_path }),
cx,
),
- }
- });
+ })
+ })
+ .detach_and_log_err(cx);
}
}
@@ -1408,9 +1408,6 @@ pub fn handle_keymap_file_changes(
mut user_keymap_file_rx: mpsc::UnboundedReceiver<String>,
cx: &mut App,
) {
- BaseKeymap::register(cx);
- vim_mode_setting::init(cx);
-
let (base_keymap_tx, mut base_keymap_rx) = mpsc::unbounded();
let (keyboard_layout_tx, mut keyboard_layout_rx) = mpsc::unbounded();
let mut old_base_keymap = *BaseKeymap::get_global(cx);
@@ -4072,7 +4069,9 @@ mod tests {
let editor = item.downcast::<Editor>().unwrap();
let (selections, scroll_position) = editor.update(cx, |editor, cx| {
(
- editor.selections.display_ranges(cx),
+ editor
+ .selections
+ .display_ranges(&editor.display_snapshot(cx)),
editor.scroll_position(cx),
)
});
@@ -4329,10 +4328,8 @@ mod tests {
theme::init(theme::LoadThemes::JustBase, cx);
client::init(&app_state.client, cx);
- language::init(cx);
workspace::init(app_state.clone(), cx);
onboarding::init(cx);
- Project::init_settings(cx);
app_state
})
}
@@ -4795,21 +4792,17 @@ mod tests {
let state = Arc::get_mut(&mut app_state).unwrap();
state.build_window_options = build_window_options;
-
app_state.languages.add(markdown_language());
gpui_tokio::init(cx);
- vim_mode_setting::init(cx);
theme::init(theme::LoadThemes::JustBase, cx);
audio::init(cx);
channel::init(&app_state.client, app_state.user_store.clone(), cx);
call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx);
workspace::init(app_state.clone(), cx);
- Project::init_settings(cx);
release_channel::init(SemanticVersion::default(), cx);
command_palette::init(cx);
- language::init(cx);
editor::init(cx);
collab_ui::init(&app_state, cx);
git_ui::init(cx);
@@ -665,9 +665,6 @@ mod tests {
#[gpui::test]
fn test_parse_ssh_url(cx: &mut TestAppContext) {
let _app_state = init_test(cx);
- cx.update(|cx| {
- SshSettings::register(cx);
- });
let request = cx.update(|cx| {
OpenRequest::parse(
RawOpenRequest {
@@ -390,8 +390,7 @@ mod tests {
use gpui::TestAppContext;
use rand::Rng as _;
use serde_json::json;
- use settings::{Settings as _, SettingsStore};
- use worktree::WorktreeSettings;
+ use settings::SettingsStore;
use super::*;
@@ -720,7 +719,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- WorktreeSettings::register(cx);
});
}
@@ -133,7 +133,7 @@ pub struct EditPrediction {
path: Arc<Path>,
excerpt_range: Range<usize>,
cursor_offset: usize,
- edits: Arc<[(Range<Anchor>, String)]>,
+ edits: Arc<[(Range<Anchor>, Arc<str>)]>,
snapshot: BufferSnapshot,
edit_preview: EditPreview,
input_outline: Arc<str>,
@@ -150,7 +150,7 @@ impl EditPrediction {
.duration_since(self.buffer_snapshotted_at)
}
- fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, String)>> {
+ fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
}
}
@@ -711,7 +711,7 @@ impl Zeta {
cx.spawn(async move |cx| {
let output_excerpt: Arc<str> = output_excerpt.into();
- let edits: Arc<[(Range<Anchor>, String)]> = cx
+ let edits: Arc<[(Range<Anchor>, Arc<str>)]> = cx
.background_spawn({
let output_excerpt = output_excerpt.clone();
let editable_range = editable_range.clone();
@@ -725,7 +725,7 @@ impl Zeta {
let edits = edits.clone();
move |buffer, cx| {
let new_snapshot = buffer.snapshot();
- let edits: Arc<[(Range<Anchor>, String)]> =
+ let edits: Arc<[(Range<Anchor>, Arc<str>)]> =
edit_prediction::interpolate_edits(&snapshot, &new_snapshot, &edits)?
.into();
Some((edits.clone(), new_snapshot, buffer.preview_edits(edits, cx)))
@@ -759,7 +759,7 @@ impl Zeta {
output_excerpt: Arc<str>,
editable_range: Range<usize>,
snapshot: &BufferSnapshot,
- ) -> Result<Vec<(Range<Anchor>, String)>> {
+ ) -> Result<Vec<(Range<Anchor>, Arc<str>)>> {
let content = output_excerpt.replace(CURSOR_MARKER, "");
let start_markers = content
@@ -817,7 +817,7 @@ impl Zeta {
new_text: &str,
offset: usize,
snapshot: &BufferSnapshot,
- ) -> Vec<(Range<Anchor>, String)> {
+ ) -> Vec<(Range<Anchor>, Arc<str>)> {
text_diff(&old_text, new_text)
.into_iter()
.map(|(mut old_range, new_text)| {
@@ -836,7 +836,7 @@ impl Zeta {
);
old_range.end = old_range.end.saturating_sub(suffix_len);
- let new_text = new_text[prefix_len..new_text.len() - suffix_len].to_string();
+ let new_text = new_text[prefix_len..new_text.len() - suffix_len].into();
let range = if old_range.is_empty() {
let anchor = snapshot.anchor_after(old_range.start);
anchor..anchor
@@ -1183,7 +1183,7 @@ impl CurrentEditPrediction {
if old_edits.len() == 1 && new_edits.len() == 1 {
let (old_range, old_text) = &old_edits[0];
let (new_range, new_text) = &new_edits[0];
- new_range == old_range && new_text.starts_with(old_text)
+ new_range == old_range && new_text.starts_with(old_text.as_ref())
} else {
true
}
@@ -1599,13 +1599,8 @@ mod tests {
#[gpui::test]
async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) {
let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx));
- let edits: Arc<[(Range<Anchor>, String)]> = cx.update(|cx| {
- to_completion_edits(
- [(2..5, "REM".to_string()), (9..11, "".to_string())],
- &buffer,
- cx,
- )
- .into()
+ let edits: Arc<[(Range<Anchor>, Arc<str>)]> = cx.update(|cx| {
+ to_completion_edits([(2..5, "REM".into()), (9..11, "".into())], &buffer, cx).into()
});
let edit_preview = cx
@@ -1635,7 +1630,7 @@ mod tests {
&buffer,
cx
),
- vec![(2..5, "REM".to_string()), (9..11, "".to_string())]
+ vec![(2..5, "REM".into()), (9..11, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx));
@@ -1645,7 +1640,7 @@ mod tests {
&buffer,
cx
),
- vec![(2..2, "REM".to_string()), (6..8, "".to_string())]
+ vec![(2..2, "REM".into()), (6..8, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.undo(cx));
@@ -1655,7 +1650,7 @@ mod tests {
&buffer,
cx
),
- vec![(2..5, "REM".to_string()), (9..11, "".to_string())]
+ vec![(2..5, "REM".into()), (9..11, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx));
@@ -1665,7 +1660,7 @@ mod tests {
&buffer,
cx
),
- vec![(3..3, "EM".to_string()), (7..9, "".to_string())]
+ vec![(3..3, "EM".into()), (7..9, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx));
@@ -1675,7 +1670,7 @@ mod tests {
&buffer,
cx
),
- vec![(4..4, "M".to_string()), (8..10, "".to_string())]
+ vec![(4..4, "M".into()), (8..10, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx));
@@ -1685,7 +1680,7 @@ mod tests {
&buffer,
cx
),
- vec![(9..11, "".to_string())]
+ vec![(9..11, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx));
@@ -1695,7 +1690,7 @@ mod tests {
&buffer,
cx
),
- vec![(4..4, "M".to_string()), (8..10, "".to_string())]
+ vec![(4..4, "M".into()), (8..10, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx));
@@ -1705,7 +1700,7 @@ mod tests {
&buffer,
cx
),
- vec![(4..4, "M".to_string())]
+ vec![(4..4, "M".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx));
@@ -2084,9 +2079,6 @@ mod tests {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- client::init_settings(cx);
- Project::init_settings(cx);
});
}
@@ -2211,10 +2203,10 @@ mod tests {
}
fn to_completion_edits(
- iterator: impl IntoIterator<Item = (Range<usize>, String)>,
+ iterator: impl IntoIterator<Item = (Range<usize>, Arc<str>)>,
buffer: &Entity<Buffer>,
cx: &App,
- ) -> Vec<(Range<Anchor>, String)> {
+ ) -> Vec<(Range<Anchor>, Arc<str>)> {
let buffer = buffer.read(cx);
iterator
.into_iter()
@@ -2228,10 +2220,10 @@ mod tests {
}
fn from_completion_edits(
- editor_edits: &[(Range<Anchor>, String)],
+ editor_edits: &[(Range<Anchor>, Arc<str>)],
buffer: &Entity<Buffer>,
cx: &App,
- ) -> Vec<(Range<usize>, String)> {
+ ) -> Vec<(Range<usize>, Arc<str>)> {
let buffer = buffer.read(cx);
editor_edits
.iter()
@@ -28,9 +28,9 @@ indoc.workspace = true
language.workspace = true
language_model.workspace = true
log.workspace = true
+open_ai.workspace = true
project.workspace = true
release_channel.workspace = true
-schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
thiserror.workspace = true
@@ -50,3 +50,4 @@ language_model = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
project = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
+zlog.workspace = true
@@ -1,17 +1,11 @@
-use std::{borrow::Cow, ops::Range, path::Path, sync::Arc};
-
-use anyhow::Context as _;
-use cloud_llm_client::predict_edits_v3;
-use gpui::{App, AsyncApp, Entity};
-use language::{
- Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, TextBufferSnapshot, text_diff,
-};
-use project::Project;
-use util::ResultExt;
+use std::{ops::Range, sync::Arc};
+
+use gpui::{AsyncApp, Entity};
+use language::{Anchor, Buffer, BufferSnapshot, EditPreview, OffsetRangeExt, TextBufferSnapshot};
use uuid::Uuid;
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
-pub struct EditPredictionId(Uuid);
+pub struct EditPredictionId(pub Uuid);
impl Into<Uuid> for EditPredictionId {
fn into(self) -> Uuid {
@@ -34,8 +28,7 @@ impl std::fmt::Display for EditPredictionId {
#[derive(Clone)]
pub struct EditPrediction {
pub id: EditPredictionId,
- pub path: Arc<Path>,
- pub edits: Arc<[(Range<Anchor>, String)]>,
+ pub edits: Arc<[(Range<Anchor>, Arc<str>)]>,
pub snapshot: BufferSnapshot,
pub edit_preview: EditPreview,
// We keep a reference to the buffer so that we do not need to reload it from disk when applying the prediction.
@@ -43,90 +36,43 @@ pub struct EditPrediction {
}
impl EditPrediction {
- pub async fn from_response(
- response: predict_edits_v3::PredictEditsResponse,
- active_buffer_old_snapshot: &TextBufferSnapshot,
- active_buffer: &Entity<Buffer>,
- project: &Entity<Project>,
+ pub async fn new(
+ id: EditPredictionId,
+ edited_buffer: &Entity<Buffer>,
+ edited_buffer_snapshot: &BufferSnapshot,
+ edits: Vec<(Range<Anchor>, Arc<str>)>,
cx: &mut AsyncApp,
) -> Option<Self> {
- // TODO only allow cloud to return one path
- let Some(path) = response.edits.first().map(|e| e.path.clone()) else {
- return None;
- };
+ let (edits, snapshot, edit_preview_task) = edited_buffer
+ .read_with(cx, |buffer, cx| {
+ let new_snapshot = buffer.snapshot();
+ let edits: Arc<[_]> =
+ interpolate_edits(&edited_buffer_snapshot, &new_snapshot, edits.into())?.into();
- let is_same_path = active_buffer
- .read_with(cx, |buffer, cx| buffer_path_eq(buffer, &path, cx))
- .ok()?;
-
- let (buffer, edits, snapshot, edit_preview_task) = if is_same_path {
- active_buffer
- .read_with(cx, |buffer, cx| {
- let new_snapshot = buffer.snapshot();
- let edits = edits_from_response(&response.edits, &active_buffer_old_snapshot);
- let edits: Arc<[_]> =
- interpolate_edits(active_buffer_old_snapshot, &new_snapshot, edits)?.into();
-
- Some((
- active_buffer.clone(),
- edits.clone(),
- new_snapshot,
- buffer.preview_edits(edits, cx),
- ))
- })
- .ok()??
- } else {
- let buffer_handle = project
- .update(cx, |project, cx| {
- let project_path = project
- .find_project_path(&path, cx)
- .context("Failed to find project path for zeta edit")?;
- anyhow::Ok(project.open_buffer(project_path, cx))
- })
- .ok()?
- .log_err()?
- .await
- .context("Failed to open buffer for zeta edit")
- .log_err()?;
-
- buffer_handle
- .read_with(cx, |buffer, cx| {
- let snapshot = buffer.snapshot();
- let edits = edits_from_response(&response.edits, &snapshot);
- if edits.is_empty() {
- return None;
- }
- Some((
- buffer_handle.clone(),
- edits.clone(),
- snapshot,
- buffer.preview_edits(edits, cx),
- ))
- })
- .ok()??
- };
+ Some((edits.clone(), new_snapshot, buffer.preview_edits(edits, cx)))
+ })
+ .ok()??;
let edit_preview = edit_preview_task.await;
Some(EditPrediction {
- id: EditPredictionId(response.request_id),
- path,
+ id,
edits,
snapshot,
edit_preview,
- buffer,
+ buffer: edited_buffer.clone(),
})
}
pub fn interpolate(
&self,
new_snapshot: &TextBufferSnapshot,
- ) -> Option<Vec<(Range<Anchor>, String)>> {
+ ) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
interpolate_edits(&self.snapshot, new_snapshot, self.edits.clone())
}
- pub fn targets_buffer(&self, buffer: &Buffer, cx: &App) -> bool {
- buffer_path_eq(buffer, &self.path, cx)
+ pub fn targets_buffer(&self, buffer: &Buffer) -> bool {
+ self.snapshot.remote_id() == buffer.remote_id()
}
}
@@ -134,21 +80,16 @@ impl std::fmt::Debug for EditPrediction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("EditPrediction")
.field("id", &self.id)
- .field("path", &self.path)
.field("edits", &self.edits)
.finish()
}
}
-pub fn buffer_path_eq(buffer: &Buffer, path: &Path, cx: &App) -> bool {
- buffer.file().map(|p| p.full_path(cx)).as_deref() == Some(path)
-}
-
pub fn interpolate_edits(
old_snapshot: &TextBufferSnapshot,
new_snapshot: &TextBufferSnapshot,
- current_edits: Arc<[(Range<Anchor>, String)]>,
-) -> Option<Vec<(Range<Anchor>, String)>> {
+ current_edits: Arc<[(Range<Anchor>, Arc<str>)]>,
+) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
let mut edits = Vec::new();
let mut model_edits = current_edits.iter().peekable();
@@ -173,7 +114,7 @@ pub fn interpolate_edits(
if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) {
if !model_suffix.is_empty() {
let anchor = old_snapshot.anchor_after(user_edit.old.end);
- edits.push((anchor..anchor, model_suffix.to_string()));
+ edits.push((anchor..anchor, model_suffix.into()));
}
model_edits.next();
@@ -190,135 +131,17 @@ pub fn interpolate_edits(
if edits.is_empty() { None } else { Some(edits) }
}
-pub fn line_range_to_point_range(range: Range<predict_edits_v3::Line>) -> Range<language::Point> {
- language::Point::new(range.start.0, 0)..language::Point::new(range.end.0, 0)
-}
-
-fn edits_from_response(
- edits: &[predict_edits_v3::Edit],
- snapshot: &TextBufferSnapshot,
-) -> Arc<[(Range<Anchor>, String)]> {
- edits
- .iter()
- .flat_map(|edit| {
- let point_range = line_range_to_point_range(edit.range.clone());
- let offset = point_range.to_offset(snapshot).start;
- let old_text = snapshot.text_for_range(point_range);
-
- excerpt_edits_from_response(
- old_text.collect::<Cow<str>>(),
- &edit.content,
- offset,
- &snapshot,
- )
- })
- .collect::<Vec<_>>()
- .into()
-}
-
-fn excerpt_edits_from_response(
- old_text: Cow<str>,
- new_text: &str,
- offset: usize,
- snapshot: &TextBufferSnapshot,
-) -> impl Iterator<Item = (Range<Anchor>, String)> {
- text_diff(&old_text, new_text)
- .into_iter()
- .map(move |(mut old_range, new_text)| {
- old_range.start += offset;
- old_range.end += offset;
-
- let prefix_len = common_prefix(
- snapshot.chars_for_range(old_range.clone()),
- new_text.chars(),
- );
- old_range.start += prefix_len;
-
- let suffix_len = common_prefix(
- snapshot.reversed_chars_for_range(old_range.clone()),
- new_text[prefix_len..].chars().rev(),
- );
- old_range.end = old_range.end.saturating_sub(suffix_len);
-
- let new_text = new_text[prefix_len..new_text.len() - suffix_len].to_string();
- let range = if old_range.is_empty() {
- let anchor = snapshot.anchor_after(old_range.start);
- anchor..anchor
- } else {
- snapshot.anchor_after(old_range.start)..snapshot.anchor_before(old_range.end)
- };
- (range, new_text)
- })
-}
-
-fn common_prefix<T1: Iterator<Item = char>, T2: Iterator<Item = char>>(a: T1, b: T2) -> usize {
- a.zip(b)
- .take_while(|(a, b)| a == b)
- .map(|(a, _)| a.len_utf8())
- .sum()
-}
-
#[cfg(test)]
mod tests {
- use std::path::PathBuf;
-
use super::*;
- use cloud_llm_client::predict_edits_v3;
- use edit_prediction_context::Line;
use gpui::{App, Entity, TestAppContext, prelude::*};
- use indoc::indoc;
use language::{Buffer, ToOffset as _};
- #[gpui::test]
- async fn test_compute_edits(cx: &mut TestAppContext) {
- let old = indoc! {r#"
- fn main() {
- let args =
- println!("{}", args[1])
- }
- "#};
-
- let new = indoc! {r#"
- fn main() {
- let args = std::env::args();
- println!("{}", args[1]);
- }
- "#};
-
- let buffer = cx.new(|cx| Buffer::local(old, cx));
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
-
- // TODO cover more cases when multi-file is supported
- let big_edits = vec![predict_edits_v3::Edit {
- path: PathBuf::from("test.txt").into(),
- range: Line(0)..Line(old.lines().count() as u32),
- content: new.into(),
- }];
-
- let edits = edits_from_response(&big_edits, &snapshot);
- assert_eq!(edits.len(), 2);
- assert_eq!(
- edits[0].0.to_point(&snapshot).start,
- language::Point::new(1, 14)
- );
- assert_eq!(edits[0].1, " std::env::args();");
- assert_eq!(
- edits[1].0.to_point(&snapshot).start,
- language::Point::new(2, 27)
- );
- assert_eq!(edits[1].1, ";");
- }
-
#[gpui::test]
async fn test_edit_prediction_basic_interpolation(cx: &mut TestAppContext) {
let buffer = cx.new(|cx| Buffer::local("Lorem ipsum dolor", cx));
- let edits: Arc<[(Range<Anchor>, String)]> = cx.update(|cx| {
- to_prediction_edits(
- [(2..5, "REM".to_string()), (9..11, "".to_string())],
- &buffer,
- cx,
- )
- .into()
+ let edits: Arc<[(Range<Anchor>, Arc<str>)]> = cx.update(|cx| {
+ to_prediction_edits([(2..5, "REM".into()), (9..11, "".into())], &buffer, cx).into()
});
let edit_preview = cx
@@ -329,7 +152,6 @@ mod tests {
id: EditPredictionId(Uuid::new_v4()),
edits,
snapshot: cx.read(|cx| buffer.read(cx).snapshot()),
- path: Path::new("test.txt").into(),
buffer: buffer.clone(),
edit_preview,
};
@@ -341,7 +163,7 @@ mod tests {
&buffer,
cx
),
- vec![(2..5, "REM".to_string()), (9..11, "".to_string())]
+ vec![(2..5, "REM".into()), (9..11, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "")], None, cx));
@@ -351,7 +173,7 @@ mod tests {
&buffer,
cx
),
- vec![(2..2, "REM".to_string()), (6..8, "".to_string())]
+ vec![(2..2, "REM".into()), (6..8, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.undo(cx));
@@ -361,7 +183,7 @@ mod tests {
&buffer,
cx
),
- vec![(2..5, "REM".to_string()), (9..11, "".to_string())]
+ vec![(2..5, "REM".into()), (9..11, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(2..5, "R")], None, cx));
@@ -371,7 +193,7 @@ mod tests {
&buffer,
cx
),
- vec![(3..3, "EM".to_string()), (7..9, "".to_string())]
+ vec![(3..3, "EM".into()), (7..9, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(3..3, "E")], None, cx));
@@ -381,7 +203,7 @@ mod tests {
&buffer,
cx
),
- vec![(4..4, "M".to_string()), (8..10, "".to_string())]
+ vec![(4..4, "M".into()), (8..10, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "M")], None, cx));
@@ -391,7 +213,7 @@ mod tests {
&buffer,
cx
),
- vec![(9..11, "".to_string())]
+ vec![(9..11, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(4..5, "")], None, cx));
@@ -401,7 +223,7 @@ mod tests {
&buffer,
cx
),
- vec![(4..4, "M".to_string()), (8..10, "".to_string())]
+ vec![(4..4, "M".into()), (8..10, "".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(8..10, "")], None, cx));
@@ -411,7 +233,7 @@ mod tests {
&buffer,
cx
),
- vec![(4..4, "M".to_string())]
+ vec![(4..4, "M".into())]
);
buffer.update(cx, |buffer, cx| buffer.edit([(4..6, "")], None, cx));
@@ -420,10 +242,10 @@ mod tests {
}
fn to_prediction_edits(
- iterator: impl IntoIterator<Item = (Range<usize>, String)>,
+ iterator: impl IntoIterator<Item = (Range<usize>, Arc<str>)>,
buffer: &Entity<Buffer>,
cx: &App,
- ) -> Vec<(Range<Anchor>, String)> {
+ ) -> Vec<(Range<Anchor>, Arc<str>)> {
let buffer = buffer.read(cx);
iterator
.into_iter()
@@ -437,10 +259,10 @@ mod tests {
}
fn from_prediction_edits(
- editor_edits: &[(Range<Anchor>, String)],
+ editor_edits: &[(Range<Anchor>, Arc<str>)],
buffer: &Entity<Buffer>,
cx: &App,
- ) -> Vec<(Range<usize>, String)> {
+ ) -> Vec<(Range<usize>, Arc<str>)> {
let buffer = buffer.read(cx);
editor_edits
.iter()
@@ -1,717 +0,0 @@
-use std::{
- cmp::Reverse, collections::hash_map::Entry, ops::Range, path::PathBuf, sync::Arc, time::Instant,
-};
-
-use crate::{
- ZetaContextRetrievalDebugInfo, ZetaContextRetrievalStartedDebugInfo, ZetaDebugInfo,
- ZetaSearchQueryDebugInfo, merge_excerpts::merge_excerpts,
-};
-use anyhow::{Result, anyhow};
-use cloud_zeta2_prompt::write_codeblock;
-use collections::HashMap;
-use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions, Line};
-use futures::{
- StreamExt,
- channel::mpsc::{self, UnboundedSender},
- stream::BoxStream,
-};
-use gpui::{App, AppContext, AsyncApp, Entity, Task};
-use indoc::indoc;
-use language::{
- Anchor, Bias, Buffer, BufferSnapshot, OffsetRangeExt, Point, TextBufferSnapshot, ToPoint as _,
-};
-use language_model::{
- LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelId,
- LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
- LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
- LanguageModelToolUse, MessageContent, Role,
-};
-use project::{
- Project, WorktreeSettings,
- search::{SearchQuery, SearchResult},
-};
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
-use util::{
- ResultExt as _,
- paths::{PathMatcher, PathStyle},
-};
-use workspace::item::Settings as _;
-
-const SEARCH_PROMPT: &str = indoc! {r#"
- ## Task
-
- You are part of an edit prediction system in a code editor. Your role is to identify relevant code locations
- that will serve as context for predicting the next required edit.
-
- **Your task:**
- - Analyze the user's recent edits and current cursor context
- - Use the `search` tool to find code that may be relevant for predicting the next edit
- - Focus on finding:
- - Code patterns that might need similar changes based on the recent edits
- - Functions, variables, types, and constants referenced in the current cursor context
- - Related implementations, usages, or dependencies that may require consistent updates
-
- **Important constraints:**
- - This conversation has exactly 2 turns
- - You must make ALL search queries in your first response via the `search` tool
- - All queries will be executed in parallel and results returned together
- - In the second turn, you will select the most relevant results via the `select` tool.
-
- ## User Edits
-
- {edits}
-
- ## Current cursor context
-
- `````{current_file_path}
- {cursor_excerpt}
- `````
-
- --
- Use the `search` tool now
-"#};
-
-const SEARCH_TOOL_NAME: &str = "search";
-
-/// Search for relevant code
-///
-/// For the best results, run multiple queries at once with a single invocation of this tool.
-#[derive(Clone, Deserialize, Serialize, JsonSchema)]
-pub struct SearchToolInput {
- /// An array of queries to run for gathering context relevant to the next prediction
- #[schemars(length(max = 5))]
- pub queries: Box<[SearchToolQuery]>,
-}
-
-#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
-pub struct SearchToolQuery {
- /// A glob pattern to match file paths in the codebase
- pub glob: String,
- /// A regular expression to match content within the files matched by the glob pattern
- pub regex: String,
-}
-
-const RESULTS_MESSAGE: &str = indoc! {"
- Here are the results of your queries combined and grouped by file:
-
-"};
-
-const SELECT_TOOL_NAME: &str = "select";
-
-const SELECT_PROMPT: &str = indoc! {"
- Use the `select` tool now to pick the most relevant line ranges according to the user state provided in the first message.
- Make sure to include enough lines of context so that the edit prediction model can suggest accurate edits.
- Include up to 200 lines in total.
-"};
-
-/// Select line ranges from search results
-#[derive(Deserialize, JsonSchema)]
-struct SelectToolInput {
- /// The line ranges to select from search results.
- ranges: Vec<SelectLineRange>,
-}
-
-/// A specific line range to select from a file
-#[derive(Debug, Deserialize, JsonSchema)]
-struct SelectLineRange {
- /// The file path containing the lines to select
- /// Exactly as it appears in the search result codeblocks.
- path: PathBuf,
- /// The starting line number (1-based)
- #[schemars(range(min = 1))]
- start_line: u32,
- /// The ending line number (1-based, inclusive)
- #[schemars(range(min = 1))]
- end_line: u32,
-}
-
-#[derive(Debug, Clone, PartialEq)]
-pub struct LlmContextOptions {
- pub excerpt: EditPredictionExcerptOptions,
-}
-
-pub const MODEL_PROVIDER_ID: LanguageModelProviderId = language_model::ANTHROPIC_PROVIDER_ID;
-
-pub fn find_related_excerpts(
- buffer: Entity<language::Buffer>,
- cursor_position: Anchor,
- project: &Entity<Project>,
- mut edit_history_unified_diff: String,
- options: &LlmContextOptions,
- debug_tx: Option<mpsc::UnboundedSender<ZetaDebugInfo>>,
- cx: &App,
-) -> Task<Result<HashMap<Entity<Buffer>, Vec<Range<Anchor>>>>> {
- let language_model_registry = LanguageModelRegistry::global(cx);
- let Some(model) = language_model_registry
- .read(cx)
- .available_models(cx)
- .find(|model| {
- model.provider_id() == MODEL_PROVIDER_ID
- && model.id() == LanguageModelId("claude-haiku-4-5-latest".into())
- // model.provider_id() == LanguageModelProviderId::new("zeta-ctx-qwen-30b")
- // model.provider_id() == LanguageModelProviderId::new("ollama")
- // && model.id() == LanguageModelId("gpt-oss:20b".into())
- })
- else {
- return Task::ready(Err(anyhow!("could not find context model")));
- };
-
- if edit_history_unified_diff.is_empty() {
- edit_history_unified_diff.push_str("(No user edits yet)");
- }
-
- // TODO [zeta2] include breadcrumbs?
- let snapshot = buffer.read(cx).snapshot();
- let cursor_point = cursor_position.to_point(&snapshot);
- let Some(cursor_excerpt) =
- EditPredictionExcerpt::select_from_buffer(cursor_point, &snapshot, &options.excerpt, None)
- else {
- return Task::ready(Ok(HashMap::default()));
- };
-
- let current_file_path = snapshot
- .file()
- .map(|f| f.full_path(cx).display().to_string())
- .unwrap_or_else(|| "untitled".to_string());
-
- let prompt = SEARCH_PROMPT
- .replace("{edits}", &edit_history_unified_diff)
- .replace("{current_file_path}", ¤t_file_path)
- .replace("{cursor_excerpt}", &cursor_excerpt.text(&snapshot).body);
-
- if let Some(debug_tx) = &debug_tx {
- debug_tx
- .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted(
- ZetaContextRetrievalStartedDebugInfo {
- project: project.clone(),
- timestamp: Instant::now(),
- search_prompt: prompt.clone(),
- },
- ))
- .ok();
- }
-
- let path_style = project.read(cx).path_style(cx);
-
- let exclude_matcher = {
- let global_settings = WorktreeSettings::get_global(cx);
- let exclude_patterns = global_settings
- .file_scan_exclusions
- .sources()
- .iter()
- .chain(global_settings.private_files.sources().iter());
-
- match PathMatcher::new(exclude_patterns, path_style) {
- Ok(matcher) => matcher,
- Err(err) => {
- return Task::ready(Err(anyhow!(err)));
- }
- }
- };
-
- let project = project.clone();
- cx.spawn(async move |cx| {
- let initial_prompt_message = LanguageModelRequestMessage {
- role: Role::User,
- content: vec![prompt.into()],
- cache: false,
- };
-
- let mut search_stream = request_tool_call::<SearchToolInput>(
- vec![initial_prompt_message.clone()],
- SEARCH_TOOL_NAME,
- &model,
- cx,
- )
- .await?;
-
- let mut select_request_messages = Vec::with_capacity(5); // initial prompt, LLM response/thinking, tool use, tool result, select prompt
- select_request_messages.push(initial_prompt_message);
-
- let mut regex_by_glob: HashMap<String, String> = HashMap::default();
- let mut search_calls = Vec::new();
-
- while let Some(event) = search_stream.next().await {
- match event? {
- LanguageModelCompletionEvent::ToolUse(tool_use) => {
- if !tool_use.is_input_complete {
- continue;
- }
-
- if tool_use.name.as_ref() == SEARCH_TOOL_NAME {
- let input =
- serde_json::from_value::<SearchToolInput>(tool_use.input.clone())?;
-
- for query in input.queries {
- let regex = regex_by_glob.entry(query.glob).or_default();
- if !regex.is_empty() {
- regex.push('|');
- }
- regex.push_str(&query.regex);
- }
-
- search_calls.push(tool_use);
- } else {
- log::warn!(
- "context gathering model tried to use unknown tool: {}",
- tool_use.name
- );
- }
- }
- LanguageModelCompletionEvent::Text(txt) => {
- if let Some(LanguageModelRequestMessage {
- role: Role::Assistant,
- content,
- ..
- }) = select_request_messages.last_mut()
- {
- if let Some(MessageContent::Text(existing_text)) = content.last_mut() {
- existing_text.push_str(&txt);
- } else {
- content.push(MessageContent::Text(txt));
- }
- } else {
- select_request_messages.push(LanguageModelRequestMessage {
- role: Role::Assistant,
- content: vec![MessageContent::Text(txt)],
- cache: false,
- });
- }
- }
- LanguageModelCompletionEvent::Thinking { text, signature } => {
- if let Some(LanguageModelRequestMessage {
- role: Role::Assistant,
- content,
- ..
- }) = select_request_messages.last_mut()
- {
- if let Some(MessageContent::Thinking {
- text: existing_text,
- signature: existing_signature,
- }) = content.last_mut()
- {
- existing_text.push_str(&text);
- *existing_signature = signature;
- } else {
- content.push(MessageContent::Thinking { text, signature });
- }
- } else {
- select_request_messages.push(LanguageModelRequestMessage {
- role: Role::Assistant,
- content: vec![MessageContent::Thinking { text, signature }],
- cache: false,
- });
- }
- }
- LanguageModelCompletionEvent::RedactedThinking { data } => {
- if let Some(LanguageModelRequestMessage {
- role: Role::Assistant,
- content,
- ..
- }) = select_request_messages.last_mut()
- {
- if let Some(MessageContent::RedactedThinking(existing_data)) =
- content.last_mut()
- {
- existing_data.push_str(&data);
- } else {
- content.push(MessageContent::RedactedThinking(data));
- }
- } else {
- select_request_messages.push(LanguageModelRequestMessage {
- role: Role::Assistant,
- content: vec![MessageContent::RedactedThinking(data)],
- cache: false,
- });
- }
- }
- ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => {
- log::error!("{ev:?}");
- }
- ev => {
- log::trace!("context search event: {ev:?}")
- }
- }
- }
-
- let search_tool_use = if search_calls.is_empty() {
- log::warn!("context model ran 0 searches");
- return anyhow::Ok(Default::default());
- } else if search_calls.len() == 1 {
- search_calls.swap_remove(0)
- } else {
- // In theory, the model could perform multiple search calls
- // Dealing with them separately is not worth it when it doesn't happen in practice.
- // If it were to happen, here we would combine them into one.
- // The second request doesn't need to know it was actually two different calls ;)
- let input = serde_json::to_value(&SearchToolInput {
- queries: regex_by_glob
- .iter()
- .map(|(glob, regex)| SearchToolQuery {
- glob: glob.clone(),
- regex: regex.clone(),
- })
- .collect(),
- })
- .unwrap_or_default();
-
- LanguageModelToolUse {
- id: search_calls.swap_remove(0).id,
- name: SELECT_TOOL_NAME.into(),
- raw_input: serde_json::to_string(&input).unwrap_or_default(),
- input,
- is_input_complete: true,
- }
- };
-
- if let Some(debug_tx) = &debug_tx {
- debug_tx
- .unbounded_send(ZetaDebugInfo::SearchQueriesGenerated(
- ZetaSearchQueryDebugInfo {
- project: project.clone(),
- timestamp: Instant::now(),
- queries: regex_by_glob
- .iter()
- .map(|(glob, regex)| SearchToolQuery {
- glob: glob.clone(),
- regex: regex.clone(),
- })
- .collect(),
- },
- ))
- .ok();
- }
-
- let (results_tx, mut results_rx) = mpsc::unbounded();
-
- for (glob, regex) in regex_by_glob {
- let exclude_matcher = exclude_matcher.clone();
- let results_tx = results_tx.clone();
- let project = project.clone();
- cx.spawn(async move |cx| {
- run_query(
- &glob,
- ®ex,
- results_tx.clone(),
- path_style,
- exclude_matcher,
- &project,
- cx,
- )
- .await
- .log_err();
- })
- .detach()
- }
- drop(results_tx);
-
- struct ResultBuffer {
- buffer: Entity<Buffer>,
- snapshot: TextBufferSnapshot,
- }
-
- let (result_buffers_by_path, merged_result) = cx
- .background_spawn(async move {
- let mut excerpts_by_buffer: HashMap<Entity<Buffer>, MatchedBuffer> =
- HashMap::default();
-
- while let Some((buffer, matched)) = results_rx.next().await {
- match excerpts_by_buffer.entry(buffer) {
- Entry::Occupied(mut entry) => {
- let entry = entry.get_mut();
- entry.full_path = matched.full_path;
- entry.snapshot = matched.snapshot;
- entry.line_ranges.extend(matched.line_ranges);
- }
- Entry::Vacant(entry) => {
- entry.insert(matched);
- }
- }
- }
-
- let mut result_buffers_by_path = HashMap::default();
- let mut merged_result = RESULTS_MESSAGE.to_string();
-
- for (buffer, mut matched) in excerpts_by_buffer {
- matched
- .line_ranges
- .sort_unstable_by_key(|range| (range.start, Reverse(range.end)));
-
- write_codeblock(
- &matched.full_path,
- merge_excerpts(&matched.snapshot, matched.line_ranges).iter(),
- &[],
- Line(matched.snapshot.max_point().row),
- true,
- &mut merged_result,
- );
-
- result_buffers_by_path.insert(
- matched.full_path,
- ResultBuffer {
- buffer,
- snapshot: matched.snapshot.text,
- },
- );
- }
-
- (result_buffers_by_path, merged_result)
- })
- .await;
-
- if let Some(debug_tx) = &debug_tx {
- debug_tx
- .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted(
- ZetaContextRetrievalDebugInfo {
- project: project.clone(),
- timestamp: Instant::now(),
- },
- ))
- .ok();
- }
-
- let tool_result = LanguageModelToolResult {
- tool_use_id: search_tool_use.id.clone(),
- tool_name: SEARCH_TOOL_NAME.into(),
- is_error: false,
- content: merged_result.into(),
- output: None,
- };
-
- select_request_messages.extend([
- LanguageModelRequestMessage {
- role: Role::Assistant,
- content: vec![MessageContent::ToolUse(search_tool_use)],
- cache: false,
- },
- LanguageModelRequestMessage {
- role: Role::User,
- content: vec![MessageContent::ToolResult(tool_result)],
- cache: false,
- },
- ]);
-
- if result_buffers_by_path.is_empty() {
- log::trace!("context gathering queries produced no results");
- return anyhow::Ok(HashMap::default());
- }
-
- select_request_messages.push(LanguageModelRequestMessage {
- role: Role::User,
- content: vec![SELECT_PROMPT.into()],
- cache: false,
- });
-
- let mut select_stream = request_tool_call::<SelectToolInput>(
- select_request_messages,
- SELECT_TOOL_NAME,
- &model,
- cx,
- )
- .await?;
-
- cx.background_spawn(async move {
- let mut selected_ranges = Vec::new();
-
- while let Some(event) = select_stream.next().await {
- match event? {
- LanguageModelCompletionEvent::ToolUse(tool_use) => {
- if !tool_use.is_input_complete {
- continue;
- }
-
- if tool_use.name.as_ref() == SELECT_TOOL_NAME {
- let call =
- serde_json::from_value::<SelectToolInput>(tool_use.input.clone())?;
- selected_ranges.extend(call.ranges);
- } else {
- log::warn!(
- "context gathering model tried to use unknown tool: {}",
- tool_use.name
- );
- }
- }
- ev @ LanguageModelCompletionEvent::ToolUseJsonParseError { .. } => {
- log::error!("{ev:?}");
- }
- ev => {
- log::trace!("context select event: {ev:?}")
- }
- }
- }
-
- if let Some(debug_tx) = &debug_tx {
- debug_tx
- .unbounded_send(ZetaDebugInfo::SearchResultsFiltered(
- ZetaContextRetrievalDebugInfo {
- project: project.clone(),
- timestamp: Instant::now(),
- },
- ))
- .ok();
- }
-
- if selected_ranges.is_empty() {
- log::trace!("context gathering selected no ranges")
- }
-
- selected_ranges.sort_unstable_by(|a, b| {
- a.start_line
- .cmp(&b.start_line)
- .then(b.end_line.cmp(&a.end_line))
- });
-
- let mut related_excerpts_by_buffer: HashMap<_, Vec<_>> = HashMap::default();
-
- for selected_range in selected_ranges {
- if let Some(ResultBuffer { buffer, snapshot }) =
- result_buffers_by_path.get(&selected_range.path)
- {
- let start_point = Point::new(selected_range.start_line.saturating_sub(1), 0);
- let end_point =
- snapshot.clip_point(Point::new(selected_range.end_line, 0), Bias::Left);
- let range =
- snapshot.anchor_after(start_point)..snapshot.anchor_before(end_point);
-
- related_excerpts_by_buffer
- .entry(buffer.clone())
- .or_default()
- .push(range);
- } else {
- log::warn!(
- "selected path that wasn't included in search results: {}",
- selected_range.path.display()
- );
- }
- }
-
- anyhow::Ok(related_excerpts_by_buffer)
- })
- .await
- })
-}
-
-async fn request_tool_call<T: JsonSchema>(
- messages: Vec<LanguageModelRequestMessage>,
- tool_name: &'static str,
- model: &Arc<dyn LanguageModel>,
- cx: &mut AsyncApp,
-) -> Result<BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>>
-{
- let schema = schemars::schema_for!(T);
-
- let request = LanguageModelRequest {
- messages,
- tools: vec![LanguageModelRequestTool {
- name: tool_name.into(),
- description: schema
- .get("description")
- .and_then(|description| description.as_str())
- .unwrap()
- .to_string(),
- input_schema: serde_json::to_value(schema).unwrap(),
- }],
- ..Default::default()
- };
-
- Ok(model.stream_completion(request, cx).await?)
-}
-
-const MIN_EXCERPT_LEN: usize = 16;
-const MAX_EXCERPT_LEN: usize = 768;
-const MAX_RESULT_BYTES_PER_QUERY: usize = MAX_EXCERPT_LEN * 5;
-
-struct MatchedBuffer {
- snapshot: BufferSnapshot,
- line_ranges: Vec<Range<Line>>,
- full_path: PathBuf,
-}
-
-async fn run_query(
- glob: &str,
- regex: &str,
- results_tx: UnboundedSender<(Entity<Buffer>, MatchedBuffer)>,
- path_style: PathStyle,
- exclude_matcher: PathMatcher,
- project: &Entity<Project>,
- cx: &mut AsyncApp,
-) -> Result<()> {
- let include_matcher = PathMatcher::new(vec![glob], path_style)?;
-
- let query = SearchQuery::regex(
- regex,
- false,
- true,
- false,
- true,
- include_matcher,
- exclude_matcher,
- true,
- None,
- )?;
-
- let results = project.update(cx, |project, cx| project.search(query, cx))?;
- futures::pin_mut!(results);
-
- let mut total_bytes = 0;
-
- while let Some(SearchResult::Buffer { buffer, ranges }) = results.next().await {
- if ranges.is_empty() {
- continue;
- }
-
- let Some((snapshot, full_path)) = buffer.read_with(cx, |buffer, cx| {
- Some((buffer.snapshot(), buffer.file()?.full_path(cx)))
- })?
- else {
- continue;
- };
-
- let results_tx = results_tx.clone();
- cx.background_spawn(async move {
- let mut line_ranges = Vec::with_capacity(ranges.len());
-
- for range in ranges {
- let offset_range = range.to_offset(&snapshot);
- let query_point = (offset_range.start + offset_range.len() / 2).to_point(&snapshot);
-
- if total_bytes + MIN_EXCERPT_LEN >= MAX_RESULT_BYTES_PER_QUERY {
- break;
- }
-
- let excerpt = EditPredictionExcerpt::select_from_buffer(
- query_point,
- &snapshot,
- &EditPredictionExcerptOptions {
- max_bytes: MAX_EXCERPT_LEN.min(MAX_RESULT_BYTES_PER_QUERY - total_bytes),
- min_bytes: MIN_EXCERPT_LEN,
- target_before_cursor_over_total_bytes: 0.5,
- },
- None,
- );
-
- if let Some(excerpt) = excerpt {
- total_bytes += excerpt.range.len();
- if !excerpt.line_range.is_empty() {
- line_ranges.push(excerpt.line_range);
- }
- }
- }
-
- results_tx
- .unbounded_send((
- buffer,
- MatchedBuffer {
- snapshot,
- line_ranges,
- full_path,
- },
- ))
- .log_err();
- })
- .detach();
- }
-
- anyhow::Ok(())
-}
@@ -0,0 +1,194 @@
+use std::ops::Range;
+
+use anyhow::Result;
+use collections::HashMap;
+use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions};
+use futures::{
+ StreamExt,
+ channel::mpsc::{self, UnboundedSender},
+};
+use gpui::{AppContext, AsyncApp, Entity};
+use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt, ToPoint as _};
+use project::{
+ Project, WorktreeSettings,
+ search::{SearchQuery, SearchResult},
+};
+use util::{
+ ResultExt as _,
+ paths::{PathMatcher, PathStyle},
+};
+use workspace::item::Settings as _;
+
+pub async fn run_retrieval_searches(
+ project: Entity<Project>,
+ regex_by_glob: HashMap<String, String>,
+ cx: &mut AsyncApp,
+) -> Result<HashMap<Entity<Buffer>, Vec<Range<Anchor>>>> {
+ let (exclude_matcher, path_style) = project.update(cx, |project, cx| {
+ let global_settings = WorktreeSettings::get_global(cx);
+ let exclude_patterns = global_settings
+ .file_scan_exclusions
+ .sources()
+ .iter()
+ .chain(global_settings.private_files.sources().iter());
+ let path_style = project.path_style(cx);
+ anyhow::Ok((PathMatcher::new(exclude_patterns, path_style)?, path_style))
+ })??;
+
+ let (results_tx, mut results_rx) = mpsc::unbounded();
+
+ for (glob, regex) in regex_by_glob {
+ let exclude_matcher = exclude_matcher.clone();
+ let results_tx = results_tx.clone();
+ let project = project.clone();
+ cx.spawn(async move |cx| {
+ run_query(
+ &glob,
+ ®ex,
+ results_tx.clone(),
+ path_style,
+ exclude_matcher,
+ &project,
+ cx,
+ )
+ .await
+ .log_err();
+ })
+ .detach()
+ }
+ drop(results_tx);
+
+ cx.background_spawn(async move {
+ let mut results: HashMap<Entity<Buffer>, Vec<Range<Anchor>>> = HashMap::default();
+ let mut snapshots = HashMap::default();
+
+ let mut total_bytes = 0;
+ 'outer: while let Some((buffer, snapshot, excerpts)) = results_rx.next().await {
+ snapshots.insert(buffer.entity_id(), snapshot);
+ let existing = results.entry(buffer).or_default();
+ existing.reserve(excerpts.len());
+
+ for (range, size) in excerpts {
+ // Blunt trimming of the results until we have a proper algorithmic filtering step
+ if (total_bytes + size) > MAX_RESULTS_LEN {
+ log::trace!("Combined results reached limit of {MAX_RESULTS_LEN}B");
+ break 'outer;
+ }
+ total_bytes += size;
+ existing.push(range);
+ }
+ }
+
+ for (buffer, ranges) in results.iter_mut() {
+ if let Some(snapshot) = snapshots.get(&buffer.entity_id()) {
+ ranges.sort_unstable_by(|a, b| {
+ a.start
+ .cmp(&b.start, snapshot)
+ .then(b.end.cmp(&b.end, snapshot))
+ });
+
+ let mut index = 1;
+ while index < ranges.len() {
+ if ranges[index - 1]
+ .end
+ .cmp(&ranges[index].start, snapshot)
+ .is_gt()
+ {
+ let removed = ranges.remove(index);
+ ranges[index - 1].end = removed.end;
+ } else {
+ index += 1;
+ }
+ }
+ }
+ }
+
+ Ok(results)
+ })
+ .await
+}
+
+const MIN_EXCERPT_LEN: usize = 16;
+const MAX_EXCERPT_LEN: usize = 768;
+const MAX_RESULTS_LEN: usize = MAX_EXCERPT_LEN * 5;
+
+async fn run_query(
+ glob: &str,
+ regex: &str,
+ results_tx: UnboundedSender<(Entity<Buffer>, BufferSnapshot, Vec<(Range<Anchor>, usize)>)>,
+ path_style: PathStyle,
+ exclude_matcher: PathMatcher,
+ project: &Entity<Project>,
+ cx: &mut AsyncApp,
+) -> Result<()> {
+ let include_matcher = PathMatcher::new(vec![glob], path_style)?;
+
+ let query = SearchQuery::regex(
+ regex,
+ false,
+ true,
+ false,
+ true,
+ include_matcher,
+ exclude_matcher,
+ true,
+ None,
+ )?;
+
+ let results = project.update(cx, |project, cx| project.search(query, cx))?;
+ futures::pin_mut!(results);
+
+ while let Some(SearchResult::Buffer { buffer, ranges }) = results.next().await {
+ if results_tx.is_closed() {
+ break;
+ }
+
+ if ranges.is_empty() {
+ continue;
+ }
+
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
+ let results_tx = results_tx.clone();
+
+ cx.background_spawn(async move {
+ let mut excerpts = Vec::with_capacity(ranges.len());
+
+ for range in ranges {
+ let offset_range = range.to_offset(&snapshot);
+ let query_point = (offset_range.start + offset_range.len() / 2).to_point(&snapshot);
+
+ let excerpt = EditPredictionExcerpt::select_from_buffer(
+ query_point,
+ &snapshot,
+ &EditPredictionExcerptOptions {
+ max_bytes: MAX_EXCERPT_LEN,
+ min_bytes: MIN_EXCERPT_LEN,
+ target_before_cursor_over_total_bytes: 0.5,
+ },
+ None,
+ );
+
+ if let Some(excerpt) = excerpt
+ && !excerpt.line_range.is_empty()
+ {
+ excerpts.push((
+ snapshot.anchor_after(excerpt.range.start)
+ ..snapshot.anchor_before(excerpt.range.end),
+ excerpt.range.len(),
+ ));
+ }
+ }
+
+ let send_result = results_tx.unbounded_send((buffer, snapshot, excerpts));
+
+ if let Err(err) = send_result
+ && !err.is_disconnected()
+ {
+ log::error!("{err}");
+ }
+ })
+ .detach();
+ }
+
+ anyhow::Ok(())
+}
@@ -0,0 +1,1022 @@
+use std::borrow::Cow;
+use std::fmt::Display;
+use std::sync::Arc;
+use std::{
+ fmt::{Debug, Write},
+ mem,
+ ops::Range,
+ path::Path,
+};
+
+use anyhow::Context as _;
+use anyhow::Result;
+use anyhow::anyhow;
+use collections::HashMap;
+use gpui::AsyncApp;
+use gpui::Entity;
+use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt as _, TextBufferSnapshot};
+use project::Project;
+
+pub async fn parse_diff<'a>(
+ diff: &'a str,
+ get_buffer: impl Fn(&Path) -> Option<(&'a BufferSnapshot, &'a [Range<Anchor>])> + Send,
+) -> Result<(&'a BufferSnapshot, Vec<(Range<Anchor>, Arc<str>)>)> {
+ let mut diff = DiffParser::new(diff);
+ let mut edited_buffer = None;
+ let mut edits = Vec::new();
+
+ while let Some(event) = diff.next()? {
+ match event {
+ DiffEvent::Hunk {
+ path: file_path,
+ hunk,
+ } => {
+ let (buffer, ranges) = match edited_buffer {
+ None => {
+ edited_buffer = get_buffer(&Path::new(file_path.as_ref()));
+ edited_buffer
+ .as_ref()
+ .context("Model tried to edit a file that wasn't included")?
+ }
+ Some(ref current) => current,
+ };
+
+ edits.extend(resolve_hunk_edits_in_buffer(hunk, &buffer.text, ranges)?);
+ }
+ DiffEvent::FileEnd { renamed_to } => {
+ let (buffer, _) = edited_buffer
+ .take()
+ .expect("Got a FileEnd event before an Hunk event");
+
+ if renamed_to.is_some() {
+ anyhow::bail!("edit predictions cannot rename files");
+ }
+
+ if diff.next()?.is_some() {
+ anyhow::bail!("Edited more than one file");
+ }
+
+ return Ok((buffer, edits));
+ }
+ }
+ }
+
+ Err(anyhow::anyhow!("No EOF"))
+}
+
+#[derive(Debug)]
+pub struct OpenedBuffers<'a>(#[allow(unused)] HashMap<Cow<'a, str>, Entity<Buffer>>);
+
+#[must_use]
+pub async fn apply_diff<'a>(
+ diff: &'a str,
+ project: &Entity<Project>,
+ cx: &mut AsyncApp,
+) -> Result<OpenedBuffers<'a>> {
+ let mut included_files = HashMap::default();
+
+ for line in diff.lines() {
+ let diff_line = DiffLine::parse(line);
+
+ if let DiffLine::OldPath { path } = diff_line {
+ let buffer = project
+ .update(cx, |project, cx| {
+ let project_path =
+ project
+ .find_project_path(path.as_ref(), cx)
+ .with_context(|| {
+ format!("Failed to find worktree for new path: {}", path)
+ })?;
+ anyhow::Ok(project.open_buffer(project_path, cx))
+ })??
+ .await?;
+
+ included_files.insert(path, buffer);
+ }
+ }
+
+ let ranges = [Anchor::MIN..Anchor::MAX];
+
+ let mut diff = DiffParser::new(diff);
+ let mut current_file = None;
+ let mut edits = vec![];
+
+ while let Some(event) = diff.next()? {
+ match event {
+ DiffEvent::Hunk {
+ path: file_path,
+ hunk,
+ } => {
+ let (buffer, ranges) = match current_file {
+ None => {
+ let buffer = included_files
+ .get_mut(&file_path)
+ .expect("Opened all files in diff");
+
+ current_file = Some((buffer, ranges.as_slice()));
+ current_file.as_ref().unwrap()
+ }
+ Some(ref current) => current,
+ };
+
+ buffer.read_with(cx, |buffer, _| {
+ edits.extend(resolve_hunk_edits_in_buffer(hunk, buffer, ranges)?);
+ anyhow::Ok(())
+ })??;
+ }
+ DiffEvent::FileEnd { renamed_to } => {
+ let (buffer, _) = current_file
+ .take()
+ .expect("Got a FileEnd event before an Hunk event");
+
+ if let Some(renamed_to) = renamed_to {
+ project
+ .update(cx, |project, cx| {
+ let new_project_path = project
+ .find_project_path(Path::new(renamed_to.as_ref()), cx)
+ .with_context(|| {
+ format!("Failed to find worktree for new path: {}", renamed_to)
+ })?;
+
+ let project_file = project::File::from_dyn(buffer.read(cx).file())
+ .expect("Wrong file type");
+
+ anyhow::Ok(project.rename_entry(
+ project_file.entry_id.unwrap(),
+ new_project_path,
+ cx,
+ ))
+ })??
+ .await?;
+ }
+
+ let edits = mem::take(&mut edits);
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, None, cx);
+ })?;
+ }
+ }
+ }
+
+ Ok(OpenedBuffers(included_files))
+}
+
+struct PatchFile<'a> {
+ old_path: Cow<'a, str>,
+ new_path: Cow<'a, str>,
+}
+
+struct DiffParser<'a> {
+ current_file: Option<PatchFile<'a>>,
+ current_line: Option<(&'a str, DiffLine<'a>)>,
+ hunk: Hunk,
+ diff: std::str::Lines<'a>,
+}
+
+#[derive(Debug, PartialEq)]
+enum DiffEvent<'a> {
+ Hunk { path: Cow<'a, str>, hunk: Hunk },
+ FileEnd { renamed_to: Option<Cow<'a, str>> },
+}
+
+#[derive(Debug, Default, PartialEq)]
+struct Hunk {
+ context: String,
+ edits: Vec<Edit>,
+}
+
+impl Hunk {
+ fn is_empty(&self) -> bool {
+ self.context.is_empty() && self.edits.is_empty()
+ }
+}
+
+#[derive(Debug, PartialEq)]
+struct Edit {
+ range: Range<usize>,
+ text: String,
+}
+
+impl<'a> DiffParser<'a> {
+ fn new(diff: &'a str) -> Self {
+ let mut diff = diff.lines();
+ let current_line = diff.next().map(|line| (line, DiffLine::parse(line)));
+ DiffParser {
+ current_file: None,
+ hunk: Hunk::default(),
+ current_line,
+ diff,
+ }
+ }
+
+ fn next(&mut self) -> Result<Option<DiffEvent<'a>>> {
+ loop {
+ let (hunk_done, file_done) = match self.current_line.as_ref().map(|e| &e.1) {
+ Some(DiffLine::OldPath { .. }) | Some(DiffLine::Garbage(_)) | None => (true, true),
+ Some(DiffLine::HunkHeader(_)) => (true, false),
+ _ => (false, false),
+ };
+
+ if hunk_done {
+ if let Some(file) = &self.current_file
+ && !self.hunk.is_empty()
+ {
+ return Ok(Some(DiffEvent::Hunk {
+ path: file.old_path.clone(),
+ hunk: mem::take(&mut self.hunk),
+ }));
+ }
+ }
+
+ if file_done {
+ if let Some(PatchFile { old_path, new_path }) = self.current_file.take() {
+ return Ok(Some(DiffEvent::FileEnd {
+ renamed_to: if old_path != new_path {
+ Some(new_path)
+ } else {
+ None
+ },
+ }));
+ }
+ }
+
+ let Some((line, parsed_line)) = self.current_line.take() else {
+ break;
+ };
+
+ util::maybe!({
+ match parsed_line {
+ DiffLine::OldPath { path } => {
+ self.current_file = Some(PatchFile {
+ old_path: path,
+ new_path: "".into(),
+ });
+ }
+ DiffLine::NewPath { path } => {
+ if let Some(current_file) = &mut self.current_file {
+ current_file.new_path = path
+ }
+ }
+ DiffLine::HunkHeader(_) => {}
+ DiffLine::Context(ctx) => {
+ if self.current_file.is_some() {
+ writeln!(&mut self.hunk.context, "{ctx}")?;
+ }
+ }
+ DiffLine::Deletion(del) => {
+ if self.current_file.is_some() {
+ let range = self.hunk.context.len()
+ ..self.hunk.context.len() + del.len() + '\n'.len_utf8();
+ if let Some(last_edit) = self.hunk.edits.last_mut()
+ && last_edit.range.end == range.start
+ {
+ last_edit.range.end = range.end;
+ } else {
+ self.hunk.edits.push(Edit {
+ range,
+ text: String::new(),
+ });
+ }
+ writeln!(&mut self.hunk.context, "{del}")?;
+ }
+ }
+ DiffLine::Addition(add) => {
+ if self.current_file.is_some() {
+ let range = self.hunk.context.len()..self.hunk.context.len();
+ if let Some(last_edit) = self.hunk.edits.last_mut()
+ && last_edit.range.end == range.start
+ {
+ writeln!(&mut last_edit.text, "{add}").unwrap();
+ } else {
+ self.hunk.edits.push(Edit {
+ range,
+ text: format!("{add}\n"),
+ });
+ }
+ }
+ }
+ DiffLine::Garbage(_) => {}
+ }
+
+ anyhow::Ok(())
+ })
+ .with_context(|| format!("on line:\n\n```\n{}```", line))?;
+
+ self.current_line = self.diff.next().map(|line| (line, DiffLine::parse(line)));
+ }
+
+ anyhow::Ok(None)
+ }
+}
+
+fn resolve_hunk_edits_in_buffer(
+ hunk: Hunk,
+ buffer: &TextBufferSnapshot,
+ ranges: &[Range<Anchor>],
+) -> Result<impl Iterator<Item = (Range<Anchor>, Arc<str>)>, anyhow::Error> {
+ let context_offset = if hunk.context.is_empty() {
+ Ok(0)
+ } else {
+ let mut offset = None;
+ for range in ranges {
+ let range = range.to_offset(buffer);
+ let text = buffer.text_for_range(range.clone()).collect::<String>();
+ for (ix, _) in text.match_indices(&hunk.context) {
+ if offset.is_some() {
+ anyhow::bail!("Context is not unique enough:\n{}", hunk.context);
+ }
+ offset = Some(range.start + ix);
+ }
+ }
+ offset.ok_or_else(|| {
+ anyhow!(
+ "Failed to match context:\n{}\n\nBuffer:\n{}",
+ hunk.context,
+ buffer.text(),
+ )
+ })
+ }?;
+ let iter = hunk.edits.into_iter().flat_map(move |edit| {
+ let old_text = buffer
+ .text_for_range(context_offset + edit.range.start..context_offset + edit.range.end)
+ .collect::<String>();
+ let edits_within_hunk = language::text_diff(&old_text, &edit.text);
+ edits_within_hunk
+ .into_iter()
+ .map(move |(inner_range, inner_text)| {
+ (
+ buffer.anchor_after(context_offset + edit.range.start + inner_range.start)
+ ..buffer.anchor_before(context_offset + edit.range.start + inner_range.end),
+ inner_text,
+ )
+ })
+ });
+ Ok(iter)
+}
+
+#[derive(Debug, PartialEq)]
+pub enum DiffLine<'a> {
+ OldPath { path: Cow<'a, str> },
+ NewPath { path: Cow<'a, str> },
+ HunkHeader(Option<HunkLocation>),
+ Context(&'a str),
+ Deletion(&'a str),
+ Addition(&'a str),
+ Garbage(&'a str),
+}
+
+#[derive(Debug, PartialEq)]
+pub struct HunkLocation {
+ start_line_old: u32,
+ count_old: u32,
+ start_line_new: u32,
+ count_new: u32,
+}
+
+impl<'a> DiffLine<'a> {
+ pub fn parse(line: &'a str) -> Self {
+ Self::try_parse(line).unwrap_or(Self::Garbage(line))
+ }
+
+ fn try_parse(line: &'a str) -> Option<Self> {
+ if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
+ let path = parse_header_path("a/", header);
+ Some(Self::OldPath { path })
+ } else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
+ Some(Self::NewPath {
+ path: parse_header_path("b/", header),
+ })
+ } else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
+ if header.starts_with("...") {
+ return Some(Self::HunkHeader(None));
+ }
+
+ let (start_line_old, header) = header.strip_prefix('-')?.split_once(',')?;
+ let mut parts = header.split_ascii_whitespace();
+ let count_old = parts.next()?;
+ let (start_line_new, count_new) = parts.next()?.strip_prefix('+')?.split_once(',')?;
+
+ Some(Self::HunkHeader(Some(HunkLocation {
+ start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
+ count_old: count_old.parse().ok()?,
+ start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
+ count_new: count_new.parse().ok()?,
+ })))
+ } else if let Some(deleted_header) = line.strip_prefix("-") {
+ Some(Self::Deletion(deleted_header))
+ } else if line.is_empty() {
+ Some(Self::Context(""))
+ } else if let Some(context) = line.strip_prefix(" ") {
+ Some(Self::Context(context))
+ } else {
+ Some(Self::Addition(line.strip_prefix("+")?))
+ }
+ }
+}
+
+impl<'a> Display for DiffLine<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ DiffLine::OldPath { path } => write!(f, "--- {path}"),
+ DiffLine::NewPath { path } => write!(f, "+++ {path}"),
+ DiffLine::HunkHeader(Some(hunk_location)) => {
+ write!(
+ f,
+ "@@ -{},{} +{},{} @@",
+ hunk_location.start_line_old + 1,
+ hunk_location.count_old,
+ hunk_location.start_line_new + 1,
+ hunk_location.count_new
+ )
+ }
+ DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
+ DiffLine::Context(content) => write!(f, " {content}"),
+ DiffLine::Deletion(content) => write!(f, "-{content}"),
+ DiffLine::Addition(content) => write!(f, "+{content}"),
+ DiffLine::Garbage(line) => write!(f, "{line}"),
+ }
+ }
+}
+
+fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
+ if !header.contains(['"', '\\']) {
+ let path = header.split_ascii_whitespace().next().unwrap_or(header);
+ return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
+ }
+
+ let mut path = String::with_capacity(header.len());
+ let mut in_quote = false;
+ let mut chars = header.chars().peekable();
+ let mut strip_prefix = Some(strip_prefix);
+
+ while let Some(char) = chars.next() {
+ if char == '"' {
+ in_quote = !in_quote;
+ } else if char == '\\' {
+ let Some(&next_char) = chars.peek() else {
+ break;
+ };
+ chars.next();
+ path.push(next_char);
+ } else if char.is_ascii_whitespace() && !in_quote {
+ break;
+ } else {
+ path.push(char);
+ }
+
+ if let Some(prefix) = strip_prefix
+ && path == prefix
+ {
+ strip_prefix.take();
+ path.clear();
+ }
+ }
+
+ Cow::Owned(path)
+}
+
+fn eat_required_whitespace(header: &str) -> Option<&str> {
+ let trimmed = header.trim_ascii_start();
+
+ if trimmed.len() == header.len() {
+ None
+ } else {
+ Some(trimmed)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use gpui::TestAppContext;
+ use indoc::indoc;
+ use language::Point;
+ use pretty_assertions::assert_eq;
+ use project::{FakeFs, Project};
+ use serde_json::json;
+ use settings::SettingsStore;
+ use util::path;
+
+ #[test]
+ fn parse_lines_simple() {
+ let input = indoc! {"
+ diff --git a/text.txt b/text.txt
+ index 86c770d..a1fd855 100644
+ --- a/file.txt
+ +++ b/file.txt
+ @@ -1,2 +1,3 @@
+ context
+ -deleted
+ +inserted
+ garbage
+
+ --- b/file.txt
+ +++ a/file.txt
+ "};
+
+ let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
+
+ pretty_assertions::assert_eq!(
+ lines,
+ &[
+ DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
+ DiffLine::Garbage("index 86c770d..a1fd855 100644"),
+ DiffLine::OldPath {
+ path: "file.txt".into()
+ },
+ DiffLine::NewPath {
+ path: "file.txt".into()
+ },
+ DiffLine::HunkHeader(Some(HunkLocation {
+ start_line_old: 0,
+ count_old: 2,
+ start_line_new: 0,
+ count_new: 3
+ })),
+ DiffLine::Context("context"),
+ DiffLine::Deletion("deleted"),
+ DiffLine::Addition("inserted"),
+ DiffLine::Garbage("garbage"),
+ DiffLine::Context(""),
+ DiffLine::OldPath {
+ path: "b/file.txt".into()
+ },
+ DiffLine::NewPath {
+ path: "a/file.txt".into()
+ },
+ ]
+ );
+ }
+
+ #[test]
+ fn file_header_extra_space() {
+ let options = ["--- file", "--- file", "---\tfile"];
+
+ for option in options {
+ pretty_assertions::assert_eq!(
+ DiffLine::parse(option),
+ DiffLine::OldPath {
+ path: "file".into()
+ },
+ "{option}",
+ );
+ }
+ }
+
+ #[test]
+ fn hunk_header_extra_space() {
+ let options = [
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@",
+ "@@\t-1,2\t+1,3\t@@",
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@",
+ "@@ -1,2 +1,3 @@ garbage",
+ ];
+
+ for option in options {
+ pretty_assertions::assert_eq!(
+ DiffLine::parse(option),
+ DiffLine::HunkHeader(Some(HunkLocation {
+ start_line_old: 0,
+ count_old: 2,
+ start_line_new: 0,
+ count_new: 3
+ })),
+ "{option}",
+ );
+ }
+ }
+
+ #[test]
+ fn hunk_header_without_location() {
+ pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
+ }
+
+ #[test]
+ fn test_parse_path() {
+ assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
+ assert_eq!(
+ parse_header_path("a/", "foo/bar/baz.txt"),
+ "foo/bar/baz.txt"
+ );
+ assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt"),
+ "foo/bar/baz.txt"
+ );
+
+ // Extra
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
+ "foo/bar/baz.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
+ "foo/bar/baz.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/baz.txt \""),
+ "foo/bar/baz.txt"
+ );
+
+ // Quoted
+ assert_eq!(
+ parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
+ "foo/bar/baz quox.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
+ "foo/bar/baz quox.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
+ "foo/bar/baz quox.txt"
+ );
+ assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
+ assert_eq!(
+ parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
+ "foo/bar/baz quox.txt"
+ );
+ // unescaped quotes are dropped
+ assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
+
+ // Escaped
+ assert_eq!(
+ parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
+ "foo/\"bar\"/baz.txt"
+ );
+ assert_eq!(
+ parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
+ "C:\\Projects\\My App\\old file.txt"
+ );
+ }
+
+ #[test]
+ fn test_parse_diff_with_leading_and_trailing_garbage() {
+ let diff = indoc! {"
+ I need to make some changes.
+
+ I'll change the following things:
+ - one
+ - two
+ - three
+
+ ```
+ --- a/file.txt
+ +++ b/file.txt
+ one
+ +AND
+ two
+ ```
+
+ Summary of what I did:
+ - one
+ - two
+ - three
+
+ That's about it.
+ "};
+
+ let mut events = Vec::new();
+ let mut parser = DiffParser::new(diff);
+ while let Some(event) = parser.next().unwrap() {
+ events.push(event);
+ }
+
+ assert_eq!(
+ events,
+ &[
+ DiffEvent::Hunk {
+ path: "file.txt".into(),
+ hunk: Hunk {
+ context: "one\ntwo\n".into(),
+ edits: vec![Edit {
+ range: 4..4,
+ text: "AND\n".into()
+ }],
+ }
+ },
+ DiffEvent::FileEnd { renamed_to: None }
+ ],
+ )
+ }
+
+ #[gpui::test]
+ async fn test_apply_diff_successful(cx: &mut TestAppContext) {
+ let fs = init_test(cx);
+
+ let buffer_1_text = indoc! {r#"
+ one
+ two
+ three
+ four
+ five
+ "# };
+
+ let buffer_1_text_final = indoc! {r#"
+ 3
+ 4
+ 5
+ "# };
+
+ let buffer_2_text = indoc! {r#"
+ six
+ seven
+ eight
+ nine
+ ten
+ "# };
+
+ let buffer_2_text_final = indoc! {r#"
+ 5
+ six
+ seven
+ 7.5
+ eight
+ nine
+ ten
+ 11
+ "# };
+
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "file1": buffer_1_text,
+ "file2": buffer_2_text,
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+
+ let diff = indoc! {r#"
+ --- a/root/file1
+ +++ b/root/file1
+ one
+ two
+ -three
+ +3
+ four
+ five
+ --- a/root/file1
+ +++ b/root/file1
+ 3
+ -four
+ -five
+ +4
+ +5
+ --- a/root/file1
+ +++ b/root/file1
+ -one
+ -two
+ 3
+ 4
+ --- a/root/file2
+ +++ b/root/file2
+ +5
+ six
+ --- a/root/file2
+ +++ b/root/file2
+ seven
+ +7.5
+ eight
+ --- a/root/file2
+ +++ b/root/file2
+ ten
+ +11
+ "#};
+
+ let _buffers = apply_diff(diff, &project, &mut cx.to_async())
+ .await
+ .unwrap();
+ let buffer_1 = project
+ .update(cx, |project, cx| {
+ let project_path = project.find_project_path(path!("/root/file1"), cx).unwrap();
+ project.open_buffer(project_path, cx)
+ })
+ .await
+ .unwrap();
+
+ buffer_1.read_with(cx, |buffer, _cx| {
+ assert_eq!(buffer.text(), buffer_1_text_final);
+ });
+ let buffer_2 = project
+ .update(cx, |project, cx| {
+ let project_path = project.find_project_path(path!("/root/file2"), cx).unwrap();
+ project.open_buffer(project_path, cx)
+ })
+ .await
+ .unwrap();
+
+ buffer_2.read_with(cx, |buffer, _cx| {
+ assert_eq!(buffer.text(), buffer_2_text_final);
+ });
+ }
+
+ #[gpui::test]
+ async fn test_apply_diff_non_unique(cx: &mut TestAppContext) {
+ let fs = init_test(cx);
+
+ let buffer_1_text = indoc! {r#"
+ one
+ two
+ three
+ four
+ five
+ one
+ two
+ three
+ four
+ five
+ "# };
+
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "file1": buffer_1_text,
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/root/file1"), cx)
+ })
+ .await
+ .unwrap();
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+
+ let diff = indoc! {r#"
+ --- a/root/file1
+ +++ b/root/file1
+ one
+ two
+ -three
+ +3
+ four
+ five
+ "#};
+
+ let final_text = indoc! {r#"
+ one
+ two
+ three
+ four
+ five
+ one
+ two
+ 3
+ four
+ five
+ "#};
+
+ apply_diff(diff, &project, &mut cx.to_async())
+ .await
+ .expect_err("Non-unique edits should fail");
+
+ let ranges = [buffer_snapshot.anchor_before(Point::new(1, 0))
+ ..buffer_snapshot.anchor_after(buffer_snapshot.max_point())];
+
+ let (edited_snapshot, edits) = parse_diff(diff, |_path| Some((&buffer_snapshot, &ranges)))
+ .await
+ .unwrap();
+
+ assert_eq!(edited_snapshot.remote_id(), buffer_snapshot.remote_id());
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, None, cx);
+ assert_eq!(buffer.text(), final_text);
+ });
+ }
+
+ #[gpui::test]
+ async fn test_parse_diff_with_edits_within_line(cx: &mut TestAppContext) {
+ let fs = init_test(cx);
+
+ let buffer_1_text = indoc! {r#"
+ one two three four
+ five six seven eight
+ nine ten eleven twelve
+ "# };
+
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "file1": buffer_1_text,
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+ let buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer(path!("/root/file1"), cx)
+ })
+ .await
+ .unwrap();
+ let buffer_snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+
+ let diff = indoc! {r#"
+ --- a/root/file1
+ +++ b/root/file1
+ one two three four
+ -five six seven eight
+ +five SIX seven eight!
+ nine ten eleven twelve
+ "#};
+
+ let (buffer, edits) = parse_diff(diff, |_path| {
+ Some((&buffer_snapshot, &[(Anchor::MIN..Anchor::MAX)] as &[_]))
+ })
+ .await
+ .unwrap();
+
+ let edits = edits
+ .into_iter()
+ .map(|(range, text)| (range.to_point(&buffer), text))
+ .collect::<Vec<_>>();
+ assert_eq!(
+ edits,
+ &[
+ (Point::new(1, 5)..Point::new(1, 8), "SIX".into()),
+ (Point::new(1, 20)..Point::new(1, 20), "!".into())
+ ]
+ );
+ }
+
+ #[gpui::test]
+ async fn test_apply_diff_unique_via_previous_context(cx: &mut TestAppContext) {
+ let fs = init_test(cx);
+
+ let start = indoc! {r#"
+ one
+ two
+ three
+ four
+ five
+
+ four
+ five
+ "# };
+
+ let end = indoc! {r#"
+ one
+ two
+ 3
+ four
+ 5
+
+ four
+ five
+ "# };
+
+ fs.insert_tree(
+ path!("/root"),
+ json!({
+ "file1": start,
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+
+ let diff = indoc! {r#"
+ --- a/root/file1
+ +++ b/root/file1
+ one
+ two
+ -three
+ +3
+ four
+ -five
+ +5
+ "#};
+
+ let _buffers = apply_diff(diff, &project, &mut cx.to_async())
+ .await
+ .unwrap();
+
+ let buffer_1 = project
+ .update(cx, |project, cx| {
+ let project_path = project.find_project_path(path!("/root/file1"), cx).unwrap();
+ project.open_buffer(project_path, cx)
+ })
+ .await
+ .unwrap();
+
+ buffer_1.read_with(cx, |buffer, _cx| {
+ assert_eq!(buffer.text(), end);
+ });
+ }
+
+ fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
+ cx.update(|cx| {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ });
+
+ FakeFs::new(cx.background_executor.clone())
+ }
+}
@@ -6,7 +6,8 @@ use cloud_llm_client::{
AcceptEditPredictionBody, EXPIRED_LLM_TOKEN_HEADER_NAME, MINIMUM_REQUIRED_VERSION_HEADER_NAME,
ZED_VERSION_HEADER_NAME,
};
-use cloud_zeta2_prompt::{DEFAULT_MAX_PROMPT_BYTES, build_prompt};
+use cloud_zeta2_prompt::DEFAULT_MAX_PROMPT_BYTES;
+use cloud_zeta2_prompt::retrieval_prompt::SearchToolInput;
use collections::HashMap;
use edit_prediction_context::{
DeclarationId, DeclarationStyle, EditPredictionContext, EditPredictionContextOptions,
@@ -24,11 +25,13 @@ use gpui::{
use language::{Anchor, Buffer, DiagnosticSet, LanguageServerId, ToOffset as _, ToPoint};
use language::{BufferSnapshot, OffsetRangeExt};
use language_model::{LlmApiToken, RefreshLlmTokenListener};
+use open_ai::FunctionDefinition;
use project::Project;
use release_channel::AppVersion;
use serde::de::DeserializeOwned;
use std::collections::{VecDeque, hash_map};
-use std::fmt::Write;
+use uuid::Uuid;
+
use std::ops::Range;
use std::path::Path;
use std::str::FromStr as _;
@@ -42,12 +45,12 @@ use workspace::notifications::{ErrorMessagePrompt, NotificationId, show_app_noti
pub mod merge_excerpts;
mod prediction;
mod provider;
-pub mod related_excerpts;
+pub mod retrieval_search;
+pub mod udiff;
use crate::merge_excerpts::merge_excerpts;
use crate::prediction::EditPrediction;
-use crate::related_excerpts::find_related_excerpts;
-pub use crate::related_excerpts::{LlmContextOptions, SearchToolQuery};
+pub use crate::prediction::EditPredictionId;
pub use provider::ZetaEditPredictionProvider;
/// Maximum number of events to track.
@@ -59,9 +62,10 @@ pub const DEFAULT_EXCERPT_OPTIONS: EditPredictionExcerptOptions = EditPrediction
target_before_cursor_over_total_bytes: 0.5,
};
-pub const DEFAULT_CONTEXT_OPTIONS: ContextMode = ContextMode::Llm(DEFAULT_LLM_CONTEXT_OPTIONS);
+pub const DEFAULT_CONTEXT_OPTIONS: ContextMode =
+ ContextMode::Agentic(DEFAULT_AGENTIC_CONTEXT_OPTIONS);
-pub const DEFAULT_LLM_CONTEXT_OPTIONS: LlmContextOptions = LlmContextOptions {
+pub const DEFAULT_AGENTIC_CONTEXT_OPTIONS: AgenticContextOptions = AgenticContextOptions {
excerpt: DEFAULT_EXCERPT_OPTIONS,
};
@@ -122,14 +126,19 @@ pub struct ZetaOptions {
#[derive(Debug, Clone, PartialEq)]
pub enum ContextMode {
- Llm(LlmContextOptions),
+ Agentic(AgenticContextOptions),
Syntax(EditPredictionContextOptions),
}
+#[derive(Debug, Clone, PartialEq)]
+pub struct AgenticContextOptions {
+ pub excerpt: EditPredictionExcerptOptions,
+}
+
impl ContextMode {
pub fn excerpt(&self) -> &EditPredictionExcerptOptions {
match self {
- ContextMode::Llm(options) => &options.excerpt,
+ ContextMode::Agentic(options) => &options.excerpt,
ContextMode::Syntax(options) => &options.excerpt,
}
}
@@ -140,9 +149,8 @@ pub enum ZetaDebugInfo {
ContextRetrievalStarted(ZetaContextRetrievalStartedDebugInfo),
SearchQueriesGenerated(ZetaSearchQueryDebugInfo),
SearchQueriesExecuted(ZetaContextRetrievalDebugInfo),
- SearchResultsFiltered(ZetaContextRetrievalDebugInfo),
ContextRetrievalFinished(ZetaContextRetrievalDebugInfo),
- EditPredicted(ZetaEditPredictionDebugInfo),
+ EditPredictionRequested(ZetaEditPredictionDebugInfo),
}
#[derive(Debug)]
@@ -165,14 +173,14 @@ pub struct ZetaEditPredictionDebugInfo {
pub buffer: WeakEntity<Buffer>,
pub position: language::Anchor,
pub local_prompt: Result<String, String>,
- pub response_rx: oneshot::Receiver<Result<predict_edits_v3::PredictEditsResponse, String>>,
+ pub response_rx: oneshot::Receiver<(Result<open_ai::Response, String>, TimeDelta)>,
}
#[derive(Debug)]
pub struct ZetaSearchQueryDebugInfo {
pub project: Entity<Project>,
pub timestamp: Instant,
- pub queries: Vec<SearchToolQuery>,
+ pub regex_by_glob: HashMap<String, String>,
}
pub type RequestDebugInfo = predict_edits_v3::DebugInfo;
@@ -224,7 +232,7 @@ impl CurrentEditPrediction {
{
let (old_range, old_text) = &old_edits[0];
let (new_range, new_text) = &new_edits[0];
- new_range == old_range && new_text.starts_with(old_text)
+ new_range == old_range && new_text.starts_with(old_text.as_ref())
} else {
true
}
@@ -539,7 +547,7 @@ impl Zeta {
prediction,
} = project_state.current_prediction.as_ref()?;
- if prediction.targets_buffer(buffer.read(cx), cx) {
+ if prediction.targets_buffer(buffer.read(cx)) {
Some(BufferEditPrediction::Local { prediction })
} else if *requested_by_buffer_id == buffer.entity_id() {
Some(BufferEditPrediction::Jump { prediction })
@@ -639,7 +647,7 @@ impl Zeta {
pub fn request_prediction(
&mut self,
project: &Entity<Project>,
- buffer: &Entity<Buffer>,
+ active_buffer: &Entity<Buffer>,
position: language::Anchor,
cx: &mut Context<Self>,
) -> Task<Result<Option<EditPrediction>>> {
@@ -651,8 +659,8 @@ impl Zeta {
.read_with(cx, |index, _cx| index.state().clone())
});
let options = self.options.clone();
- let snapshot = buffer.read(cx).snapshot();
- let Some(excerpt_path) = snapshot
+ let active_snapshot = active_buffer.read(cx).snapshot();
+ let Some(excerpt_path) = active_snapshot
.file()
.map(|path| -> Arc<Path> { path.full_path(cx).into() })
else {
@@ -678,12 +686,13 @@ impl Zeta {
})
.unwrap_or_default();
- let diagnostics = snapshot.diagnostic_sets().clone();
+ let diagnostics = active_snapshot.diagnostic_sets().clone();
- let parent_abs_path = project::File::from_dyn(buffer.read(cx).file()).and_then(|f| {
- let mut path = f.worktree.read(cx).absolutize(&f.path);
- if path.pop() { Some(path) } else { None }
- });
+ let parent_abs_path =
+ project::File::from_dyn(active_buffer.read(cx).file()).and_then(|f| {
+ let mut path = f.worktree.read(cx).absolutize(&f.path);
+ if path.pop() { Some(path) } else { None }
+ });
// TODO data collection
let can_collect_data = cx.is_staff();
@@ -692,9 +701,10 @@ impl Zeta {
.and_then(|project_state| project_state.context.as_ref())
.unwrap_or(&HashMap::default())
.iter()
- .filter_map(|(buffer, ranges)| {
- let buffer = buffer.read(cx);
+ .filter_map(|(buffer_entity, ranges)| {
+ let buffer = buffer_entity.read(cx);
Some((
+ buffer_entity.clone(),
buffer.snapshot(),
buffer.file()?.full_path(cx).into(),
ranges.clone(),
@@ -703,8 +713,7 @@ impl Zeta {
.collect::<Vec<_>>();
let request_task = cx.background_spawn({
- let snapshot = snapshot.clone();
- let buffer = buffer.clone();
+ let active_buffer = active_buffer.clone();
async move {
let index_state = if let Some(index_state) = index_state {
Some(index_state.lock_owned().await)
@@ -712,8 +721,8 @@ impl Zeta {
None
};
- let cursor_offset = position.to_offset(&snapshot);
- let cursor_point = cursor_offset.to_point(&snapshot);
+ let cursor_offset = position.to_offset(&active_snapshot);
+ let cursor_point = cursor_offset.to_point(&active_snapshot);
let before_retrieval = chrono::Utc::now();
@@ -721,29 +730,30 @@ impl Zeta {
Self::gather_nearby_diagnostics(
cursor_offset,
&diagnostics,
- &snapshot,
+ &active_snapshot,
options.max_diagnostic_bytes,
);
- let request = match options.context {
- ContextMode::Llm(context_options) => {
+ let cloud_request = match options.context {
+ ContextMode::Agentic(context_options) => {
let Some(excerpt) = EditPredictionExcerpt::select_from_buffer(
cursor_point,
- &snapshot,
+ &active_snapshot,
&context_options.excerpt,
index_state.as_deref(),
) else {
return Ok((None, None));
};
- let excerpt_anchor_range = snapshot.anchor_after(excerpt.range.start)
- ..snapshot.anchor_before(excerpt.range.end);
+ let excerpt_anchor_range = active_snapshot.anchor_after(excerpt.range.start)
+ ..active_snapshot.anchor_before(excerpt.range.end);
- if let Some(buffer_ix) = included_files
- .iter()
- .position(|(buffer, _, _)| buffer.remote_id() == snapshot.remote_id())
+ if let Some(buffer_ix) =
+ included_files.iter().position(|(_, snapshot, _, _)| {
+ snapshot.remote_id() == active_snapshot.remote_id()
+ })
{
- let (buffer, _, ranges) = &mut included_files[buffer_ix];
+ let (_, buffer, _, ranges) = &mut included_files[buffer_ix];
let range_ix = ranges
.binary_search_by(|probe| {
probe
@@ -758,15 +768,16 @@ impl Zeta {
included_files.swap(buffer_ix, last_ix);
} else {
included_files.push((
- snapshot,
+ active_buffer.clone(),
+ active_snapshot,
excerpt_path.clone(),
vec![excerpt_anchor_range],
));
}
let included_files = included_files
- .into_iter()
- .map(|(buffer, path, ranges)| {
+ .iter()
+ .map(|(_, buffer, path, ranges)| {
let excerpts = merge_excerpts(
&buffer,
ranges.iter().map(|range| {
@@ -775,7 +786,7 @@ impl Zeta {
}),
);
predict_edits_v3::IncludedFile {
- path,
+ path: path.clone(),
max_row: Line(buffer.max_point().row),
excerpts,
}
@@ -809,7 +820,7 @@ impl Zeta {
ContextMode::Syntax(context_options) => {
let Some(context) = EditPredictionContext::gather_context(
cursor_point,
- &snapshot,
+ &active_snapshot,
parent_abs_path.as_deref(),
&context_options,
index_state.as_deref(),
@@ -834,24 +845,27 @@ impl Zeta {
}
};
+ let prompt_result = cloud_zeta2_prompt::build_prompt(&cloud_request);
+
let retrieval_time = chrono::Utc::now() - before_retrieval;
let debug_response_tx = if let Some(debug_tx) = &debug_tx {
let (response_tx, response_rx) = oneshot::channel();
- let local_prompt = build_prompt(&request)
- .map(|(prompt, _)| prompt)
- .map_err(|err| err.to_string());
-
debug_tx
- .unbounded_send(ZetaDebugInfo::EditPredicted(ZetaEditPredictionDebugInfo {
- request: request.clone(),
- retrieval_time,
- buffer: buffer.downgrade(),
- local_prompt,
- position,
- response_rx,
- }))
+ .unbounded_send(ZetaDebugInfo::EditPredictionRequested(
+ ZetaEditPredictionDebugInfo {
+ request: cloud_request.clone(),
+ retrieval_time,
+ buffer: active_buffer.downgrade(),
+ local_prompt: match prompt_result.as_ref() {
+ Ok((prompt, _)) => Ok(prompt.clone()),
+ Err(err) => Err(err.to_string()),
+ },
+ position,
+ response_rx,
+ },
+ ))
.ok();
Some(response_tx)
} else {
@@ -861,61 +875,114 @@ impl Zeta {
if cfg!(debug_assertions) && std::env::var("ZED_ZETA2_SKIP_REQUEST").is_ok() {
if let Some(debug_response_tx) = debug_response_tx {
debug_response_tx
- .send(Err("Request skipped".to_string()))
+ .send((Err("Request skipped".to_string()), TimeDelta::zero()))
.ok();
}
anyhow::bail!("Skipping request because ZED_ZETA2_SKIP_REQUEST is set")
}
+ let (prompt, _) = prompt_result?;
+ let request = open_ai::Request {
+ model: std::env::var("ZED_ZETA2_MODEL").unwrap_or("yqvev8r3".to_string()),
+ messages: vec![open_ai::RequestMessage::User {
+ content: open_ai::MessageContent::Plain(prompt),
+ }],
+ stream: false,
+ max_completion_tokens: None,
+ stop: Default::default(),
+ temperature: 0.7,
+ tool_choice: None,
+ parallel_tool_calls: None,
+ tools: vec![],
+ prompt_cache_key: None,
+ reasoning_effort: None,
+ };
+
+ log::trace!("Sending edit prediction request");
+
+ let before_request = chrono::Utc::now();
let response =
- Self::send_prediction_request(client, llm_token, app_version, request).await;
+ Self::send_raw_llm_request(client, llm_token, app_version, request).await;
+ let request_time = chrono::Utc::now() - before_request;
+
+ log::trace!("Got edit prediction response");
if let Some(debug_response_tx) = debug_response_tx {
debug_response_tx
- .send(
+ .send((
response
.as_ref()
.map_err(|err| err.to_string())
.map(|response| response.0.clone()),
- )
+ request_time,
+ ))
.ok();
}
- response.map(|(res, usage)| (Some(res), usage))
+ let (res, usage) = response?;
+ let request_id = EditPredictionId(Uuid::from_str(&res.id)?);
+ let Some(output_text) = text_from_response(res) else {
+ return Ok((None, usage))
+ };
+
+ let (edited_buffer_snapshot, edits) =
+ crate::udiff::parse_diff(&output_text, |path| {
+ included_files
+ .iter()
+ .find_map(|(_, buffer, probe_path, ranges)| {
+ if probe_path.as_ref() == path {
+ Some((buffer, ranges.as_slice()))
+ } else {
+ None
+ }
+ })
+ })
+ .await?;
+
+ let edited_buffer = included_files
+ .iter()
+ .find_map(|(buffer, snapshot, _, _)| {
+ if snapshot.remote_id() == edited_buffer_snapshot.remote_id() {
+ Some(buffer.clone())
+ } else {
+ None
+ }
+ })
+ .context("Failed to find buffer in included_buffers, even though we just found the snapshot")?;
+
+ anyhow::Ok((Some((request_id, edited_buffer, edited_buffer_snapshot.clone(), edits)), usage))
}
});
- let buffer = buffer.clone();
-
cx.spawn({
- let project = project.clone();
async move |this, cx| {
- let Some(response) = Self::handle_api_response(&this, request_task.await, cx)?
+ let Some((id, edited_buffer, edited_buffer_snapshot, edits)) =
+ Self::handle_api_response(&this, request_task.await, cx)?
else {
return Ok(None);
};
// TODO telemetry: duration, etc
- Ok(EditPrediction::from_response(response, &snapshot, &buffer, &project, cx).await)
+ Ok(
+ EditPrediction::new(id, &edited_buffer, &edited_buffer_snapshot, edits, cx)
+ .await,
+ )
}
})
}
- async fn send_prediction_request(
+ async fn send_raw_llm_request(
client: Arc<Client>,
llm_token: LlmApiToken,
app_version: SemanticVersion,
- request: predict_edits_v3::PredictEditsRequest,
- ) -> Result<(
- predict_edits_v3::PredictEditsResponse,
- Option<EditPredictionUsage>,
- )> {
+ request: open_ai::Request,
+ ) -> Result<(open_ai::Response, Option<EditPredictionUsage>)> {
let url = if let Ok(predict_edits_url) = std::env::var("ZED_PREDICT_EDITS_URL") {
http_client::Url::parse(&predict_edits_url)?
} else {
client
.http_client()
- .build_zed_llm_url("/predict_edits/v3", &[])?
+ .build_zed_llm_url("/predict_edits/raw", &[])?
};
Self::send_api_request(
@@ -1052,7 +1119,7 @@ impl Zeta {
cursor_position: language::Anchor,
cx: &mut Context<Self>,
) {
- if !matches!(&self.options().context, ContextMode::Llm { .. }) {
+ if !matches!(&self.options().context, ContextMode::Agentic { .. }) {
return;
}
@@ -1100,36 +1167,160 @@ impl Zeta {
cursor_position: language::Anchor,
cx: &mut Context<Self>,
) -> Task<Result<()>> {
+ let Some(zeta_project) = self.projects.get(&project.entity_id()) else {
+ return Task::ready(anyhow::Ok(()));
+ };
+
+ let ContextMode::Agentic(options) = &self.options().context else {
+ return Task::ready(anyhow::Ok(()));
+ };
+
+ let snapshot = buffer.read(cx).snapshot();
+ let cursor_point = cursor_position.to_point(&snapshot);
+ let Some(cursor_excerpt) = EditPredictionExcerpt::select_from_buffer(
+ cursor_point,
+ &snapshot,
+ &options.excerpt,
+ None,
+ ) else {
+ return Task::ready(Ok(()));
+ };
+
+ let app_version = AppVersion::global(cx);
+ let client = self.client.clone();
+ let llm_token = self.llm_token.clone();
+ let debug_tx = self.debug_tx.clone();
+ let current_file_path: Arc<Path> = snapshot
+ .file()
+ .map(|f| f.full_path(cx).into())
+ .unwrap_or_else(|| Path::new("untitled").into());
+
+ let prompt = match cloud_zeta2_prompt::retrieval_prompt::build_prompt(
+ predict_edits_v3::PlanContextRetrievalRequest {
+ excerpt: cursor_excerpt.text(&snapshot).body,
+ excerpt_path: current_file_path,
+ excerpt_line_range: cursor_excerpt.line_range,
+ cursor_file_max_row: Line(snapshot.max_point().row),
+ events: zeta_project
+ .events
+ .iter()
+ .filter_map(|ev| ev.to_request_event(cx))
+ .collect(),
+ },
+ ) {
+ Ok(prompt) => prompt,
+ Err(err) => {
+ return Task::ready(Err(err));
+ }
+ };
+
+ if let Some(debug_tx) = &debug_tx {
+ debug_tx
+ .unbounded_send(ZetaDebugInfo::ContextRetrievalStarted(
+ ZetaContextRetrievalStartedDebugInfo {
+ project: project.clone(),
+ timestamp: Instant::now(),
+ search_prompt: prompt.clone(),
+ },
+ ))
+ .ok();
+ }
+
+ let (tool_schema, tool_description) = &*cloud_zeta2_prompt::retrieval_prompt::TOOL_SCHEMA;
+
+ let request = open_ai::Request {
+ model: std::env::var("ZED_ZETA2_MODEL").unwrap_or("2327jz9q".to_string()),
+ messages: vec![open_ai::RequestMessage::User {
+ content: open_ai::MessageContent::Plain(prompt),
+ }],
+ stream: false,
+ max_completion_tokens: None,
+ stop: Default::default(),
+ temperature: 0.7,
+ tool_choice: None,
+ parallel_tool_calls: None,
+ tools: vec![open_ai::ToolDefinition::Function {
+ function: FunctionDefinition {
+ name: cloud_zeta2_prompt::retrieval_prompt::TOOL_NAME.to_string(),
+ description: Some(tool_description.clone()),
+ parameters: Some(tool_schema.clone()),
+ },
+ }],
+ prompt_cache_key: None,
+ reasoning_effort: None,
+ };
+
cx.spawn(async move |this, cx| {
- let related_excerpts_result = this
- .update(cx, |this, cx| {
- let Some(zeta_project) = this.projects.get(&project.entity_id()) else {
- return Task::ready(anyhow::Ok(HashMap::default()));
- };
+ log::trace!("Sending search planning request");
+ let response =
+ Self::send_raw_llm_request(client, llm_token, app_version, request).await;
+ let mut response = Self::handle_api_response(&this, response, cx)?;
+
+ log::trace!("Got search planning response");
+
+ let choice = response
+ .choices
+ .pop()
+ .context("No choices in retrieval response")?;
+ let open_ai::RequestMessage::Assistant {
+ content: _,
+ tool_calls,
+ } = choice.message
+ else {
+ anyhow::bail!("Retrieval response didn't include an assistant message");
+ };
- let ContextMode::Llm(options) = &this.options().context else {
- return Task::ready(anyhow::Ok(HashMap::default()));
- };
+ let mut regex_by_glob: HashMap<String, String> = HashMap::default();
+ for tool_call in tool_calls {
+ let open_ai::ToolCallContent::Function { function } = tool_call.content;
+ if function.name != cloud_zeta2_prompt::retrieval_prompt::TOOL_NAME {
+ log::warn!(
+ "Context retrieval response tried to call an unknown tool: {}",
+ function.name
+ );
- let mut edit_history_unified_diff = String::new();
+ continue;
+ }
- for event in zeta_project.events.iter() {
- if let Some(event) = event.to_request_event(cx) {
- writeln!(&mut edit_history_unified_diff, "{event}").ok();
- }
+ let input: SearchToolInput = serde_json::from_str(&function.arguments)?;
+ for query in input.queries {
+ let regex = regex_by_glob.entry(query.glob).or_default();
+ if !regex.is_empty() {
+ regex.push('|');
}
+ regex.push_str(&query.regex);
+ }
+ }
- find_related_excerpts(
- buffer.clone(),
- cursor_position,
- &project,
- edit_history_unified_diff,
- options,
- this.debug_tx.clone(),
- cx,
- )
- })?
- .await;
+ if let Some(debug_tx) = &debug_tx {
+ debug_tx
+ .unbounded_send(ZetaDebugInfo::SearchQueriesGenerated(
+ ZetaSearchQueryDebugInfo {
+ project: project.clone(),
+ timestamp: Instant::now(),
+ regex_by_glob: regex_by_glob.clone(),
+ },
+ ))
+ .ok();
+ }
+
+ log::trace!("Running retrieval search: {regex_by_glob:#?}");
+
+ let related_excerpts_result =
+ retrieval_search::run_retrieval_searches(project.clone(), regex_by_glob, cx).await;
+
+ log::trace!("Search queries executed");
+
+ if let Some(debug_tx) = &debug_tx {
+ debug_tx
+ .unbounded_send(ZetaDebugInfo::SearchQueriesExecuted(
+ ZetaContextRetrievalDebugInfo {
+ project: project.clone(),
+ timestamp: Instant::now(),
+ },
+ ))
+ .ok();
+ }
this.update(cx, |this, _cx| {
let Some(zeta_project) = this.projects.get_mut(&project.entity_id()) else {
@@ -1249,7 +1440,7 @@ impl Zeta {
&snapshot,
parent_abs_path.as_deref(),
match &options.context {
- ContextMode::Llm(_) => {
+ ContextMode::Agentic(_) => {
// TODO
panic!("Llm mode not supported in zeta cli yet");
}
@@ -1293,6 +1484,38 @@ impl Zeta {
}
}
+pub fn text_from_response(mut res: open_ai::Response) -> Option<String> {
+ let choice = res.choices.pop()?;
+ let output_text = match choice.message {
+ open_ai::RequestMessage::Assistant {
+ content: Some(open_ai::MessageContent::Plain(content)),
+ ..
+ } => content,
+ open_ai::RequestMessage::Assistant {
+ content: Some(open_ai::MessageContent::Multipart(mut content)),
+ ..
+ } => {
+ if content.is_empty() {
+ log::error!("No output from Baseten completion response");
+ return None;
+ }
+
+ match content.remove(0) {
+ open_ai::MessagePart::Text { text } => text,
+ open_ai::MessagePart::Image { .. } => {
+ log::error!("Expected text, got an image");
+ return None;
+ }
+ }
+ }
+ _ => {
+ log::error!("Invalid response message: {:?}", choice.message);
+ return None;
+ }
+ };
+ Some(output_text)
+}
+
#[derive(Error, Debug)]
#[error(
"You must update to Zed version {minimum_version} or higher to continue using edit predictions."
@@ -1426,15 +1649,11 @@ fn add_signature(
#[cfg(test)]
mod tests {
- use std::{
- path::{Path, PathBuf},
- sync::Arc,
- };
+ use std::{path::Path, sync::Arc};
use client::UserStore;
use clock::FakeSystemClock;
- use cloud_llm_client::predict_edits_v3::{self, Point};
- use edit_prediction_context::Line;
+ use cloud_zeta2_prompt::retrieval_prompt::{SearchToolInput, SearchToolQuery};
use futures::{
AsyncReadExt, StreamExt,
channel::{mpsc, oneshot},
@@ -1445,7 +1664,8 @@ mod tests {
prelude::*,
};
use indoc::indoc;
- use language::{LanguageServerId, OffsetRangeExt as _};
+ use language::OffsetRangeExt as _;
+ use open_ai::Usage;
use pretty_assertions::{assert_eq, assert_matches};
use project::{FakeFs, Project};
use serde_json::json;
@@ -1462,8 +1682,8 @@ mod tests {
fs.insert_tree(
"/root",
json!({
- "1.txt": "Hello!\nHow\nBye",
- "2.txt": "Hola!\nComo\nAdios"
+ "1.txt": "Hello!\nHow\nBye\n",
+ "2.txt": "Hola!\nComo\nAdios\n"
}),
)
.await;
@@ -1489,16 +1709,17 @@ mod tests {
zeta.refresh_prediction(&project, &buffer1, position, cx)
});
let (_request, respond_tx) = req_rx.next().await.unwrap();
+
respond_tx
- .send(predict_edits_v3::PredictEditsResponse {
- request_id: Uuid::new_v4(),
- edits: vec![predict_edits_v3::Edit {
- path: Path::new(path!("root/1.txt")).into(),
- range: Line(0)..Line(snapshot1.max_point().row + 1),
- content: "Hello!\nHow are you?\nBye".into(),
- }],
- debug_info: None,
- })
+ .send(model_response(indoc! {r"
+ --- a/root/1.txt
+ +++ b/root/1.txt
+ @@ ... @@
+ Hello!
+ -How
+ +How are you?
+ Bye
+ "}))
.unwrap();
prediction_task.await.unwrap();
@@ -1509,21 +1730,67 @@ mod tests {
assert_matches!(prediction, BufferEditPrediction::Local { .. });
});
+ // Context refresh
+ let refresh_task = zeta.update(cx, |zeta, cx| {
+ zeta.refresh_context(project.clone(), buffer1.clone(), position, cx)
+ });
+ let (_request, respond_tx) = req_rx.next().await.unwrap();
+ respond_tx
+ .send(open_ai::Response {
+ id: Uuid::new_v4().to_string(),
+ object: "response".into(),
+ created: 0,
+ model: "model".into(),
+ choices: vec![open_ai::Choice {
+ index: 0,
+ message: open_ai::RequestMessage::Assistant {
+ content: None,
+ tool_calls: vec![open_ai::ToolCall {
+ id: "search".into(),
+ content: open_ai::ToolCallContent::Function {
+ function: open_ai::FunctionContent {
+ name: cloud_zeta2_prompt::retrieval_prompt::TOOL_NAME
+ .to_string(),
+ arguments: serde_json::to_string(&SearchToolInput {
+ queries: Box::new([SearchToolQuery {
+ glob: "root/2.txt".to_string(),
+ regex: ".".to_string(),
+ }]),
+ })
+ .unwrap(),
+ },
+ },
+ }],
+ },
+ finish_reason: None,
+ }],
+ usage: Usage {
+ prompt_tokens: 0,
+ completion_tokens: 0,
+ total_tokens: 0,
+ },
+ })
+ .unwrap();
+ refresh_task.await.unwrap();
+
+ zeta.update(cx, |zeta, _cx| {
+ zeta.discard_current_prediction(&project);
+ });
+
// Prediction for another file
let prediction_task = zeta.update(cx, |zeta, cx| {
zeta.refresh_prediction(&project, &buffer1, position, cx)
});
let (_request, respond_tx) = req_rx.next().await.unwrap();
respond_tx
- .send(predict_edits_v3::PredictEditsResponse {
- request_id: Uuid::new_v4(),
- edits: vec![predict_edits_v3::Edit {
- path: Path::new(path!("root/2.txt")).into(),
- range: Line(0)..Line(snapshot1.max_point().row + 1),
- content: "Hola!\nComo estas?\nAdios".into(),
- }],
- debug_info: None,
- })
+ .send(model_response(indoc! {r#"
+ --- a/root/2.txt
+ +++ b/root/2.txt
+ Hola!
+ -Como
+ +Como estas?
+ Adios
+ "#}))
.unwrap();
prediction_task.await.unwrap();
zeta.read_with(cx, |zeta, cx| {
@@ -1532,7 +1799,7 @@ mod tests {
.unwrap();
assert_matches!(
prediction,
- BufferEditPrediction::Jump { prediction } if prediction.path.as_ref() == Path::new(path!("root/2.txt"))
+ BufferEditPrediction::Jump { prediction } if prediction.snapshot.file().unwrap().full_path(cx) == Path::new(path!("root/2.txt"))
);
});
@@ -1559,7 +1826,7 @@ mod tests {
fs.insert_tree(
"/root",
json!({
- "foo.md": "Hello!\nHow\nBye"
+ "foo.md": "Hello!\nHow\nBye\n"
}),
)
.await;
@@ -1579,29 +1846,31 @@ mod tests {
zeta.request_prediction(&project, &buffer, position, cx)
});
- let (request, respond_tx) = req_rx.next().await.unwrap();
- assert_eq!(
- request.excerpt_path.as_ref(),
- Path::new(path!("root/foo.md"))
- );
- assert_eq!(
- request.cursor_point,
- Point {
- line: Line(1),
- column: 3
- }
- );
+ let (_, respond_tx) = req_rx.next().await.unwrap();
+
+ // TODO Put back when we have a structured request again
+ // assert_eq!(
+ // request.excerpt_path.as_ref(),
+ // Path::new(path!("root/foo.md"))
+ // );
+ // assert_eq!(
+ // request.cursor_point,
+ // Point {
+ // line: Line(1),
+ // column: 3
+ // }
+ // );
respond_tx
- .send(predict_edits_v3::PredictEditsResponse {
- request_id: Uuid::new_v4(),
- edits: vec![predict_edits_v3::Edit {
- path: Path::new(path!("root/foo.md")).into(),
- range: Line(0)..Line(snapshot.max_point().row + 1),
- content: "Hello!\nHow are you?\nBye".into(),
- }],
- debug_info: None,
- })
+ .send(model_response(indoc! { r"
+ --- a/root/foo.md
+ +++ b/root/foo.md
+ @@ ... @@
+ Hello!
+ -How
+ +How are you?
+ Bye
+ "}))
.unwrap();
let prediction = prediction_task.await.unwrap().unwrap();
@@ -1611,7 +1880,7 @@ mod tests {
prediction.edits[0].0.to_point(&snapshot).start,
language::Point::new(1, 3)
);
- assert_eq!(prediction.edits[0].1, " are you?");
+ assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
}
#[gpui::test]
@@ -1621,7 +1890,7 @@ mod tests {
fs.insert_tree(
"/root",
json!({
- "foo.md": "Hello!\n\nBye"
+ "foo.md": "Hello!\n\nBye\n"
}),
)
.await;
@@ -1652,34 +1921,30 @@ mod tests {
let (request, respond_tx) = req_rx.next().await.unwrap();
- assert_eq!(request.events.len(), 1);
- assert_eq!(
- request.events[0],
- predict_edits_v3::Event::BufferChange {
- path: Some(PathBuf::from(path!("root/foo.md"))),
- old_path: None,
- diff: indoc! {"
- @@ -1,3 +1,3 @@
- Hello!
- -
- +How
- Bye
- "}
- .to_string(),
- predicted: false
- }
+ let prompt = prompt_from_request(&request);
+ assert!(
+ prompt.contains(indoc! {"
+ --- a/root/foo.md
+ +++ b/root/foo.md
+ @@ -1,3 +1,3 @@
+ Hello!
+ -
+ +How
+ Bye
+ "}),
+ "{prompt}"
);
respond_tx
- .send(predict_edits_v3::PredictEditsResponse {
- request_id: Uuid::new_v4(),
- edits: vec![predict_edits_v3::Edit {
- path: Path::new(path!("root/foo.md")).into(),
- range: Line(0)..Line(snapshot.max_point().row + 1),
- content: "Hello!\nHow are you?\nBye".into(),
- }],
- debug_info: None,
- })
+ .send(model_response(indoc! {r#"
+ --- a/root/foo.md
+ +++ b/root/foo.md
+ @@ ... @@
+ Hello!
+ -How
+ +How are you?
+ Bye
+ "#}))
.unwrap();
let prediction = prediction_task.await.unwrap().unwrap();
@@ -1689,114 +1954,148 @@ mod tests {
prediction.edits[0].0.to_point(&snapshot).start,
language::Point::new(1, 3)
);
- assert_eq!(prediction.edits[0].1, " are you?");
+ assert_eq!(prediction.edits[0].1.as_ref(), " are you?");
}
- #[gpui::test]
- async fn test_request_diagnostics(cx: &mut TestAppContext) {
- let (zeta, mut req_rx) = init_test(cx);
- let fs = FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "foo.md": "Hello!\nBye"
- }),
- )
- .await;
- let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
+ // Skipped until we start including diagnostics in prompt
+ // #[gpui::test]
+ // async fn test_request_diagnostics(cx: &mut TestAppContext) {
+ // let (zeta, mut req_rx) = init_test(cx);
+ // let fs = FakeFs::new(cx.executor());
+ // fs.insert_tree(
+ // "/root",
+ // json!({
+ // "foo.md": "Hello!\nBye"
+ // }),
+ // )
+ // .await;
+ // let project = Project::test(fs, vec![path!("/root").as_ref()], cx).await;
+
+ // let path_to_buffer_uri = lsp::Uri::from_file_path(path!("/root/foo.md")).unwrap();
+ // let diagnostic = lsp::Diagnostic {
+ // range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
+ // severity: Some(lsp::DiagnosticSeverity::ERROR),
+ // message: "\"Hello\" deprecated. Use \"Hi\" instead".to_string(),
+ // ..Default::default()
+ // };
+
+ // project.update(cx, |project, cx| {
+ // project.lsp_store().update(cx, |lsp_store, cx| {
+ // // Create some diagnostics
+ // lsp_store
+ // .update_diagnostics(
+ // LanguageServerId(0),
+ // lsp::PublishDiagnosticsParams {
+ // uri: path_to_buffer_uri.clone(),
+ // diagnostics: vec![diagnostic],
+ // version: None,
+ // },
+ // None,
+ // language::DiagnosticSourceKind::Pushed,
+ // &[],
+ // cx,
+ // )
+ // .unwrap();
+ // });
+ // });
+
+ // let buffer = project
+ // .update(cx, |project, cx| {
+ // let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
+ // project.open_buffer(path, cx)
+ // })
+ // .await
+ // .unwrap();
+
+ // let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+ // let position = snapshot.anchor_before(language::Point::new(0, 0));
+
+ // let _prediction_task = zeta.update(cx, |zeta, cx| {
+ // zeta.request_prediction(&project, &buffer, position, cx)
+ // });
+
+ // let (request, _respond_tx) = req_rx.next().await.unwrap();
+
+ // assert_eq!(request.diagnostic_groups.len(), 1);
+ // let value = serde_json::from_str::<serde_json::Value>(request.diagnostic_groups[0].0.get())
+ // .unwrap();
+ // // We probably don't need all of this. TODO define a specific diagnostic type in predict_edits_v3
+ // assert_eq!(
+ // value,
+ // json!({
+ // "entries": [{
+ // "range": {
+ // "start": 8,
+ // "end": 10
+ // },
+ // "diagnostic": {
+ // "source": null,
+ // "code": null,
+ // "code_description": null,
+ // "severity": 1,
+ // "message": "\"Hello\" deprecated. Use \"Hi\" instead",
+ // "markdown": null,
+ // "group_id": 0,
+ // "is_primary": true,
+ // "is_disk_based": false,
+ // "is_unnecessary": false,
+ // "source_kind": "Pushed",
+ // "data": null,
+ // "underline": true
+ // }
+ // }],
+ // "primary_ix": 0
+ // })
+ // );
+ // }
+
+ fn model_response(text: &str) -> open_ai::Response {
+ open_ai::Response {
+ id: Uuid::new_v4().to_string(),
+ object: "response".into(),
+ created: 0,
+ model: "model".into(),
+ choices: vec![open_ai::Choice {
+ index: 0,
+ message: open_ai::RequestMessage::Assistant {
+ content: Some(open_ai::MessageContent::Plain(text.to_string())),
+ tool_calls: vec![],
+ },
+ finish_reason: None,
+ }],
+ usage: Usage {
+ prompt_tokens: 0,
+ completion_tokens: 0,
+ total_tokens: 0,
+ },
+ }
+ }
- let path_to_buffer_uri = lsp::Uri::from_file_path(path!("/root/foo.md")).unwrap();
- let diagnostic = lsp::Diagnostic {
- range: lsp::Range::new(lsp::Position::new(1, 1), lsp::Position::new(1, 5)),
- severity: Some(lsp::DiagnosticSeverity::ERROR),
- message: "\"Hello\" deprecated. Use \"Hi\" instead".to_string(),
- ..Default::default()
+ fn prompt_from_request(request: &open_ai::Request) -> &str {
+ assert_eq!(request.messages.len(), 1);
+ let open_ai::RequestMessage::User {
+ content: open_ai::MessageContent::Plain(content),
+ ..
+ } = &request.messages[0]
+ else {
+ panic!(
+ "Request does not have single user message of type Plain. {:#?}",
+ request
+ );
};
-
- project.update(cx, |project, cx| {
- project.lsp_store().update(cx, |lsp_store, cx| {
- // Create some diagnostics
- lsp_store
- .update_diagnostics(
- LanguageServerId(0),
- lsp::PublishDiagnosticsParams {
- uri: path_to_buffer_uri.clone(),
- diagnostics: vec![diagnostic],
- version: None,
- },
- None,
- language::DiagnosticSourceKind::Pushed,
- &[],
- cx,
- )
- .unwrap();
- });
- });
-
- let buffer = project
- .update(cx, |project, cx| {
- let path = project.find_project_path(path!("root/foo.md"), cx).unwrap();
- project.open_buffer(path, cx)
- })
- .await
- .unwrap();
-
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
- let position = snapshot.anchor_before(language::Point::new(0, 0));
-
- let _prediction_task = zeta.update(cx, |zeta, cx| {
- zeta.request_prediction(&project, &buffer, position, cx)
- });
-
- let (request, _respond_tx) = req_rx.next().await.unwrap();
-
- assert_eq!(request.diagnostic_groups.len(), 1);
- let value = serde_json::from_str::<serde_json::Value>(request.diagnostic_groups[0].0.get())
- .unwrap();
- // We probably don't need all of this. TODO define a specific diagnostic type in predict_edits_v3
- assert_eq!(
- value,
- json!({
- "entries": [{
- "range": {
- "start": 8,
- "end": 10
- },
- "diagnostic": {
- "source": null,
- "code": null,
- "code_description": null,
- "severity": 1,
- "message": "\"Hello\" deprecated. Use \"Hi\" instead",
- "markdown": null,
- "group_id": 0,
- "is_primary": true,
- "is_disk_based": false,
- "is_unnecessary": false,
- "source_kind": "Pushed",
- "data": null,
- "underline": true
- }
- }],
- "primary_ix": 0
- })
- );
+ content
}
fn init_test(
cx: &mut TestAppContext,
) -> (
Entity<Zeta>,
- mpsc::UnboundedReceiver<(
- predict_edits_v3::PredictEditsRequest,
- oneshot::Sender<predict_edits_v3::PredictEditsResponse>,
- )>,
+ mpsc::UnboundedReceiver<(open_ai::Request, oneshot::Sender<open_ai::Response>)>,
) {
cx.update(move |cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
- language::init(cx);
- Project::init_settings(cx);
+ zlog::init_test();
let (req_tx, req_rx) = mpsc::unbounded();
@@ -27,10 +27,11 @@ log.workspace = true
multi_buffer.workspace = true
ordered-float.workspace = true
project.workspace = true
+regex-syntax = "0.8.8"
serde.workspace = true
+serde_json.workspace = true
telemetry.workspace = true
text.workspace = true
-regex-syntax = "0.8.8"
ui.workspace = true
ui_input.workspace = true
util.workspace = true
@@ -45,7 +45,6 @@ struct RetrievalRun {
started_at: Instant,
search_results_generated_at: Option<Instant>,
search_results_executed_at: Option<Instant>,
- search_results_filtered_at: Option<Instant>,
finished_at: Option<Instant>,
}
@@ -117,17 +116,12 @@ impl Zeta2ContextView {
self.handle_search_queries_executed(info, window, cx);
}
}
- ZetaDebugInfo::SearchResultsFiltered(info) => {
- if info.project == self.project {
- self.handle_search_results_filtered(info, window, cx);
- }
- }
ZetaDebugInfo::ContextRetrievalFinished(info) => {
if info.project == self.project {
self.handle_context_retrieval_finished(info, window, cx);
}
}
- ZetaDebugInfo::EditPredicted(_) => {}
+ ZetaDebugInfo::EditPredictionRequested(_) => {}
}
}
@@ -159,7 +153,6 @@ impl Zeta2ContextView {
started_at: info.timestamp,
search_results_generated_at: None,
search_results_executed_at: None,
- search_results_filtered_at: None,
finished_at: None,
});
@@ -218,18 +211,18 @@ impl Zeta2ContextView {
run.search_results_generated_at = Some(info.timestamp);
run.search_queries = info
- .queries
+ .regex_by_glob
.into_iter()
- .map(|query| {
+ .map(|(glob, regex)| {
let mut regex_parser = regex_syntax::ast::parse::Parser::new();
GlobQueries {
- glob: query.glob,
- alternations: match regex_parser.parse(&query.regex) {
+ glob,
+ alternations: match regex_parser.parse(®ex) {
Ok(regex_syntax::ast::Ast::Alternation(ref alt)) => {
alt.asts.iter().map(|ast| ast.to_string()).collect()
}
- _ => vec![query.regex],
+ _ => vec![regex],
},
}
})
@@ -256,20 +249,6 @@ impl Zeta2ContextView {
cx.notify();
}
- fn handle_search_results_filtered(
- &mut self,
- info: ZetaContextRetrievalDebugInfo,
- _window: &mut Window,
- cx: &mut Context<Self>,
- ) {
- let Some(run) = self.runs.back_mut() else {
- return;
- };
-
- run.search_results_filtered_at = Some(info.timestamp);
- cx.notify();
- }
-
fn handle_go_back(
&mut self,
_: &Zeta2ContextGoBack,
@@ -398,19 +377,10 @@ impl Zeta2ContextView {
};
div = div.child(format!("Ran search: {:>5} ms", (t2 - t1).as_millis()));
- let Some(t3) = run.search_results_filtered_at else {
- return pending_message(div, "Filtering results...");
- };
- div =
- div.child(format!("Filtered results: {:>5} ms", (t3 - t2).as_millis()));
-
- let Some(t4) = run.finished_at else {
- return pending_message(div, "Building excerpts");
- };
- div = div
- .child(format!("Build excerpts: {:>5} µs", (t4 - t3).as_micros()))
- .child(format!("Total: {:>5} ms", (t4 - t0).as_millis()));
- div
+ div.child(format!(
+ "Total: {:>5} ms",
+ (run.finished_at.unwrap_or(t0) - t0).as_millis()
+ ))
}),
)
}
@@ -5,7 +5,7 @@ use std::{cmp::Reverse, path::PathBuf, str::FromStr, sync::Arc, time::Duration};
use chrono::TimeDelta;
use client::{Client, UserStore};
use cloud_llm_client::predict_edits_v3::{
- self, DeclarationScoreComponents, PredictEditsRequest, PredictEditsResponse, PromptFormat,
+ DeclarationScoreComponents, PredictEditsRequest, PromptFormat,
};
use collections::HashMap;
use editor::{Editor, EditorEvent, EditorMode, ExcerptRange, MultiBuffer};
@@ -23,7 +23,7 @@ use ui_input::InputField;
use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
use workspace::{Item, SplitDirection, Workspace};
use zeta2::{
- ContextMode, DEFAULT_SYNTAX_CONTEXT_OPTIONS, LlmContextOptions, Zeta, Zeta2FeatureFlag,
+ AgenticContextOptions, ContextMode, DEFAULT_SYNTAX_CONTEXT_OPTIONS, Zeta, Zeta2FeatureFlag,
ZetaDebugInfo, ZetaEditPredictionDebugInfo, ZetaOptions,
};
@@ -123,6 +123,7 @@ struct LastPrediction {
context_editor: Entity<Editor>,
prompt_editor: Entity<Editor>,
retrieval_time: TimeDelta,
+ request_time: Option<TimeDelta>,
buffer: WeakEntity<Buffer>,
position: language::Anchor,
state: LastPredictionState,
@@ -143,7 +144,7 @@ enum LastPredictionState {
model_response_editor: Entity<Editor>,
feedback_editor: Entity<Editor>,
feedback: Option<Feedback>,
- response: predict_edits_v3::PredictEditsResponse,
+ request_id: String,
},
Failed {
message: String,
@@ -217,7 +218,7 @@ impl Zeta2Inspector {
});
match &options.context {
- ContextMode::Llm(_) => {
+ ContextMode::Agentic(_) => {
self.context_mode = ContextModeState::Llm;
}
ContextMode::Syntax(_) => {
@@ -307,9 +308,11 @@ impl Zeta2Inspector {
};
let context = match zeta_options.context {
- ContextMode::Llm(_context_options) => ContextMode::Llm(LlmContextOptions {
- excerpt: excerpt_options,
- }),
+ ContextMode::Agentic(_context_options) => {
+ ContextMode::Agentic(AgenticContextOptions {
+ excerpt: excerpt_options,
+ })
+ }
ContextMode::Syntax(context_options) => {
let max_retrieved_declarations = match &this.context_mode {
ContextModeState::Llm => {
@@ -368,7 +371,7 @@ impl Zeta2Inspector {
let language_registry = self.project.read(cx).languages().clone();
async move |this, cx| {
let mut languages = HashMap::default();
- let ZetaDebugInfo::EditPredicted(prediction) = prediction else {
+ let ZetaDebugInfo::EditPredictionRequested(prediction) = prediction else {
return;
};
for ext in prediction
@@ -396,6 +399,8 @@ impl Zeta2Inspector {
.await
.log_err();
+ let json_language = language_registry.language_for_name("Json").await.log_err();
+
this.update_in(cx, |this, window, cx| {
let context_editor = cx.new(|cx| {
let mut excerpt_score_components = HashMap::default();
@@ -492,25 +497,15 @@ impl Zeta2Inspector {
let task = cx.spawn_in(window, {
let markdown_language = markdown_language.clone();
+ let json_language = json_language.clone();
async move |this, cx| {
let response = response_rx.await;
this.update_in(cx, |this, window, cx| {
if let Some(prediction) = this.last_prediction.as_mut() {
prediction.state = match response {
- Ok(Ok(response)) => {
- if let Some(debug_info) = &response.debug_info {
- prediction.prompt_editor.update(
- cx,
- |prompt_editor, cx| {
- prompt_editor.set_text(
- debug_info.prompt.as_str(),
- window,
- cx,
- );
- },
- );
- }
+ Ok((Ok(response), request_time)) => {
+ prediction.request_time = Some(request_time);
let feedback_editor = cx.new(|cx| {
let buffer = cx.new(|cx| {
@@ -577,16 +572,11 @@ impl Zeta2Inspector {
model_response_editor: cx.new(|cx| {
let buffer = cx.new(|cx| {
let mut buffer = Buffer::local(
- response
- .debug_info
- .as_ref()
- .map(|p| p.model_response.as_str())
- .unwrap_or(
- "(Debug info not available)",
- ),
+ serde_json::to_string_pretty(&response)
+ .unwrap_or_default(),
cx,
);
- buffer.set_language(markdown_language, cx);
+ buffer.set_language(json_language, cx);
buffer
});
let buffer = cx.new(|cx| {
@@ -607,10 +597,11 @@ impl Zeta2Inspector {
}),
feedback_editor,
feedback: None,
- response,
+ request_id: response.id.clone(),
}
}
- Ok(Err(err)) => {
+ Ok((Err(err), request_time)) => {
+ prediction.request_time = Some(request_time);
LastPredictionState::Failed { message: err }
}
Err(oneshot::Canceled) => LastPredictionState::Failed {
@@ -644,6 +635,7 @@ impl Zeta2Inspector {
editor
}),
retrieval_time,
+ request_time: None,
buffer,
position,
state: LastPredictionState::Requested,
@@ -700,7 +692,7 @@ impl Zeta2Inspector {
feedback: feedback_state,
feedback_editor,
model_response_editor,
- response,
+ request_id,
..
} = &mut last_prediction.state
else {
@@ -734,11 +726,10 @@ impl Zeta2Inspector {
telemetry::event!(
"Zeta2 Prediction Rated",
- id = response.request_id,
+ id = request_id,
kind = kind,
text = text,
request = last_prediction.request,
- response = response,
project_snapshot = project_snapshot,
);
})
@@ -834,11 +825,11 @@ impl Zeta2Inspector {
let current_options =
this.zeta.read(cx).options().clone();
match current_options.context.clone() {
- ContextMode::Llm(_) => {}
+ ContextMode::Agentic(_) => {}
ContextMode::Syntax(context_options) => {
let options = ZetaOptions {
- context: ContextMode::Llm(
- LlmContextOptions {
+ context: ContextMode::Agentic(
+ AgenticContextOptions {
excerpt: context_options.excerpt,
},
),
@@ -865,7 +856,7 @@ impl Zeta2Inspector {
let current_options =
this.zeta.read(cx).options().clone();
match current_options.context.clone() {
- ContextMode::Llm(context_options) => {
+ ContextMode::Agentic(context_options) => {
let options = ZetaOptions {
context: ContextMode::Syntax(
EditPredictionContextOptions {
@@ -976,25 +967,6 @@ impl Zeta2Inspector {
return None;
};
- let (prompt_planning_time, inference_time, parsing_time) =
- if let LastPredictionState::Success {
- response:
- PredictEditsResponse {
- debug_info: Some(debug_info),
- ..
- },
- ..
- } = &prediction.state
- {
- (
- Some(debug_info.prompt_planning_time),
- Some(debug_info.inference_time),
- Some(debug_info.parsing_time),
- )
- } else {
- (None, None, None)
- };
-
Some(
v_flex()
.p_4()
@@ -1005,12 +977,7 @@ impl Zeta2Inspector {
"Context retrieval",
Some(prediction.retrieval_time),
))
- .child(Self::render_duration(
- "Prompt planning",
- prompt_planning_time,
- ))
- .child(Self::render_duration("Inference", inference_time))
- .child(Self::render_duration("Parsing", parsing_time)),
+ .child(Self::render_duration("Request", prediction.request_time)),
)
}
@@ -7,13 +7,14 @@ use std::{
use anyhow::Result;
use clap::Args;
-use cloud_llm_client::udiff::DiffLine;
use collections::HashSet;
use gpui::AsyncApp;
+use zeta2::udiff::DiffLine;
use crate::{
example::{Example, NamedExample},
headless::ZetaCliAppState,
+ paths::CACHE_DIR,
predict::{PredictionDetails, zeta2_predict},
};
@@ -54,10 +55,8 @@ pub async fn run_evaluate_one(
app_state: Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) -> Result<EvaluationResult> {
- let cache_dir = Path::new(&std::env::var("CARGO_MANIFEST_DIR").unwrap_or_default())
- .join("../../target/zeta-prediction-cache");
let example = NamedExample::load(&example_path).unwrap();
- let example_cache_path = cache_dir.join(&example_path.file_name().unwrap());
+ let example_cache_path = CACHE_DIR.join(&example_path.file_name().unwrap());
let predictions = if !re_run && example_cache_path.exists() {
let file_contents = fs::read_to_string(&example_cache_path)?;
@@ -74,7 +73,7 @@ pub async fn run_evaluate_one(
};
if !example_cache_path.exists() {
- fs::create_dir_all(&cache_dir).unwrap();
+ fs::create_dir_all(&*CACHE_DIR).unwrap();
fs::write(
example_cache_path,
serde_json::to_string(&predictions).unwrap(),
@@ -84,11 +83,6 @@ pub async fn run_evaluate_one(
let evaluation_result = evaluate(&example.example, &predictions);
- println!("# {}\n", example.name);
- println!(
- "## Expected Context: \n\n```\n{}\n```\n\n",
- compare_context(&example.example, &predictions)
- );
println!(
"## Expected edit prediction:\n\n```diff\n{}\n```\n",
compare_diffs(&example.example.expected_patch, &predictions.diff)
@@ -105,21 +99,30 @@ pub async fn run_evaluate_one(
#[derive(Debug, Default)]
pub struct EvaluationResult {
- pub context: Scores,
pub edit_prediction: Scores,
+ pub context: Scores,
}
#[derive(Default, Debug)]
pub struct Scores {
- pub precision: f64,
- pub recall: f64,
- pub f1_score: f64,
pub true_positives: usize,
pub false_positives: usize,
pub false_negatives: usize,
}
impl Scores {
+ pub fn new(expected: &HashSet<String>, actual: &HashSet<String>) -> Scores {
+ let true_positives = expected.intersection(actual).count();
+ let false_positives = actual.difference(expected).count();
+ let false_negatives = expected.difference(actual).count();
+
+ Scores {
+ true_positives,
+ false_positives,
+ false_negatives,
+ }
+ }
+
pub fn to_markdown(&self) -> String {
format!(
"
@@ -129,17 +132,15 @@ F1 Score : {:.4}
True Positives : {}
False Positives : {}
False Negatives : {}",
- self.precision,
- self.recall,
- self.f1_score,
+ self.precision(),
+ self.recall(),
+ self.f1_score(),
self.true_positives,
self.false_positives,
self.false_negatives
)
}
-}
-impl Scores {
pub fn aggregate<'a>(scores: impl Iterator<Item = &'a Scores>) -> Scores {
let mut true_positives = 0;
let mut false_positives = 0;
@@ -151,22 +152,38 @@ impl Scores {
false_negatives += score.false_negatives;
}
- let precision = true_positives as f64 / (true_positives + false_positives) as f64;
- let recall = true_positives as f64 / (true_positives + false_negatives) as f64;
- let mut f1_score = 2.0 * precision * recall / (precision + recall);
- if f1_score.is_nan() {
- f1_score = 0.0;
- }
-
Scores {
- precision,
- recall,
- f1_score,
true_positives,
false_positives,
false_negatives,
}
}
+
+ pub fn precision(&self) -> f64 {
+ if self.true_positives + self.false_positives == 0 {
+ 0.0
+ } else {
+ self.true_positives as f64 / (self.true_positives + self.false_positives) as f64
+ }
+ }
+
+ pub fn recall(&self) -> f64 {
+ if self.true_positives + self.false_negatives == 0 {
+ 0.0
+ } else {
+ self.true_positives as f64 / (self.true_positives + self.false_negatives) as f64
+ }
+ }
+
+ pub fn f1_score(&self) -> f64 {
+ let recall = self.recall();
+ let precision = self.precision();
+ if precision + recall == 0.0 {
+ 0.0
+ } else {
+ 2.0 * precision * recall / (precision + recall)
+ }
+ }
}
impl EvaluationResult {
@@ -186,19 +203,9 @@ impl EvaluationResult {
}
pub fn evaluate(example: &Example, preds: &PredictionDetails) -> EvaluationResult {
- let mut result = EvaluationResult::default();
+ let mut eval_result = EvaluationResult::default();
- let expected_context_lines = example
- .expected_excerpts
- .iter()
- .flat_map(|excerpt| {
- excerpt
- .text
- .lines()
- .map(|line| format!("{}: {line}", excerpt.path.display()))
- })
- .collect();
- let actual_context_lines = preds
+ let actual_context_lines: HashSet<_> = preds
.excerpts
.iter()
.flat_map(|excerpt| {
@@ -209,8 +216,39 @@ pub fn evaluate(example: &Example, preds: &PredictionDetails) -> EvaluationResul
})
.collect();
- result.context = precision_recall(&expected_context_lines, &actual_context_lines);
+ let mut false_positive_lines = actual_context_lines.clone();
+
+ for entry in &example.expected_context {
+ let mut best_alternative_score = Scores::default();
+
+ for alternative in &entry.alternatives {
+ let expected: HashSet<_> = alternative
+ .excerpts
+ .iter()
+ .flat_map(|excerpt| {
+ excerpt
+ .text
+ .lines()
+ .map(|line| format!("{}: {line}", excerpt.path.display()))
+ })
+ .collect();
+
+ let scores = Scores::new(&expected, &actual_context_lines);
+ false_positive_lines.retain(|line| !actual_context_lines.contains(line));
+
+ if scores.recall() > best_alternative_score.recall() {
+ best_alternative_score = scores;
+ }
+ }
+
+ eval_result.context.false_negatives += best_alternative_score.false_negatives;
+ eval_result.context.true_positives += best_alternative_score.true_positives;
+ }
+
+ eval_result.context.false_positives = false_positive_lines.len();
+
+ // todo: alternatives for patches
let expected_patch_lines = example
.expected_patch
.lines()
@@ -227,86 +265,8 @@ pub fn evaluate(example: &Example, preds: &PredictionDetails) -> EvaluationResul
.map(|line| line.to_string())
.collect();
- result.edit_prediction = precision_recall(&expected_patch_lines, &actual_patch_lines);
-
- result
-}
-
-fn precision_recall(expected: &HashSet<String>, actual: &HashSet<String>) -> Scores {
- let true_positives = expected.intersection(actual).count();
- let false_positives = actual.difference(expected).count();
- let false_negatives = expected.difference(actual).count();
-
- let precision = if true_positives + false_positives == 0 {
- 0.0
- } else {
- true_positives as f64 / (true_positives + false_positives) as f64
- };
- let recall = if true_positives + false_negatives == 0 {
- 0.0
- } else {
- true_positives as f64 / (true_positives + false_negatives) as f64
- };
- let f1_score = if precision + recall == 0.0 {
- 0.0
- } else {
- 2.0 * precision * recall / (precision + recall)
- };
-
- Scores {
- precision,
- recall,
- f1_score,
- true_positives,
- false_positives,
- false_negatives,
- }
-}
-
-/// Compare actual and expected context.
-///
-/// Return expected context annotated with these markers:
-///
-/// `✓ context line` -- line was correctly predicted
-/// `✗ context line` -- line is missing from predictions
-pub fn compare_context(example: &Example, preds: &PredictionDetails) -> String {
- let use_color = std::io::stdout().is_terminal();
- let green = if use_color { "\x1b[32m" } else { "" };
- let red = if use_color { "\x1b[31m" } else { "" };
- let reset = if use_color { "\x1b[0m" } else { "" };
- let expected: Vec<_> = example
- .expected_excerpts
- .iter()
- .flat_map(|excerpt| {
- excerpt
- .text
- .lines()
- .map(|line| (excerpt.path.clone(), line))
- })
- .collect();
- let actual: HashSet<_> = preds
- .excerpts
- .iter()
- .flat_map(|excerpt| {
- excerpt
- .text
- .lines()
- .map(|line| (excerpt.path.clone(), line))
- })
- .collect();
-
- let annotated = expected
- .iter()
- .map(|(path, line)| {
- if actual.contains(&(path.to_path_buf(), line)) {
- format!("{green}✓ {line}{reset}")
- } else {
- format!("{red}✗ {line}{reset}")
- }
- })
- .collect::<Vec<String>>();
-
- annotated.join("\n")
+ eval_result.edit_prediction = Scores::new(&expected_patch_lines, &actual_patch_lines);
+ eval_result
}
/// Return annotated `patch_a` so that:
@@ -1,34 +1,38 @@
use std::{
borrow::Cow,
cell::RefCell,
- env,
fmt::{self, Display},
fs,
io::Write,
mem,
- ops::Range,
path::{Path, PathBuf},
sync::Arc,
};
-use anyhow::{Context as _, Result};
+use anyhow::{Context as _, Result, anyhow};
use clap::ValueEnum;
-use collections::{HashMap, HashSet};
+use cloud_zeta2_prompt::CURSOR_MARKER;
+use collections::HashMap;
+use edit_prediction_context::Line;
use futures::{
AsyncWriteExt as _,
lock::{Mutex, OwnedMutexGuard},
};
use gpui::{AsyncApp, Entity, http_client::Url};
-use language::Buffer;
+use language::{Anchor, Buffer};
use project::{Project, ProjectPath};
use pulldown_cmark::CowStr;
use serde::{Deserialize, Serialize};
+use util::{paths::PathStyle, rel_path::RelPath};
+use zeta2::udiff::OpenedBuffers;
+
+use crate::paths::{REPOS_DIR, WORKTREES_DIR};
const UNCOMMITTED_DIFF_HEADING: &str = "Uncommitted Diff";
const EDIT_HISTORY_HEADING: &str = "Edit History";
const CURSOR_POSITION_HEADING: &str = "Cursor Position";
const EXPECTED_PATCH_HEADING: &str = "Expected Patch";
-const EXPECTED_EXCERPTS_HEADING: &str = "Expected Excerpts";
+const EXPECTED_CONTEXT_HEADING: &str = "Expected Context";
const REPOSITORY_URL_FIELD: &str = "repository_url";
const REVISION_FIELD: &str = "revision";
@@ -47,10 +51,9 @@ pub struct Example {
pub cursor_position: String,
pub edit_history: String,
pub expected_patch: String,
- pub expected_excerpts: Vec<ExpectedExcerpt>,
+ pub expected_context: Vec<ExpectedContextEntry>,
}
-pub type ExpectedExcerpt = Excerpt;
pub type ActualExcerpt = Excerpt;
#[derive(Clone, Debug, Serialize, Deserialize)]
@@ -59,6 +62,25 @@ pub struct Excerpt {
pub text: String,
}
+#[derive(Default, Clone, Debug, Serialize, Deserialize)]
+pub struct ExpectedContextEntry {
+ pub heading: String,
+ pub alternatives: Vec<ExpectedExcerptSet>,
+}
+
+#[derive(Default, Clone, Debug, Serialize, Deserialize)]
+pub struct ExpectedExcerptSet {
+ pub heading: String,
+ pub excerpts: Vec<ExpectedExcerpt>,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+pub struct ExpectedExcerpt {
+ pub path: PathBuf,
+ pub text: String,
+ pub required_lines: Vec<Line>,
+}
+
#[derive(ValueEnum, Debug, Clone)]
pub enum ExampleFormat {
Json,
@@ -108,21 +130,32 @@ impl NamedExample {
cursor_position: String::new(),
edit_history: String::new(),
expected_patch: String::new(),
- expected_excerpts: Vec::new(),
+ expected_context: Vec::new(),
},
};
let mut text = String::new();
- let mut current_section = String::new();
let mut block_info: CowStr = "".into();
+ #[derive(PartialEq)]
+ enum Section {
+ UncommittedDiff,
+ EditHistory,
+ CursorPosition,
+ ExpectedExcerpts,
+ ExpectedPatch,
+ Other,
+ }
+
+ let mut current_section = Section::Other;
+
for event in parser {
match event {
Event::Text(line) => {
text.push_str(&line);
if !named.name.is_empty()
- && current_section.is_empty()
+ && current_section == Section::Other
// in h1 section
&& let Some((field, value)) = line.split_once('=')
{
@@ -133,9 +166,7 @@ impl NamedExample {
REVISION_FIELD => {
named.example.revision = value.trim().to_string();
}
- _ => {
- eprintln!("Warning: Unrecognized field `{field}`");
- }
+ _ => {}
}
}
}
@@ -148,7 +179,46 @@ impl NamedExample {
named.name = mem::take(&mut text);
}
Event::End(TagEnd::Heading(HeadingLevel::H2)) => {
- current_section = mem::take(&mut text);
+ let title = mem::take(&mut text);
+ current_section = if title.eq_ignore_ascii_case(UNCOMMITTED_DIFF_HEADING) {
+ Section::UncommittedDiff
+ } else if title.eq_ignore_ascii_case(EDIT_HISTORY_HEADING) {
+ Section::EditHistory
+ } else if title.eq_ignore_ascii_case(CURSOR_POSITION_HEADING) {
+ Section::CursorPosition
+ } else if title.eq_ignore_ascii_case(EXPECTED_PATCH_HEADING) {
+ Section::ExpectedPatch
+ } else if title.eq_ignore_ascii_case(EXPECTED_CONTEXT_HEADING) {
+ Section::ExpectedExcerpts
+ } else {
+ Section::Other
+ };
+ }
+ Event::End(TagEnd::Heading(HeadingLevel::H3)) => {
+ let heading = mem::take(&mut text);
+ match current_section {
+ Section::ExpectedExcerpts => {
+ named.example.expected_context.push(ExpectedContextEntry {
+ heading,
+ alternatives: Vec::new(),
+ });
+ }
+ _ => {}
+ }
+ }
+ Event::End(TagEnd::Heading(HeadingLevel::H4)) => {
+ let heading = mem::take(&mut text);
+ match current_section {
+ Section::ExpectedExcerpts => {
+ let expected_context = &mut named.example.expected_context;
+ let last_entry = expected_context.last_mut().unwrap();
+ last_entry.alternatives.push(ExpectedExcerptSet {
+ heading,
+ excerpts: Vec::new(),
+ })
+ }
+ _ => {}
+ }
}
Event::End(TagEnd::Heading(level)) => {
anyhow::bail!("Unexpected heading level: {level}");
@@ -169,23 +239,53 @@ impl NamedExample {
}
Event::End(TagEnd::CodeBlock) => {
let block_info = block_info.trim();
- if current_section.eq_ignore_ascii_case(UNCOMMITTED_DIFF_HEADING) {
- named.example.uncommitted_diff = mem::take(&mut text);
- } else if current_section.eq_ignore_ascii_case(EDIT_HISTORY_HEADING) {
- named.example.edit_history.push_str(&mem::take(&mut text));
- } else if current_section.eq_ignore_ascii_case(CURSOR_POSITION_HEADING) {
- named.example.cursor_path = block_info.into();
- named.example.cursor_position = mem::take(&mut text);
- } else if current_section.eq_ignore_ascii_case(EXPECTED_PATCH_HEADING) {
- named.example.expected_patch = mem::take(&mut text);
- } else if current_section.eq_ignore_ascii_case(EXPECTED_EXCERPTS_HEADING) {
- // TODO: "…" should not be a part of the excerpt
- named.example.expected_excerpts.push(ExpectedExcerpt {
- path: block_info.into(),
- text: mem::take(&mut text),
- });
- } else {
- eprintln!("Warning: Unrecognized section `{current_section:?}`")
+ match current_section {
+ Section::UncommittedDiff => {
+ named.example.uncommitted_diff = mem::take(&mut text);
+ }
+ Section::EditHistory => {
+ named.example.edit_history.push_str(&mem::take(&mut text));
+ }
+ Section::CursorPosition => {
+ named.example.cursor_path = block_info.into();
+ named.example.cursor_position = mem::take(&mut text);
+ }
+ Section::ExpectedExcerpts => {
+ let text = mem::take(&mut text);
+ for excerpt in text.split("\n…\n") {
+ let (mut text, required_lines) = extract_required_lines(&excerpt);
+ if !text.ends_with('\n') {
+ text.push('\n');
+ }
+ let alternatives = &mut named
+ .example
+ .expected_context
+ .last_mut()
+ .unwrap()
+ .alternatives;
+
+ if alternatives.is_empty() {
+ alternatives.push(ExpectedExcerptSet {
+ heading: String::new(),
+ excerpts: vec![],
+ });
+ }
+
+ alternatives
+ .last_mut()
+ .unwrap()
+ .excerpts
+ .push(ExpectedExcerpt {
+ path: block_info.into(),
+ text,
+ required_lines,
+ });
+ }
+ }
+ Section::ExpectedPatch => {
+ named.example.expected_patch = mem::take(&mut text);
+ }
+ Section::Other => {}
}
}
_ => {}
@@ -215,12 +315,10 @@ impl NamedExample {
let (repo_owner, repo_name) = self.repo_name()?;
let file_name = self.file_name();
- let worktrees_dir = env::current_dir()?.join("target").join("zeta-worktrees");
- let repos_dir = env::current_dir()?.join("target").join("zeta-repos");
- fs::create_dir_all(&repos_dir)?;
- fs::create_dir_all(&worktrees_dir)?;
+ fs::create_dir_all(&*REPOS_DIR)?;
+ fs::create_dir_all(&*WORKTREES_DIR)?;
- let repo_dir = repos_dir.join(repo_owner.as_ref()).join(repo_name.as_ref());
+ let repo_dir = REPOS_DIR.join(repo_owner.as_ref()).join(repo_name.as_ref());
let repo_lock = lock_repo(&repo_dir).await;
if !repo_dir.is_dir() {
@@ -251,7 +349,7 @@ impl NamedExample {
};
// Create the worktree for this example if needed.
- let worktree_path = worktrees_dir.join(&file_name);
+ let worktree_path = WORKTREES_DIR.join(&file_name);
if worktree_path.is_dir() {
run_git(&worktree_path, &["clean", "--force", "-d"]).await?;
run_git(&worktree_path, &["reset", "--hard", "HEAD"]).await?;
@@ -309,7 +407,6 @@ impl NamedExample {
.collect()
}
- #[allow(unused)]
fn repo_name(&self) -> Result<(Cow<'_, str>, Cow<'_, str>)> {
// git@github.com:owner/repo.git
if self.example.repository_url.contains('@') {
@@ -344,16 +441,107 @@ impl NamedExample {
}
}
+ pub async fn cursor_position(
+ &self,
+ project: &Entity<Project>,
+ cx: &mut AsyncApp,
+ ) -> Result<(Entity<Buffer>, Anchor)> {
+ let worktree = project.read_with(cx, |project, cx| {
+ project.visible_worktrees(cx).next().unwrap()
+ })?;
+ let cursor_path = RelPath::new(&self.example.cursor_path, PathStyle::Posix)?.into_arc();
+ let cursor_buffer = project
+ .update(cx, |project, cx| {
+ project.open_buffer(
+ ProjectPath {
+ worktree_id: worktree.read(cx).id(),
+ path: cursor_path,
+ },
+ cx,
+ )
+ })?
+ .await?;
+ let cursor_offset_within_excerpt = self
+ .example
+ .cursor_position
+ .find(CURSOR_MARKER)
+ .ok_or_else(|| anyhow!("missing cursor marker"))?;
+ let mut cursor_excerpt = self.example.cursor_position.clone();
+ cursor_excerpt.replace_range(
+ cursor_offset_within_excerpt..(cursor_offset_within_excerpt + CURSOR_MARKER.len()),
+ "",
+ );
+ let excerpt_offset = cursor_buffer.read_with(cx, |buffer, _cx| {
+ let text = buffer.text();
+
+ let mut matches = text.match_indices(&cursor_excerpt);
+ let Some((excerpt_offset, _)) = matches.next() else {
+ anyhow::bail!(
+ "Cursor excerpt did not exist in buffer.\nExcerpt:\n\n{cursor_excerpt}\nBuffer text:\n{text}\n"
+ );
+ };
+ assert!(matches.next().is_none());
+
+ Ok(excerpt_offset)
+ })??;
+
+ let cursor_offset = excerpt_offset + cursor_offset_within_excerpt;
+ let cursor_anchor =
+ cursor_buffer.read_with(cx, |buffer, _| buffer.anchor_after(cursor_offset))?;
+ Ok((cursor_buffer, cursor_anchor))
+ }
+
#[must_use]
pub async fn apply_edit_history(
&self,
project: &Entity<Project>,
cx: &mut AsyncApp,
- ) -> Result<HashSet<Entity<Buffer>>> {
- apply_diff(&self.example.edit_history, project, cx).await
+ ) -> Result<OpenedBuffers<'_>> {
+ zeta2::udiff::apply_diff(&self.example.edit_history, project, cx).await
}
}
+fn extract_required_lines(text: &str) -> (String, Vec<Line>) {
+ const MARKER: &str = "[ZETA]";
+ let mut new_text = String::new();
+ let mut required_lines = Vec::new();
+ let mut skipped_lines = 0_u32;
+
+ for (row, mut line) in text.split('\n').enumerate() {
+ if let Some(marker_column) = line.find(MARKER) {
+ let mut strip_column = marker_column;
+
+ while strip_column > 0 {
+ let prev_char = line[strip_column - 1..].chars().next().unwrap();
+ if prev_char.is_whitespace() || ['/', '#'].contains(&prev_char) {
+ strip_column -= 1;
+ } else {
+ break;
+ }
+ }
+
+ let metadata = &line[marker_column + MARKER.len()..];
+ if metadata.contains("required") {
+ required_lines.push(Line(row as u32 - skipped_lines));
+ }
+
+ if strip_column == 0 {
+ skipped_lines += 1;
+ continue;
+ }
+
+ line = &line[..strip_column];
+ }
+
+ new_text.push_str(line);
+ new_text.push('\n');
+ }
+
+ new_text.pop();
+
+ (new_text, required_lines)
+}
+
async fn run_git(repo_path: &Path, args: &[&str]) -> Result<String> {
let output = smol::process::Command::new("git")
.current_dir(repo_path)
@@ -408,21 +596,34 @@ impl Display for NamedExample {
)?;
}
- if !self.example.expected_excerpts.is_empty() {
- write!(f, "\n## {EXPECTED_EXCERPTS_HEADING}\n\n")?;
-
- for excerpt in &self.example.expected_excerpts {
- write!(
- f,
- "`````{}{}\n{}`````\n\n",
- excerpt
- .path
- .extension()
- .map(|ext| format!("{} ", ext.to_string_lossy()))
- .unwrap_or_default(),
- excerpt.path.display(),
- excerpt.text
- )?;
+ if !self.example.expected_context.is_empty() {
+ write!(f, "\n## {EXPECTED_CONTEXT_HEADING}\n\n")?;
+
+ for entry in &self.example.expected_context {
+ write!(f, "\n### {}\n\n", entry.heading)?;
+
+ let skip_h4 =
+ entry.alternatives.len() == 1 && entry.alternatives[0].heading.is_empty();
+
+ for excerpt_set in &entry.alternatives {
+ if !skip_h4 {
+ write!(f, "\n#### {}\n\n", excerpt_set.heading)?;
+ }
+
+ for excerpt in &excerpt_set.excerpts {
+ write!(
+ f,
+ "`````{}{}\n{}`````\n\n",
+ excerpt
+ .path
+ .extension()
+ .map(|ext| format!("{} ", ext.to_string_lossy()))
+ .unwrap_or_default(),
+ excerpt.path.display(),
+ excerpt.text
+ )?;
+ }
+ }
}
}
@@ -447,403 +648,37 @@ pub async fn lock_repo(path: impl AsRef<Path>) -> OwnedMutexGuard<()> {
.await
}
-#[must_use]
-pub async fn apply_diff(
- diff: &str,
- project: &Entity<Project>,
- cx: &mut AsyncApp,
-) -> Result<HashSet<Entity<Buffer>>> {
- use cloud_llm_client::udiff::DiffLine;
- use std::fmt::Write;
-
- #[derive(Debug, Default)]
- struct HunkState {
- context: String,
- edits: Vec<Edit>,
- }
-
- #[derive(Debug)]
- struct Edit {
- range: Range<usize>,
- text: String,
- }
-
- let mut old_path = None;
- let mut new_path = None;
- let mut hunk = HunkState::default();
- let mut diff_lines = diff.lines().map(DiffLine::parse).peekable();
- let mut open_buffers = HashSet::default();
-
- while let Some(diff_line) = diff_lines.next() {
- match diff_line {
- DiffLine::OldPath { path } => old_path = Some(path),
- DiffLine::NewPath { path } => {
- if old_path.is_none() {
- anyhow::bail!(
- "Found a new path header (`+++`) before an (`---`) old path header"
- );
- }
- new_path = Some(path)
- }
- DiffLine::Context(ctx) => {
- writeln!(&mut hunk.context, "{ctx}")?;
- }
- DiffLine::Deletion(del) => {
- let range = hunk.context.len()..hunk.context.len() + del.len() + '\n'.len_utf8();
- if let Some(last_edit) = hunk.edits.last_mut()
- && last_edit.range.end == range.start
- {
- last_edit.range.end = range.end;
- } else {
- hunk.edits.push(Edit {
- range,
- text: String::new(),
- });
- }
- writeln!(&mut hunk.context, "{del}")?;
- }
- DiffLine::Addition(add) => {
- let range = hunk.context.len()..hunk.context.len();
- if let Some(last_edit) = hunk.edits.last_mut()
- && last_edit.range.end == range.start
- {
- writeln!(&mut last_edit.text, "{add}").unwrap();
- } else {
- hunk.edits.push(Edit {
- range,
- text: format!("{add}\n"),
- });
- }
- }
- DiffLine::HunkHeader(_) | DiffLine::Garbage(_) => {}
- }
-
- let at_hunk_end = match diff_lines.peek() {
- Some(DiffLine::OldPath { .. }) | Some(DiffLine::HunkHeader(_)) | None => true,
- _ => false,
- };
-
- if at_hunk_end {
- let hunk = mem::take(&mut hunk);
-
- let Some(old_path) = old_path.as_deref() else {
- anyhow::bail!("Missing old path (`---`) header")
- };
-
- let Some(new_path) = new_path.as_deref() else {
- anyhow::bail!("Missing new path (`+++`) header")
- };
-
- let buffer = project
- .update(cx, |project, cx| {
- let project_path = project
- .find_project_path(old_path, cx)
- .context("Failed to find old_path in project")?;
-
- anyhow::Ok(project.open_buffer(project_path, cx))
- })??
- .await?;
- open_buffers.insert(buffer.clone());
-
- if old_path != new_path {
- project
- .update(cx, |project, cx| {
- let project_file = project::File::from_dyn(buffer.read(cx).file()).unwrap();
- let new_path = ProjectPath {
- worktree_id: project_file.worktree_id(cx),
- path: project_file.path.clone(),
- };
- project.rename_entry(project_file.entry_id.unwrap(), new_path, cx)
- })?
- .await?;
- }
-
- // TODO is it worth using project search?
- buffer.update(cx, |buffer, cx| {
- let context_offset = if hunk.context.is_empty() {
- 0
- } else {
- let text = buffer.text();
- if let Some(offset) = text.find(&hunk.context) {
- if text[offset + 1..].contains(&hunk.context) {
- anyhow::bail!("Context is not unique enough:\n{}", hunk.context);
- }
- offset
- } else {
- anyhow::bail!(
- "Failed to match context:\n{}\n\nBuffer:\n{}",
- hunk.context,
- text
- );
- }
- };
-
- buffer.edit(
- hunk.edits.into_iter().map(|edit| {
- (
- context_offset + edit.range.start..context_offset + edit.range.end,
- edit.text,
- )
- }),
- None,
- cx,
- );
-
- anyhow::Ok(())
- })??;
- }
- }
-
- anyhow::Ok(open_buffers)
-}
-
#[cfg(test)]
mod tests {
use super::*;
- use ::fs::FakeFs;
- use gpui::TestAppContext;
use indoc::indoc;
use pretty_assertions::assert_eq;
- use project::Project;
- use serde_json::json;
- use settings::SettingsStore;
- use util::path;
-
- #[gpui::test]
- async fn test_apply_diff_successful(cx: &mut TestAppContext) {
- let buffer_1_text = indoc! {r#"
- one
- two
- three
- four
- five
- "# };
-
- let buffer_1_text_final = indoc! {r#"
- 3
- 4
- 5
- "# };
-
- let buffer_2_text = indoc! {r#"
- six
- seven
- eight
- nine
- ten
- "# };
-
- let buffer_2_text_final = indoc! {r#"
- 5
- six
- seven
- 7.5
- eight
- nine
- ten
- 11
- "# };
-
- cx.update(|cx| {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
- Project::init_settings(cx);
- language::init(cx);
- });
-
- let fs = FakeFs::new(cx.background_executor.clone());
- fs.insert_tree(
- path!("/root"),
- json!({
- "file1": buffer_1_text,
- "file2": buffer_2_text,
- }),
- )
- .await;
-
- let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-
- let diff = indoc! {r#"
- --- a/root/file1
- +++ b/root/file1
- one
- two
- -three
- +3
- four
- five
- --- a/root/file1
- +++ b/root/file1
- 3
- -four
- -five
- +4
- +5
- --- a/root/file1
- +++ b/root/file1
- -one
- -two
- 3
- 4
- --- a/root/file2
- +++ b/root/file2
- +5
- six
- --- a/root/file2
- +++ b/root/file2
- seven
- +7.5
- eight
- --- a/root/file2
- +++ b/root/file2
- ten
- +11
- "#};
-
- let _buffers = apply_diff(diff, &project, &mut cx.to_async())
- .await
- .unwrap();
- let buffer_1 = project
- .update(cx, |project, cx| {
- let project_path = project.find_project_path(path!("/root/file1"), cx).unwrap();
- project.open_buffer(project_path, cx)
- })
- .await
- .unwrap();
- buffer_1.read_with(cx, |buffer, _cx| {
- assert_eq!(buffer.text(), buffer_1_text_final);
- });
- let buffer_2 = project
- .update(cx, |project, cx| {
- let project_path = project.find_project_path(path!("/root/file2"), cx).unwrap();
- project.open_buffer(project_path, cx)
- })
- .await
- .unwrap();
-
- buffer_2.read_with(cx, |buffer, _cx| {
- assert_eq!(buffer.text(), buffer_2_text_final);
- });
- }
-
- #[gpui::test]
- async fn test_apply_diff_non_unique(cx: &mut TestAppContext) {
- let buffer_1_text = indoc! {r#"
- one
+ #[test]
+ fn test_extract_required_lines() {
+ let input = indoc! {"
+ zero
+ one // [ZETA] required
two
+ // [ZETA] something
three
- four
+ four # [ZETA] required
five
- one
- two
- three
- four
- five
- "# };
-
- cx.update(|cx| {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
- Project::init_settings(cx);
- language::init(cx);
- });
-
- let fs = FakeFs::new(cx.background_executor.clone());
- fs.insert_tree(
- path!("/root"),
- json!({
- "file1": buffer_1_text,
- }),
- )
- .await;
-
- let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-
- let diff = indoc! {r#"
- --- a/root/file1
- +++ b/root/file1
- one
- two
- -three
- +3
- four
- five
- "#};
-
- apply_diff(diff, &project, &mut cx.to_async())
- .await
- .expect_err("Non-unique edits should fail");
- }
+ "};
- #[gpui::test]
- async fn test_apply_diff_unique_via_previous_context(cx: &mut TestAppContext) {
- let start = indoc! {r#"
+ let expected_updated_input = indoc! {"
+ zero
one
two
three
four
five
+ "};
- four
- five
- "# };
-
- let end = indoc! {r#"
- one
- two
- 3
- four
- 5
-
- four
- five
- "# };
-
- cx.update(|cx| {
- let settings_store = SettingsStore::test(cx);
- cx.set_global(settings_store);
- Project::init_settings(cx);
- language::init(cx);
- });
-
- let fs = FakeFs::new(cx.background_executor.clone());
- fs.insert_tree(
- path!("/root"),
- json!({
- "file1": start,
- }),
- )
- .await;
-
- let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-
- let diff = indoc! {r#"
- --- a/root/file1
- +++ b/root/file1
- one
- two
- -three
- +3
- four
- -five
- +5
- "#};
-
- let _buffers = apply_diff(diff, &project, &mut cx.to_async())
- .await
- .unwrap();
-
- let buffer_1 = project
- .update(cx, |project, cx| {
- let project_path = project.find_project_path(path!("/root/file1"), cx).unwrap();
- project.open_buffer(project_path, cx)
- })
- .await
- .unwrap();
+ let expected_required_lines = vec![Line(1), Line(4)];
- buffer_1.read_with(cx, |buffer, _cx| {
- assert_eq!(buffer.text(), end);
- });
+ let (updated_input, required_lines) = extract_required_lines(input);
+ assert_eq!(updated_input, expected_updated_input);
+ assert_eq!(required_lines, expected_required_lines);
}
}
@@ -7,7 +7,6 @@ use gpui_tokio::Tokio;
use language::LanguageRegistry;
use language_extension::LspAccess;
use node_runtime::{NodeBinaryOptions, NodeRuntime};
-use project::Project;
use project::project_settings::ProjectSettings;
use release_channel::AppVersion;
use reqwest_client::ReqwestClient;
@@ -33,7 +32,6 @@ pub fn init(cx: &mut App) -> ZetaCliAppState {
let settings_store = SettingsStore::new(cx, &settings::default_settings());
cx.set_global(settings_store);
- client::init_settings(cx);
// Set User-Agent so we can download language servers from GitHub
let user_agent = format!(
@@ -55,8 +53,6 @@ pub fn init(cx: &mut App) -> ZetaCliAppState {
};
cx.set_http_client(Arc::new(http));
- Project::init_settings(cx);
-
let client = Client::production(cx);
cx.set_http_client(client.http_client());
@@ -102,7 +98,6 @@ pub fn init(cx: &mut App) -> ZetaCliAppState {
let extension_host_proxy = ExtensionHostProxy::global(cx);
- language::init(cx);
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
language_extension::init(LspAccess::Noop, extension_host_proxy, languages.clone());
language_model::init(client.clone(), cx);
@@ -1,6 +1,7 @@
mod evaluate;
mod example;
mod headless;
+mod paths;
mod predict;
mod source_location;
mod syntax_retrieval_stats;
@@ -10,28 +11,22 @@ use crate::evaluate::{EvaluateArguments, run_evaluate};
use crate::example::{ExampleFormat, NamedExample};
use crate::predict::{PredictArguments, run_zeta2_predict};
use crate::syntax_retrieval_stats::retrieval_stats;
-use ::serde::Serialize;
use ::util::paths::PathStyle;
-use anyhow::{Context as _, Result, anyhow};
+use anyhow::{Result, anyhow};
use clap::{Args, Parser, Subcommand};
-use cloud_llm_client::predict_edits_v3::{self, Excerpt};
-use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock};
+use cloud_llm_client::predict_edits_v3;
use edit_prediction_context::{
- EditPredictionContextOptions, EditPredictionExcerpt, EditPredictionExcerptOptions,
- EditPredictionScoreOptions, Line,
+ EditPredictionContextOptions, EditPredictionExcerptOptions, EditPredictionScoreOptions,
};
-use futures::StreamExt as _;
-use futures::channel::mpsc;
use gpui::{Application, AsyncApp, Entity, prelude::*};
-use language::{Bias, Buffer, BufferSnapshot, OffsetRangeExt, Point};
-use language_model::LanguageModelRegistry;
+use language::{Bias, Buffer, BufferSnapshot, Point};
use project::{Project, Worktree};
use reqwest_client::ReqwestClient;
use serde_json::json;
use std::io::{self};
use std::time::Duration;
use std::{collections::HashSet, path::PathBuf, str::FromStr, sync::Arc};
-use zeta2::{ContextMode, LlmContextOptions, SearchToolQuery};
+use zeta2::ContextMode;
use crate::headless::ZetaCliAppState;
use crate::source_location::SourceLocation;
@@ -79,12 +74,6 @@ enum Zeta2Command {
#[command(subcommand)]
command: Zeta2SyntaxCommand,
},
- Llm {
- #[clap(flatten)]
- args: Zeta2Args,
- #[command(subcommand)]
- command: Zeta2LlmCommand,
- },
Predict(PredictArguments),
Eval(EvaluateArguments),
}
@@ -107,14 +96,6 @@ enum Zeta2SyntaxCommand {
},
}
-#[derive(Subcommand, Debug)]
-enum Zeta2LlmCommand {
- Context {
- #[clap(flatten)]
- context_args: ContextArgs,
- },
-}
-
#[derive(Debug, Args)]
#[group(requires = "worktree")]
struct ContextArgs {
@@ -388,197 +369,6 @@ async fn zeta2_syntax_context(
Ok(output)
}
-async fn zeta2_llm_context(
- zeta2_args: Zeta2Args,
- context_args: ContextArgs,
- app_state: &Arc<ZetaCliAppState>,
- cx: &mut AsyncApp,
-) -> Result<String> {
- let LoadedContext {
- buffer,
- clipped_cursor,
- snapshot: cursor_snapshot,
- project,
- ..
- } = load_context(&context_args, app_state, cx).await?;
-
- let cursor_position = cursor_snapshot.anchor_after(clipped_cursor);
-
- cx.update(|cx| {
- LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
- registry
- .provider(&zeta2::related_excerpts::MODEL_PROVIDER_ID)
- .unwrap()
- .authenticate(cx)
- })
- })?
- .await?;
-
- let edit_history_unified_diff = match context_args.edit_history {
- Some(events) => events.read_to_string().await?,
- None => String::new(),
- };
-
- let (debug_tx, mut debug_rx) = mpsc::unbounded();
-
- let excerpt_options = EditPredictionExcerptOptions {
- max_bytes: zeta2_args.max_excerpt_bytes,
- min_bytes: zeta2_args.min_excerpt_bytes,
- target_before_cursor_over_total_bytes: zeta2_args.target_before_cursor_over_total_bytes,
- };
-
- let related_excerpts = cx
- .update(|cx| {
- zeta2::related_excerpts::find_related_excerpts(
- buffer,
- cursor_position,
- &project,
- edit_history_unified_diff,
- &LlmContextOptions {
- excerpt: excerpt_options.clone(),
- },
- Some(debug_tx),
- cx,
- )
- })?
- .await?;
-
- let cursor_excerpt = EditPredictionExcerpt::select_from_buffer(
- clipped_cursor,
- &cursor_snapshot,
- &excerpt_options,
- None,
- )
- .context("line didn't fit")?;
-
- #[derive(Serialize)]
- struct Output {
- excerpts: Vec<OutputExcerpt>,
- formatted_excerpts: String,
- meta: OutputMeta,
- }
-
- #[derive(Default, Serialize)]
- struct OutputMeta {
- search_prompt: String,
- search_queries: Vec<SearchToolQuery>,
- }
-
- #[derive(Serialize)]
- struct OutputExcerpt {
- path: PathBuf,
- #[serde(flatten)]
- excerpt: Excerpt,
- }
-
- let mut meta = OutputMeta::default();
-
- while let Some(debug_info) = debug_rx.next().await {
- match debug_info {
- zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => {
- meta.search_prompt = info.search_prompt;
- }
- zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => {
- meta.search_queries = info.queries
- }
- _ => {}
- }
- }
-
- cx.update(|cx| {
- let mut excerpts = Vec::new();
- let mut formatted_excerpts = String::new();
-
- let cursor_insertions = [(
- predict_edits_v3::Point {
- line: Line(clipped_cursor.row),
- column: clipped_cursor.column,
- },
- CURSOR_MARKER,
- )];
-
- let mut cursor_excerpt_added = false;
-
- for (buffer, ranges) in related_excerpts {
- let excerpt_snapshot = buffer.read(cx).snapshot();
-
- let mut line_ranges = ranges
- .into_iter()
- .map(|range| {
- let point_range = range.to_point(&excerpt_snapshot);
- Line(point_range.start.row)..Line(point_range.end.row)
- })
- .collect::<Vec<_>>();
-
- let Some(file) = excerpt_snapshot.file() else {
- continue;
- };
- let path = file.full_path(cx);
-
- let is_cursor_file = path == cursor_snapshot.file().unwrap().full_path(cx);
- if is_cursor_file {
- let insertion_ix = line_ranges
- .binary_search_by(|probe| {
- probe
- .start
- .cmp(&cursor_excerpt.line_range.start)
- .then(cursor_excerpt.line_range.end.cmp(&probe.end))
- })
- .unwrap_or_else(|ix| ix);
- line_ranges.insert(insertion_ix, cursor_excerpt.line_range.clone());
- cursor_excerpt_added = true;
- }
-
- let merged_excerpts =
- zeta2::merge_excerpts::merge_excerpts(&excerpt_snapshot, line_ranges)
- .into_iter()
- .map(|excerpt| OutputExcerpt {
- path: path.clone(),
- excerpt,
- });
-
- let excerpt_start_ix = excerpts.len();
- excerpts.extend(merged_excerpts);
-
- write_codeblock(
- &path,
- excerpts[excerpt_start_ix..].iter().map(|e| &e.excerpt),
- if is_cursor_file {
- &cursor_insertions
- } else {
- &[]
- },
- Line(excerpt_snapshot.max_point().row),
- true,
- &mut formatted_excerpts,
- );
- }
-
- if !cursor_excerpt_added {
- write_codeblock(
- &cursor_snapshot.file().unwrap().full_path(cx),
- &[Excerpt {
- start_line: cursor_excerpt.line_range.start,
- text: cursor_excerpt.text(&cursor_snapshot).body.into(),
- }],
- &cursor_insertions,
- Line(cursor_snapshot.max_point().row),
- true,
- &mut formatted_excerpts,
- );
- }
-
- let output = Output {
- excerpts,
- formatted_excerpts,
- meta,
- };
-
- Ok(serde_json::to_string_pretty(&output)?)
- })
- .unwrap()
-}
-
async fn zeta1_context(
args: ContextArgs,
app_state: &Arc<ZetaCliAppState>,
@@ -670,13 +460,6 @@ fn main() {
};
println!("{}", result.unwrap());
}
- Zeta2Command::Llm { args, command } => match command {
- Zeta2LlmCommand::Context { context_args } => {
- let result =
- zeta2_llm_context(args, context_args, &app_state, cx).await;
- println!("{}", result.unwrap());
- }
- },
},
Command::ConvertExample {
path,
@@ -0,0 +1,8 @@
+use std::{env, path::PathBuf, sync::LazyLock};
+
+static TARGET_DIR: LazyLock<PathBuf> = LazyLock::new(|| env::current_dir().unwrap().join("target"));
+pub static CACHE_DIR: LazyLock<PathBuf> =
+ LazyLock::new(|| TARGET_DIR.join("zeta-prediction-cache"));
+pub static REPOS_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-repos"));
+pub static WORKTREES_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-worktrees"));
+pub static LOGS_DIR: LazyLock<PathBuf> = LazyLock::new(|| TARGET_DIR.join("zeta-logs"));
@@ -1,22 +1,20 @@
use crate::example::{ActualExcerpt, NamedExample};
-
use crate::headless::ZetaCliAppState;
+use crate::paths::LOGS_DIR;
use ::serde::Serialize;
-use ::util::paths::PathStyle;
use anyhow::{Context as _, Result, anyhow};
use clap::Args;
use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock};
use futures::StreamExt as _;
use gpui::AsyncApp;
-use language_model::LanguageModelRegistry;
-use project::{Project, ProjectPath};
+use project::Project;
use serde::Deserialize;
use std::cell::Cell;
+use std::fs;
use std::io::Write;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::{Duration, Instant};
-use util::rel_path::RelPath;
#[derive(Debug, Args)]
pub struct PredictArguments {
@@ -50,21 +48,12 @@ pub async fn zeta2_predict(
app_state: &Arc<ZetaCliAppState>,
cx: &mut AsyncApp,
) -> Result<PredictionDetails> {
+ fs::create_dir_all(&*LOGS_DIR)?;
let worktree_path = example.setup_worktree().await?;
if !AUTHENTICATED.get() {
AUTHENTICATED.set(true);
- cx.update(|cx| {
- LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
- registry
- .provider(&zeta2::related_excerpts::MODEL_PROVIDER_ID)
- .unwrap()
- .authenticate(cx)
- })
- })?
- .await?;
-
app_state
.client
.sign_in_with_optional_connect(true, cx)
@@ -83,6 +72,8 @@ pub async fn zeta2_predict(
)
})?;
+ let buffer_store = project.read_with(cx, |project, _| project.buffer_store().clone())?;
+
let worktree = project
.update(cx, |project, cx| {
project.create_worktree(&worktree_path, true, cx)
@@ -94,58 +85,30 @@ pub async fn zeta2_predict(
})?
.await;
- let _edited_buffers = example.apply_edit_history(&project, cx).await?;
-
- let cursor_path = RelPath::new(&example.example.cursor_path, PathStyle::Posix)?.into_arc();
-
- let cursor_buffer = project
- .update(cx, |project, cx| {
- project.open_buffer(
- ProjectPath {
- worktree_id: worktree.read(cx).id(),
- path: cursor_path,
- },
- cx,
- )
- })?
- .await?;
-
- let cursor_offset_within_excerpt = example
- .example
- .cursor_position
- .find(CURSOR_MARKER)
- .ok_or_else(|| anyhow!("missing cursor marker"))?;
- let mut cursor_excerpt = example.example.cursor_position.clone();
- cursor_excerpt.replace_range(
- cursor_offset_within_excerpt..(cursor_offset_within_excerpt + CURSOR_MARKER.len()),
- "",
- );
- let excerpt_offset = cursor_buffer.read_with(cx, |buffer, _cx| {
- let text = buffer.text();
-
- let mut matches = text.match_indices(&cursor_excerpt);
- let Some((excerpt_offset, _)) = matches.next() else {
- anyhow::bail!(
- "Cursor excerpt did not exist in buffer.\nExcerpt:\n\n{cursor_excerpt}\nBuffer text:\n{text}\n"
- );
- };
- assert!(matches.next().is_none());
+ let zeta = cx.update(|cx| zeta2::Zeta::global(&app_state.client, &app_state.user_store, cx))?;
- Ok(excerpt_offset)
- })??;
+ cx.subscribe(&buffer_store, {
+ let project = project.clone();
+ move |_, event, cx| match event {
+ project::buffer_store::BufferStoreEvent::BufferAdded(buffer) => {
+ zeta2::Zeta::try_global(cx)
+ .unwrap()
+ .update(cx, |zeta, cx| zeta.register_buffer(&buffer, &project, cx));
+ }
+ _ => {}
+ }
+ })?
+ .detach();
- let cursor_offset = excerpt_offset + cursor_offset_within_excerpt;
- let cursor_anchor =
- cursor_buffer.read_with(cx, |buffer, _| buffer.anchor_after(cursor_offset))?;
+ let _edited_buffers = example.apply_edit_history(&project, cx).await?;
+ let (cursor_buffer, cursor_anchor) = example.cursor_position(&project, cx).await?;
- let zeta = cx.update(|cx| zeta2::Zeta::global(&app_state.client, &app_state.user_store, cx))?;
+ let mut debug_rx = zeta.update(cx, |zeta, _| zeta.debug_info())?;
let refresh_task = zeta.update(cx, |zeta, cx| {
- zeta.register_buffer(&cursor_buffer, &project, cx);
zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
})?;
- let mut debug_rx = zeta.update(cx, |zeta, _| zeta.debug_info())?;
let mut context_retrieval_started_at = None;
let mut context_retrieval_finished_at = None;
let mut search_queries_generated_at = None;
@@ -159,9 +122,14 @@ pub async fn zeta2_predict(
match event {
zeta2::ZetaDebugInfo::ContextRetrievalStarted(info) => {
context_retrieval_started_at = Some(info.timestamp);
+ fs::write(LOGS_DIR.join("search_prompt.md"), &info.search_prompt)?;
}
zeta2::ZetaDebugInfo::SearchQueriesGenerated(info) => {
search_queries_generated_at = Some(info.timestamp);
+ fs::write(
+ LOGS_DIR.join("search_queries.json"),
+ serde_json::to_string_pretty(&info.regex_by_glob).unwrap(),
+ )?;
}
zeta2::ZetaDebugInfo::SearchQueriesExecuted(info) => {
search_queries_executed_at = Some(info.timestamp);
@@ -173,10 +141,12 @@ pub async fn zeta2_predict(
zeta.request_prediction(&project, &cursor_buffer, cursor_anchor, cx)
})?);
}
- zeta2::ZetaDebugInfo::EditPredicted(request) => {
+ zeta2::ZetaDebugInfo::EditPredictionRequested(request) => {
prediction_started_at = Some(Instant::now());
- request.response_rx.await?.map_err(|err| anyhow!(err))?;
- prediction_finished_at = Some(Instant::now());
+ fs::write(
+ LOGS_DIR.join("prediction_prompt.md"),
+ &request.local_prompt.unwrap_or_default(),
+ )?;
for included_file in request.request.included_files {
let insertions = vec![(request.request.cursor_point, CURSOR_MARKER)];
@@ -199,9 +169,14 @@ pub async fn zeta2_predict(
&mut excerpts_text,
);
}
+
+ let response = request.response_rx.await?.0.map_err(|err| anyhow!(err))?;
+ let response = zeta2::text_from_response(response).unwrap_or_default();
+ prediction_finished_at = Some(Instant::now());
+ fs::write(LOGS_DIR.join("prediction_response.md"), &response)?;
+
break;
}
- _ => {}
}
}
@@ -39,6 +39,7 @@ pub struct Record<'a> {
pub level: log::Level,
pub message: &'a std::fmt::Arguments<'a>,
pub module_path: Option<&'a str>,
+ pub line: Option<u32>,
}
pub fn init_output_stdout() {
@@ -105,7 +106,11 @@ static LEVEL_ANSI_COLORS: [&str; 6] = [
];
// PERF: batching
-pub fn submit(record: Record) {
+pub fn submit(mut record: Record) {
+ if record.module_path.is_none_or(|p| !p.ends_with(".rs")) {
+ // Only render line numbers for actual rust files emitted by `log_err` and friends
+ record.line.take();
+ }
if ENABLED_SINKS_STDOUT.load(Ordering::Acquire) {
let mut stdout = std::io::stdout().lock();
_ = writeln!(
@@ -117,6 +122,7 @@ pub fn submit(record: Record) {
SourceFmt {
scope: record.scope,
module_path: record.module_path,
+ line: record.line,
ansi: true,
},
record.message
@@ -132,6 +138,7 @@ pub fn submit(record: Record) {
SourceFmt {
scope: record.scope,
module_path: record.module_path,
+ line: record.line,
ansi: true,
},
record.message
@@ -167,6 +174,7 @@ pub fn submit(record: Record) {
SourceFmt {
scope: record.scope,
module_path: record.module_path,
+ line: record.line,
ansi: false,
},
record.message
@@ -202,6 +210,7 @@ pub fn flush() {
struct SourceFmt<'a> {
scope: Scope,
module_path: Option<&'a str>,
+ line: Option<u32>,
ansi: bool,
}
@@ -225,6 +234,10 @@ impl std::fmt::Display for SourceFmt<'_> {
f.write_str(subscope)?;
}
}
+ if let Some(line) = self.line {
+ f.write_char(':')?;
+ line.fmt(f)?;
+ }
if self.ansi {
f.write_str(ANSI_RESET)?;
}
@@ -80,7 +80,7 @@ impl log::Log for Zlog {
None => (private::scope_new(&[]), private::scope_new(&["*unknown*"])),
};
let level = record.metadata().level();
- if !filter::is_scope_enabled(&crate_name_scope, record.module_path(), level) {
+ if !filter::is_scope_enabled(&crate_name_scope, Some(record.target()), level) {
return;
}
sink::submit(sink::Record {
@@ -89,6 +89,7 @@ impl log::Log for Zlog {
message: record.args(),
// PERF(batching): store non-static paths in a cache + leak them and pass static str here
module_path: record.module_path().or(record.file()),
+ line: record.line(),
});
}
@@ -109,6 +110,7 @@ macro_rules! log {
level,
message: &format_args!($($arg)+),
module_path: Some(module_path!()),
+ line: Some(line!()),
});
}
}
@@ -291,7 +293,7 @@ impl log::Log for Logger {
return;
}
let level = record.metadata().level();
- if !filter::is_scope_enabled(&self.scope, record.module_path(), level) {
+ if !filter::is_scope_enabled(&self.scope, Some(record.target()), level) {
return;
}
sink::submit(sink::Record {
@@ -299,6 +301,7 @@ impl log::Log for Logger {
level,
message: record.args(),
module_path: record.module_path(),
+ line: record.line(),
});
}
@@ -2,11 +2,9 @@
use collections::HashMap;
use gpui::App;
-use settings::{Settings, SettingsStore};
+use settings::{RegisterSetting, Settings, SettingsStore};
pub fn init(cx: &mut App) {
- ZlogSettings::register(cx);
-
cx.observe_global::<SettingsStore>(|cx| {
let zlog_settings = ZlogSettings::get_global(cx);
zlog::filter::refresh_from_settings(&zlog_settings.scopes);
@@ -14,7 +12,7 @@ pub fn init(cx: &mut App) {
.detach();
}
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, RegisterSetting)]
pub struct ZlogSettings {
/// A map of log scopes to the desired log level.
/// Useful for filtering out noisy logs or enabling more verbose logging.
@@ -75,6 +75,7 @@
- [Overview](./extensions.md)
- [Installing Extensions](./extensions/installing-extensions.md)
- [Developing Extensions](./extensions/developing-extensions.md)
+- [Extension Capabilities](./extensions/capabilities.md)
- [Language Extensions](./extensions/languages.md)
- [Debugger Extensions](./extensions/debugger-extensions.md)
- [Theme Extensions](./extensions/themes.md)
@@ -316,7 +316,7 @@ You should be able to sign-in to Supermaven by clicking on the Supermaven icon i
### Codestral {#codestral}
-To use Mistral's Codestral as your provider, start by going to the the Agent Panel settings view by running the {#action agent::OpenSettings} action.
+To use Mistral's Codestral as your provider, start by going to the Agent Panel settings view by running the {#action agent::OpenSettings} action.
Look for the Mistral item and add a Codestral API key in the corresponding text input.
After that, you should be able to switch your provider to it in your `settings.json` file:
@@ -18,6 +18,8 @@ It is entirely possible to use Zed, including Zed's AI capabilities, without sha
- [Accounts](../authentication.md): When and why you'd need to authenticate into Zed, how to do so, and what scope we need from you.
+- [Collab](https://zed.dev/faq#data-and-privacy): How Zed's live collaboration works, and how data flows to provide the experience (we don't store your code!).
+
## Legal Links
- [Terms of Service](https://zed.dev/terms-of-service)
@@ -3,6 +3,7 @@
Zed lets you add new functionality using user-defined extensions.
- [Installing Extensions](./extensions/installing-extensions.md)
+- [Extension Capabilities](./extensions/capabilities.md)
- [Developing Extensions](./extensions/developing-extensions.md)
- [Developing Language Extensions](./extensions/languages.md)
- [Developing Debugger Extensions](./extensions/debugger-extensions.md)
@@ -0,0 +1,96 @@
+# Extension Capabilities
+
+The operations that Zed extensions are able to perform are governed by a capability system.
+
+## Restricting capabilities
+
+As a user, you have the option of restricting the capabilities that are granted to extensions.
+
+This is controlled via the `granted_extension_capabilities` setting.
+
+Restricting or removing a capability will cause an error to be returned when an extension attempts to call the corresponding extension API without sufficient capabilities.
+
+For instance, if you wanted to restrict downloads to just files from GitHub, you could modify `host` for the `download_file` capability:
+
+```diff
+{
+ "granted_extension_capabilities": [
+ { "kind": "process:exec", "command": "*", "args": ["**"] },
+- { "kind": "download_file", "host": "*", "path": ["**"] },
++ { "kind": "download_file", "host": "github.com", "path": ["**"] },
+ { "kind": "npm:install", "package": "*" }
+ ]
+}
+```
+
+If you don't want extensions to be able to perform _any_ capabilities, you can remove all granted capabilities:
+
+```json
+{
+ "granted_extension_capabilities": []
+}
+```
+
+> Note that this will likely make many extensions non-functional, at least in their default configuration.
+
+## Capabilities
+
+### `process:exec`
+
+The `process:exec` capability grants extensions the ability to invoke commands using [`zed_extension_api::process::Command`](https://docs.rs/zed_extension_api/latest/zed_extension_api/process/struct.Command.html).
+
+#### Examples
+
+To allow any command to be executed with any arguments:
+
+```toml
+{ kind = "process:exec", command = "*", args = ["**"] }
+```
+
+To allow a specific command (e.g., `gem`) to be executed with any arguments:
+
+```toml
+{ kind = "process:exec", command = "gem", args = ["**"] }
+```
+
+### `download_file`
+
+The `download_file` capability grants extensions the ability to download files using [`zed_extension_api::download_file`](https://docs.rs/zed_extension_api/latest/zed_extension_api/fn.download_file.html).
+
+#### Examples
+
+To allow any file to be downloaded:
+
+```toml
+{ kind = "download_file", host = "github.com", path = ["**"] }
+```
+
+To allow any file to be downloaded from `github.com`:
+
+```toml
+{ kind = "download_file", host = "github.com", path = ["**"] }
+```
+
+To allow any file to be downloaded from a specific GitHub repository:
+
+```toml
+{ kind = "download_file", host = "github.com", path = ["zed-industries", "zed", "**"] }
+```
+
+### `npm:install`
+
+The `npm:install` capability grants extensions the ability to install npm packages using [`zed_extension_api::npm_install_package`](https://docs.rs/zed_extension_api/latest/zed_extension_api/fn.npm_install_package.html).
+
+#### Examples
+
+To allow any npm package to be installed:
+
+```toml
+{ kind = "npm:install", package = "*" }
+```
+
+To allow a specific npm package (e.g., `typescript`) to be installed:
+
+```toml
+{ kind = "npm:install", package = "typescript" }
+```
@@ -1,8 +1,8 @@
# Developing Extensions
-## Extension Capabilities
+## Extension Features
-Extensions can add the following capabilities to Zed:
+Extensions are able to provide the following features to Zed:
- [Languages](./languages.md)
- [Debuggers](./debugger-extensions.md)
@@ -280,7 +280,9 @@ function sign_binary() {
}
function upload_debug_symbols() {
- if [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then
+ if [ "$local_install" = true ]; then
+ echo "local install; skipping sentry upload."
+ elif [[ -n "${SENTRY_AUTH_TOKEN:-}" ]]; then
echo "Uploading zed debug symbols to sentry..."
exe_path="target/${target_triple}/release/Zed"
if ! dsymutil --flat "target/${target_triple}/${target_dir}/zed" 2> target/dsymutil.log; then
@@ -5,14 +5,13 @@ source script/lib/blob-store.sh
bucket_name="zed-nightly-host"
-
for file_to_upload in ./release-artifacts/*; do
[ -f "$file_to_upload" ] || continue
- upload_to_blob_store $bucket_name "$file_to_upload" "nightly/$(basename "$file_to_upload")"
- upload_to_blob_store $bucket_name "$file_to_upload" "${GITHUB_SHA}/$(basename "$file_to_upload")"
+ upload_to_blob_store_public $bucket_name "$file_to_upload" "nightly/$(basename "$file_to_upload")"
+ upload_to_blob_store_public $bucket_name "$file_to_upload" "${GITHUB_SHA}/$(basename "$file_to_upload")"
rm -f "$file_to_upload"
done
sha=$(git rev-parse HEAD)
echo -n ${sha} > ./release-artifacts/latest-sha
-upload_to_blob_store $bucket_name "release-artifacts/latest-sha" "nightly/latest-sha"
+upload_to_blob_store_public $bucket_name "release-artifacts/latest-sha" "nightly/latest-sha"
@@ -1,8 +1,8 @@
use gh_workflow::*;
use crate::tasks::workflows::{
- runners,
- steps::{NamedJob, dependant_job, named},
+ release, runners,
+ steps::{NamedJob, checkout_repo, dependant_job, named},
vars::{self, StepOutput},
};
@@ -10,24 +10,36 @@ pub fn after_release() -> Workflow {
let refresh_zed_dev = rebuild_releases_page();
let post_to_discord = post_to_discord(&[&refresh_zed_dev]);
let publish_winget = publish_winget();
+ let create_sentry_release = create_sentry_release();
named::workflow()
.on(Event::default().release(Release::default().types(vec![ReleaseType::Published])))
.add_job(refresh_zed_dev.name, refresh_zed_dev.job)
.add_job(post_to_discord.name, post_to_discord.job)
.add_job(publish_winget.name, publish_winget.job)
+ .add_job(create_sentry_release.name, create_sentry_release.job)
}
fn rebuild_releases_page() -> NamedJob {
+ fn refresh_cloud_releases() -> Step<Run> {
+ named::bash(
+ "curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}",
+ )
+ }
+
+ fn redeploy_zed_dev() -> Step<Run> {
+ named::bash("npm exec --yes -- vercel@37 --token=\"$VERCEL_TOKEN\" --scope zed-industries redeploy https://zed.dev")
+ .add_env(("VERCEL_TOKEN", vars::VERCEL_TOKEN))
+ }
+
named::job(
Job::default()
.runs_on(runners::LINUX_SMALL)
.cond(Expression::new(
"github.repository_owner == 'zed-industries'",
))
- .add_step(named::bash(
- "curl https://zed.dev/api/revalidate-releases -H \"Authorization: Bearer ${RELEASE_NOTES_API_TOKEN}\"",
- ).add_env(("RELEASE_NOTES_API_TOKEN", vars::RELEASE_NOTES_API_TOKEN))),
+ .add_step(refresh_cloud_releases())
+ .add_step(redeploy_zed_dev()),
)
}
@@ -116,8 +128,19 @@ fn publish_winget() -> NamedJob {
named::job(
Job::default()
- .runs_on(runners::LINUX_SMALL)
+ .runs_on(runners::WINDOWS_DEFAULT)
.add_step(set_package_name)
.add_step(winget_releaser(&package_name)),
)
}
+
+fn create_sentry_release() -> NamedJob {
+ let job = Job::default()
+ .runs_on(runners::LINUX_SMALL)
+ .cond(Expression::new(
+ "github.repository_owner == 'zed-industries'",
+ ))
+ .add_step(checkout_repo())
+ .add_step(release::create_sentry_release());
+ named::job(job)
+}
@@ -107,7 +107,6 @@ fn auto_release_preview(deps: &[&NamedJob; 1]) -> NamedJob {
)
.add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)),
)
- .add_step(create_sentry_release()),
)
}
@@ -38,7 +38,7 @@ secret!(ZED_ZIPPY_APP_ID);
secret!(ZED_ZIPPY_APP_PRIVATE_KEY);
secret!(DISCORD_WEBHOOK_RELEASE_NOTES);
secret!(WINGET_TOKEN);
-secret!(RELEASE_NOTES_API_TOKEN);
+secret!(VERCEL_TOKEN);
// todo(ci) make these secrets too...
var!(AZURE_SIGNING_ACCOUNT_NAME);